Kaydet (Commit) b474ea2f authored tarafından Joffrey F's avatar Joffrey F Kaydeden (comit) GitHub

Merge pull request #1519 from shin-/2.2.0-release

2.2.0 release
......@@ -35,7 +35,7 @@ def buildImages = { ->
}
def getAPIVersion = { engineVersion ->
def versionMap = ['1.12': '1.24', '1.13': '1.25']
def versionMap = ['1.12': '1.24', '1.13': '1.26']
return versionMap[engineVersion.substring(0, 4)]
}
......
......@@ -44,11 +44,11 @@ integration-test-py3: build-py3
.PHONY: integration-dind
integration-dind: build build-py3
docker rm -vf dpy-dind || :
docker run -d --name dpy-dind --privileged dockerswarm/dind:1.13.0 docker daemon\
-H tcp://0.0.0.0:2375
docker run --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=1.25"\
docker run -d --name dpy-dind --privileged dockerswarm/dind:1.13.1 docker daemon\
-H tcp://0.0.0.0:2375 --experimental
docker run --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=1.26"\
--link=dpy-dind:docker docker-sdk-python py.test tests/integration
docker run --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=1.25"\
docker run --rm --env="DOCKER_HOST=tcp://docker:2375" --env="DOCKER_TEST_API_VERSION=1.26"\
--link=dpy-dind:docker docker-sdk-python3 py.test tests/integration
docker rm -vf dpy-dind
......@@ -57,14 +57,14 @@ integration-dind-ssl: build-dind-certs build build-py3
docker run -d --name dpy-dind-certs dpy-dind-certs
docker run -d --env="DOCKER_HOST=tcp://localhost:2375" --env="DOCKER_TLS_VERIFY=1"\
--env="DOCKER_CERT_PATH=/certs" --volumes-from dpy-dind-certs --name dpy-dind-ssl\
-v /tmp --privileged dockerswarm/dind:1.13.0 docker daemon --tlsverify\
-v /tmp --privileged dockerswarm/dind:1.13.1 docker daemon --tlsverify\
--tlscacert=/certs/ca.pem --tlscert=/certs/server-cert.pem\
--tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375
--tlskey=/certs/server-key.pem -H tcp://0.0.0.0:2375 --experimental
docker run --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\
--env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=1.25"\
--env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=1.26"\
--link=dpy-dind-ssl:docker docker-sdk-python py.test tests/integration
docker run --rm --volumes-from dpy-dind-ssl --env="DOCKER_HOST=tcp://docker:2375"\
--env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=1.25"\
--env="DOCKER_TLS_VERIFY=1" --env="DOCKER_CERT_PATH=/certs" --env="DOCKER_TEST_API_VERSION=1.26"\
--link=dpy-dind-ssl:docker docker-sdk-python3 py.test tests/integration
docker rm -vf dpy-dind-ssl dpy-dind-certs
......
......@@ -498,6 +498,9 @@ class ContainerApiMixin(object):
container, as a mapping of hostname to IP address.
group_add (:py:class:`list`): List of additional group names and/or
IDs that the container process will run as.
init (bool): Run an init inside the container that forwards
signals and reaps processes
init_path (str): Path to the docker-init binary
ipc_mode (str): Set the IPC mode for the container.
isolation (str): Isolation technology to use. Default: `None`.
links (dict or list of tuples): Either a dictionary mapping name
......
......@@ -7,6 +7,22 @@ from ..constants import INSECURE_REGISTRY_DEPRECATION_WARNING
class DaemonApiMixin(object):
@utils.minimum_version('1.25')
def df(self):
"""
Get data usage information.
Returns:
(dict): A dictionary representing different resource categories
and their respective data usage.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url('/system/df')
return self._result(self._get(url), True)
def events(self, since=None, until=None, filters=None, decode=None):
"""
Get real-time events from the server. Similar to the ``docker events``
......@@ -54,7 +70,7 @@ class DaemonApiMixin(object):
}
return self._stream_helper(
self.get(self._url('/events'), params=params, stream=True),
self._get(self._url('/events'), params=params, stream=True),
decode=decode
)
......
......@@ -195,6 +195,7 @@ class PluginApiMixin(object):
return self._stream_helper(res, decode=True)
@utils.minimum_version('1.25')
@utils.check_resource
def remove_plugin(self, name, force=False):
"""
Remove an installed plugin.
......@@ -212,3 +213,39 @@ class PluginApiMixin(object):
res = self._delete(url, params={'force': force})
self._raise_for_status(res)
return True
@utils.minimum_version('1.26')
@utils.check_resource
def upgrade_plugin(self, name, remote, privileges):
"""
Upgrade an installed plugin.
Args:
name (string): Name of the plugin to upgrade. The ``:latest``
tag is optional and is the default if omitted.
remote (string): Remote reference to upgrade to. The
``:latest`` tag is optional and is the default if omitted.
privileges (list): A list of privileges the user consents to
grant to the plugin. Can be retrieved using
:py:meth:`~plugin_privileges`.
Returns:
An iterable object streaming the decoded API logs
"""
url = self._url('/plugins/{0}/upgrade', name)
params = {
'remote': remote,
}
headers = {}
registry, repo_name = auth.resolve_repository_name(remote)
header = auth.get_config_header(self, registry)
if header:
headers['X-Registry-Auth'] = header
response = self._post_json(
url, params=params, headers=headers, data=privileges,
stream=True
)
self._raise_for_status(response)
return self._stream_helper(response, decode=True)
......@@ -166,6 +166,56 @@ class ServiceApiMixin(object):
url = self._url('/services')
return self._result(self._get(url, params=params), True)
@utils.minimum_version('1.25')
@utils.check_resource
def service_logs(self, service, details=False, follow=False, stdout=False,
stderr=False, since=0, timestamps=False, tail='all',
is_tty=None):
"""
Get log stream for a service.
Note: This endpoint works only for services with the ``json-file``
or ``journald`` logging drivers.
Args:
service (str): ID or name of the service
details (bool): Show extra details provided to logs.
Default: ``False``
follow (bool): Keep connection open to read logs as they are
sent by the Engine. Default: ``False``
stdout (bool): Return logs from ``stdout``. Default: ``False``
stderr (bool): Return logs from ``stderr``. Default: ``False``
since (int): UNIX timestamp for the logs staring point.
Default: 0
timestamps (bool): Add timestamps to every log line.
tail (string or int): Number of log lines to be returned,
counting from the current end of the logs. Specify an
integer or ``'all'`` to output all log lines.
Default: ``all``
is_tty (bool): Whether the service's :py:class:`ContainerSpec`
enables the TTY option. If omitted, the method will query
the Engine for the information, causing an additional
roundtrip.
Returns (generator): Logs for the service.
"""
params = {
'details': details,
'follow': follow,
'stdout': stdout,
'stderr': stderr,
'since': since,
'timestamps': timestamps,
'tail': tail
}
url = self._url('/services/{0}/logs', service)
res = self._get(url, params=params, stream=True)
if is_tty is None:
is_tty = self.inspect_service(
service
)['Spec']['TaskTemplate']['ContainerSpec'].get('TTY', False)
return self._get_result_tty(True, res, is_tty)
@utils.minimum_version('1.24')
def tasks(self, filters=None):
"""
......
......@@ -155,6 +155,10 @@ class DockerClient(object):
return self.api.events(*args, **kwargs)
events.__doc__ = APIClient.events.__doc__
def df(self):
return self.api.df()
df.__doc__ = APIClient.df.__doc__
def info(self, *args, **kwargs):
return self.api.info(*args, **kwargs)
info.__doc__ = APIClient.info.__doc__
......
import sys
from .version import version
DEFAULT_DOCKER_API_VERSION = '1.24'
DEFAULT_DOCKER_API_VERSION = '1.26'
MINIMUM_DOCKER_API_VERSION = '1.21'
DEFAULT_TIMEOUT_SECONDS = 60
STREAM_HEADER_SIZE_BYTES = 8
......
......@@ -491,6 +491,9 @@ class ContainerCollection(Collection):
group_add (:py:class:`list`): List of additional group names and/or
IDs that the container process will run as.
hostname (str): Optional hostname for the container.
init (bool): Run an init inside the container that forwards
signals and reaps processes
init_path (str): Path to the docker-init binary
ipc_mode (str): Set the IPC mode for the container.
isolation (str): Isolation technology to use. Default: `None`.
labels (dict or list): A dictionary of name-value labels (e.g.
......@@ -726,7 +729,7 @@ class ContainerCollection(Collection):
Args:
all (bool): Show all containers. Only running containers are shown
by default trunc (bool): Truncate output
by default
since (str): Show only containers created since Id or Name, include
non-running ones
before (str): Show only container created before Id or Name,
......@@ -814,6 +817,8 @@ RUN_HOST_CONFIG_KWARGS = [
'dns',
'extra_hosts',
'group_add',
'init',
'init_path',
'ipc_mode',
'isolation',
'kernel_memory',
......
from .. import errors
from .resource import Collection, Model
......@@ -96,6 +97,30 @@ class Plugin(Model):
"""
return self.client.api.remove_plugin(self.name, force=force)
def upgrade(self, remote=None):
"""
Upgrade the plugin.
Args:
remote (string): Remote reference to upgrade to. The
``:latest`` tag is optional and is the default if omitted.
Default: this plugin's name.
Returns:
A generator streaming the decoded API logs
"""
if self.enabled:
raise errors.DockerError(
'Plugin must be disabled before upgrading.'
)
if remote is None:
remote = self.name
privileges = self.client.api.plugin_privileges(remote)
for d in self.client.api.upgrade_plugin(self.name, remote, privileges):
yield d
self._reload()
class PluginCollection(Collection):
model = Plugin
......
......@@ -77,6 +77,34 @@ class Service(Model):
**create_kwargs
)
def logs(self, **kwargs):
"""
Get log stream for the service.
Note: This method works only for services with the ``json-file``
or ``journald`` logging drivers.
Args:
details (bool): Show extra details provided to logs.
Default: ``False``
follow (bool): Keep connection open to read logs as they are
sent by the Engine. Default: ``False``
stdout (bool): Return logs from ``stdout``. Default: ``False``
stderr (bool): Return logs from ``stderr``. Default: ``False``
since (int): UNIX timestamp for the logs staring point.
Default: 0
timestamps (bool): Add timestamps to every log line.
tail (string or int): Number of log lines to be returned,
counting from the current end of the logs. Specify an
integer or ``'all'`` to output all log lines.
Default: ``all``
Returns (generator): Logs for the service.
"""
is_tty = self.attrs['Spec']['TaskTemplate']['ContainerSpec'].get(
'TTY', False
)
return self.client.api.service_logs(self.id, is_tty=is_tty, **kwargs)
class ServiceCollection(Collection):
"""Services on the Docker server."""
......@@ -96,11 +124,12 @@ class ServiceCollection(Collection):
access and load balance a service. Default: ``None``.
env (list of str): Environment variables, in the form
``KEY=val``.
hostname (string): Hostname to set on the container.
labels (dict): Labels to apply to the service.
log_driver (str): Log driver to use for containers.
log_driver_options (dict): Log driver options.
mode (str): Scheduling mode for the service (``replicated`` or
``global``). Defaults to ``replicated``.
mode (ServiceMode): Scheduling mode for the service.
Default:``None``
mounts (list of str): Mounts for the containers, in the form
``source:target:options``, where options is either
``ro`` or ``rw``.
......@@ -176,6 +205,7 @@ CONTAINER_SPEC_KWARGS = [
'command',
'args',
'env',
'hostname',
'workdir',
'user',
'labels',
......
......@@ -117,7 +117,8 @@ class HostConfig(dict):
oom_kill_disable=False, shm_size=None, sysctls=None,
tmpfs=None, oom_score_adj=None, dns_opt=None, cpu_shares=None,
cpuset_cpus=None, userns_mode=None, pids_limit=None,
isolation=None, auto_remove=False, storage_opt=None):
isolation=None, auto_remove=False, storage_opt=None,
init=None, init_path=None):
if mem_limit is not None:
self['Memory'] = parse_bytes(mem_limit)
......@@ -417,6 +418,16 @@ class HostConfig(dict):
raise host_config_version_error('storage_opt', '1.24')
self['StorageOpt'] = storage_opt
if init is not None:
if version_lt(version, '1.25'):
raise host_config_version_error('init', '1.25')
self['Init'] = init
if init_path is not None:
if version_lt(version, '1.25'):
raise host_config_version_error('init_path', '1.25')
self['InitPath'] = init_path
def host_config_type_error(param, param_value, expected):
error_msg = 'Invalid type for {0} param: expected {1} but found {2}'
......
......@@ -70,6 +70,7 @@ class ContainerSpec(dict):
image (string): The image name to use for the container.
command (string or list): The command to be run in the image.
args (:py:class:`list`): Arguments to the command.
hostname (string): The hostname to set on the container.
env (dict): Environment variables.
dir (string): The working directory for commands to run in.
user (string): The user inside the container.
......@@ -82,9 +83,9 @@ class ContainerSpec(dict):
secrets (list of py:class:`SecretReference`): List of secrets to be
made available inside the containers.
"""
def __init__(self, image, command=None, args=None, env=None, workdir=None,
user=None, labels=None, mounts=None, stop_grace_period=None,
secrets=None):
def __init__(self, image, command=None, args=None, hostname=None, env=None,
workdir=None, user=None, labels=None, mounts=None,
stop_grace_period=None, secrets=None):
self['Image'] = image
if isinstance(command, six.string_types):
......@@ -92,6 +93,8 @@ class ContainerSpec(dict):
self['Command'] = command
self['Args'] = args
if hostname is not None:
self['Hostname'] = hostname
if env is not None:
if isinstance(env, dict):
self['Env'] = format_environment(env)
......
......@@ -24,7 +24,7 @@ def build_port_bindings(ports):
return port_bindings
def to_port_range(port):
def to_port_range(port, randomly_available_port=False):
if not port:
return None
......@@ -37,6 +37,9 @@ def to_port_range(port):
port, protocol = parts
protocol = "/" + protocol
if randomly_available_port:
return ["%s%s" % (port, protocol)]
parts = str(port).split('-')
if len(parts) == 1:
......@@ -64,14 +67,18 @@ def split_port(port):
if len(parts) == 1:
internal_port, = parts
if not internal_port:
_raise_invalid_port(port)
return to_port_range(internal_port), None
if len(parts) == 2:
external_port, internal_port = parts
internal_range = to_port_range(internal_port)
external_range = to_port_range(external_port)
if internal_range is None:
_raise_invalid_port(port)
if internal_range is None or external_range is None:
external_range = to_port_range(external_port, len(internal_range) == 1)
if external_range is None:
_raise_invalid_port(port)
if len(internal_range) != len(external_range):
......@@ -80,8 +87,13 @@ def split_port(port):
return internal_range, external_range
external_ip, external_port, internal_port = parts
if not internal_port:
_raise_invalid_port(port)
internal_range = to_port_range(internal_port)
external_range = to_port_range(external_port)
external_range = to_port_range(external_port, len(internal_range) == 1)
if not external_range:
external_range = [None] * len(internal_range)
......
version = "2.1.0"
version = "2.2.0"
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])
Change log
==========
2.2.0
-----
[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone/30?closed=1)
### Features
* Default API version has been bumped to `1.26` (Engine 1.13.1+)
* Upgrade plugin:
* Added the `upgrade_plugin` method to the `APIClient` class
* Added the `upgrade` method to the `Plugin` class
* Service logs:
* Added the `service_logs` method to the `APIClient` class
* Added the `logs` method to the `Service` class
* Added the `df` method to `APIClient` and `DockerClient`
* Added support for `init` and `init_path` parameters in `HostConfig`
and `DockerClient.containers.run`
* Added support for `hostname` parameter in `ContainerSpec` and
`DockerClient.service.create`
* Added support for port range to single port in port mappings
(e.g. `8000-8010:80`)
### Bugfixes
* Fixed a bug where a missing container port in a port mapping would raise
an unexpected `TypeError`
* Fixed a bug where the `events` method in `APIClient` and `DockerClient`
would not respect custom headers set in `config.json`
2.1.0
-----
......
......@@ -25,6 +25,7 @@ Client reference
.. autoattribute:: swarm
.. autoattribute:: volumes
.. automethod:: df()
.. automethod:: events()
.. automethod:: info()
.. automethod:: login()
......
......@@ -35,3 +35,4 @@ Plugin objects
.. automethod:: reload
.. automethod:: push
.. automethod:: remove
.. automethod:: upgrade
import functools
import os
import os.path
import random
......@@ -53,6 +54,15 @@ def requires_api_version(version):
)
def requires_experimental(f):
@functools.wraps(f)
def wrapped(self, *args, **kwargs):
if not self.client.info()['ExperimentalBuild']:
pytest.skip('Feature requires Docker Engine experimental mode')
return f(self, *args, **kwargs)
return wrapped
def wait_on_condition(condition, delay=0.1, timeout=40):
start_time = time.time()
while not condition():
......
......@@ -8,7 +8,7 @@ import warnings
import docker
from docker.utils import kwargs_from_env
from .base import BaseAPIIntegrationTest, BUSYBOX
from .base import BaseAPIIntegrationTest
class InformationTest(BaseAPIIntegrationTest):
......@@ -25,48 +25,6 @@ class InformationTest(BaseAPIIntegrationTest):
self.assertIn('Debug', res)
class LinkTest(BaseAPIIntegrationTest):
def test_remove_link(self):
# Create containers
container1 = self.client.create_container(
BUSYBOX, 'cat', detach=True, stdin_open=True
)
container1_id = container1['Id']
self.tmp_containers.append(container1_id)
self.client.start(container1_id)
# Create Link
# we don't want the first /
link_path = self.client.inspect_container(container1_id)['Name'][1:]
link_alias = 'mylink'
container2 = self.client.create_container(
BUSYBOX, 'cat', host_config=self.client.create_host_config(
links={link_path: link_alias}
)
)
container2_id = container2['Id']
self.tmp_containers.append(container2_id)
self.client.start(container2_id)
# Remove link
linked_name = self.client.inspect_container(container2_id)['Name'][1:]
link_name = '%s/%s' % (linked_name, link_alias)
self.client.remove_container(link_name, link=True)
# Link is gone
containers = self.client.containers(all=True)
retrieved = [x for x in containers if link_name in x['Names']]
self.assertEqual(len(retrieved), 0)
# Containers are still there
retrieved = [
x for x in containers if x['Id'].startswith(container1_id) or
x['Id'].startswith(container2_id)
]
self.assertEqual(len(retrieved), 2)
class LoadConfigTest(BaseAPIIntegrationTest):
def test_load_legacy_config(self):
folder = tempfile.mkdtemp()
......@@ -150,7 +108,7 @@ class UnixconnTest(unittest.TestCase):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
client = docker.APIClient(**kwargs_from_env())
client = docker.APIClient(version='auto', **kwargs_from_env())
client.images()
client.close()
del client
......
......@@ -122,7 +122,7 @@ class CreateContainerTest(BaseAPIIntegrationTest):
self.client.remove_container(id)
err = exc.exception.explanation
self.assertIn(
'You cannot remove a running container', err
'You cannot remove ', err
)
self.client.remove_container(id, force=True)
......@@ -439,6 +439,30 @@ class CreateContainerTest(BaseAPIIntegrationTest):
'size': '120G'
}
@requires_api_version('1.25')
def test_create_with_init(self):
ctnr = self.client.create_container(
BUSYBOX, 'true',
host_config=self.client.create_host_config(
init=True
)
)
self.tmp_containers.append(ctnr['Id'])
config = self.client.inspect_container(ctnr)
assert config['HostConfig']['Init'] is True
@requires_api_version('1.25')
def test_create_with_init_path(self):
ctnr = self.client.create_container(
BUSYBOX, 'true',
host_config=self.client.create_host_config(
init_path="/usr/libexec/docker-init"
)
)
self.tmp_containers.append(ctnr['Id'])
config = self.client.inspect_container(ctnr)
assert config['HostConfig']['InitPath'] == "/usr/libexec/docker-init"
class VolumeBindTest(BaseAPIIntegrationTest):
def setUp(self):
......@@ -1229,3 +1253,45 @@ class ContainerCPUTest(BaseAPIIntegrationTest):
self.client.start(container)
inspect_data = self.client.inspect_container(container)
self.assertEqual(inspect_data['HostConfig']['CpusetCpus'], cpuset_cpus)
class LinkTest(BaseAPIIntegrationTest):
def test_remove_link(self):
# Create containers
container1 = self.client.create_container(
BUSYBOX, 'cat', detach=True, stdin_open=True
)
container1_id = container1['Id']
self.tmp_containers.append(container1_id)
self.client.start(container1_id)
# Create Link
# we don't want the first /
link_path = self.client.inspect_container(container1_id)['Name'][1:]
link_alias = 'mylink'
container2 = self.client.create_container(
BUSYBOX, 'cat', host_config=self.client.create_host_config(
links={link_path: link_alias}
)
)
container2_id = container2['Id']
self.tmp_containers.append(container2_id)
self.client.start(container2_id)
# Remove link
linked_name = self.client.inspect_container(container2_id)['Name'][1:]
link_name = '%s/%s' % (linked_name, link_alias)
self.client.remove_container(link_name, link=True)
# Link is gone
containers = self.client.containers(all=True)
retrieved = [x for x in containers if link_name in x['Names']]
self.assertEqual(len(retrieved), 0)
# Containers are still there
retrieved = [
x for x in containers if x['Id'].startswith(container1_id) or
x['Id'].startswith(container2_id)
]
self.assertEqual(len(retrieved), 2)
......@@ -123,6 +123,16 @@ class PluginTest(BaseAPIIntegrationTest):
assert self.client.inspect_plugin(SSHFS)
assert self.client.enable_plugin(SSHFS)
@requires_api_version('1.26')
def test_upgrade_plugin(self):
pl_data = self.ensure_plugin_installed(SSHFS)
assert pl_data['Enabled'] is False
prv = self.client.plugin_privileges(SSHFS)
logs = [d for d in self.client.upgrade_plugin(SSHFS, SSHFS, prv)]
assert filter(lambda x: x['status'] == 'Download complete', logs)
assert self.client.inspect_plugin(SSHFS)
assert self.client.enable_plugin(SSHFS)
def test_create_plugin(self):
plugin_data_dir = os.path.join(
os.path.dirname(__file__), 'testdata/dummy-plugin'
......
......@@ -4,8 +4,11 @@ import random
import time
import docker
import six
from ..helpers import force_leave_swarm, requires_api_version
from ..helpers import (
force_leave_swarm, requires_api_version, requires_experimental
)
from .base import BaseAPIIntegrationTest, BUSYBOX
......@@ -27,13 +30,15 @@ class ServiceTest(BaseAPIIntegrationTest):
def get_service_name(self):
return 'dockerpytest_{0:x}'.format(random.getrandbits(64))
def get_service_container(self, service_name, attempts=20, interval=0.5):
def get_service_container(self, service_name, attempts=20, interval=0.5,
include_stopped=False):
# There is some delay between the service's creation and the creation
# of the service's containers. This method deals with the uncertainty
# when trying to retrieve the container associated with a service.
while True:
containers = self.client.containers(
filters={'name': [service_name]}, quiet=True
filters={'name': [service_name]}, quiet=True,
all=include_stopped
)
if len(containers) > 0:
return containers[0]
......@@ -97,6 +102,20 @@ class ServiceTest(BaseAPIIntegrationTest):
assert len(services) == 1
assert services[0]['ID'] == svc_id['ID']
@requires_api_version('1.25')
@requires_experimental
def test_service_logs(self):
name, svc_id = self.create_simple_service()
assert self.get_service_container(name, include_stopped=True)
logs = self.client.service_logs(svc_id, stdout=True, is_tty=False)
log_line = next(logs)
if six.PY3:
log_line = log_line.decode('utf-8')
assert 'hello\n' in log_line
assert 'com.docker.swarm.service.id={}'.format(
svc_id['ID']
) in log_line
def test_create_service_custom_log_driver(self):
container_spec = docker.types.ContainerSpec(
BUSYBOX, ['echo', 'hello']
......
......@@ -2,21 +2,28 @@ import unittest
import docker
from ..helpers import requires_api_version
from .base import TEST_API_VERSION
class ClientTest(unittest.TestCase):
client = docker.from_env(version=TEST_API_VERSION)
def test_info(self):
client = docker.from_env(version=TEST_API_VERSION)
info = client.info()
info = self.client.info()
assert 'ID' in info
assert 'Name' in info
def test_ping(self):
client = docker.from_env(version=TEST_API_VERSION)
assert client.ping() is True
assert self.client.ping() is True
def test_version(self):
client = docker.from_env(version=TEST_API_VERSION)
assert 'Version' in client.version()
assert 'Version' in self.client.version()
@requires_api_version('1.25')
def test_df(self):
data = self.client.df()
assert 'LayersSize' in data
assert 'Containers' in data
assert 'Volumes' in data
assert 'Images' in data
......@@ -228,7 +228,8 @@ class DockerApiTest(BaseAPIClientTest):
'GET',
url_prefix + 'events',
params={'since': None, 'until': None, 'filters': None},
stream=True
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
def test_events_with_since_until(self):
......@@ -247,7 +248,8 @@ class DockerApiTest(BaseAPIClientTest):
'until': ts + 10,
'filters': None
},
stream=True
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
def test_events_with_filters(self):
......@@ -265,7 +267,8 @@ class DockerApiTest(BaseAPIClientTest):
'until': None,
'filters': expected_filters
},
stream=True
stream=True,
timeout=DEFAULT_TIMEOUT_SECONDS
)
def _socket_path_for_client_session(self, client):
......
......@@ -9,6 +9,7 @@ class CreateServiceKwargsTest(unittest.TestCase):
'command': 'true',
'name': 'somename',
'labels': {'key': 'value'},
'hostname': 'test_host',
'mode': 'global',
'update_config': {'update': 'config'},
'networks': ['somenet'],
......@@ -47,6 +48,6 @@ class CreateServiceKwargsTest(unittest.TestCase):
'Options': {'foo': 'bar'}
}
assert set(task_template['ContainerSpec'].keys()) == set([
'Image', 'Command', 'Args', 'Env', 'Dir', 'User', 'Labels',
'Mounts', 'StopGracePeriod'
'Image', 'Command', 'Args', 'Hostname', 'Env', 'Dir', 'User',
'Labels', 'Mounts', 'StopGracePeriod'
])
......@@ -530,6 +530,11 @@ class PortsTest(unittest.TestCase):
self.assertEqual(internal_port, ["2000", "2001"])
self.assertEqual(external_port, ["1000", "1001"])
def test_split_port_random_port_range_with_host_port(self):
internal_port, external_port = split_port("1000-1001:2000")
self.assertEqual(internal_port, ["2000"])
self.assertEqual(external_port, ["1000-1001"])
def test_split_port_no_host_port(self):
internal_port, external_port = split_port("2000")
self.assertEqual(internal_port, ["2000"])
......@@ -569,6 +574,13 @@ class PortsTest(unittest.TestCase):
self.assertRaises(ValueError,
lambda: split_port("localhost:"))
def test_with_no_container_port(self):
self.assertRaises(ValueError,
lambda: split_port("localhost:80:"))
def test_split_port_empty_string(self):
self.assertRaises(ValueError, lambda: split_port(""))
def test_build_port_bindings_with_one_port(self):
port_bindings = build_port_bindings(["127.0.0.1:1000:1000"])
self.assertEqual(port_bindings["1000"], [("127.0.0.1", "1000")])
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment