Skip to content

Commit d74bfa6

Browse files
authoredNov 28, 2018
Merge pull request #2187 from docker/3.6.0-release
3.6.0 release
·
rtd-test3.6.0
2 parents 7cc0a1b + 24ed2f3 commit d74bfa6

36 files changed

+643
-186
lines changed
 

‎docker/api/build.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -339,7 +339,14 @@ def process_dockerfile(dockerfile, path):
339339
abs_dockerfile = dockerfile
340340
if not os.path.isabs(dockerfile):
341341
abs_dockerfile = os.path.join(path, dockerfile)
342-
342+
if constants.IS_WINDOWS_PLATFORM and path.startswith(
343+
constants.WINDOWS_LONGPATH_PREFIX):
344+
abs_dockerfile = '{}{}'.format(
345+
constants.WINDOWS_LONGPATH_PREFIX,
346+
os.path.normpath(
347+
abs_dockerfile[len(constants.WINDOWS_LONGPATH_PREFIX):]
348+
)
349+
)
343350
if (os.path.splitdrive(path)[0] != os.path.splitdrive(abs_dockerfile)[0] or
344351
os.path.relpath(abs_dockerfile, path).startswith('..')):
345352
# Dockerfile not in context - read data to insert into tar later

‎docker/api/client.py

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,11 @@
3939
except ImportError:
4040
pass
4141

42+
try:
43+
from ..transport import SSHAdapter
44+
except ImportError:
45+
pass
46+
4247

4348
class APIClient(
4449
requests.Session,
@@ -141,6 +146,18 @@ def __init__(self, base_url=None, version=None,
141146
)
142147
self.mount('http+docker://', self._custom_adapter)
143148
self.base_url = 'http+docker://localnpipe'
149+
elif base_url.startswith('ssh://'):
150+
try:
151+
self._custom_adapter = SSHAdapter(
152+
base_url, timeout, pool_connections=num_pools
153+
)
154+
except NameError:
155+
raise DockerException(
156+
'Install paramiko package to enable ssh:// support'
157+
)
158+
self.mount('http+docker://ssh', self._custom_adapter)
159+
self._unmount('http://', 'https://')
160+
self.base_url = 'http+docker://ssh'
144161
else:
145162
# Use SSLAdapter for the ability to specify SSL version
146163
if isinstance(tls, TLSConfig):
@@ -279,6 +296,8 @@ def _get_raw_response_socket(self, response):
279296
self._raise_for_status(response)
280297
if self.base_url == "http+docker://localnpipe":
281298
sock = response.raw._fp.fp.raw.sock
299+
elif self.base_url.startswith('http+docker://ssh'):
300+
sock = response.raw._fp.fp.channel
282301
elif six.PY3:
283302
sock = response.raw._fp.fp.raw
284303
if self.base_url.startswith("https://"):

‎docker/api/container.py

Lines changed: 30 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -473,16 +473,12 @@ def create_host_config(self, *args, **kwargs):
473473
signals and reaps processes
474474
init_path (str): Path to the docker-init binary
475475
ipc_mode (str): Set the IPC mode for the container.
476-
isolation (str): Isolation technology to use. Default: `None`.
477-
links (dict or list of tuples): Either a dictionary mapping name
478-
to alias or as a list of ``(name, alias)`` tuples.
479-
log_config (dict): Logging configuration, as a dictionary with
480-
keys:
481-
482-
- ``type`` The logging driver name.
483-
- ``config`` A dictionary of configuration for the logging
484-
driver.
485-
476+
isolation (str): Isolation technology to use. Default: ``None``.
477+
links (dict): Mapping of links using the
478+
``{'container': 'alias'}`` format. The alias is optional.
479+
Containers declared in this dict will be linked to the new
480+
container using the provided alias. Default: ``None``.
481+
log_config (LogConfig): Logging configuration
486482
lxc_conf (dict): LXC config.
487483
mem_limit (float or str): Memory limit. Accepts float values
488484
(which represent the memory limit of the created container in
@@ -543,7 +539,7 @@ def create_host_config(self, *args, **kwargs):
543539
}
544540
545541
ulimits (:py:class:`list`): Ulimits to set inside the container,
546-
as a list of dicts.
542+
as a list of :py:class:`docker.types.Ulimit` instances.
547543
userns_mode (str): Sets the user namespace mode for the container
548544
when user namespace remapping option is enabled. Supported
549545
values are: ``host``
@@ -611,9 +607,10 @@ def create_endpoint_config(self, *args, **kwargs):
611607
aliases (:py:class:`list`): A list of aliases for this endpoint.
612608
Names in that list can be used within the network to reach the
613609
container. Defaults to ``None``.
614-
links (:py:class:`list`): A list of links for this endpoint.
615-
Containers declared in this list will be linked to this
616-
container. Defaults to ``None``.
610+
links (dict): Mapping of links for this endpoint using the
611+
``{'container': 'alias'}`` format. The alias is optional.
612+
Containers declared in this dict will be linked to this
613+
container using the provided alias. Defaults to ``None``.
617614
ipv4_address (str): The IP address of this container on the
618615
network, using the IPv4 protocol. Defaults to ``None``.
619616
ipv6_address (str): The IP address of this container on the
@@ -628,7 +625,7 @@ def create_endpoint_config(self, *args, **kwargs):
628625
629626
>>> endpoint_config = client.create_endpoint_config(
630627
aliases=['web', 'app'],
631-
links=['app_db'],
628+
links={'app_db': 'db', 'another': None},
632629
ipv4_address='132.65.0.123'
633630
)
634631
@@ -697,6 +694,18 @@ def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
697694
Raises:
698695
:py:class:`docker.errors.APIError`
699696
If the server returns an error.
697+
698+
Example:
699+
700+
>>> c = docker.APIClient()
701+
>>> f = open('./sh_bin.tar', 'wb')
702+
>>> bits, stat = c.get_archive(container, '/bin/sh')
703+
>>> print(stat)
704+
{'name': 'sh', 'size': 1075464, 'mode': 493,
705+
'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''}
706+
>>> for chunk in bits:
707+
... f.write(chunk)
708+
>>> f.close()
700709
"""
701710
params = {
702711
'path': path
@@ -1074,7 +1083,8 @@ def stats(self, container, decode=None, stream=True):
10741083
Args:
10751084
container (str): The container to stream statistics from
10761085
decode (bool): If set to true, stream will be decoded into dicts
1077-
on the fly. False by default.
1086+
on the fly. Only applicable if ``stream`` is True.
1087+
False by default.
10781088
stream (bool): If set to false, only the current stats will be
10791089
returned instead of a stream. True by default.
10801090
@@ -1088,6 +1098,10 @@ def stats(self, container, decode=None, stream=True):
10881098
return self._stream_helper(self._get(url, stream=True),
10891099
decode=decode)
10901100
else:
1101+
if decode:
1102+
raise errors.InvalidArgument(
1103+
"decode is only available in conjuction with stream=True"
1104+
)
10911105
return self._result(self._get(url, params={'stream': False}),
10921106
json=True)
10931107

‎docker/api/image.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def get_image(self, image, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
3232
Example:
3333
3434
>>> image = cli.get_image("busybox:latest")
35-
>>> f = open('/tmp/busybox-latest.tar', 'w')
35+
>>> f = open('/tmp/busybox-latest.tar', 'wb')
3636
>>> for chunk in image:
3737
>>> f.write(chunk)
3838
>>> f.close()
@@ -334,7 +334,8 @@ def pull(self, repository, tag=None, stream=False, auth_config=None,
334334
Args:
335335
repository (str): The repository to pull
336336
tag (str): The tag to pull
337-
stream (bool): Stream the output as a generator
337+
stream (bool): Stream the output as a generator. Make sure to
338+
consume the generator, otherwise pull might get cancelled.
338339
auth_config (dict): Override the credentials that
339340
:py:meth:`~docker.api.daemon.DaemonApiMixin.login` has set for
340341
this request. ``auth_config`` should contain the ``username``

‎docker/api/service.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -197,7 +197,8 @@ def inspect_service(self, service, insert_defaults=None):
197197
into the service inspect output.
198198
199199
Returns:
200-
``True`` if successful.
200+
(dict): A dictionary of the server-side representation of the
201+
service, including all relevant properties.
201202
202203
Raises:
203204
:py:class:`docker.errors.APIError`

‎docker/auth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -267,7 +267,7 @@ def load_config(config_path=None, config_dict=None):
267267
return res
268268

269269
log.debug(
270-
"Couldn't find auth-related section ; attempting to interpret"
270+
"Couldn't find auth-related section ; attempting to interpret "
271271
"as auth-only file"
272272
)
273273
return {'auths': parse_auth(config_dict)}

‎docker/constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
'is deprecated and non-functional. Please remove it.'
1515

1616
IS_WINDOWS_PLATFORM = (sys.platform == 'win32')
17+
WINDOWS_LONGPATH_PREFIX = '\\\\?\\'
1718

1819
DEFAULT_USER_AGENT = "docker-sdk-python/{0}".format(version)
1920
DEFAULT_NUM_POOLS = 25

‎docker/models/containers.py

Lines changed: 26 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,12 @@
1515

1616

1717
class Container(Model):
18-
18+
""" Local representation of a container object. Detailed configuration may
19+
be accessed through the :py:attr:`attrs` attribute. Note that local
20+
attributes are cached; users may call :py:meth:`reload` to
21+
query the Docker daemon for the current properties, causing
22+
:py:attr:`attrs` to be refreshed.
23+
"""
1924
@property
2025
def name(self):
2126
"""
@@ -228,6 +233,17 @@ def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
228233
Raises:
229234
:py:class:`docker.errors.APIError`
230235
If the server returns an error.
236+
237+
Example:
238+
239+
>>> f = open('./sh_bin.tar', 'wb')
240+
>>> bits, stat = container.get_archive('/bin/sh')
241+
>>> print(stat)
242+
{'name': 'sh', 'size': 1075464, 'mode': 493,
243+
'mtime': '2018-10-01T15:37:48-07:00', 'linkTarget': ''}
244+
>>> for chunk in bits:
245+
... f.write(chunk)
246+
>>> f.close()
231247
"""
232248
return self.client.api.get_archive(self.id, path, chunk_size)
233249

@@ -380,7 +396,8 @@ def stats(self, **kwargs):
380396
381397
Args:
382398
decode (bool): If set to true, stream will be decoded into dicts
383-
on the fly. False by default.
399+
on the fly. Only applicable if ``stream`` is True.
400+
False by default.
384401
stream (bool): If set to false, only the current stats will be
385402
returned instead of a stream. True by default.
386403
@@ -574,15 +591,11 @@ def run(self, image, command=None, stdout=True, stderr=False,
574591
``{"label1": "value1", "label2": "value2"}``) or a list of
575592
names of labels to set with empty values (e.g.
576593
``["label1", "label2"]``)
577-
links (dict or list of tuples): Either a dictionary mapping name
578-
to alias or as a list of ``(name, alias)`` tuples.
579-
log_config (dict): Logging configuration, as a dictionary with
580-
keys:
581-
582-
- ``type`` The logging driver name.
583-
- ``config`` A dictionary of configuration for the logging
584-
driver.
585-
594+
links (dict): Mapping of links using the
595+
``{'container': 'alias'}`` format. The alias is optional.
596+
Containers declared in this dict will be linked to the new
597+
container using the provided alias. Default: ``None``.
598+
log_config (LogConfig): Logging configuration.
586599
mac_address (str): MAC address to assign to the container.
587600
mem_limit (int or str): Memory limit. Accepts float values
588601
(which represent the memory limit of the created container in
@@ -691,8 +704,8 @@ def run(self, image, command=None, stdout=True, stderr=False,
691704
}
692705
693706
tty (bool): Allocate a pseudo-TTY.
694-
ulimits (:py:class:`list`): Ulimits to set inside the container, as
695-
a list of dicts.
707+
ulimits (:py:class:`list`): Ulimits to set inside the container,
708+
as a list of :py:class:`docker.types.Ulimit` instances.
696709
user (str or int): Username or UID to run commands as inside the
697710
container.
698711
userns_mode (str): Sets the user namespace mode for the container

‎docker/models/images.py

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
import itertools
22
import re
3+
import warnings
34

45
import six
56

@@ -59,14 +60,20 @@ def history(self):
5960
"""
6061
return self.client.api.history(self.id)
6162

62-
def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
63+
def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE, named=False):
6364
"""
6465
Get a tarball of an image. Similar to the ``docker save`` command.
6566
6667
Args:
6768
chunk_size (int): The generator will return up to that much data
6869
per iteration, but may return less. If ``None``, data will be
6970
streamed as it is received. Default: 2 MB
71+
named (str or bool): If ``False`` (default), the tarball will not
72+
retain repository and tag information for this image. If set
73+
to ``True``, the first tag in the :py:attr:`~tags` list will
74+
be used to identify the image. Alternatively, any element of
75+
the :py:attr:`~tags` list can be used as an argument to use
76+
that specific tag as the saved identifier.
7077
7178
Returns:
7279
(generator): A stream of raw archive data.
@@ -78,12 +85,22 @@ def save(self, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
7885
Example:
7986
8087
>>> image = cli.get_image("busybox:latest")
81-
>>> f = open('/tmp/busybox-latest.tar', 'w')
88+
>>> f = open('/tmp/busybox-latest.tar', 'wb')
8289
>>> for chunk in image:
8390
>>> f.write(chunk)
8491
>>> f.close()
8592
"""
86-
return self.client.api.get_image(self.id, chunk_size)
93+
img = self.id
94+
if named:
95+
img = self.tags[0] if self.tags else img
96+
if isinstance(named, six.string_types):
97+
if named not in self.tags:
98+
raise InvalidArgument(
99+
"{} is not a valid tag for this image".format(named)
100+
)
101+
img = named
102+
103+
return self.client.api.get_image(img, chunk_size)
87104

88105
def tag(self, repository, tag=None, **kwargs):
89106
"""
@@ -409,7 +426,21 @@ def pull(self, repository, tag=None, **kwargs):
409426
if not tag:
410427
repository, tag = parse_repository_tag(repository)
411428

412-
self.client.api.pull(repository, tag=tag, **kwargs)
429+
if 'stream' in kwargs:
430+
warnings.warn(
431+
'`stream` is not a valid parameter for this method'
432+
' and will be overridden'
433+
)
434+
del kwargs['stream']
435+
436+
pull_log = self.client.api.pull(
437+
repository, tag=tag, stream=True, **kwargs
438+
)
439+
for _ in pull_log:
440+
# We don't do anything with the logs, but we need
441+
# to keep the connection alive and wait for the image
442+
# to be pulled.
443+
pass
413444
if tag:
414445
return self.get('{0}{2}{1}'.format(
415446
repository, tag, '@' if tag.startswith('sha256:') else ':'

‎docker/transport/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,8 @@
66
from .npipesocket import NpipeSocket
77
except ImportError:
88
pass
9+
10+
try:
11+
from .sshconn import SSHAdapter
12+
except ImportError:
13+
pass

‎docker/transport/npipesocket.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -87,10 +87,6 @@ def detach(self):
8787
def dup(self):
8888
return NpipeSocket(self._handle)
8989

90-
@check_closed
91-
def fileno(self):
92-
return int(self._handle)
93-
9490
def getpeername(self):
9591
return self._address
9692

‎docker/transport/sshconn.py

Lines changed: 107 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,107 @@
1+
import paramiko
2+
import requests.adapters
3+
import six
4+
5+
from .. import constants
6+
7+
if six.PY3:
8+
import http.client as httplib
9+
else:
10+
import httplib
11+
12+
try:
13+
import requests.packages.urllib3 as urllib3
14+
except ImportError:
15+
import urllib3
16+
17+
RecentlyUsedContainer = urllib3._collections.RecentlyUsedContainer
18+
19+
20+
class SSHConnection(httplib.HTTPConnection, object):
21+
def __init__(self, ssh_transport, timeout=60):
22+
super(SSHConnection, self).__init__(
23+
'localhost', timeout=timeout
24+
)
25+
self.ssh_transport = ssh_transport
26+
self.timeout = timeout
27+
28+
def connect(self):
29+
sock = self.ssh_transport.open_session()
30+
sock.settimeout(self.timeout)
31+
sock.exec_command('docker system dial-stdio')
32+
self.sock = sock
33+
34+
35+
class SSHConnectionPool(urllib3.connectionpool.HTTPConnectionPool):
36+
scheme = 'ssh'
37+
38+
def __init__(self, ssh_client, timeout=60, maxsize=10):
39+
super(SSHConnectionPool, self).__init__(
40+
'localhost', timeout=timeout, maxsize=maxsize
41+
)
42+
self.ssh_transport = ssh_client.get_transport()
43+
self.timeout = timeout
44+
45+
def _new_conn(self):
46+
return SSHConnection(self.ssh_transport, self.timeout)
47+
48+
# When re-using connections, urllib3 calls fileno() on our
49+
# SSH channel instance, quickly overloading our fd limit. To avoid this,
50+
# we override _get_conn
51+
def _get_conn(self, timeout):
52+
conn = None
53+
try:
54+
conn = self.pool.get(block=self.block, timeout=timeout)
55+
56+
except AttributeError: # self.pool is None
57+
raise urllib3.exceptions.ClosedPoolError(self, "Pool is closed.")
58+
59+
except six.moves.queue.Empty:
60+
if self.block:
61+
raise urllib3.exceptions.EmptyPoolError(
62+
self,
63+
"Pool reached maximum size and no more "
64+
"connections are allowed."
65+
)
66+
pass # Oh well, we'll create a new connection then
67+
68+
return conn or self._new_conn()
69+
70+
71+
class SSHAdapter(requests.adapters.HTTPAdapter):
72+
73+
__attrs__ = requests.adapters.HTTPAdapter.__attrs__ + [
74+
'pools', 'timeout', 'ssh_client',
75+
]
76+
77+
def __init__(self, base_url, timeout=60,
78+
pool_connections=constants.DEFAULT_NUM_POOLS):
79+
self.ssh_client = paramiko.SSHClient()
80+
self.ssh_client.load_system_host_keys()
81+
82+
parsed = six.moves.urllib_parse.urlparse(base_url)
83+
self.ssh_client.connect(
84+
parsed.hostname, parsed.port, parsed.username,
85+
)
86+
self.timeout = timeout
87+
self.pools = RecentlyUsedContainer(
88+
pool_connections, dispose_func=lambda p: p.close()
89+
)
90+
super(SSHAdapter, self).__init__()
91+
92+
def get_connection(self, url, proxies=None):
93+
with self.pools.lock:
94+
pool = self.pools.get(url)
95+
if pool:
96+
return pool
97+
98+
pool = SSHConnectionPool(
99+
self.ssh_client, self.timeout
100+
)
101+
self.pools[url] = pool
102+
103+
return pool
104+
105+
def close(self):
106+
self.pools.clear()
107+
self.ssh_client.close()

‎docker/types/containers.py

Lines changed: 56 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,36 @@ class LogConfigTypesEnum(object):
2323

2424

2525
class LogConfig(DictType):
26+
"""
27+
Configure logging for a container, when provided as an argument to
28+
:py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`.
29+
You may refer to the
30+
`official logging driver documentation <https://docs.docker.com/config/containers/logging/configure/>`_
31+
for more information.
32+
33+
Args:
34+
type (str): Indicate which log driver to use. A set of valid drivers
35+
is provided as part of the :py:attr:`LogConfig.types`
36+
enum. Other values may be accepted depending on the engine version
37+
and available logging plugins.
38+
config (dict): A driver-dependent configuration dictionary. Please
39+
refer to the driver's documentation for a list of valid config
40+
keys.
41+
42+
Example:
43+
44+
>>> from docker.types import LogConfig
45+
>>> lc = LogConfig(type=LogConfig.types.JSON, config={
46+
... 'max-size': '1g',
47+
... 'labels': 'production_status,geo'
48+
... })
49+
>>> hc = client.create_host_config(log_config=lc)
50+
>>> container = client.create_container('busybox', 'true',
51+
... host_config=hc)
52+
>>> client.inspect_container(container)['HostConfig']['LogConfig']
53+
{'Type': 'json-file', 'Config': {'labels': 'production_status,geo', 'max-size': '1g'}}
54+
55+
""" # flake8: noqa
2656
types = LogConfigTypesEnum
2757

2858
def __init__(self, **kwargs):
@@ -50,14 +80,40 @@ def config(self):
5080
return self['Config']
5181

5282
def set_config_value(self, key, value):
83+
""" Set a the value for ``key`` to ``value`` inside the ``config``
84+
dict.
85+
"""
5386
self.config[key] = value
5487

5588
def unset_config(self, key):
89+
""" Remove the ``key`` property from the ``config`` dict. """
5690
if key in self.config:
5791
del self.config[key]
5892

5993

6094
class Ulimit(DictType):
95+
"""
96+
Create a ulimit declaration to be used with
97+
:py:meth:`~docker.api.container.ContainerApiMixin.create_host_config`.
98+
99+
Args:
100+
101+
name (str): Which ulimit will this apply to. A list of valid names can
102+
be found `here <http://tinyurl.me/ZWRkM2Ztwlykf>`_.
103+
soft (int): The soft limit for this ulimit. Optional.
104+
hard (int): The hard limit for this ulimit. Optional.
105+
106+
Example:
107+
108+
>>> nproc_limit = docker.types.Ulimit(name='nproc', soft=1024)
109+
>>> hc = client.create_host_config(ulimits=[nproc_limit])
110+
>>> container = client.create_container(
111+
'busybox', 'true', host_config=hc
112+
)
113+
>>> client.inspect_container(container)['HostConfig']['Ulimits']
114+
[{'Name': 'nproc', 'Hard': 0, 'Soft': 1024}]
115+
116+
"""
61117
def __init__(self, **kwargs):
62118
name = kwargs.get('name', kwargs.get('Name'))
63119
soft = kwargs.get('soft', kwargs.get('Soft'))

‎docker/types/daemon.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@
55
except ImportError:
66
import urllib3
77

8+
from ..errors import DockerException
9+
810

911
class CancellableStream(object):
1012
"""
@@ -55,9 +57,17 @@ def close(self):
5557
elif hasattr(sock_raw, '_sock'):
5658
sock = sock_raw._sock
5759

60+
elif hasattr(sock_fp, 'channel'):
61+
# We're working with a paramiko (SSH) channel, which doesn't
62+
# support cancelable streams with the current implementation
63+
raise DockerException(
64+
'Cancellable streams not supported for the SSH protocol'
65+
)
5866
else:
5967
sock = sock_fp._sock
60-
if isinstance(sock, urllib3.contrib.pyopenssl.WrappedSocket):
68+
69+
if hasattr(urllib3.contrib, 'pyopenssl') and isinstance(
70+
sock, urllib3.contrib.pyopenssl.WrappedSocket):
6171
sock = sock.socket
6272

6373
sock.shutdown(socket.SHUT_RDWR)

‎docker/utils/utils.py

Lines changed: 80 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
import base64
2+
import json
23
import os
34
import os.path
4-
import json
55
import shlex
6-
from distutils.version import StrictVersion
6+
import string
77
from datetime import datetime
8+
from distutils.version import StrictVersion
89

910
import six
1011

@@ -13,11 +14,12 @@
1314

1415
if six.PY2:
1516
from urllib import splitnport
17+
from urlparse import urlparse
1618
else:
17-
from urllib.parse import splitnport
19+
from urllib.parse import splitnport, urlparse
1820

1921
DEFAULT_HTTP_HOST = "127.0.0.1"
20-
DEFAULT_UNIX_SOCKET = "http+unix://var/run/docker.sock"
22+
DEFAULT_UNIX_SOCKET = "http+unix:///var/run/docker.sock"
2123
DEFAULT_NPIPE = 'npipe:////./pipe/docker_engine'
2224

2325
BYTE_UNITS = {
@@ -212,75 +214,93 @@ def parse_repository_tag(repo_name):
212214
return repo_name, None
213215

214216

215-
# Based on utils.go:ParseHost http://tinyurl.com/nkahcfh
216-
# fd:// protocol unsupported (for obvious reasons)
217-
# Added support for http and https
218-
# Protocol translation: tcp -> http, unix -> http+unix
219217
def parse_host(addr, is_win32=False, tls=False):
220-
proto = "http+unix"
221-
port = None
222218
path = ''
219+
port = None
220+
host = None
223221

222+
# Sensible defaults
224223
if not addr and is_win32:
225-
addr = DEFAULT_NPIPE
226-
224+
return DEFAULT_NPIPE
227225
if not addr or addr.strip() == 'unix://':
228226
return DEFAULT_UNIX_SOCKET
229227

230228
addr = addr.strip()
231-
if addr.startswith('http://'):
232-
addr = addr.replace('http://', 'tcp://')
233-
if addr.startswith('http+unix://'):
234-
addr = addr.replace('http+unix://', 'unix://')
235229

236-
if addr == 'tcp://':
230+
parsed_url = urlparse(addr)
231+
proto = parsed_url.scheme
232+
if not proto or any([x not in string.ascii_letters + '+' for x in proto]):
233+
# https://bugs.python.org/issue754016
234+
parsed_url = urlparse('//' + addr, 'tcp')
235+
proto = 'tcp'
236+
237+
if proto == 'fd':
238+
raise errors.DockerException('fd protocol is not implemented')
239+
240+
# These protos are valid aliases for our library but not for the
241+
# official spec
242+
if proto == 'http' or proto == 'https':
243+
tls = proto == 'https'
244+
proto = 'tcp'
245+
elif proto == 'http+unix':
246+
proto = 'unix'
247+
248+
if proto not in ('tcp', 'unix', 'npipe', 'ssh'):
237249
raise errors.DockerException(
238-
"Invalid bind address format: {0}".format(addr)
250+
"Invalid bind address protocol: {}".format(addr)
239251
)
240-
elif addr.startswith('unix://'):
241-
addr = addr[7:]
242-
elif addr.startswith('tcp://'):
243-
proto = 'http{0}'.format('s' if tls else '')
244-
addr = addr[6:]
245-
elif addr.startswith('https://'):
246-
proto = "https"
247-
addr = addr[8:]
248-
elif addr.startswith('npipe://'):
249-
proto = 'npipe'
250-
addr = addr[8:]
251-
elif addr.startswith('fd://'):
252-
raise errors.DockerException("fd protocol is not implemented")
253-
else:
254-
if "://" in addr:
255-
raise errors.DockerException(
256-
"Invalid bind address protocol: {0}".format(addr)
257-
)
258-
proto = "https" if tls else "http"
259252

260-
if proto in ("http", "https"):
261-
address_parts = addr.split('/', 1)
262-
host = address_parts[0]
263-
if len(address_parts) == 2:
264-
path = '/' + address_parts[1]
265-
host, port = splitnport(host)
253+
if proto == 'tcp' and not parsed_url.netloc:
254+
# "tcp://" is exceptionally disallowed by convention;
255+
# omitting a hostname for other protocols is fine
256+
raise errors.DockerException(
257+
'Invalid bind address format: {}'.format(addr)
258+
)
266259

267-
if port is None:
268-
raise errors.DockerException(
269-
"Invalid port: {0}".format(addr)
270-
)
260+
if any([
261+
parsed_url.params, parsed_url.query, parsed_url.fragment,
262+
parsed_url.password
263+
]):
264+
raise errors.DockerException(
265+
'Invalid bind address format: {}'.format(addr)
266+
)
267+
268+
if parsed_url.path and proto == 'ssh':
269+
raise errors.DockerException(
270+
'Invalid bind address format: no path allowed for this protocol:'
271+
' {}'.format(addr)
272+
)
273+
else:
274+
path = parsed_url.path
275+
if proto == 'unix' and parsed_url.hostname is not None:
276+
# For legacy reasons, we consider unix://path
277+
# to be valid and equivalent to unix:///path
278+
path = '/'.join((parsed_url.hostname, path))
279+
280+
if proto in ('tcp', 'ssh'):
281+
# parsed_url.hostname strips brackets from IPv6 addresses,
282+
# which can be problematic hence our use of splitnport() instead.
283+
host, port = splitnport(parsed_url.netloc)
284+
if port is None or port < 0:
285+
if proto != 'ssh':
286+
raise errors.DockerException(
287+
'Invalid bind address format: port is required:'
288+
' {}'.format(addr)
289+
)
290+
port = 22
271291

272292
if not host:
273293
host = DEFAULT_HTTP_HOST
274-
else:
275-
host = addr
276294

277-
if proto in ("http", "https") and port == -1:
278-
raise errors.DockerException(
279-
"Bind address needs a port: {0}".format(addr))
295+
# Rewrite schemes to fit library internals (requests adapters)
296+
if proto == 'tcp':
297+
proto = 'http{}'.format('s' if tls else '')
298+
elif proto == 'unix':
299+
proto = 'http+unix'
280300

281-
if proto == "http+unix" or proto == 'npipe':
282-
return "{0}://{1}".format(proto, host).rstrip('/')
283-
return "{0}://{1}:{2}{3}".format(proto, host, port, path).rstrip('/')
301+
if proto in ('http+unix', 'npipe'):
302+
return "{}://{}".format(proto, path).rstrip('/')
303+
return '{0}://{1}:{2}{3}'.format(proto, host, port, path).rstrip('/')
284304

285305

286306
def parse_devices(devices):
@@ -366,7 +386,10 @@ def convert_filters(filters):
366386
v = 'true' if v else 'false'
367387
if not isinstance(v, list):
368388
v = [v, ]
369-
result[k] = v
389+
result[k] = [
390+
str(item) if not isinstance(item, six.string_types) else item
391+
for item in v
392+
]
370393
return json.dumps(result)
371394

372395

@@ -421,7 +444,7 @@ def normalize_links(links):
421444
if isinstance(links, dict):
422445
links = six.iteritems(links)
423446

424-
return ['{0}:{1}'.format(k, v) for k, v in sorted(links)]
447+
return ['{0}:{1}'.format(k, v) if v else k for k, v in sorted(links)]
425448

426449

427450
def parse_env_file(env_file):

‎docker/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,2 @@
1-
version = "3.5.1"
1+
version = "3.6.0"
22
version_info = tuple([int(d) for d in version.split("-")[0].split(".")])

‎docs/api.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,7 @@ Configuration types
140140
.. autoclass:: Healthcheck
141141
.. autoclass:: IPAMConfig
142142
.. autoclass:: IPAMPool
143+
.. autoclass:: LogConfig
143144
.. autoclass:: Mount
144145
.. autoclass:: Placement
145146
.. autoclass:: Privileges
@@ -151,4 +152,5 @@ Configuration types
151152
.. autoclass:: SwarmExternalCA
152153
.. autoclass:: SwarmSpec(*args, **kwargs)
153154
.. autoclass:: TaskTemplate
155+
.. autoclass:: Ulimit
154156
.. autoclass:: UpdateConfig

‎docs/change-log.md

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,31 @@
11
Change log
22
==========
33

4+
3.6.0
5+
-----
6+
7+
[List of PRs / issues for this release](https://github.com/docker/docker-py/milestone=55?closed=1)
8+
9+
### Features
10+
11+
* Added support for connecting to the Docker Engine over SSH. Additional
12+
dependencies for this feature can be installed with
13+
`pip install "docker[ssh]"`
14+
* Added support for the `named` parameter in `Image.save`, which may be
15+
used to ensure the resulting tarball retains the image's name on save.
16+
17+
### Bugfixes
18+
19+
* Fixed a bug where builds on Windows with a context path using the `\\?\`
20+
prefix would fail with some relative Dockerfile paths.
21+
* Fixed an issue where pulls made with the `DockerClient` would fail when
22+
setting the `stream` parameter to `True`.
23+
24+
### Miscellaneous
25+
26+
* The minimum requirement for the `requests` dependency has been bumped
27+
to 2.20.0
28+
429
3.5.1
530
-----
631

‎requirements.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,12 +9,13 @@ enum34==1.1.6
99
idna==2.5
1010
ipaddress==1.0.18
1111
packaging==16.8
12+
paramiko==2.4.2
1213
pycparser==2.17
1314
pyOpenSSL==18.0.0
1415
pyparsing==2.2.0
1516
pypiwin32==219; sys_platform == 'win32' and python_version < '3.6'
1617
pypiwin32==223; sys_platform == 'win32' and python_version >= '3.6'
17-
requests==2.14.2
18+
requests==2.20.0
1819
six==1.10.0
1920
websocket-client==0.40.0
2021
urllib3==1.21.1; python_version == '3.3'

‎scripts/versions.py

Lines changed: 25 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -11,23 +11,24 @@
1111
'test'
1212
]
1313

14+
STAGES = ['tp', 'beta', 'rc']
1415

15-
class Version(namedtuple('_Version', 'major minor patch rc edition')):
16+
17+
class Version(namedtuple('_Version', 'major minor patch stage edition')):
1618

1719
@classmethod
1820
def parse(cls, version):
1921
edition = None
2022
version = version.lstrip('v')
21-
version, _, rc = version.partition('-')
22-
if rc:
23-
if 'rc' not in rc:
24-
edition = rc
25-
rc = None
26-
elif '-' in rc:
27-
edition, rc = rc.split('-')
28-
23+
version, _, stage = version.partition('-')
24+
if stage:
25+
if not any(marker in stage for marker in STAGES):
26+
edition = stage
27+
stage = None
28+
elif '-' in stage:
29+
edition, stage = stage.split('-')
2930
major, minor, patch = version.split('.', 3)
30-
return cls(major, minor, patch, rc, edition)
31+
return cls(major, minor, patch, stage, edition)
3132

3233
@property
3334
def major_minor(self):
@@ -38,14 +39,22 @@ def order(self):
3839
"""Return a representation that allows this object to be sorted
3940
correctly with the default comparator.
4041
"""
41-
# rc releases should appear before official releases
42-
rc = (0, self.rc) if self.rc else (1, )
43-
return (int(self.major), int(self.minor), int(self.patch)) + rc
42+
# non-GA releases should appear before GA releases
43+
# Order: tp -> beta -> rc -> GA
44+
if self.stage:
45+
for st in STAGES:
46+
if st in self.stage:
47+
stage = (STAGES.index(st), self.stage)
48+
break
49+
else:
50+
stage = (len(STAGES),)
51+
52+
return (int(self.major), int(self.minor), int(self.patch)) + stage
4453

4554
def __str__(self):
46-
rc = '-{}'.format(self.rc) if self.rc else ''
55+
stage = '-{}'.format(self.stage) if self.stage else ''
4756
edition = '-{}'.format(self.edition) if self.edition else ''
48-
return '.'.join(map(str, self[:3])) + edition + rc
57+
return '.'.join(map(str, self[:3])) + edition + stage
4958

5059

5160
def main():
@@ -57,7 +66,7 @@ def main():
5766
Version.parse(
5867
v.strip('"').lstrip('docker-').rstrip('.tgz').rstrip('-x86_64')
5968
) for v in re.findall(
60-
r'"docker-[0-9]+\.[0-9]+\.[0-9]+-.*tgz"', content
69+
r'"docker-[0-9]+\.[0-9]+\.[0-9]+-?.*tgz"', content
6170
)
6271
]
6372
sorted_versions = sorted(

‎setup.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,9 @@
4242
# 'requests[security] >= 2.5.2, != 2.11.0, != 2.12.2'
4343
'tls': ['pyOpenSSL>=17.5.0', 'cryptography>=1.3.4', 'idna>=2.0.0'],
4444

45+
# Only required when connecting using the ssh:// protocol
46+
'ssh': ['paramiko>=2.4.2'],
47+
4548
}
4649

4750
version = None

‎test-requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,4 @@ mock==1.0.1
44
pytest==2.9.1; python_version == '3.3'
55
pytest==3.6.3; python_version > '3.3'
66
pytest-cov==2.1.0
7-
pytest-timeout==1.2.1
7+
pytest-timeout==1.3.3

‎tests/helpers.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
import socket
1111

1212
import docker
13+
import paramiko
1314
import pytest
1415

1516

@@ -121,6 +122,9 @@ def assert_cat_socket_detached_with_keys(sock, inputs):
121122
if getattr(sock, 'family', -9) == getattr(socket, 'AF_UNIX', -1):
122123
with pytest.raises(socket.error):
123124
sock.sendall(b'make sure the socket is closed\n')
125+
elif isinstance(sock, paramiko.Channel):
126+
with pytest.raises(OSError):
127+
sock.sendall(b'make sure the socket is closed\n')
124128
else:
125129
sock.sendall(b"make sure the socket is closed\n")
126130
data = sock.recv(128)

‎tests/integration/api_build_test.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -540,6 +540,11 @@ def test_build_in_context_abs_dockerfile(self):
540540
) == sorted(lsdata)
541541

542542
@requires_api_version('1.31')
543+
@pytest.mark.xfail(
544+
True,
545+
reason='Currently fails on 18.09: '
546+
'https://github.com/moby/moby/issues/37920'
547+
)
543548
def test_prune_builds(self):
544549
prune_result = self.client.prune_builds()
545550
assert 'SpaceReclaimed' in prune_result

‎tests/integration/api_container_test.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -883,6 +883,8 @@ def test_logs_streaming_and_follow(self):
883883
assert logs == (snippet + '\n').encode(encoding='ascii')
884884

885885
@pytest.mark.timeout(5)
886+
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
887+
reason='No cancellable streams over SSH')
886888
def test_logs_streaming_and_follow_and_cancel(self):
887889
snippet = 'Flowering Nights (Sakuya Iyazoi)'
888890
container = self.client.create_container(
@@ -1255,6 +1257,8 @@ def test_attach_no_stream(self):
12551257
assert output == 'hello\n'.encode(encoding='ascii')
12561258

12571259
@pytest.mark.timeout(5)
1260+
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
1261+
reason='No cancellable streams over SSH')
12581262
def test_attach_stream_and_cancel(self):
12591263
container = self.client.create_container(
12601264
BUSYBOX, 'sh -c "echo hello && sleep 60"',

‎tests/integration/api_network_test.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88

99
class TestNetworks(BaseAPIIntegrationTest):
1010
def tearDown(self):
11-
super(TestNetworks, self).tearDown()
1211
self.client.leave_swarm(force=True)
12+
super(TestNetworks, self).tearDown()
1313

1414
def create_network(self, *args, **kwargs):
1515
net_name = random_name()

‎tests/integration/api_plugin_test.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import docker
44
import pytest
55

6-
from .base import BaseAPIIntegrationTest, TEST_API_VERSION
6+
from .base import BaseAPIIntegrationTest
77
from ..helpers import requires_api_version
88

99
SSHFS = 'vieux/sshfs:latest'
@@ -13,27 +13,27 @@
1313
class PluginTest(BaseAPIIntegrationTest):
1414
@classmethod
1515
def teardown_class(cls):
16-
c = docker.APIClient(
17-
version=TEST_API_VERSION, timeout=60,
18-
**docker.utils.kwargs_from_env()
19-
)
16+
client = cls.get_client_instance()
2017
try:
21-
c.remove_plugin(SSHFS, force=True)
18+
client.remove_plugin(SSHFS, force=True)
2219
except docker.errors.APIError:
2320
pass
2421

2522
def teardown_method(self, method):
23+
client = self.get_client_instance()
2624
try:
27-
self.client.disable_plugin(SSHFS)
25+
client.disable_plugin(SSHFS)
2826
except docker.errors.APIError:
2927
pass
3028

3129
for p in self.tmp_plugins:
3230
try:
33-
self.client.remove_plugin(p, force=True)
31+
client.remove_plugin(p, force=True)
3432
except docker.errors.APIError:
3533
pass
3634

35+
client.close()
36+
3737
def ensure_plugin_installed(self, plugin_name):
3838
try:
3939
return self.client.inspect_plugin(plugin_name)

‎tests/integration/api_swarm_test.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,14 +13,13 @@ def setUp(self):
1313
self._unlock_key = None
1414

1515
def tearDown(self):
16-
super(SwarmTest, self).tearDown()
1716
try:
1817
if self._unlock_key:
1918
self.client.unlock_swarm(self._unlock_key)
2019
except docker.errors.APIError:
2120
pass
22-
2321
force_leave_swarm(self.client)
22+
super(SwarmTest, self).tearDown()
2423

2524
@requires_api_version('1.24')
2625
def test_init_swarm_simple(self):

‎tests/integration/base.py

Lines changed: 38 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -29,41 +29,44 @@ def setUp(self):
2929

3030
def tearDown(self):
3131
client = docker.from_env(version=TEST_API_VERSION)
32-
for img in self.tmp_imgs:
33-
try:
34-
client.api.remove_image(img)
35-
except docker.errors.APIError:
36-
pass
37-
for container in self.tmp_containers:
38-
try:
39-
client.api.remove_container(container, force=True, v=True)
40-
except docker.errors.APIError:
41-
pass
42-
for network in self.tmp_networks:
43-
try:
44-
client.api.remove_network(network)
45-
except docker.errors.APIError:
46-
pass
47-
for volume in self.tmp_volumes:
48-
try:
49-
client.api.remove_volume(volume)
50-
except docker.errors.APIError:
51-
pass
52-
53-
for secret in self.tmp_secrets:
54-
try:
55-
client.api.remove_secret(secret)
56-
except docker.errors.APIError:
57-
pass
58-
59-
for config in self.tmp_configs:
60-
try:
61-
client.api.remove_config(config)
62-
except docker.errors.APIError:
63-
pass
64-
65-
for folder in self.tmp_folders:
66-
shutil.rmtree(folder)
32+
try:
33+
for img in self.tmp_imgs:
34+
try:
35+
client.api.remove_image(img)
36+
except docker.errors.APIError:
37+
pass
38+
for container in self.tmp_containers:
39+
try:
40+
client.api.remove_container(container, force=True, v=True)
41+
except docker.errors.APIError:
42+
pass
43+
for network in self.tmp_networks:
44+
try:
45+
client.api.remove_network(network)
46+
except docker.errors.APIError:
47+
pass
48+
for volume in self.tmp_volumes:
49+
try:
50+
client.api.remove_volume(volume)
51+
except docker.errors.APIError:
52+
pass
53+
54+
for secret in self.tmp_secrets:
55+
try:
56+
client.api.remove_secret(secret)
57+
except docker.errors.APIError:
58+
pass
59+
60+
for config in self.tmp_configs:
61+
try:
62+
client.api.remove_config(config)
63+
except docker.errors.APIError:
64+
pass
65+
66+
for folder in self.tmp_folders:
67+
shutil.rmtree(folder)
68+
finally:
69+
client.close()
6770

6871

6972
class BaseAPIIntegrationTest(BaseIntegrationTest):

‎tests/integration/models_containers_test.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import os
12
import tempfile
23
import threading
34

@@ -146,6 +147,8 @@ def test_run_with_streamed_logs(self):
146147
assert logs[1] == b'world\n'
147148

148149
@pytest.mark.timeout(5)
150+
@pytest.mark.skipif(os.environ.get('DOCKER_HOST', '').startswith('ssh://'),
151+
reason='No cancellable streams over SSH')
149152
def test_run_with_streamed_logs_and_cancel(self):
150153
client = docker.from_env(version=TEST_API_VERSION)
151154
out = client.containers.run(

‎tests/integration/models_images_test.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import pytest
66

77
from .base import BaseIntegrationTest, BUSYBOX, TEST_API_VERSION
8+
from ..helpers import random_name
89

910

1011
class ImageCollectionTest(BaseIntegrationTest):
@@ -108,6 +109,32 @@ def test_save_and_load(self):
108109
assert len(result) == 1
109110
assert result[0].id == image.id
110111

112+
def test_save_and_load_repo_name(self):
113+
client = docker.from_env(version=TEST_API_VERSION)
114+
image = client.images.get(BUSYBOX)
115+
additional_tag = random_name()
116+
image.tag(additional_tag)
117+
self.tmp_imgs.append(additional_tag)
118+
image.reload()
119+
with tempfile.TemporaryFile() as f:
120+
stream = image.save(named='{}:latest'.format(additional_tag))
121+
for chunk in stream:
122+
f.write(chunk)
123+
124+
f.seek(0)
125+
client.images.remove(additional_tag, force=True)
126+
result = client.images.load(f.read())
127+
128+
assert len(result) == 1
129+
assert result[0].id == image.id
130+
assert '{}:latest'.format(additional_tag) in result[0].tags
131+
132+
def test_save_name_error(self):
133+
client = docker.from_env(version=TEST_API_VERSION)
134+
image = client.images.get(BUSYBOX)
135+
with pytest.raises(docker.errors.InvalidArgument):
136+
image.save(named='sakuya/izayoi')
137+
111138

112139
class ImageTest(BaseIntegrationTest):
113140

‎tests/unit/api_build_test.py

Lines changed: 63 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,16 @@
11
import gzip
22
import io
3+
import shutil
34

45
import docker
56
from docker import auth
7+
from docker.api.build import process_dockerfile
68

7-
from .api_test import BaseAPIClientTest, fake_request, url_prefix
89
import pytest
910

11+
from ..helpers import make_tree
12+
from .api_test import BaseAPIClientTest, fake_request, url_prefix
13+
1014

1115
class BuildTest(BaseAPIClientTest):
1216
def test_build_container(self):
@@ -161,3 +165,61 @@ def test_set_auth_headers_with_dict_and_no_auth_configs(self):
161165

162166
self.client._set_auth_headers(headers)
163167
assert headers == expected_headers
168+
169+
@pytest.mark.skipif(
170+
not docker.constants.IS_WINDOWS_PLATFORM,
171+
reason='Windows-specific syntax')
172+
def test_process_dockerfile_win_longpath_prefix(self):
173+
dirs = [
174+
'foo', 'foo/bar', 'baz',
175+
]
176+
177+
files = [
178+
'Dockerfile', 'foo/Dockerfile.foo', 'foo/bar/Dockerfile.bar',
179+
'baz/Dockerfile.baz',
180+
]
181+
182+
base = make_tree(dirs, files)
183+
self.addCleanup(shutil.rmtree, base)
184+
185+
def pre(path):
186+
return docker.constants.WINDOWS_LONGPATH_PREFIX + path
187+
188+
assert process_dockerfile(None, pre(base)) == (None, None)
189+
assert process_dockerfile('Dockerfile', pre(base)) == (
190+
'Dockerfile', None
191+
)
192+
assert process_dockerfile('foo/Dockerfile.foo', pre(base)) == (
193+
'foo/Dockerfile.foo', None
194+
)
195+
assert process_dockerfile(
196+
'../Dockerfile', pre(base + '\\foo')
197+
)[1] is not None
198+
assert process_dockerfile(
199+
'../baz/Dockerfile.baz', pre(base + '/baz')
200+
) == ('../baz/Dockerfile.baz', None)
201+
202+
def test_process_dockerfile(self):
203+
dirs = [
204+
'foo', 'foo/bar', 'baz',
205+
]
206+
207+
files = [
208+
'Dockerfile', 'foo/Dockerfile.foo', 'foo/bar/Dockerfile.bar',
209+
'baz/Dockerfile.baz',
210+
]
211+
212+
base = make_tree(dirs, files)
213+
self.addCleanup(shutil.rmtree, base)
214+
215+
assert process_dockerfile(None, base) == (None, None)
216+
assert process_dockerfile('Dockerfile', base) == ('Dockerfile', None)
217+
assert process_dockerfile('foo/Dockerfile.foo', base) == (
218+
'foo/Dockerfile.foo', None
219+
)
220+
assert process_dockerfile(
221+
'../Dockerfile', base + '/foo'
222+
)[1] is not None
223+
assert process_dockerfile('../baz/Dockerfile.baz', base + '/baz') == (
224+
'../baz/Dockerfile.baz', None
225+
)

‎tests/unit/models_containers_test.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -232,7 +232,9 @@ def test_run_pull(self):
232232
container = client.containers.run('alpine', 'sleep 300', detach=True)
233233

234234
assert container.id == FAKE_CONTAINER_ID
235-
client.api.pull.assert_called_with('alpine', platform=None, tag=None)
235+
client.api.pull.assert_called_with(
236+
'alpine', platform=None, tag=None, stream=True
237+
)
236238

237239
def test_run_with_error(self):
238240
client = make_fake_client()

‎tests/unit/models_images_test.py

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
import unittest
2+
import warnings
3+
14
from docker.constants import DEFAULT_DATA_CHUNK_SIZE
25
from docker.models.images import Image
3-
import unittest
46

57
from .fake_api import FAKE_IMAGE_ID
68
from .fake_api_client import make_fake_client
@@ -43,15 +45,19 @@ def test_load(self):
4345
def test_pull(self):
4446
client = make_fake_client()
4547
image = client.images.pull('test_image:latest')
46-
client.api.pull.assert_called_with('test_image', tag='latest')
48+
client.api.pull.assert_called_with(
49+
'test_image', tag='latest', stream=True
50+
)
4751
client.api.inspect_image.assert_called_with('test_image:latest')
4852
assert isinstance(image, Image)
4953
assert image.id == FAKE_IMAGE_ID
5054

5155
def test_pull_multiple(self):
5256
client = make_fake_client()
5357
images = client.images.pull('test_image')
54-
client.api.pull.assert_called_with('test_image', tag=None)
58+
client.api.pull.assert_called_with(
59+
'test_image', tag=None, stream=True
60+
)
5561
client.api.images.assert_called_with(
5662
all=False, name='test_image', filters=None
5763
)
@@ -61,6 +67,16 @@ def test_pull_multiple(self):
6167
assert isinstance(image, Image)
6268
assert image.id == FAKE_IMAGE_ID
6369

70+
def test_pull_with_stream_param(self):
71+
client = make_fake_client()
72+
with warnings.catch_warnings(record=True) as w:
73+
client.images.pull('test_image', stream=True)
74+
75+
assert len(w) == 1
76+
assert str(w[0].message).startswith(
77+
'`stream` is not a valid parameter'
78+
)
79+
6480
def test_push(self):
6581
client = make_fake_client()
6682
client.images.push('foobar', insecure_registry=True)

‎tests/unit/utils_config_test.py

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
import tempfile
55
import json
66

7-
from py.test import ensuretemp
8-
from pytest import mark
7+
from pytest import mark, fixture
8+
99
from docker.utils import config
1010

1111
try:
@@ -15,49 +15,49 @@
1515

1616

1717
class FindConfigFileTest(unittest.TestCase):
18-
def tmpdir(self, name):
19-
tmpdir = ensuretemp(name)
20-
self.addCleanup(tmpdir.remove)
21-
return tmpdir
18+
19+
@fixture(autouse=True)
20+
def tmpdir(self, tmpdir):
21+
self.mkdir = tmpdir.mkdir
2222

2323
def test_find_config_fallback(self):
24-
tmpdir = self.tmpdir('test_find_config_fallback')
24+
tmpdir = self.mkdir('test_find_config_fallback')
2525

2626
with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
2727
assert config.find_config_file() is None
2828

2929
def test_find_config_from_explicit_path(self):
30-
tmpdir = self.tmpdir('test_find_config_from_explicit_path')
30+
tmpdir = self.mkdir('test_find_config_from_explicit_path')
3131
config_path = tmpdir.ensure('my-config-file.json')
3232

3333
assert config.find_config_file(str(config_path)) == str(config_path)
3434

3535
def test_find_config_from_environment(self):
36-
tmpdir = self.tmpdir('test_find_config_from_environment')
36+
tmpdir = self.mkdir('test_find_config_from_environment')
3737
config_path = tmpdir.ensure('config.json')
3838

3939
with mock.patch.dict(os.environ, {'DOCKER_CONFIG': str(tmpdir)}):
4040
assert config.find_config_file() == str(config_path)
4141

4242
@mark.skipif("sys.platform == 'win32'")
4343
def test_find_config_from_home_posix(self):
44-
tmpdir = self.tmpdir('test_find_config_from_home_posix')
44+
tmpdir = self.mkdir('test_find_config_from_home_posix')
4545
config_path = tmpdir.ensure('.docker', 'config.json')
4646

4747
with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
4848
assert config.find_config_file() == str(config_path)
4949

5050
@mark.skipif("sys.platform == 'win32'")
5151
def test_find_config_from_home_legacy_name(self):
52-
tmpdir = self.tmpdir('test_find_config_from_home_legacy_name')
52+
tmpdir = self.mkdir('test_find_config_from_home_legacy_name')
5353
config_path = tmpdir.ensure('.dockercfg')
5454

5555
with mock.patch.dict(os.environ, {'HOME': str(tmpdir)}):
5656
assert config.find_config_file() == str(config_path)
5757

5858
@mark.skipif("sys.platform != 'win32'")
5959
def test_find_config_from_home_windows(self):
60-
tmpdir = self.tmpdir('test_find_config_from_home_windows')
60+
tmpdir = self.mkdir('test_find_config_from_home_windows')
6161
config_path = tmpdir.ensure('.docker', 'config.json')
6262

6363
with mock.patch.dict(os.environ, {'USERPROFILE': str(tmpdir)}):

‎tests/unit/utils_test.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -272,6 +272,11 @@ def test_parse_host(self):
272272
'tcp://',
273273
'udp://127.0.0.1',
274274
'udp://127.0.0.1:2375',
275+
'ssh://:22/path',
276+
'tcp://netloc:3333/path?q=1',
277+
'unix:///sock/path#fragment',
278+
'https://netloc:3333/path;params',
279+
'ssh://:clearpassword@host:22',
275280
]
276281

277282
valid_hosts = {
@@ -281,7 +286,7 @@ def test_parse_host(self):
281286
'http://:7777': 'http://127.0.0.1:7777',
282287
'https://kokia.jp:2375': 'https://kokia.jp:2375',
283288
'unix:///var/run/docker.sock': 'http+unix:///var/run/docker.sock',
284-
'unix://': 'http+unix://var/run/docker.sock',
289+
'unix://': 'http+unix:///var/run/docker.sock',
285290
'12.234.45.127:2375/docker/engine': (
286291
'http://12.234.45.127:2375/docker/engine'
287292
),
@@ -294,6 +299,9 @@ def test_parse_host(self):
294299
'[fd12::82d1]:2375/docker/engine': (
295300
'http://[fd12::82d1]:2375/docker/engine'
296301
),
302+
'ssh://': 'ssh://127.0.0.1:22',
303+
'ssh://user@localhost:22': 'ssh://user@localhost:22',
304+
'ssh://user@remote': 'ssh://user@remote:22',
297305
}
298306

299307
for host in invalid_hosts:
@@ -304,7 +312,7 @@ def test_parse_host(self):
304312
assert parse_host(host, None) == expected
305313

306314
def test_parse_host_empty_value(self):
307-
unix_socket = 'http+unix://var/run/docker.sock'
315+
unix_socket = 'http+unix:///var/run/docker.sock'
308316
npipe = 'npipe:////./pipe/docker_engine'
309317

310318
for val in [None, '']:
@@ -449,8 +457,8 @@ def test_convert_filters(self):
449457
tests = [
450458
({'dangling': True}, '{"dangling": ["true"]}'),
451459
({'dangling': "true"}, '{"dangling": ["true"]}'),
452-
({'exited': 0}, '{"exited": [0]}'),
453-
({'exited': [0, 1]}, '{"exited": [0, 1]}'),
460+
({'exited': 0}, '{"exited": ["0"]}'),
461+
({'exited': [0, 1]}, '{"exited": ["0", "1"]}'),
454462
]
455463

456464
for filters, expected in tests:

0 commit comments

Comments
 (0)
Please sign in to comment.