Skip to content

Commit 1569889

Browse files
committed
Upgrade joblib to version 0.13.2
1 parent a2d1b07 commit 1569889

File tree

7 files changed

+155
-80
lines changed

7 files changed

+155
-80
lines changed

sklearn/externals/joblib/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
==================== ===============================================
1515
**Documentation:** https://joblib.readthedocs.io
1616
17-
**Download:** http://pypi.python.org/pypi/joblib#downloads
17+
**Download:** https://pypi.python.org/pypi/joblib#downloads
1818
1919
**Source code:** https://github.com/joblib/joblib
2020
@@ -106,7 +106,7 @@
106106
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
107107
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
108108
#
109-
__version__ = '0.13.0'
109+
__version__ = '0.13.2'
110110

111111

112112
from .memory import Memory, MemorizedResult, register_store_backend

sklearn/externals/joblib/_dask.py

Lines changed: 42 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,14 @@ def is_weakrefable(obj):
3030
return False
3131

3232

33+
try:
34+
TimeoutError = TimeoutError
35+
except NameError:
36+
# Python 2 backward compat
37+
class TimeoutError(OSError):
38+
pass
39+
40+
3341
class _WeakKeyDictionary:
3442
"""A variant of weakref.WeakKeyDictionary for unhashable objects.
3543
@@ -102,12 +110,24 @@ def __reduce__(self):
102110
return Batch, (self.tasks,)
103111

104112

113+
def _joblib_probe_task():
114+
# Noop used by the joblib connector to probe when workers are ready.
115+
pass
116+
117+
105118
class DaskDistributedBackend(ParallelBackendBase, AutoBatchingMixin):
106119
MIN_IDEAL_BATCH_DURATION = 0.2
107120
MAX_IDEAL_BATCH_DURATION = 1.0
108121

109122
def __init__(self, scheduler_host=None, scatter=None,
110-
client=None, loop=None, **submit_kwargs):
123+
client=None, loop=None, wait_for_workers_timeout=10,
124+
**submit_kwargs):
125+
if distributed is None:
126+
msg = ("You are trying to use 'dask' as a joblib parallel backend "
127+
"but dask is not installed. Please install dask "
128+
"to fix this error.")
129+
raise ValueError(msg)
130+
111131
if client is None:
112132
if scheduler_host:
113133
client = Client(scheduler_host, loop=loop,
@@ -139,6 +159,7 @@ def __init__(self, scheduler_host=None, scatter=None,
139159
self._scatter = []
140160
self.data_futures = {}
141161
self.task_futures = set()
162+
self.wait_for_workers_timeout = wait_for_workers_timeout
142163
self.submit_kwargs = submit_kwargs
143164

144165
def __reduce__(self):
@@ -159,6 +180,26 @@ def stop_call(self):
159180
self.call_data_futures.clear()
160181

161182
def effective_n_jobs(self, n_jobs):
183+
effective_n_jobs = sum(self.client.ncores().values())
184+
if effective_n_jobs != 0 or not self.wait_for_workers_timeout:
185+
return effective_n_jobs
186+
187+
# If there is no worker, schedule a probe task to wait for the workers
188+
# to come up and be available. If the dask cluster is in adaptive mode
189+
# task might cause the cluster to provision some workers.
190+
try:
191+
self.client.submit(_joblib_probe_task).result(
192+
timeout=self.wait_for_workers_timeout)
193+
except gen.TimeoutError:
194+
error_msg = (
195+
"DaskDistributedBackend has no worker after {} seconds. "
196+
"Make sure that workers are started and can properly connect "
197+
"to the scheduler and increase the joblib/dask connection "
198+
"timeout with:\n\n"
199+
"parallel_backend('dask', wait_for_workers_timeout={})"
200+
).format(self.wait_for_workers_timeout,
201+
max(10, 2 * self.wait_for_workers_timeout))
202+
raise TimeoutError(error_msg)
162203
return sum(self.client.ncores().values())
163204

164205
def _to_func_args(self, func):

sklearn/externals/joblib/externals/cloudpickle/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@
22

33
from .cloudpickle import *
44

5-
__version__ = '0.6.1'
5+
__version__ = '0.8.0'

0 commit comments

Comments
 (0)