1
# -*- coding: utf-8 -*-
7
This module contains the transport adapters that Requests uses to define
8
and maintain connections.
13
from .models import Response
14
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
15
from .packages.urllib3.response import HTTPResponse
16
from .packages.urllib3.util import Timeout as TimeoutSauce
17
from .compat import urlparse, basestring, urldefrag, unquote
18
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
19
prepend_scheme_if_needed, get_auth_from_url)
20
from .structures import CaseInsensitiveDict
21
from .packages.urllib3.exceptions import MaxRetryError
22
from .packages.urllib3.exceptions import TimeoutError
23
from .packages.urllib3.exceptions import SSLError as _SSLError
24
from .packages.urllib3.exceptions import HTTPError as _HTTPError
25
from .packages.urllib3.exceptions import ProxyError as _ProxyError
26
from .cookies import extract_cookies_to_jar
27
from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
28
from .auth import _basic_auth_str
30
DEFAULT_POOLBLOCK = False
35
class BaseAdapter(object):
36
"""The Base Transport Adapter"""
39
super(BaseAdapter, self).__init__()
42
raise NotImplementedError
45
raise NotImplementedError
48
class HTTPAdapter(BaseAdapter):
49
"""The built-in HTTP Adapter for urllib3.
51
Provides a general-case interface for Requests sessions to contact HTTP and
52
HTTPS urls by implementing the Transport Adapter interface. This class will
53
usually be created by the :class:`Session <Session>` class under the
56
:param pool_connections: The number of urllib3 connection pools to cache.
57
:param pool_maxsize: The maximum number of connections to save in the pool.
58
:param int max_retries: The maximum number of retries each connection
59
should attempt. Note, this applies only to failed connections and
60
timeouts, never to requests where the server returns a response.
61
:param pool_block: Whether the connection pool should block for connections.
66
>>> s = requests.Session()
67
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
68
>>> s.mount('http://', a)
70
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
73
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
74
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
75
pool_block=DEFAULT_POOLBLOCK):
76
self.max_retries = max_retries
78
self.proxy_manager = {}
80
super(HTTPAdapter, self).__init__()
82
self._pool_connections = pool_connections
83
self._pool_maxsize = pool_maxsize
84
self._pool_block = pool_block
86
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
88
def __getstate__(self):
89
return dict((attr, getattr(self, attr, None)) for attr in
92
def __setstate__(self, state):
93
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
94
# because self.poolmanager uses a lambda function, which isn't pickleable.
95
self.proxy_manager = {}
98
for attr, value in state.items():
99
setattr(self, attr, value)
101
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
102
block=self._pool_block)
104
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK):
105
"""Initializes a urllib3 PoolManager. This method should not be called
106
from user code, and is only exposed for use when subclassing the
107
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
109
:param connections: The number of urllib3 connection pools to cache.
110
:param maxsize: The maximum number of connections to save in the pool.
111
:param block: Block when no free connections are available.
113
# save these values for pickling
114
self._pool_connections = connections
115
self._pool_maxsize = maxsize
116
self._pool_block = block
118
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
121
def cert_verify(self, conn, url, verify, cert):
122
"""Verify a SSL certificate. This method should not be called from user
123
code, and is only exposed for use when subclassing the
124
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
126
:param conn: The urllib3 connection object associated with the cert.
127
:param url: The requested URL.
128
:param verify: Whether we should actually verify the certificate.
129
:param cert: The SSL certificate to verify.
131
if url.lower().startswith('https') and verify:
135
# Allow self-specified cert location.
136
if verify is not True:
140
cert_loc = DEFAULT_CA_BUNDLE_PATH
143
raise Exception("Could not find a suitable SSL CA certificate bundle.")
145
conn.cert_reqs = 'CERT_REQUIRED'
146
conn.ca_certs = cert_loc
148
conn.cert_reqs = 'CERT_NONE'
152
if not isinstance(cert, basestring):
153
conn.cert_file = cert[0]
154
conn.key_file = cert[1]
156
conn.cert_file = cert
158
def build_response(self, req, resp):
159
"""Builds a :class:`Response <requests.Response>` object from a urllib3
160
response. This should not be called from user code, and is only exposed
161
for use when subclassing the
162
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
164
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
165
:param resp: The urllib3 response object.
167
response = Response()
169
# Fallback to None if there's no status_code, for whatever reason.
170
response.status_code = getattr(resp, 'status', None)
172
# Make headers case-insensitive.
173
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
176
response.encoding = get_encoding_from_headers(response.headers)
178
response.reason = response.raw.reason
180
if isinstance(req.url, bytes):
181
response.url = req.url.decode('utf-8')
183
response.url = req.url
185
# Add new cookies from the server.
186
extract_cookies_to_jar(response.cookies, req, resp)
188
# Give the Response some context.
189
response.request = req
190
response.connection = self
194
def get_connection(self, url, proxies=None):
195
"""Returns a urllib3 connection for the given URL. This should not be
196
called from user code, and is only exposed for use when subclassing the
197
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
199
:param url: The URL to connect to.
200
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
202
proxies = proxies or {}
203
proxy = proxies.get(urlparse(url.lower()).scheme)
206
proxy = prepend_scheme_if_needed(proxy, 'http')
207
proxy_headers = self.proxy_headers(proxy)
209
if not proxy in self.proxy_manager:
210
self.proxy_manager[proxy] = proxy_from_url(
212
proxy_headers=proxy_headers,
213
num_pools=self._pool_connections,
214
maxsize=self._pool_maxsize,
215
block=self._pool_block)
217
conn = self.proxy_manager[proxy].connection_from_url(url)
219
# Only scheme should be lower case
220
parsed = urlparse(url)
221
url = parsed.geturl()
222
conn = self.poolmanager.connection_from_url(url)
227
"""Disposes of any internal state.
229
Currently, this just closes the PoolManager, which closes pooled
232
self.poolmanager.clear()
234
def request_url(self, request, proxies):
235
"""Obtain the url to use when making the final request.
237
If the message is being sent through a HTTP proxy, the full URL has to
238
be used. Otherwise, we should only use the path portion of the URL.
240
This should not be called from user code, and is only exposed for use
242
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
244
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
245
:param proxies: A dictionary of schemes to proxy URLs.
247
proxies = proxies or {}
248
scheme = urlparse(request.url).scheme
249
proxy = proxies.get(scheme)
251
if proxy and scheme != 'https':
252
url, _ = urldefrag(request.url)
254
url = request.path_url
258
def add_headers(self, request, **kwargs):
259
"""Add any headers needed by the connection. As of v2.0 this does
260
nothing by default, but is left for overriding by users that subclass
261
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
263
This should not be called from user code, and is only exposed for use
265
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
267
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
268
:param kwargs: The keyword arguments from the call to send().
272
def proxy_headers(self, proxy):
273
"""Returns a dictionary of the headers to add to any request sent
274
through a proxy. This works with urllib3 magic to ensure that they are
275
correctly sent to the proxy, rather than in a tunnelled request if
276
CONNECT is being used.
278
This should not be called from user code, and is only exposed for use
280
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
282
:param proxies: The url of the proxy being used for this request.
283
:param kwargs: Optional additional keyword arguments.
286
username, password = get_auth_from_url(proxy)
288
if username and password:
289
headers['Proxy-Authorization'] = _basic_auth_str(username,
294
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
295
"""Sends PreparedRequest object. Returns Response object.
297
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
298
:param stream: (optional) Whether to stream the request content.
299
:param timeout: (optional) The timeout on the request.
300
:param verify: (optional) Whether to verify SSL certificates.
301
:param cert: (optional) Any user-provided SSL certificate to be trusted.
302
:param proxies: (optional) The proxies dictionary to apply to the request.
305
conn = self.get_connection(request.url, proxies)
307
self.cert_verify(conn, request.url, verify, cert)
308
url = self.request_url(request, proxies)
309
self.add_headers(request)
311
chunked = not (request.body is None or 'Content-Length' in request.headers)
313
timeout = TimeoutSauce(connect=timeout, read=timeout)
318
method=request.method,
321
headers=request.headers,
323
assert_same_host=False,
324
preload_content=False,
325
decode_content=False,
326
retries=self.max_retries,
332
if hasattr(conn, 'proxy_pool'):
333
conn = conn.proxy_pool
335
low_conn = conn._get_conn(timeout=timeout)
338
low_conn.putrequest(request.method,
340
skip_accept_encoding=True)
342
for header, value in request.headers.items():
343
low_conn.putheader(header, value)
345
low_conn.endheaders()
347
for i in request.body:
348
low_conn.send(hex(len(i))[2:].encode('utf-8'))
349
low_conn.send(b'\r\n')
351
low_conn.send(b'\r\n')
352
low_conn.send(b'0\r\n\r\n')
354
r = low_conn.getresponse()
355
resp = HTTPResponse.from_httplib(
359
preload_content=False,
363
# If we hit any problems here, clean up the connection.
364
# Then, reraise so that we can handle the actual exception.
368
# All is well, return the connection to the pool.
369
conn._put_conn(low_conn)
371
except socket.error as sockerr:
372
raise ConnectionError(sockerr, request=request)
374
except MaxRetryError as e:
375
raise ConnectionError(e, request=request)
377
except _ProxyError as e:
380
except (_SSLError, _HTTPError) as e:
381
if isinstance(e, _SSLError):
382
raise SSLError(e, request=request)
383
elif isinstance(e, TimeoutError):
384
raise Timeout(e, request=request)
388
return self.build_response(request, resp)