Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(111)

Side by Side Diff: third_party/google-endpoints/requests/adapters.py

Issue 2666783008: Add google-endpoints to third_party/. (Closed)
Patch Set: Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # -*- coding: utf-8 -*-
2
3 """
4 requests.adapters
5 ~~~~~~~~~~~~~~~~~
6
7 This module contains the transport adapters that Requests uses to define
8 and maintain connections.
9 """
10
11 import os.path
12 import socket
13
14 from .models import Response
15 from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
16 from .packages.urllib3.response import HTTPResponse
17 from .packages.urllib3.util import Timeout as TimeoutSauce
18 from .packages.urllib3.util.retry import Retry
19 from .compat import urlparse, basestring
20 from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
21 prepend_scheme_if_needed, get_auth_from_url, urldefragauth,
22 select_proxy, to_native_string)
23 from .structures import CaseInsensitiveDict
24 from .packages.urllib3.exceptions import ClosedPoolError
25 from .packages.urllib3.exceptions import ConnectTimeoutError
26 from .packages.urllib3.exceptions import HTTPError as _HTTPError
27 from .packages.urllib3.exceptions import MaxRetryError
28 from .packages.urllib3.exceptions import NewConnectionError
29 from .packages.urllib3.exceptions import ProxyError as _ProxyError
30 from .packages.urllib3.exceptions import ProtocolError
31 from .packages.urllib3.exceptions import ReadTimeoutError
32 from .packages.urllib3.exceptions import SSLError as _SSLError
33 from .packages.urllib3.exceptions import ResponseError
34 from .cookies import extract_cookies_to_jar
35 from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
36 ProxyError, RetryError, InvalidSchema)
37 from .auth import _basic_auth_str
38
39 try:
40 from .packages.urllib3.contrib.socks import SOCKSProxyManager
41 except ImportError:
42 def SOCKSProxyManager(*args, **kwargs):
43 raise InvalidSchema("Missing dependencies for SOCKS support.")
44
45 DEFAULT_POOLBLOCK = False
46 DEFAULT_POOLSIZE = 10
47 DEFAULT_RETRIES = 0
48 DEFAULT_POOL_TIMEOUT = None
49
50
51 class BaseAdapter(object):
52 """The Base Transport Adapter"""
53
54 def __init__(self):
55 super(BaseAdapter, self).__init__()
56
57 def send(self, request, stream=False, timeout=None, verify=True,
58 cert=None, proxies=None):
59 """Sends PreparedRequest object. Returns Response object.
60
61 :param request: The :class:`PreparedRequest <PreparedRequest>` being sen t.
62 :param stream: (optional) Whether to stream the request content.
63 :param timeout: (optional) How long to wait for the server to send
64 data before giving up, as a float, or a :ref:`(connect timeout,
65 read timeout) <timeouts>` tuple.
66 :type timeout: float or tuple
67 :param verify: (optional) Whether to verify SSL certificates.
68 :param cert: (optional) Any user-provided SSL certificate to be trusted.
69 :param proxies: (optional) The proxies dictionary to apply to the reques t.
70 """
71 raise NotImplementedError
72
73 def close(self):
74 """Cleans up adapter specific items."""
75 raise NotImplementedError
76
77
78 class HTTPAdapter(BaseAdapter):
79 """The built-in HTTP Adapter for urllib3.
80
81 Provides a general-case interface for Requests sessions to contact HTTP and
82 HTTPS urls by implementing the Transport Adapter interface. This class will
83 usually be created by the :class:`Session <Session>` class under the
84 covers.
85
86 :param pool_connections: The number of urllib3 connection pools to cache.
87 :param pool_maxsize: The maximum number of connections to save in the pool.
88 :param max_retries: The maximum number of retries each connection
89 should attempt. Note, this applies only to failed DNS lookups, socket
90 connections and connection timeouts, never to requests where data has
91 made it to the server. By default, Requests does not retry failed
92 connections. If you need granular control over the conditions under
93 which we retry a request, import urllib3's ``Retry`` class and pass
94 that instead.
95 :param pool_block: Whether the connection pool should block for connections.
96
97 Usage::
98
99 >>> import requests
100 >>> s = requests.Session()
101 >>> a = requests.adapters.HTTPAdapter(max_retries=3)
102 >>> s.mount('http://', a)
103 """
104 __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
105 '_pool_block']
106
107 def __init__(self, pool_connections=DEFAULT_POOLSIZE,
108 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
109 pool_block=DEFAULT_POOLBLOCK):
110 if max_retries == DEFAULT_RETRIES:
111 self.max_retries = Retry(0, read=False)
112 else:
113 self.max_retries = Retry.from_int(max_retries)
114 self.config = {}
115 self.proxy_manager = {}
116
117 super(HTTPAdapter, self).__init__()
118
119 self._pool_connections = pool_connections
120 self._pool_maxsize = pool_maxsize
121 self._pool_block = pool_block
122
123 self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
124
125 def __getstate__(self):
126 return dict((attr, getattr(self, attr, None)) for attr in
127 self.__attrs__)
128
129 def __setstate__(self, state):
130 # Can't handle by adding 'proxy_manager' to self.__attrs__ because
131 # self.poolmanager uses a lambda function, which isn't pickleable.
132 self.proxy_manager = {}
133 self.config = {}
134
135 for attr, value in state.items():
136 setattr(self, attr, value)
137
138 self.init_poolmanager(self._pool_connections, self._pool_maxsize,
139 block=self._pool_block)
140
141 def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, ** pool_kwargs):
142 """Initializes a urllib3 PoolManager.
143
144 This method should not be called from user code, and is only
145 exposed for use when subclassing the
146 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
147
148 :param connections: The number of urllib3 connection pools to cache.
149 :param maxsize: The maximum number of connections to save in the pool.
150 :param block: Block when no free connections are available.
151 :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
152 """
153 # save these values for pickling
154 self._pool_connections = connections
155 self._pool_maxsize = maxsize
156 self._pool_block = block
157
158 self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
159 block=block, strict=True, **pool_kwargs)
160
161 def proxy_manager_for(self, proxy, **proxy_kwargs):
162 """Return urllib3 ProxyManager for the given proxy.
163
164 This method should not be called from user code, and is only
165 exposed for use when subclassing the
166 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
167
168 :param proxy: The proxy to return a urllib3 ProxyManager for.
169 :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
170 :returns: ProxyManager
171 :rtype: requests.packages.urllib3.ProxyManager
172 """
173 if proxy in self.proxy_manager:
174 manager = self.proxy_manager[proxy]
175 elif proxy.lower().startswith('socks'):
176 username, password = get_auth_from_url(proxy)
177 manager = self.proxy_manager[proxy] = SOCKSProxyManager(
178 proxy,
179 username=username,
180 password=password,
181 num_pools=self._pool_connections,
182 maxsize=self._pool_maxsize,
183 block=self._pool_block,
184 **proxy_kwargs
185 )
186 else:
187 proxy_headers = self.proxy_headers(proxy)
188 manager = self.proxy_manager[proxy] = proxy_from_url(
189 proxy,
190 proxy_headers=proxy_headers,
191 num_pools=self._pool_connections,
192 maxsize=self._pool_maxsize,
193 block=self._pool_block,
194 **proxy_kwargs)
195
196 return manager
197
198 def cert_verify(self, conn, url, verify, cert):
199 """Verify a SSL certificate. This method should not be called from user
200 code, and is only exposed for use when subclassing the
201 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
202
203 :param conn: The urllib3 connection object associated with the cert.
204 :param url: The requested URL.
205 :param verify: Whether we should actually verify the certificate.
206 :param cert: The SSL certificate to verify.
207 """
208 if url.lower().startswith('https') and verify:
209
210 cert_loc = None
211
212 # Allow self-specified cert location.
213 if verify is not True:
214 cert_loc = verify
215
216 if not cert_loc:
217 cert_loc = DEFAULT_CA_BUNDLE_PATH
218
219 if not cert_loc:
220 raise Exception("Could not find a suitable SSL CA certificate bu ndle.")
221
222 conn.cert_reqs = 'CERT_REQUIRED'
223
224 if not os.path.isdir(cert_loc):
225 conn.ca_certs = cert_loc
226 else:
227 conn.ca_cert_dir = cert_loc
228 else:
229 conn.cert_reqs = 'CERT_NONE'
230 conn.ca_certs = None
231 conn.ca_cert_dir = None
232
233 if cert:
234 if not isinstance(cert, basestring):
235 conn.cert_file = cert[0]
236 conn.key_file = cert[1]
237 else:
238 conn.cert_file = cert
239
240 def build_response(self, req, resp):
241 """Builds a :class:`Response <requests.Response>` object from a urllib3
242 response. This should not be called from user code, and is only exposed
243 for use when subclassing the
244 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
245
246 :param req: The :class:`PreparedRequest <PreparedRequest>` used to gener ate the response.
247 :param resp: The urllib3 response object.
248 :rtype: requests.Response
249 """
250 response = Response()
251
252 # Fallback to None if there's no status_code, for whatever reason.
253 response.status_code = getattr(resp, 'status', None)
254
255 # Make headers case-insensitive.
256 response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
257
258 # Set encoding.
259 response.encoding = get_encoding_from_headers(response.headers)
260 response.raw = resp
261 response.reason = response.raw.reason
262
263 if isinstance(req.url, bytes):
264 response.url = req.url.decode('utf-8')
265 else:
266 response.url = req.url
267
268 # Add new cookies from the server.
269 extract_cookies_to_jar(response.cookies, req, resp)
270
271 # Give the Response some context.
272 response.request = req
273 response.connection = self
274
275 return response
276
277 def get_connection(self, url, proxies=None):
278 """Returns a urllib3 connection for the given URL. This should not be
279 called from user code, and is only exposed for use when subclassing the
280 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
281
282 :param url: The URL to connect to.
283 :param proxies: (optional) A Requests-style dictionary of proxies used o n this request.
284 :rtype: requests.packages.urllib3.ConnectionPool
285 """
286 proxy = select_proxy(url, proxies)
287
288 if proxy:
289 proxy = prepend_scheme_if_needed(proxy, 'http')
290 proxy_manager = self.proxy_manager_for(proxy)
291 conn = proxy_manager.connection_from_url(url)
292 else:
293 # Only scheme should be lower case
294 parsed = urlparse(url)
295 url = parsed.geturl()
296 conn = self.poolmanager.connection_from_url(url)
297
298 return conn
299
300 def close(self):
301 """Disposes of any internal state.
302
303 Currently, this closes the PoolManager and any active ProxyManager,
304 which closes any pooled connections.
305 """
306 self.poolmanager.clear()
307 for proxy in self.proxy_manager.values():
308 proxy.clear()
309
310 def request_url(self, request, proxies):
311 """Obtain the url to use when making the final request.
312
313 If the message is being sent through a HTTP proxy, the full URL has to
314 be used. Otherwise, we should only use the path portion of the URL.
315
316 This should not be called from user code, and is only exposed for use
317 when subclassing the
318 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
319
320 :param request: The :class:`PreparedRequest <PreparedRequest>` being sen t.
321 :param proxies: A dictionary of schemes or schemes and hosts to proxy UR Ls.
322 :rtype: str
323 """
324 proxy = select_proxy(request.url, proxies)
325 scheme = urlparse(request.url).scheme
326
327 is_proxied_http_request = (proxy and scheme != 'https')
328 using_socks_proxy = False
329 if proxy:
330 proxy_scheme = urlparse(proxy).scheme.lower()
331 using_socks_proxy = proxy_scheme.startswith('socks')
332
333 url = request.path_url
334 if is_proxied_http_request and not using_socks_proxy:
335 url = urldefragauth(request.url)
336
337 return url
338
339 def add_headers(self, request, **kwargs):
340 """Add any headers needed by the connection. As of v2.0 this does
341 nothing by default, but is left for overriding by users that subclass
342 the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
343
344 This should not be called from user code, and is only exposed for use
345 when subclassing the
346 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
347
348 :param request: The :class:`PreparedRequest <PreparedRequest>` to add he aders to.
349 :param kwargs: The keyword arguments from the call to send().
350 """
351 pass
352
353 def proxy_headers(self, proxy):
354 """Returns a dictionary of the headers to add to any request sent
355 through a proxy. This works with urllib3 magic to ensure that they are
356 correctly sent to the proxy, rather than in a tunnelled request if
357 CONNECT is being used.
358
359 This should not be called from user code, and is only exposed for use
360 when subclassing the
361 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
362
363 :param proxies: The url of the proxy being used for this request.
364 :rtype: dict
365 """
366 headers = {}
367 username, password = get_auth_from_url(proxy)
368
369 if username:
370 headers['Proxy-Authorization'] = _basic_auth_str(username,
371 password)
372
373 return headers
374
375 def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
376 """Sends PreparedRequest object. Returns Response object.
377
378 :param request: The :class:`PreparedRequest <PreparedRequest>` being sen t.
379 :param stream: (optional) Whether to stream the request content.
380 :param timeout: (optional) How long to wait for the server to send
381 data before giving up, as a float, or a :ref:`(connect timeout,
382 read timeout) <timeouts>` tuple.
383 :type timeout: float or tuple
384 :param verify: (optional) Whether to verify SSL certificates.
385 :param cert: (optional) Any user-provided SSL certificate to be trusted.
386 :param proxies: (optional) The proxies dictionary to apply to the reques t.
387 :rtype: requests.Response
388 """
389
390 conn = self.get_connection(request.url, proxies)
391
392 self.cert_verify(conn, request.url, verify, cert)
393 url = self.request_url(request, proxies)
394 self.add_headers(request)
395
396 chunked = not (request.body is None or 'Content-Length' in request.heade rs)
397
398 if isinstance(timeout, tuple):
399 try:
400 connect, read = timeout
401 timeout = TimeoutSauce(connect=connect, read=read)
402 except ValueError as e:
403 # this may raise a string formatting error.
404 err = ("Invalid timeout {0}. Pass a (connect, read) "
405 "timeout tuple, or a single float to set "
406 "both timeouts to the same value".format(timeout))
407 raise ValueError(err)
408 else:
409 timeout = TimeoutSauce(connect=timeout, read=timeout)
410
411 try:
412 if not chunked:
413 resp = conn.urlopen(
414 method=request.method,
415 url=url,
416 body=request.body,
417 headers=request.headers,
418 redirect=False,
419 assert_same_host=False,
420 preload_content=False,
421 decode_content=False,
422 retries=self.max_retries,
423 timeout=timeout
424 )
425
426 # Send the request.
427 else:
428 if hasattr(conn, 'proxy_pool'):
429 conn = conn.proxy_pool
430
431 low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)
432
433 try:
434 low_conn.putrequest(request.method,
435 url,
436 skip_accept_encoding=True)
437
438 for header, value in request.headers.items():
439 low_conn.putheader(header, value)
440
441 low_conn.endheaders()
442
443 for i in request.body:
444 low_conn.send(hex(len(i))[2:].encode('utf-8'))
445 low_conn.send(b'\r\n')
446 low_conn.send(i)
447 low_conn.send(b'\r\n')
448 low_conn.send(b'0\r\n\r\n')
449
450 # Receive the response from the server
451 try:
452 # For Python 2.7+ versions, use buffering of HTTP
453 # responses
454 r = low_conn.getresponse(buffering=True)
455 except TypeError:
456 # For compatibility with Python 2.6 versions and back
457 r = low_conn.getresponse()
458
459 resp = HTTPResponse.from_httplib(
460 r,
461 pool=conn,
462 connection=low_conn,
463 preload_content=False,
464 decode_content=False
465 )
466 except:
467 # If we hit any problems here, clean up the connection.
468 # Then, reraise so that we can handle the actual exception.
469 low_conn.close()
470 raise
471
472 except (ProtocolError, socket.error) as err:
473 raise ConnectionError(err, request=request)
474
475 except MaxRetryError as e:
476 if isinstance(e.reason, ConnectTimeoutError):
477 # TODO: Remove this in 3.0.0: see #2811
478 if not isinstance(e.reason, NewConnectionError):
479 raise ConnectTimeout(e, request=request)
480
481 if isinstance(e.reason, ResponseError):
482 raise RetryError(e, request=request)
483
484 if isinstance(e.reason, _ProxyError):
485 raise ProxyError(e, request=request)
486
487 raise ConnectionError(e, request=request)
488
489 except ClosedPoolError as e:
490 raise ConnectionError(e, request=request)
491
492 except _ProxyError as e:
493 raise ProxyError(e)
494
495 except (_SSLError, _HTTPError) as e:
496 if isinstance(e, _SSLError):
497 raise SSLError(e, request=request)
498 elif isinstance(e, ReadTimeoutError):
499 raise ReadTimeout(e, request=request)
500 else:
501 raise
502
503 return self.build_response(request, resp)
OLDNEW
« no previous file with comments | « third_party/google-endpoints/requests/_internal_utils.py ('k') | third_party/google-endpoints/requests/api.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698