OLD | NEW |
| (Empty) |
1 # -*- coding: utf-8 -*- | |
2 | |
3 """ | |
4 requests.adapters | |
5 ~~~~~~~~~~~~~~~~~ | |
6 | |
7 This module contains the transport adapters that Requests uses to define | |
8 and maintain connections. | |
9 """ | |
10 | |
11 import socket | |
12 | |
13 from .models import Response | |
14 from .packages.urllib3.poolmanager import PoolManager, proxy_from_url | |
15 from .packages.urllib3.response import HTTPResponse | |
16 from .packages.urllib3.util import Timeout as TimeoutSauce | |
17 from .compat import urlparse, basestring, urldefrag, unquote | |
18 from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, | |
19 except_on_missing_scheme, get_auth_from_url) | |
20 from .structures import CaseInsensitiveDict | |
21 from .packages.urllib3.exceptions import MaxRetryError | |
22 from .packages.urllib3.exceptions import TimeoutError | |
23 from .packages.urllib3.exceptions import SSLError as _SSLError | |
24 from .packages.urllib3.exceptions import HTTPError as _HTTPError | |
25 from .cookies import extract_cookies_to_jar | |
26 from .exceptions import ConnectionError, Timeout, SSLError | |
27 from .auth import _basic_auth_str | |
28 | |
29 DEFAULT_POOLBLOCK = False | |
30 DEFAULT_POOLSIZE = 10 | |
31 DEFAULT_RETRIES = 0 | |
32 | |
33 | |
34 class BaseAdapter(object): | |
35 """The Base Transport Adapter""" | |
36 | |
37 def __init__(self): | |
38 super(BaseAdapter, self).__init__() | |
39 | |
40 def send(self): | |
41 raise NotImplementedError | |
42 | |
43 def close(self): | |
44 raise NotImplementedError | |
45 | |
46 | |
47 class HTTPAdapter(BaseAdapter): | |
48 """The built-in HTTP Adapter for urllib3. | |
49 | |
50 Provides a general-case interface for Requests sessions to contact HTTP and | |
51 HTTPS urls by implementing the Transport Adapter interface. This class will | |
52 usually be created by the :class:`Session <Session>` class under the | |
53 covers. | |
54 | |
55 :param pool_connections: The number of urllib3 connection pools to cache. | |
56 :param pool_maxsize: The maximum number of connections to save in the pool. | |
57 :param max_retries: The maximum number of retries each connection should att
empt. | |
58 :param pool_block: Whether the connection pool should block for connections. | |
59 | |
60 Usage:: | |
61 | |
62 >>> import requests | |
63 >>> s = requests.Session() | |
64 >>> a = requests.adapters.HTTPAdapter() | |
65 >>> s.mount('http://', a) | |
66 """ | |
67 __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', | |
68 '_pool_block'] | |
69 | |
70 def __init__(self, pool_connections=DEFAULT_POOLSIZE, | |
71 pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, | |
72 pool_block=DEFAULT_POOLBLOCK): | |
73 self.max_retries = max_retries | |
74 self.config = {} | |
75 self.proxy_manager = {} | |
76 | |
77 super(HTTPAdapter, self).__init__() | |
78 | |
79 self._pool_connections = pool_connections | |
80 self._pool_maxsize = pool_maxsize | |
81 self._pool_block = pool_block | |
82 | |
83 self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) | |
84 | |
85 def __getstate__(self): | |
86 return dict((attr, getattr(self, attr, None)) for attr in | |
87 self.__attrs__) | |
88 | |
89 def __setstate__(self, state): | |
90 for attr, value in state.items(): | |
91 setattr(self, attr, value) | |
92 | |
93 self.init_poolmanager(self._pool_connections, self._pool_maxsize, | |
94 block=self._pool_block) | |
95 | |
96 def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK): | |
97 """Initializes a urllib3 PoolManager. This method should not be called | |
98 from user code, and is only exposed for use when subclassing the | |
99 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
100 | |
101 :param connections: The number of urllib3 connection pools to cache. | |
102 :param maxsize: The maximum number of connections to save in the pool. | |
103 :param block: Block when no free connections are available. | |
104 """ | |
105 # save these values for pickling | |
106 self._pool_connections = connections | |
107 self._pool_maxsize = maxsize | |
108 self._pool_block = block | |
109 | |
110 self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, | |
111 block=block) | |
112 | |
113 def cert_verify(self, conn, url, verify, cert): | |
114 """Verify a SSL certificate. This method should not be called from user | |
115 code, and is only exposed for use when subclassing the | |
116 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
117 | |
118 :param conn: The urllib3 connection object associated with the cert. | |
119 :param url: The requested URL. | |
120 :param verify: Whether we should actually verify the certificate. | |
121 :param cert: The SSL certificate to verify. | |
122 """ | |
123 if url.lower().startswith('https') and verify: | |
124 | |
125 cert_loc = None | |
126 | |
127 # Allow self-specified cert location. | |
128 if verify is not True: | |
129 cert_loc = verify | |
130 | |
131 if not cert_loc: | |
132 cert_loc = DEFAULT_CA_BUNDLE_PATH | |
133 | |
134 if not cert_loc: | |
135 raise Exception("Could not find a suitable SSL CA certificate bu
ndle.") | |
136 | |
137 conn.cert_reqs = 'CERT_REQUIRED' | |
138 conn.ca_certs = cert_loc | |
139 else: | |
140 conn.cert_reqs = 'CERT_NONE' | |
141 conn.ca_certs = None | |
142 | |
143 if cert: | |
144 if not isinstance(cert, basestring): | |
145 conn.cert_file = cert[0] | |
146 conn.key_file = cert[1] | |
147 else: | |
148 conn.cert_file = cert | |
149 | |
150 def build_response(self, req, resp): | |
151 """Builds a :class:`Response <requests.Response>` object from a urllib3 | |
152 response. This should not be called from user code, and is only exposed | |
153 for use when subclassing the | |
154 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` | |
155 | |
156 :param req: The :class:`PreparedRequest <PreparedRequest>` used to gener
ate the response. | |
157 :param resp: The urllib3 response object. | |
158 """ | |
159 response = Response() | |
160 | |
161 # Fallback to None if there's no status_code, for whatever reason. | |
162 response.status_code = getattr(resp, 'status', None) | |
163 | |
164 # Make headers case-insensitive. | |
165 response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) | |
166 | |
167 # Set encoding. | |
168 response.encoding = get_encoding_from_headers(response.headers) | |
169 response.raw = resp | |
170 response.reason = response.raw.reason | |
171 | |
172 if isinstance(req.url, bytes): | |
173 response.url = req.url.decode('utf-8') | |
174 else: | |
175 response.url = req.url | |
176 | |
177 # Add new cookies from the server. | |
178 extract_cookies_to_jar(response.cookies, req, resp) | |
179 | |
180 # Give the Response some context. | |
181 response.request = req | |
182 response.connection = self | |
183 | |
184 return response | |
185 | |
186 def get_connection(self, url, proxies=None): | |
187 """Returns a urllib3 connection for the given URL. This should not be | |
188 called from user code, and is only exposed for use when subclassing the | |
189 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
190 | |
191 :param url: The URL to connect to. | |
192 :param proxies: (optional) A Requests-style dictionary of proxies used o
n this request. | |
193 """ | |
194 proxies = proxies or {} | |
195 proxy = proxies.get(urlparse(url.lower()).scheme) | |
196 | |
197 if proxy: | |
198 except_on_missing_scheme(proxy) | |
199 proxy_headers = self.proxy_headers(proxy) | |
200 | |
201 if not proxy in self.proxy_manager: | |
202 self.proxy_manager[proxy] = proxy_from_url( | |
203 proxy, | |
204 proxy_headers=proxy_headers) | |
205 | |
206 conn = self.proxy_manager[proxy].connection_from_url(url) | |
207 else: | |
208 conn = self.poolmanager.connection_from_url(url.lower()) | |
209 | |
210 return conn | |
211 | |
212 def close(self): | |
213 """Disposes of any internal state. | |
214 | |
215 Currently, this just closes the PoolManager, which closes pooled | |
216 connections. | |
217 """ | |
218 self.poolmanager.clear() | |
219 | |
220 def request_url(self, request, proxies): | |
221 """Obtain the url to use when making the final request. | |
222 | |
223 If the message is being sent through a proxy, the full URL has to be | |
224 used. Otherwise, we should only use the path portion of the URL. | |
225 | |
226 This should not be called from user code, and is only exposed for use | |
227 when subclassing the | |
228 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
229 | |
230 :param request: The :class:`PreparedRequest <PreparedRequest>` being sen
t. | |
231 :param proxies: A dictionary of schemes to proxy URLs. | |
232 """ | |
233 proxies = proxies or {} | |
234 proxy = proxies.get(urlparse(request.url).scheme) | |
235 | |
236 if proxy: | |
237 url, _ = urldefrag(request.url) | |
238 else: | |
239 url = request.path_url | |
240 | |
241 return url | |
242 | |
243 def add_headers(self, request, **kwargs): | |
244 """Add any headers needed by the connection. As of v2.0 this does | |
245 nothing by default, but is left for overriding by users that subclass | |
246 the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
247 | |
248 This should not be called from user code, and is only exposed for use | |
249 when subclassing the | |
250 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
251 | |
252 :param request: The :class:`PreparedRequest <PreparedRequest>` to add he
aders to. | |
253 :param kwargs: The keyword arguments from the call to send(). | |
254 """ | |
255 pass | |
256 | |
257 def proxy_headers(self, proxy): | |
258 """Returns a dictionary of the headers to add to any request sent | |
259 through a proxy. This works with urllib3 magic to ensure that they are | |
260 correctly sent to the proxy, rather than in a tunnelled request if | |
261 CONNECT is being used. | |
262 | |
263 This should not be called from user code, and is only exposed for use | |
264 when subclassing the | |
265 :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. | |
266 | |
267 :param proxies: The url of the proxy being used for this request. | |
268 :param kwargs: Optional additional keyword arguments. | |
269 """ | |
270 headers = {} | |
271 username, password = get_auth_from_url(proxy) | |
272 | |
273 if username and password: | |
274 # Proxy auth usernames and passwords will be urlencoded, we need | |
275 # to decode them. | |
276 username = unquote(username) | |
277 password = unquote(password) | |
278 headers['Proxy-Authorization'] = _basic_auth_str(username, | |
279 password) | |
280 | |
281 return headers | |
282 | |
283 def send(self, request, stream=False, timeout=None, verify=True, cert=None,
proxies=None): | |
284 """Sends PreparedRequest object. Returns Response object. | |
285 | |
286 :param request: The :class:`PreparedRequest <PreparedRequest>` being sen
t. | |
287 :param stream: (optional) Whether to stream the request content. | |
288 :param timeout: (optional) The timeout on the request. | |
289 :param verify: (optional) Whether to verify SSL certificates. | |
290 :param vert: (optional) Any user-provided SSL certificate to be trusted. | |
291 :param proxies: (optional) The proxies dictionary to apply to the reques
t. | |
292 """ | |
293 | |
294 conn = self.get_connection(request.url, proxies) | |
295 | |
296 self.cert_verify(conn, request.url, verify, cert) | |
297 url = self.request_url(request, proxies) | |
298 self.add_headers(request) | |
299 | |
300 chunked = not (request.body is None or 'Content-Length' in request.heade
rs) | |
301 | |
302 if stream: | |
303 timeout = TimeoutSauce(connect=timeout) | |
304 else: | |
305 timeout = TimeoutSauce(connect=timeout, read=timeout) | |
306 | |
307 try: | |
308 if not chunked: | |
309 resp = conn.urlopen( | |
310 method=request.method, | |
311 url=url, | |
312 body=request.body, | |
313 headers=request.headers, | |
314 redirect=False, | |
315 assert_same_host=False, | |
316 preload_content=False, | |
317 decode_content=False, | |
318 retries=self.max_retries, | |
319 timeout=timeout | |
320 ) | |
321 | |
322 # Send the request. | |
323 else: | |
324 if hasattr(conn, 'proxy_pool'): | |
325 conn = conn.proxy_pool | |
326 | |
327 low_conn = conn._get_conn(timeout=timeout) | |
328 low_conn.putrequest(request.method, url, skip_accept_encoding=Tr
ue) | |
329 | |
330 for header, value in request.headers.items(): | |
331 low_conn.putheader(header, value) | |
332 | |
333 low_conn.endheaders() | |
334 | |
335 for i in request.body: | |
336 low_conn.send(hex(len(i))[2:].encode('utf-8')) | |
337 low_conn.send(b'\r\n') | |
338 low_conn.send(i) | |
339 low_conn.send(b'\r\n') | |
340 low_conn.send(b'0\r\n\r\n') | |
341 | |
342 r = low_conn.getresponse() | |
343 resp = HTTPResponse.from_httplib(r, | |
344 pool=conn, | |
345 connection=low_conn, | |
346 preload_content=False, | |
347 decode_content=False | |
348 ) | |
349 | |
350 except socket.error as sockerr: | |
351 raise ConnectionError(sockerr) | |
352 | |
353 except MaxRetryError as e: | |
354 raise ConnectionError(e) | |
355 | |
356 except (_SSLError, _HTTPError) as e: | |
357 if isinstance(e, _SSLError): | |
358 raise SSLError(e) | |
359 elif isinstance(e, TimeoutError): | |
360 raise Timeout(e) | |
361 else: | |
362 raise | |
363 | |
364 r = self.build_response(request, resp) | |
365 | |
366 if not stream: | |
367 r.content | |
368 | |
369 return r | |
OLD | NEW |