Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(785)

Side by Side Diff: third_party/google-endpoints/requests/models.py

Issue 2666783008: Add google-endpoints to third_party/. (Closed)
Patch Set: Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 # -*- coding: utf-8 -*-
2
3 """
4 requests.models
5 ~~~~~~~~~~~~~~~
6
7 This module contains the primary objects that power Requests.
8 """
9
10 import collections
11 import datetime
12 import sys
13
14 # Import encoding now, to avoid implicit import later.
15 # Implicit import within threads may cause LookupError when standard library is in a ZIP,
16 # such as in Embedded Python. See https://github.com/kennethreitz/requests/issue s/3578.
17 import encodings.idna
18
19 from io import BytesIO, UnsupportedOperation
20 from .hooks import default_hooks
21 from .structures import CaseInsensitiveDict
22
23 from .auth import HTTPBasicAuth
24 from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
25 from .packages.urllib3.fields import RequestField
26 from .packages.urllib3.filepost import encode_multipart_formdata
27 from .packages.urllib3.util import parse_url
28 from .packages.urllib3.exceptions import (
29 DecodeError, ReadTimeoutError, ProtocolError, LocationParseError)
30 from .exceptions import (
31 HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
32 ContentDecodingError, ConnectionError, StreamConsumedError)
33 from ._internal_utils import to_native_string, unicode_is_ascii
34 from .utils import (
35 guess_filename, get_auth_from_url, requote_uri,
36 stream_decode_response_unicode, to_key_val_list, parse_header_links,
37 iter_slices, guess_json_utf, super_len, check_header_validity)
38 from .compat import (
39 cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
40 is_py2, chardet, builtin_str, basestring)
41 from .compat import json as complexjson
42 from .status_codes import codes
43
44 #: The set of HTTP status codes that indicate an automatically
45 #: processable redirect.
46 REDIRECT_STATI = (
47 codes.moved, # 301
48 codes.found, # 302
49 codes.other, # 303
50 codes.temporary_redirect, # 307
51 codes.permanent_redirect, # 308
52 )
53
54 DEFAULT_REDIRECT_LIMIT = 30
55 CONTENT_CHUNK_SIZE = 10 * 1024
56 ITER_CHUNK_SIZE = 512
57
58
59 class RequestEncodingMixin(object):
60 @property
61 def path_url(self):
62 """Build the path URL to use."""
63
64 url = []
65
66 p = urlsplit(self.url)
67
68 path = p.path
69 if not path:
70 path = '/'
71
72 url.append(path)
73
74 query = p.query
75 if query:
76 url.append('?')
77 url.append(query)
78
79 return ''.join(url)
80
81 @staticmethod
82 def _encode_params(data):
83 """Encode parameters in a piece of data.
84
85 Will successfully encode parameters when passed as a dict or a list of
86 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
87 if parameters are supplied as a dict.
88 """
89
90 if isinstance(data, (str, bytes)):
91 return data
92 elif hasattr(data, 'read'):
93 return data
94 elif hasattr(data, '__iter__'):
95 result = []
96 for k, vs in to_key_val_list(data):
97 if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
98 vs = [vs]
99 for v in vs:
100 if v is not None:
101 result.append(
102 (k.encode('utf-8') if isinstance(k, str) else k,
103 v.encode('utf-8') if isinstance(v, str) else v))
104 return urlencode(result, doseq=True)
105 else:
106 return data
107
108 @staticmethod
109 def _encode_files(files, data):
110 """Build the body for a multipart/form-data request.
111
112 Will successfully encode files when passed as a dict or a list of
113 tuples. Order is retained if data is a list of tuples but arbitrary
114 if parameters are supplied as a dict.
115 The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, file obj, contentype)
116 or 4-tuples (filename, fileobj, contentype, custom_headers).
117 """
118 if (not files):
119 raise ValueError("Files must be provided.")
120 elif isinstance(data, basestring):
121 raise ValueError("Data must not be a string.")
122
123 new_fields = []
124 fields = to_key_val_list(data or {})
125 files = to_key_val_list(files or {})
126
127 for field, val in fields:
128 if isinstance(val, basestring) or not hasattr(val, '__iter__'):
129 val = [val]
130 for v in val:
131 if v is not None:
132 # Don't call str() on bytestrings: in Py3 it all goes wrong.
133 if not isinstance(v, bytes):
134 v = str(v)
135
136 new_fields.append(
137 (field.decode('utf-8') if isinstance(field, bytes) else field,
138 v.encode('utf-8') if isinstance(v, str) else v))
139
140 for (k, v) in files:
141 # support for explicit filename
142 ft = None
143 fh = None
144 if isinstance(v, (tuple, list)):
145 if len(v) == 2:
146 fn, fp = v
147 elif len(v) == 3:
148 fn, fp, ft = v
149 else:
150 fn, fp, ft, fh = v
151 else:
152 fn = guess_filename(v) or k
153 fp = v
154
155 if isinstance(fp, (str, bytes, bytearray)):
156 fdata = fp
157 else:
158 fdata = fp.read()
159
160 rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
161 rf.make_multipart(content_type=ft)
162 new_fields.append(rf)
163
164 body, content_type = encode_multipart_formdata(new_fields)
165
166 return body, content_type
167
168
169 class RequestHooksMixin(object):
170 def register_hook(self, event, hook):
171 """Properly register a hook."""
172
173 if event not in self.hooks:
174 raise ValueError('Unsupported event specified, with event name "%s"' % (event))
175
176 if isinstance(hook, collections.Callable):
177 self.hooks[event].append(hook)
178 elif hasattr(hook, '__iter__'):
179 self.hooks[event].extend(h for h in hook if isinstance(h, collection s.Callable))
180
181 def deregister_hook(self, event, hook):
182 """Deregister a previously registered hook.
183 Returns True if the hook existed, False if not.
184 """
185
186 try:
187 self.hooks[event].remove(hook)
188 return True
189 except ValueError:
190 return False
191
192
193 class Request(RequestHooksMixin):
194 """A user-created :class:`Request <Request>` object.
195
196 Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
197
198 :param method: HTTP method to use.
199 :param url: URL to send.
200 :param headers: dictionary of headers to send.
201 :param files: dictionary of {filename: fileobject} files to multipart upload .
202 :param data: the body to attach to the request. If a dictionary is provided, form-encoding will take place.
203 :param json: json for the body to attach to the request (if files or data is not specified).
204 :param params: dictionary of URL parameters to append to the URL.
205 :param auth: Auth handler or (user, pass) tuple.
206 :param cookies: dictionary or CookieJar of cookies to attach to this request .
207 :param hooks: dictionary of callback hooks, for internal usage.
208
209 Usage::
210
211 >>> import requests
212 >>> req = requests.Request('GET', 'http://httpbin.org/get')
213 >>> req.prepare()
214 <PreparedRequest [GET]>
215 """
216
217 def __init__(self, method=None, url=None, headers=None, files=None,
218 data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
219
220 # Default empty dicts for dict params.
221 data = [] if data is None else data
222 files = [] if files is None else files
223 headers = {} if headers is None else headers
224 params = {} if params is None else params
225 hooks = {} if hooks is None else hooks
226
227 self.hooks = default_hooks()
228 for (k, v) in list(hooks.items()):
229 self.register_hook(event=k, hook=v)
230
231 self.method = method
232 self.url = url
233 self.headers = headers
234 self.files = files
235 self.data = data
236 self.json = json
237 self.params = params
238 self.auth = auth
239 self.cookies = cookies
240
241 def __repr__(self):
242 return '<Request [%s]>' % (self.method)
243
244 def prepare(self):
245 """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmiss ion and returns it."""
246 p = PreparedRequest()
247 p.prepare(
248 method=self.method,
249 url=self.url,
250 headers=self.headers,
251 files=self.files,
252 data=self.data,
253 json=self.json,
254 params=self.params,
255 auth=self.auth,
256 cookies=self.cookies,
257 hooks=self.hooks,
258 )
259 return p
260
261
262 class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
263 """The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
264 containing the exact bytes that will be sent to the server.
265
266 Generated from either a :class:`Request <Request>` object or manually.
267
268 Usage::
269
270 >>> import requests
271 >>> req = requests.Request('GET', 'http://httpbin.org/get')
272 >>> r = req.prepare()
273 <PreparedRequest [GET]>
274
275 >>> s = requests.Session()
276 >>> s.send(r)
277 <Response [200]>
278 """
279
280 def __init__(self):
281 #: HTTP verb to send to the server.
282 self.method = None
283 #: HTTP URL to send the request to.
284 self.url = None
285 #: dictionary of HTTP headers.
286 self.headers = None
287 # The `CookieJar` used to create the Cookie header will be stored here
288 # after prepare_cookies is called
289 self._cookies = None
290 #: request body to send to the server.
291 self.body = None
292 #: dictionary of callback hooks, for internal usage.
293 self.hooks = default_hooks()
294 #: integer denoting starting position of a readable file-like body.
295 self._body_position = None
296
297 def prepare(self, method=None, url=None, headers=None, files=None,
298 data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
299 """Prepares the entire request with the given parameters."""
300
301 self.prepare_method(method)
302 self.prepare_url(url, params)
303 self.prepare_headers(headers)
304 self.prepare_cookies(cookies)
305 self.prepare_body(data, files, json)
306 self.prepare_auth(auth, url)
307
308 # Note that prepare_auth must be last to enable authentication schemes
309 # such as OAuth to work on a fully prepared request.
310
311 # This MUST go after prepare_auth. Authenticators could add a hook
312 self.prepare_hooks(hooks)
313
314 def __repr__(self):
315 return '<PreparedRequest [%s]>' % (self.method)
316
317 def copy(self):
318 p = PreparedRequest()
319 p.method = self.method
320 p.url = self.url
321 p.headers = self.headers.copy() if self.headers is not None else None
322 p._cookies = _copy_cookie_jar(self._cookies)
323 p.body = self.body
324 p.hooks = self.hooks
325 p._body_position = self._body_position
326 return p
327
328 def prepare_method(self, method):
329 """Prepares the given HTTP method."""
330 self.method = method
331 if self.method is not None:
332 self.method = to_native_string(self.method.upper())
333
334 @staticmethod
335 def _get_idna_encoded_host(host):
336 try:
337 from .packages import idna
338 except ImportError:
339 # tolerate the possibility of downstream repackagers unvendoring `re quests`
340 # For more information, read: packages/__init__.py
341 import idna
342 sys.modules['requests.packages.idna'] = idna
343
344 try:
345 host = idna.encode(host, uts46=True).decode('utf-8')
346 except idna.IDNAError:
347 raise UnicodeError
348 return host
349
350 def prepare_url(self, url, params):
351 """Prepares the given HTTP URL."""
352 #: Accept objects that have string representations.
353 #: We're unable to blindly call unicode/str functions
354 #: as this will include the bytestring indicator (b'')
355 #: on python 3.x.
356 #: https://github.com/kennethreitz/requests/pull/2238
357 if isinstance(url, bytes):
358 url = url.decode('utf8')
359 else:
360 url = unicode(url) if is_py2 else str(url)
361
362 # Remove leading whitespaces from url
363 url = url.lstrip()
364
365 # Don't do any URL preparation for non-HTTP schemes like `mailto`,
366 # `data` etc to work around exceptions from `url_parse`, which
367 # handles RFC 3986 only.
368 if ':' in url and not url.lower().startswith('http'):
369 self.url = url
370 return
371
372 # Support for unicode domain names and paths.
373 try:
374 scheme, auth, host, port, path, query, fragment = parse_url(url)
375 except LocationParseError as e:
376 raise InvalidURL(*e.args)
377
378 if not scheme:
379 error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant h ttp://{0}?")
380 error = error.format(to_native_string(url, 'utf8'))
381
382 raise MissingSchema(error)
383
384 if not host:
385 raise InvalidURL("Invalid URL %r: No host supplied" % url)
386
387 # In general, we want to try IDNA encoding the hostname if the string co ntains
388 # non-ASCII characters. This allows users to automatically get the corre ct IDNA
389 # behaviour. For strings containing only ASCII characters, we need to al so verify
390 # it doesn't start with a wildcard (*), before allowing the unencoded ho stname.
391 if not unicode_is_ascii(host):
392 try:
393 host = self._get_idna_encoded_host(host)
394 except UnicodeError:
395 raise InvalidURL('URL has an invalid label.')
396 elif host.startswith(u'*'):
397 raise InvalidURL('URL has an invalid label.')
398
399 # Carefully reconstruct the network location
400 netloc = auth or ''
401 if netloc:
402 netloc += '@'
403 netloc += host
404 if port:
405 netloc += ':' + str(port)
406
407 # Bare domains aren't valid URLs.
408 if not path:
409 path = '/'
410
411 if is_py2:
412 if isinstance(scheme, str):
413 scheme = scheme.encode('utf-8')
414 if isinstance(netloc, str):
415 netloc = netloc.encode('utf-8')
416 if isinstance(path, str):
417 path = path.encode('utf-8')
418 if isinstance(query, str):
419 query = query.encode('utf-8')
420 if isinstance(fragment, str):
421 fragment = fragment.encode('utf-8')
422
423 if isinstance(params, (str, bytes)):
424 params = to_native_string(params)
425
426 enc_params = self._encode_params(params)
427 if enc_params:
428 if query:
429 query = '%s&%s' % (query, enc_params)
430 else:
431 query = enc_params
432
433 url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragmen t]))
434 self.url = url
435
436 def prepare_headers(self, headers):
437 """Prepares the given HTTP headers."""
438
439 self.headers = CaseInsensitiveDict()
440 if headers:
441 for header in headers.items():
442 # Raise exception on invalid header value.
443 check_header_validity(header)
444 name, value = header
445 self.headers[to_native_string(name)] = value
446
447 def prepare_body(self, data, files, json=None):
448 """Prepares the given HTTP body data."""
449
450 # Check if file, fo, generator, iterator.
451 # If not, run through normal process.
452
453 # Nottin' on you.
454 body = None
455 content_type = None
456
457 if not data and json is not None:
458 # urllib3 requires a bytes-like body. Python 2's json.dumps
459 # provides this natively, but Python 3 gives a Unicode string.
460 content_type = 'application/json'
461 body = complexjson.dumps(json)
462 if not isinstance(body, bytes):
463 body = body.encode('utf-8')
464
465 is_stream = all([
466 hasattr(data, '__iter__'),
467 not isinstance(data, (basestring, list, tuple, collections.Mapping))
468 ])
469
470 try:
471 length = super_len(data)
472 except (TypeError, AttributeError, UnsupportedOperation):
473 length = None
474
475 if is_stream:
476 body = data
477
478 if getattr(body, 'tell', None) is not None:
479 # Record the current file position before reading.
480 # This will allow us to rewind a file in the event
481 # of a redirect.
482 try:
483 self._body_position = body.tell()
484 except (IOError, OSError):
485 # This differentiates from None, allowing us to catch
486 # a failed `tell()` later when trying to rewind the body
487 self._body_position = object()
488
489 if files:
490 raise NotImplementedError('Streamed bodies and files are mutuall y exclusive.')
491
492 if length:
493 self.headers['Content-Length'] = builtin_str(length)
494 else:
495 self.headers['Transfer-Encoding'] = 'chunked'
496 else:
497 # Multi-part file uploads.
498 if files:
499 (body, content_type) = self._encode_files(files, data)
500 else:
501 if data:
502 body = self._encode_params(data)
503 if isinstance(data, basestring) or hasattr(data, 'read'):
504 content_type = None
505 else:
506 content_type = 'application/x-www-form-urlencoded'
507
508 self.prepare_content_length(body)
509
510 # Add content-type if it wasn't explicitly provided.
511 if content_type and ('content-type' not in self.headers):
512 self.headers['Content-Type'] = content_type
513
514 self.body = body
515
516 def prepare_content_length(self, body):
517 """Prepare Content-Length header based on request method and body"""
518 if body is not None:
519 length = super_len(body)
520 if length:
521 # If length exists, set it. Otherwise, we fallback
522 # to Transfer-Encoding: chunked.
523 self.headers['Content-Length'] = builtin_str(length)
524 elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Le ngth') is None:
525 # Set Content-Length to 0 for methods that can have a body
526 # but don't provide one. (i.e. not GET or HEAD)
527 self.headers['Content-Length'] = '0'
528
529 def prepare_auth(self, auth, url=''):
530 """Prepares the given HTTP auth data."""
531
532 # If no Auth is explicitly provided, extract it from the URL first.
533 if auth is None:
534 url_auth = get_auth_from_url(self.url)
535 auth = url_auth if any(url_auth) else None
536
537 if auth:
538 if isinstance(auth, tuple) and len(auth) == 2:
539 # special-case basic HTTP auth
540 auth = HTTPBasicAuth(*auth)
541
542 # Allow auth to make its changes.
543 r = auth(self)
544
545 # Update self to reflect the auth changes.
546 self.__dict__.update(r.__dict__)
547
548 # Recompute Content-Length
549 self.prepare_content_length(self.body)
550
551 def prepare_cookies(self, cookies):
552 """Prepares the given HTTP cookie data.
553
554 This function eventually generates a ``Cookie`` header from the
555 given cookies using cookielib. Due to cookielib's design, the header
556 will not be regenerated if it already exists, meaning this function
557 can only be called once for the life of the
558 :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
559 to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
560 header is removed beforehand.
561 """
562 if isinstance(cookies, cookielib.CookieJar):
563 self._cookies = cookies
564 else:
565 self._cookies = cookiejar_from_dict(cookies)
566
567 cookie_header = get_cookie_header(self._cookies, self)
568 if cookie_header is not None:
569 self.headers['Cookie'] = cookie_header
570
571 def prepare_hooks(self, hooks):
572 """Prepares the given hooks."""
573 # hooks can be passed as None to the prepare method and to this
574 # method. To prevent iterating over None, simply use an empty list
575 # if hooks is False-y
576 hooks = hooks or []
577 for event in hooks:
578 self.register_hook(event, hooks[event])
579
580
581 class Response(object):
582 """The :class:`Response <Response>` object, which contains a
583 server's response to an HTTP request.
584 """
585
586 __attrs__ = [
587 '_content', 'status_code', 'headers', 'url', 'history',
588 'encoding', 'reason', 'cookies', 'elapsed', 'request'
589 ]
590
591 def __init__(self):
592 super(Response, self).__init__()
593
594 self._content = False
595 self._content_consumed = False
596
597 #: Integer Code of responded HTTP Status, e.g. 404 or 200.
598 self.status_code = None
599
600 #: Case-insensitive Dictionary of Response Headers.
601 #: For example, ``headers['content-encoding']`` will return the
602 #: value of a ``'Content-Encoding'`` response header.
603 self.headers = CaseInsensitiveDict()
604
605 #: File-like object representation of response (for advanced usage).
606 #: Use of ``raw`` requires that ``stream=True`` be set on the request.
607 # This requirement does not apply for use internally to Requests.
608 self.raw = None
609
610 #: Final URL location of Response.
611 self.url = None
612
613 #: Encoding to decode with when accessing r.text.
614 self.encoding = None
615
616 #: A list of :class:`Response <Response>` objects from
617 #: the history of the Request. Any redirect responses will end
618 #: up here. The list is sorted from the oldest to the most recent reques t.
619 self.history = []
620
621 #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
622 self.reason = None
623
624 #: A CookieJar of Cookies the server sent back.
625 self.cookies = cookiejar_from_dict({})
626
627 #: The amount of time elapsed between sending the request
628 #: and the arrival of the response (as a timedelta).
629 #: This property specifically measures the time taken between sending
630 #: the first byte of the request and finishing parsing the headers. It
631 #: is therefore unaffected by consuming the response content or the
632 #: value of the ``stream`` keyword argument.
633 self.elapsed = datetime.timedelta(0)
634
635 #: The :class:`PreparedRequest <PreparedRequest>` object to which this
636 #: is a response.
637 self.request = None
638
639 def __getstate__(self):
640 # Consume everything; accessing the content attribute makes
641 # sure the content has been fully read.
642 if not self._content_consumed:
643 self.content
644
645 return dict(
646 (attr, getattr(self, attr, None))
647 for attr in self.__attrs__
648 )
649
650 def __setstate__(self, state):
651 for name, value in state.items():
652 setattr(self, name, value)
653
654 # pickled objects do not have .raw
655 setattr(self, '_content_consumed', True)
656 setattr(self, 'raw', None)
657
658 def __repr__(self):
659 return '<Response [%s]>' % (self.status_code)
660
661 def __bool__(self):
662 """Returns true if :attr:`status_code` is 'OK'."""
663 return self.ok
664
665 def __nonzero__(self):
666 """Returns true if :attr:`status_code` is 'OK'."""
667 return self.ok
668
669 def __iter__(self):
670 """Allows you to use a response as an iterator."""
671 return self.iter_content(128)
672
673 @property
674 def ok(self):
675 try:
676 self.raise_for_status()
677 except HTTPError:
678 return False
679 return True
680
681 @property
682 def is_redirect(self):
683 """True if this Response is a well-formed HTTP redirect that could have
684 been processed automatically (by :meth:`Session.resolve_redirects`).
685 """
686 return ('location' in self.headers and self.status_code in REDIRECT_STAT I)
687
688 @property
689 def is_permanent_redirect(self):
690 """True if this Response one of the permanent versions of redirect"""
691 return ('location' in self.headers and self.status_code in (codes.moved_ permanently, codes.permanent_redirect))
692
693 @property
694 def apparent_encoding(self):
695 """The apparent encoding, provided by the chardet library"""
696 return chardet.detect(self.content)['encoding']
697
698 def iter_content(self, chunk_size=1, decode_unicode=False):
699 """Iterates over the response data. When stream=True is set on the
700 request, this avoids reading the content at once into memory for
701 large responses. The chunk size is the number of bytes it should
702 read into memory. This is not necessarily the length of each item
703 returned as decoding can take place.
704
705 chunk_size must be of type int or None. A value of None will
706 function differently depending on the value of `stream`.
707 stream=True will read data as it arrives in whatever size the
708 chunks are received. If stream=False, data is returned as
709 a single chunk.
710
711 If decode_unicode is True, content will be decoded using the best
712 available encoding based on the response.
713 """
714
715 def generate():
716 # Special case for urllib3.
717 if hasattr(self.raw, 'stream'):
718 try:
719 for chunk in self.raw.stream(chunk_size, decode_content=True ):
720 yield chunk
721 except ProtocolError as e:
722 raise ChunkedEncodingError(e)
723 except DecodeError as e:
724 raise ContentDecodingError(e)
725 except ReadTimeoutError as e:
726 raise ConnectionError(e)
727 else:
728 # Standard file-like object.
729 while True:
730 chunk = self.raw.read(chunk_size)
731 if not chunk:
732 break
733 yield chunk
734
735 self._content_consumed = True
736
737 if self._content_consumed and isinstance(self._content, bool):
738 raise StreamConsumedError()
739 elif chunk_size is not None and not isinstance(chunk_size, int):
740 raise TypeError("chunk_size must be an int, it is instead a %s." % t ype(chunk_size))
741 # simulate reading small chunks of the content
742 reused_chunks = iter_slices(self._content, chunk_size)
743
744 stream_chunks = generate()
745
746 chunks = reused_chunks if self._content_consumed else stream_chunks
747
748 if decode_unicode:
749 chunks = stream_decode_response_unicode(chunks, self)
750
751 return chunks
752
753 def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimi ter=None):
754 """Iterates over the response data, one line at a time. When
755 stream=True is set on the request, this avoids reading the
756 content at once into memory for large responses.
757
758 .. note:: This method is not reentrant safe.
759 """
760
761 pending = None
762
763 for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=dec ode_unicode):
764
765 if pending is not None:
766 chunk = pending + chunk
767
768 if delimiter:
769 lines = chunk.split(delimiter)
770 else:
771 lines = chunk.splitlines()
772
773 if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
774 pending = lines.pop()
775 else:
776 pending = None
777
778 for line in lines:
779 yield line
780
781 if pending is not None:
782 yield pending
783
784 @property
785 def content(self):
786 """Content of the response, in bytes."""
787
788 if self._content is False:
789 # Read the contents.
790 if self._content_consumed:
791 raise RuntimeError(
792 'The content for this response was already consumed')
793
794 if self.status_code == 0 or self.raw is None:
795 self._content = None
796 else:
797 self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZ E)) or bytes()
798
799 self._content_consumed = True
800 # don't need to release the connection; that's been handled by urllib3
801 # since we exhausted the data.
802 return self._content
803
804 @property
805 def text(self):
806 """Content of the response, in unicode.
807
808 If Response.encoding is None, encoding will be guessed using
809 ``chardet``.
810
811 The encoding of the response content is determined based solely on HTTP
812 headers, following RFC 2616 to the letter. If you can take advantage of
813 non-HTTP knowledge to make a better guess at the encoding, you should
814 set ``r.encoding`` appropriately before accessing this property.
815 """
816
817 # Try charset from content-type
818 content = None
819 encoding = self.encoding
820
821 if not self.content:
822 return str('')
823
824 # Fallback to auto-detected encoding.
825 if self.encoding is None:
826 encoding = self.apparent_encoding
827
828 # Decode unicode from given encoding.
829 try:
830 content = str(self.content, encoding, errors='replace')
831 except (LookupError, TypeError):
832 # A LookupError is raised if the encoding was not found which could
833 # indicate a misspelling or similar mistake.
834 #
835 # A TypeError can be raised if encoding is None
836 #
837 # So we try blindly encoding.
838 content = str(self.content, errors='replace')
839
840 return content
841
842 def json(self, **kwargs):
843 """Returns the json-encoded content of a response, if any.
844
845 :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
846 :raises ValueError: If the response body does not contain valid json.
847 """
848
849 if not self.encoding and self.content and len(self.content) > 3:
850 # No encoding set. JSON RFC 4627 section 3 states we should expect
851 # UTF-8, -16 or -32. Detect which one to use; If the detection or
852 # decoding fails, fall back to `self.text` (using chardet to make
853 # a best guess).
854 encoding = guess_json_utf(self.content)
855 if encoding is not None:
856 try:
857 return complexjson.loads(
858 self.content.decode(encoding), **kwargs
859 )
860 except UnicodeDecodeError:
861 # Wrong UTF codec detected; usually because it's not UTF-8
862 # but some other 8-bit codec. This is an RFC violation,
863 # and the server didn't bother to tell us what codec *was*
864 # used.
865 pass
866 return complexjson.loads(self.text, **kwargs)
867
868 @property
869 def links(self):
870 """Returns the parsed header links of the response, if any."""
871
872 header = self.headers.get('link')
873
874 # l = MultiDict()
875 l = {}
876
877 if header:
878 links = parse_header_links(header)
879
880 for link in links:
881 key = link.get('rel') or link.get('url')
882 l[key] = link
883
884 return l
885
886 def raise_for_status(self):
887 """Raises stored :class:`HTTPError`, if one occurred."""
888
889 http_error_msg = ''
890 if isinstance(self.reason, bytes):
891 # We attempt to decode utf-8 first because some servers
892 # choose to localize their reason strings. If the string
893 # isn't utf-8, we fall back to iso-8859-1 for all other
894 # encodings. (See PR #3538)
895 try:
896 reason = self.reason.decode('utf-8')
897 except UnicodeDecodeError:
898 reason = self.reason.decode('iso-8859-1')
899 else:
900 reason = self.reason
901
902 if 400 <= self.status_code < 500:
903 http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_c ode, reason, self.url)
904
905 elif 500 <= self.status_code < 600:
906 http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_c ode, reason, self.url)
907
908 if http_error_msg:
909 raise HTTPError(http_error_msg, response=self)
910
911 def close(self):
912 """Releases the connection back to the pool. Once this method has been
913 called the underlying ``raw`` object must not be accessed again.
914
915 *Note: Should not normally need to be called explicitly.*
916 """
917 if not self._content_consumed:
918 self.raw.close()
919
920 release_conn = getattr(self.raw, 'release_conn', None)
921 if release_conn is not None:
922 release_conn()
OLDNEW
« no previous file with comments | « third_party/google-endpoints/requests/hooks.py ('k') | third_party/google-endpoints/requests/packages/__init__.py » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698