| OLD | NEW |
| (Empty) |
| 1 # Copyright 2014 Google Inc. All Rights Reserved. | |
| 2 # | |
| 3 # Licensed under the Apache License, Version 2.0 (the "License"); | |
| 4 # you may not use this file except in compliance with the License. | |
| 5 # You may obtain a copy of the License at | |
| 6 # | |
| 7 # http://www.apache.org/licenses/LICENSE-2.0 | |
| 8 # | |
| 9 # Unless required by applicable law or agreed to in writing, software | |
| 10 # distributed under the License is distributed on an "AS IS" BASIS, | |
| 11 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
| 12 # See the License for the specific language governing permissions and | |
| 13 # limitations under the License. | |
| 14 | |
| 15 """Classes to encapsulate a single HTTP request. | |
| 16 | |
| 17 The classes implement a command pattern, with every | |
| 18 object supporting an execute() method that does the | |
| 19 actuall HTTP request. | |
| 20 """ | |
| 21 | |
| 22 __author__ = 'jcgregorio@google.com (Joe Gregorio)' | |
| 23 | |
| 24 import StringIO | |
| 25 import base64 | |
| 26 import copy | |
| 27 import gzip | |
| 28 import httplib2 | |
| 29 import json | |
| 30 import logging | |
| 31 import mimeparse | |
| 32 import mimetypes | |
| 33 import os | |
| 34 import random | |
| 35 import sys | |
| 36 import time | |
| 37 import urllib | |
| 38 import urlparse | |
| 39 import uuid | |
| 40 | |
| 41 from email.generator import Generator | |
| 42 from email.mime.multipart import MIMEMultipart | |
| 43 from email.mime.nonmultipart import MIMENonMultipart | |
| 44 from email.parser import FeedParser | |
| 45 from errors import BatchError | |
| 46 from errors import HttpError | |
| 47 from errors import InvalidChunkSizeError | |
| 48 from errors import ResumableUploadError | |
| 49 from errors import UnexpectedBodyError | |
| 50 from errors import UnexpectedMethodError | |
| 51 from model import JsonModel | |
| 52 from ...oauth2client import util | |
| 53 | |
| 54 | |
| 55 DEFAULT_CHUNK_SIZE = 512*1024 | |
| 56 | |
| 57 MAX_URI_LENGTH = 2048 | |
| 58 | |
| 59 | |
| 60 class MediaUploadProgress(object): | |
| 61 """Status of a resumable upload.""" | |
| 62 | |
| 63 def __init__(self, resumable_progress, total_size): | |
| 64 """Constructor. | |
| 65 | |
| 66 Args: | |
| 67 resumable_progress: int, bytes sent so far. | |
| 68 total_size: int, total bytes in complete upload, or None if the total | |
| 69 upload size isn't known ahead of time. | |
| 70 """ | |
| 71 self.resumable_progress = resumable_progress | |
| 72 self.total_size = total_size | |
| 73 | |
| 74 def progress(self): | |
| 75 """Percent of upload completed, as a float. | |
| 76 | |
| 77 Returns: | |
| 78 the percentage complete as a float, returning 0.0 if the total size of | |
| 79 the upload is unknown. | |
| 80 """ | |
| 81 if self.total_size is not None: | |
| 82 return float(self.resumable_progress) / float(self.total_size) | |
| 83 else: | |
| 84 return 0.0 | |
| 85 | |
| 86 | |
| 87 class MediaDownloadProgress(object): | |
| 88 """Status of a resumable download.""" | |
| 89 | |
| 90 def __init__(self, resumable_progress, total_size): | |
| 91 """Constructor. | |
| 92 | |
| 93 Args: | |
| 94 resumable_progress: int, bytes received so far. | |
| 95 total_size: int, total bytes in complete download. | |
| 96 """ | |
| 97 self.resumable_progress = resumable_progress | |
| 98 self.total_size = total_size | |
| 99 | |
| 100 def progress(self): | |
| 101 """Percent of download completed, as a float. | |
| 102 | |
| 103 Returns: | |
| 104 the percentage complete as a float, returning 0.0 if the total size of | |
| 105 the download is unknown. | |
| 106 """ | |
| 107 if self.total_size is not None: | |
| 108 return float(self.resumable_progress) / float(self.total_size) | |
| 109 else: | |
| 110 return 0.0 | |
| 111 | |
| 112 | |
| 113 class MediaUpload(object): | |
| 114 """Describes a media object to upload. | |
| 115 | |
| 116 Base class that defines the interface of MediaUpload subclasses. | |
| 117 | |
| 118 Note that subclasses of MediaUpload may allow you to control the chunksize | |
| 119 when uploading a media object. It is important to keep the size of the chunk | |
| 120 as large as possible to keep the upload efficient. Other factors may influence | |
| 121 the size of the chunk you use, particularly if you are working in an | |
| 122 environment where individual HTTP requests may have a hardcoded time limit, | |
| 123 such as under certain classes of requests under Google App Engine. | |
| 124 | |
| 125 Streams are io.Base compatible objects that support seek(). Some MediaUpload | |
| 126 subclasses support using streams directly to upload data. Support for | |
| 127 streaming may be indicated by a MediaUpload sub-class and if appropriate for a | |
| 128 platform that stream will be used for uploading the media object. The support | |
| 129 for streaming is indicated by has_stream() returning True. The stream() method | |
| 130 should return an io.Base object that supports seek(). On platforms where the | |
| 131 underlying httplib module supports streaming, for example Python 2.6 and | |
| 132 later, the stream will be passed into the http library which will result in | |
| 133 less memory being used and possibly faster uploads. | |
| 134 | |
| 135 If you need to upload media that can't be uploaded using any of the existing | |
| 136 MediaUpload sub-class then you can sub-class MediaUpload for your particular | |
| 137 needs. | |
| 138 """ | |
| 139 | |
| 140 def chunksize(self): | |
| 141 """Chunk size for resumable uploads. | |
| 142 | |
| 143 Returns: | |
| 144 Chunk size in bytes. | |
| 145 """ | |
| 146 raise NotImplementedError() | |
| 147 | |
| 148 def mimetype(self): | |
| 149 """Mime type of the body. | |
| 150 | |
| 151 Returns: | |
| 152 Mime type. | |
| 153 """ | |
| 154 return 'application/octet-stream' | |
| 155 | |
| 156 def size(self): | |
| 157 """Size of upload. | |
| 158 | |
| 159 Returns: | |
| 160 Size of the body, or None of the size is unknown. | |
| 161 """ | |
| 162 return None | |
| 163 | |
| 164 def resumable(self): | |
| 165 """Whether this upload is resumable. | |
| 166 | |
| 167 Returns: | |
| 168 True if resumable upload or False. | |
| 169 """ | |
| 170 return False | |
| 171 | |
| 172 def getbytes(self, begin, end): | |
| 173 """Get bytes from the media. | |
| 174 | |
| 175 Args: | |
| 176 begin: int, offset from beginning of file. | |
| 177 length: int, number of bytes to read, starting at begin. | |
| 178 | |
| 179 Returns: | |
| 180 A string of bytes read. May be shorter than length if EOF was reached | |
| 181 first. | |
| 182 """ | |
| 183 raise NotImplementedError() | |
| 184 | |
| 185 def has_stream(self): | |
| 186 """Does the underlying upload support a streaming interface. | |
| 187 | |
| 188 Streaming means it is an io.IOBase subclass that supports seek, i.e. | |
| 189 seekable() returns True. | |
| 190 | |
| 191 Returns: | |
| 192 True if the call to stream() will return an instance of a seekable io.Base | |
| 193 subclass. | |
| 194 """ | |
| 195 return False | |
| 196 | |
| 197 def stream(self): | |
| 198 """A stream interface to the data being uploaded. | |
| 199 | |
| 200 Returns: | |
| 201 The returned value is an io.IOBase subclass that supports seek, i.e. | |
| 202 seekable() returns True. | |
| 203 """ | |
| 204 raise NotImplementedError() | |
| 205 | |
| 206 @util.positional(1) | |
| 207 def _to_json(self, strip=None): | |
| 208 """Utility function for creating a JSON representation of a MediaUpload. | |
| 209 | |
| 210 Args: | |
| 211 strip: array, An array of names of members to not include in the JSON. | |
| 212 | |
| 213 Returns: | |
| 214 string, a JSON representation of this instance, suitable to pass to | |
| 215 from_json(). | |
| 216 """ | |
| 217 t = type(self) | |
| 218 d = copy.copy(self.__dict__) | |
| 219 if strip is not None: | |
| 220 for member in strip: | |
| 221 del d[member] | |
| 222 d['_class'] = t.__name__ | |
| 223 d['_module'] = t.__module__ | |
| 224 return json.dumps(d) | |
| 225 | |
| 226 def to_json(self): | |
| 227 """Create a JSON representation of an instance of MediaUpload. | |
| 228 | |
| 229 Returns: | |
| 230 string, a JSON representation of this instance, suitable to pass to | |
| 231 from_json(). | |
| 232 """ | |
| 233 return self._to_json() | |
| 234 | |
| 235 @classmethod | |
| 236 def new_from_json(cls, s): | |
| 237 """Utility class method to instantiate a MediaUpload subclass from a JSON | |
| 238 representation produced by to_json(). | |
| 239 | |
| 240 Args: | |
| 241 s: string, JSON from to_json(). | |
| 242 | |
| 243 Returns: | |
| 244 An instance of the subclass of MediaUpload that was serialized with | |
| 245 to_json(). | |
| 246 """ | |
| 247 data = json.loads(s) | |
| 248 # Find and call the right classmethod from_json() to restore the object. | |
| 249 module = data['_module'] | |
| 250 m = __import__(module, fromlist=module.split('.')[:-1]) | |
| 251 kls = getattr(m, data['_class']) | |
| 252 from_json = getattr(kls, 'from_json') | |
| 253 return from_json(s) | |
| 254 | |
| 255 | |
| 256 class MediaIoBaseUpload(MediaUpload): | |
| 257 """A MediaUpload for a io.Base objects. | |
| 258 | |
| 259 Note that the Python file object is compatible with io.Base and can be used | |
| 260 with this class also. | |
| 261 | |
| 262 fh = io.BytesIO('...Some data to upload...') | |
| 263 media = MediaIoBaseUpload(fh, mimetype='image/png', | |
| 264 chunksize=1024*1024, resumable=True) | |
| 265 farm.animals().insert( | |
| 266 id='cow', | |
| 267 name='cow.png', | |
| 268 media_body=media).execute() | |
| 269 | |
| 270 Depending on the platform you are working on, you may pass -1 as the | |
| 271 chunksize, which indicates that the entire file should be uploaded in a single | |
| 272 request. If the underlying platform supports streams, such as Python 2.6 or | |
| 273 later, then this can be very efficient as it avoids multiple connections, and | |
| 274 also avoids loading the entire file into memory before sending it. Note that | |
| 275 Google App Engine has a 5MB limit on request size, so you should never set | |
| 276 your chunksize larger than 5MB, or to -1. | |
| 277 """ | |
| 278 | |
| 279 @util.positional(3) | |
| 280 def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, | |
| 281 resumable=False): | |
| 282 """Constructor. | |
| 283 | |
| 284 Args: | |
| 285 fd: io.Base or file object, The source of the bytes to upload. MUST be | |
| 286 opened in blocking mode, do not use streams opened in non-blocking mode. | |
| 287 The given stream must be seekable, that is, it must be able to call | |
| 288 seek() on fd. | |
| 289 mimetype: string, Mime-type of the file. | |
| 290 chunksize: int, File will be uploaded in chunks of this many bytes. Only | |
| 291 used if resumable=True. Pass in a value of -1 if the file is to be | |
| 292 uploaded as a single chunk. Note that Google App Engine has a 5MB limit | |
| 293 on request size, so you should never set your chunksize larger than 5MB, | |
| 294 or to -1. | |
| 295 resumable: bool, True if this is a resumable upload. False means upload | |
| 296 in a single request. | |
| 297 """ | |
| 298 super(MediaIoBaseUpload, self).__init__() | |
| 299 self._fd = fd | |
| 300 self._mimetype = mimetype | |
| 301 if not (chunksize == -1 or chunksize > 0): | |
| 302 raise InvalidChunkSizeError() | |
| 303 self._chunksize = chunksize | |
| 304 self._resumable = resumable | |
| 305 | |
| 306 self._fd.seek(0, os.SEEK_END) | |
| 307 self._size = self._fd.tell() | |
| 308 | |
| 309 def chunksize(self): | |
| 310 """Chunk size for resumable uploads. | |
| 311 | |
| 312 Returns: | |
| 313 Chunk size in bytes. | |
| 314 """ | |
| 315 return self._chunksize | |
| 316 | |
| 317 def mimetype(self): | |
| 318 """Mime type of the body. | |
| 319 | |
| 320 Returns: | |
| 321 Mime type. | |
| 322 """ | |
| 323 return self._mimetype | |
| 324 | |
| 325 def size(self): | |
| 326 """Size of upload. | |
| 327 | |
| 328 Returns: | |
| 329 Size of the body, or None of the size is unknown. | |
| 330 """ | |
| 331 return self._size | |
| 332 | |
| 333 def resumable(self): | |
| 334 """Whether this upload is resumable. | |
| 335 | |
| 336 Returns: | |
| 337 True if resumable upload or False. | |
| 338 """ | |
| 339 return self._resumable | |
| 340 | |
| 341 def getbytes(self, begin, length): | |
| 342 """Get bytes from the media. | |
| 343 | |
| 344 Args: | |
| 345 begin: int, offset from beginning of file. | |
| 346 length: int, number of bytes to read, starting at begin. | |
| 347 | |
| 348 Returns: | |
| 349 A string of bytes read. May be shorted than length if EOF was reached | |
| 350 first. | |
| 351 """ | |
| 352 self._fd.seek(begin) | |
| 353 return self._fd.read(length) | |
| 354 | |
| 355 def has_stream(self): | |
| 356 """Does the underlying upload support a streaming interface. | |
| 357 | |
| 358 Streaming means it is an io.IOBase subclass that supports seek, i.e. | |
| 359 seekable() returns True. | |
| 360 | |
| 361 Returns: | |
| 362 True if the call to stream() will return an instance of a seekable io.Base | |
| 363 subclass. | |
| 364 """ | |
| 365 return True | |
| 366 | |
| 367 def stream(self): | |
| 368 """A stream interface to the data being uploaded. | |
| 369 | |
| 370 Returns: | |
| 371 The returned value is an io.IOBase subclass that supports seek, i.e. | |
| 372 seekable() returns True. | |
| 373 """ | |
| 374 return self._fd | |
| 375 | |
| 376 def to_json(self): | |
| 377 """This upload type is not serializable.""" | |
| 378 raise NotImplementedError('MediaIoBaseUpload is not serializable.') | |
| 379 | |
| 380 | |
| 381 class MediaFileUpload(MediaIoBaseUpload): | |
| 382 """A MediaUpload for a file. | |
| 383 | |
| 384 Construct a MediaFileUpload and pass as the media_body parameter of the | |
| 385 method. For example, if we had a service that allowed uploading images: | |
| 386 | |
| 387 | |
| 388 media = MediaFileUpload('cow.png', mimetype='image/png', | |
| 389 chunksize=1024*1024, resumable=True) | |
| 390 farm.animals().insert( | |
| 391 id='cow', | |
| 392 name='cow.png', | |
| 393 media_body=media).execute() | |
| 394 | |
| 395 Depending on the platform you are working on, you may pass -1 as the | |
| 396 chunksize, which indicates that the entire file should be uploaded in a single | |
| 397 request. If the underlying platform supports streams, such as Python 2.6 or | |
| 398 later, then this can be very efficient as it avoids multiple connections, and | |
| 399 also avoids loading the entire file into memory before sending it. Note that | |
| 400 Google App Engine has a 5MB limit on request size, so you should never set | |
| 401 your chunksize larger than 5MB, or to -1. | |
| 402 """ | |
| 403 | |
| 404 @util.positional(2) | |
| 405 def __init__(self, filename, mimetype=None, chunksize=DEFAULT_CHUNK_SIZE, | |
| 406 resumable=False): | |
| 407 """Constructor. | |
| 408 | |
| 409 Args: | |
| 410 filename: string, Name of the file. | |
| 411 mimetype: string, Mime-type of the file. If None then a mime-type will be | |
| 412 guessed from the file extension. | |
| 413 chunksize: int, File will be uploaded in chunks of this many bytes. Only | |
| 414 used if resumable=True. Pass in a value of -1 if the file is to be | |
| 415 uploaded in a single chunk. Note that Google App Engine has a 5MB limit | |
| 416 on request size, so you should never set your chunksize larger than 5MB, | |
| 417 or to -1. | |
| 418 resumable: bool, True if this is a resumable upload. False means upload | |
| 419 in a single request. | |
| 420 """ | |
| 421 self._filename = filename | |
| 422 fd = open(self._filename, 'rb') | |
| 423 if mimetype is None: | |
| 424 (mimetype, encoding) = mimetypes.guess_type(filename) | |
| 425 super(MediaFileUpload, self).__init__(fd, mimetype, chunksize=chunksize, | |
| 426 resumable=resumable) | |
| 427 | |
| 428 def to_json(self): | |
| 429 """Creating a JSON representation of an instance of MediaFileUpload. | |
| 430 | |
| 431 Returns: | |
| 432 string, a JSON representation of this instance, suitable to pass to | |
| 433 from_json(). | |
| 434 """ | |
| 435 return self._to_json(strip=['_fd']) | |
| 436 | |
| 437 @staticmethod | |
| 438 def from_json(s): | |
| 439 d = json.loads(s) | |
| 440 return MediaFileUpload(d['_filename'], mimetype=d['_mimetype'], | |
| 441 chunksize=d['_chunksize'], resumable=d['_resumable']) | |
| 442 | |
| 443 | |
| 444 class MediaInMemoryUpload(MediaIoBaseUpload): | |
| 445 """MediaUpload for a chunk of bytes. | |
| 446 | |
| 447 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for | |
| 448 the stream. | |
| 449 """ | |
| 450 | |
| 451 @util.positional(2) | |
| 452 def __init__(self, body, mimetype='application/octet-stream', | |
| 453 chunksize=DEFAULT_CHUNK_SIZE, resumable=False): | |
| 454 """Create a new MediaInMemoryUpload. | |
| 455 | |
| 456 DEPRECATED: Use MediaIoBaseUpload with either io.TextIOBase or StringIO for | |
| 457 the stream. | |
| 458 | |
| 459 Args: | |
| 460 body: string, Bytes of body content. | |
| 461 mimetype: string, Mime-type of the file or default of | |
| 462 'application/octet-stream'. | |
| 463 chunksize: int, File will be uploaded in chunks of this many bytes. Only | |
| 464 used if resumable=True. | |
| 465 resumable: bool, True if this is a resumable upload. False means upload | |
| 466 in a single request. | |
| 467 """ | |
| 468 fd = StringIO.StringIO(body) | |
| 469 super(MediaInMemoryUpload, self).__init__(fd, mimetype, chunksize=chunksize, | |
| 470 resumable=resumable) | |
| 471 | |
| 472 | |
| 473 class MediaIoBaseDownload(object): | |
| 474 """"Download media resources. | |
| 475 | |
| 476 Note that the Python file object is compatible with io.Base and can be used | |
| 477 with this class also. | |
| 478 | |
| 479 | |
| 480 Example: | |
| 481 request = farms.animals().get_media(id='cow') | |
| 482 fh = io.FileIO('cow.png', mode='wb') | |
| 483 downloader = MediaIoBaseDownload(fh, request, chunksize=1024*1024) | |
| 484 | |
| 485 done = False | |
| 486 while done is False: | |
| 487 status, done = downloader.next_chunk() | |
| 488 if status: | |
| 489 print "Download %d%%." % int(status.progress() * 100) | |
| 490 print "Download Complete!" | |
| 491 """ | |
| 492 | |
| 493 @util.positional(3) | |
| 494 def __init__(self, fd, request, chunksize=DEFAULT_CHUNK_SIZE): | |
| 495 """Constructor. | |
| 496 | |
| 497 Args: | |
| 498 fd: io.Base or file object, The stream in which to write the downloaded | |
| 499 bytes. | |
| 500 request: googleapiclient.http.HttpRequest, the media request to perform in | |
| 501 chunks. | |
| 502 chunksize: int, File will be downloaded in chunks of this many bytes. | |
| 503 """ | |
| 504 self._fd = fd | |
| 505 self._request = request | |
| 506 self._uri = request.uri | |
| 507 self._chunksize = chunksize | |
| 508 self._progress = 0 | |
| 509 self._total_size = None | |
| 510 self._done = False | |
| 511 | |
| 512 # Stubs for testing. | |
| 513 self._sleep = time.sleep | |
| 514 self._rand = random.random | |
| 515 | |
| 516 @util.positional(1) | |
| 517 def next_chunk(self, num_retries=0): | |
| 518 """Get the next chunk of the download. | |
| 519 | |
| 520 Args: | |
| 521 num_retries: Integer, number of times to retry 500's with randomized | |
| 522 exponential backoff. If all retries fail, the raised HttpError | |
| 523 represents the last request. If zero (default), we attempt the | |
| 524 request only once. | |
| 525 | |
| 526 Returns: | |
| 527 (status, done): (MediaDownloadStatus, boolean) | |
| 528 The value of 'done' will be True when the media has been fully | |
| 529 downloaded. | |
| 530 | |
| 531 Raises: | |
| 532 googleapiclient.errors.HttpError if the response was not a 2xx. | |
| 533 httplib2.HttpLib2Error if a transport error has occured. | |
| 534 """ | |
| 535 headers = { | |
| 536 'range': 'bytes=%d-%d' % ( | |
| 537 self._progress, self._progress + self._chunksize) | |
| 538 } | |
| 539 http = self._request.http | |
| 540 | |
| 541 for retry_num in xrange(num_retries + 1): | |
| 542 if retry_num > 0: | |
| 543 self._sleep(self._rand() * 2**retry_num) | |
| 544 logging.warning( | |
| 545 'Retry #%d for media download: GET %s, following status: %d' | |
| 546 % (retry_num, self._uri, resp.status)) | |
| 547 | |
| 548 resp, content = http.request(self._uri, headers=headers) | |
| 549 if resp.status < 500: | |
| 550 break | |
| 551 | |
| 552 if resp.status in [200, 206]: | |
| 553 if 'content-location' in resp and resp['content-location'] != self._uri: | |
| 554 self._uri = resp['content-location'] | |
| 555 self._progress += len(content) | |
| 556 self._fd.write(content) | |
| 557 | |
| 558 if 'content-range' in resp: | |
| 559 content_range = resp['content-range'] | |
| 560 length = content_range.rsplit('/', 1)[1] | |
| 561 self._total_size = int(length) | |
| 562 | |
| 563 if self._progress == self._total_size: | |
| 564 self._done = True | |
| 565 return MediaDownloadProgress(self._progress, self._total_size), self._done | |
| 566 else: | |
| 567 raise HttpError(resp, content, uri=self._uri) | |
| 568 | |
| 569 | |
| 570 class _StreamSlice(object): | |
| 571 """Truncated stream. | |
| 572 | |
| 573 Takes a stream and presents a stream that is a slice of the original stream. | |
| 574 This is used when uploading media in chunks. In later versions of Python a | |
| 575 stream can be passed to httplib in place of the string of data to send. The | |
| 576 problem is that httplib just blindly reads to the end of the stream. This | |
| 577 wrapper presents a virtual stream that only reads to the end of the chunk. | |
| 578 """ | |
| 579 | |
| 580 def __init__(self, stream, begin, chunksize): | |
| 581 """Constructor. | |
| 582 | |
| 583 Args: | |
| 584 stream: (io.Base, file object), the stream to wrap. | |
| 585 begin: int, the seek position the chunk begins at. | |
| 586 chunksize: int, the size of the chunk. | |
| 587 """ | |
| 588 self._stream = stream | |
| 589 self._begin = begin | |
| 590 self._chunksize = chunksize | |
| 591 self._stream.seek(begin) | |
| 592 | |
| 593 def read(self, n=-1): | |
| 594 """Read n bytes. | |
| 595 | |
| 596 Args: | |
| 597 n, int, the number of bytes to read. | |
| 598 | |
| 599 Returns: | |
| 600 A string of length 'n', or less if EOF is reached. | |
| 601 """ | |
| 602 # The data left available to read sits in [cur, end) | |
| 603 cur = self._stream.tell() | |
| 604 end = self._begin + self._chunksize | |
| 605 if n == -1 or cur + n > end: | |
| 606 n = end - cur | |
| 607 return self._stream.read(n) | |
| 608 | |
| 609 | |
| 610 class HttpRequest(object): | |
| 611 """Encapsulates a single HTTP request.""" | |
| 612 | |
| 613 @util.positional(4) | |
| 614 def __init__(self, http, postproc, uri, | |
| 615 method='GET', | |
| 616 body=None, | |
| 617 headers=None, | |
| 618 methodId=None, | |
| 619 resumable=None): | |
| 620 """Constructor for an HttpRequest. | |
| 621 | |
| 622 Args: | |
| 623 http: httplib2.Http, the transport object to use to make a request | |
| 624 postproc: callable, called on the HTTP response and content to transform | |
| 625 it into a data object before returning, or raising an exception | |
| 626 on an error. | |
| 627 uri: string, the absolute URI to send the request to | |
| 628 method: string, the HTTP method to use | |
| 629 body: string, the request body of the HTTP request, | |
| 630 headers: dict, the HTTP request headers | |
| 631 methodId: string, a unique identifier for the API method being called. | |
| 632 resumable: MediaUpload, None if this is not a resumbale request. | |
| 633 """ | |
| 634 self.uri = uri | |
| 635 self.method = method | |
| 636 self.body = body | |
| 637 self.headers = headers or {} | |
| 638 self.methodId = methodId | |
| 639 self.http = http | |
| 640 self.postproc = postproc | |
| 641 self.resumable = resumable | |
| 642 self.response_callbacks = [] | |
| 643 self._in_error_state = False | |
| 644 | |
| 645 # Pull the multipart boundary out of the content-type header. | |
| 646 major, minor, params = mimeparse.parse_mime_type( | |
| 647 headers.get('content-type', 'application/json')) | |
| 648 | |
| 649 # The size of the non-media part of the request. | |
| 650 self.body_size = len(self.body or '') | |
| 651 | |
| 652 # The resumable URI to send chunks to. | |
| 653 self.resumable_uri = None | |
| 654 | |
| 655 # The bytes that have been uploaded. | |
| 656 self.resumable_progress = 0 | |
| 657 | |
| 658 # Stubs for testing. | |
| 659 self._rand = random.random | |
| 660 self._sleep = time.sleep | |
| 661 | |
| 662 @util.positional(1) | |
| 663 def execute(self, http=None, num_retries=0): | |
| 664 """Execute the request. | |
| 665 | |
| 666 Args: | |
| 667 http: httplib2.Http, an http object to be used in place of the | |
| 668 one the HttpRequest request object was constructed with. | |
| 669 num_retries: Integer, number of times to retry 500's with randomized | |
| 670 exponential backoff. If all retries fail, the raised HttpError | |
| 671 represents the last request. If zero (default), we attempt the | |
| 672 request only once. | |
| 673 | |
| 674 Returns: | |
| 675 A deserialized object model of the response body as determined | |
| 676 by the postproc. | |
| 677 | |
| 678 Raises: | |
| 679 googleapiclient.errors.HttpError if the response was not a 2xx. | |
| 680 httplib2.HttpLib2Error if a transport error has occured. | |
| 681 """ | |
| 682 if http is None: | |
| 683 http = self.http | |
| 684 | |
| 685 if self.resumable: | |
| 686 body = None | |
| 687 while body is None: | |
| 688 _, body = self.next_chunk(http=http, num_retries=num_retries) | |
| 689 return body | |
| 690 | |
| 691 # Non-resumable case. | |
| 692 | |
| 693 if 'content-length' not in self.headers: | |
| 694 self.headers['content-length'] = str(self.body_size) | |
| 695 # If the request URI is too long then turn it into a POST request. | |
| 696 if len(self.uri) > MAX_URI_LENGTH and self.method == 'GET': | |
| 697 self.method = 'POST' | |
| 698 self.headers['x-http-method-override'] = 'GET' | |
| 699 self.headers['content-type'] = 'application/x-www-form-urlencoded' | |
| 700 parsed = urlparse.urlparse(self.uri) | |
| 701 self.uri = urlparse.urlunparse( | |
| 702 (parsed.scheme, parsed.netloc, parsed.path, parsed.params, None, | |
| 703 None) | |
| 704 ) | |
| 705 self.body = parsed.query | |
| 706 self.headers['content-length'] = str(len(self.body)) | |
| 707 | |
| 708 # Handle retries for server-side errors. | |
| 709 for retry_num in xrange(num_retries + 1): | |
| 710 if retry_num > 0: | |
| 711 self._sleep(self._rand() * 2**retry_num) | |
| 712 logging.warning('Retry #%d for request: %s %s, following status: %d' | |
| 713 % (retry_num, self.method, self.uri, resp.status)) | |
| 714 | |
| 715 resp, content = http.request(str(self.uri), method=str(self.method), | |
| 716 body=self.body, headers=self.headers) | |
| 717 if resp.status < 500: | |
| 718 break | |
| 719 | |
| 720 for callback in self.response_callbacks: | |
| 721 callback(resp) | |
| 722 if resp.status >= 300: | |
| 723 raise HttpError(resp, content, uri=self.uri) | |
| 724 return self.postproc(resp, content) | |
| 725 | |
| 726 @util.positional(2) | |
| 727 def add_response_callback(self, cb): | |
| 728 """add_response_headers_callback | |
| 729 | |
| 730 Args: | |
| 731 cb: Callback to be called on receiving the response headers, of signature: | |
| 732 | |
| 733 def cb(resp): | |
| 734 # Where resp is an instance of httplib2.Response | |
| 735 """ | |
| 736 self.response_callbacks.append(cb) | |
| 737 | |
| 738 @util.positional(1) | |
| 739 def next_chunk(self, http=None, num_retries=0): | |
| 740 """Execute the next step of a resumable upload. | |
| 741 | |
| 742 Can only be used if the method being executed supports media uploads and | |
| 743 the MediaUpload object passed in was flagged as using resumable upload. | |
| 744 | |
| 745 Example: | |
| 746 | |
| 747 media = MediaFileUpload('cow.png', mimetype='image/png', | |
| 748 chunksize=1000, resumable=True) | |
| 749 request = farm.animals().insert( | |
| 750 id='cow', | |
| 751 name='cow.png', | |
| 752 media_body=media) | |
| 753 | |
| 754 response = None | |
| 755 while response is None: | |
| 756 status, response = request.next_chunk() | |
| 757 if status: | |
| 758 print "Upload %d%% complete." % int(status.progress() * 100) | |
| 759 | |
| 760 | |
| 761 Args: | |
| 762 http: httplib2.Http, an http object to be used in place of the | |
| 763 one the HttpRequest request object was constructed with. | |
| 764 num_retries: Integer, number of times to retry 500's with randomized | |
| 765 exponential backoff. If all retries fail, the raised HttpError | |
| 766 represents the last request. If zero (default), we attempt the | |
| 767 request only once. | |
| 768 | |
| 769 Returns: | |
| 770 (status, body): (ResumableMediaStatus, object) | |
| 771 The body will be None until the resumable media is fully uploaded. | |
| 772 | |
| 773 Raises: | |
| 774 googleapiclient.errors.HttpError if the response was not a 2xx. | |
| 775 httplib2.HttpLib2Error if a transport error has occured. | |
| 776 """ | |
| 777 if http is None: | |
| 778 http = self.http | |
| 779 | |
| 780 if self.resumable.size() is None: | |
| 781 size = '*' | |
| 782 else: | |
| 783 size = str(self.resumable.size()) | |
| 784 | |
| 785 if self.resumable_uri is None: | |
| 786 start_headers = copy.copy(self.headers) | |
| 787 start_headers['X-Upload-Content-Type'] = self.resumable.mimetype() | |
| 788 if size != '*': | |
| 789 start_headers['X-Upload-Content-Length'] = size | |
| 790 start_headers['content-length'] = str(self.body_size) | |
| 791 | |
| 792 for retry_num in xrange(num_retries + 1): | |
| 793 if retry_num > 0: | |
| 794 self._sleep(self._rand() * 2**retry_num) | |
| 795 logging.warning( | |
| 796 'Retry #%d for resumable URI request: %s %s, following status: %d' | |
| 797 % (retry_num, self.method, self.uri, resp.status)) | |
| 798 | |
| 799 resp, content = http.request(self.uri, method=self.method, | |
| 800 body=self.body, | |
| 801 headers=start_headers) | |
| 802 if resp.status < 500: | |
| 803 break | |
| 804 | |
| 805 if resp.status == 200 and 'location' in resp: | |
| 806 self.resumable_uri = resp['location'] | |
| 807 else: | |
| 808 raise ResumableUploadError(resp, content) | |
| 809 elif self._in_error_state: | |
| 810 # If we are in an error state then query the server for current state of | |
| 811 # the upload by sending an empty PUT and reading the 'range' header in | |
| 812 # the response. | |
| 813 headers = { | |
| 814 'Content-Range': 'bytes */%s' % size, | |
| 815 'content-length': '0' | |
| 816 } | |
| 817 resp, content = http.request(self.resumable_uri, 'PUT', | |
| 818 headers=headers) | |
| 819 status, body = self._process_response(resp, content) | |
| 820 if body: | |
| 821 # The upload was complete. | |
| 822 return (status, body) | |
| 823 | |
| 824 # The httplib.request method can take streams for the body parameter, but | |
| 825 # only in Python 2.6 or later. If a stream is available under those | |
| 826 # conditions then use it as the body argument. | |
| 827 if self.resumable.has_stream() and sys.version_info[1] >= 6: | |
| 828 data = self.resumable.stream() | |
| 829 if self.resumable.chunksize() == -1: | |
| 830 data.seek(self.resumable_progress) | |
| 831 chunk_end = self.resumable.size() - self.resumable_progress - 1 | |
| 832 else: | |
| 833 # Doing chunking with a stream, so wrap a slice of the stream. | |
| 834 data = _StreamSlice(data, self.resumable_progress, | |
| 835 self.resumable.chunksize()) | |
| 836 chunk_end = min( | |
| 837 self.resumable_progress + self.resumable.chunksize() - 1, | |
| 838 self.resumable.size() - 1) | |
| 839 else: | |
| 840 data = self.resumable.getbytes( | |
| 841 self.resumable_progress, self.resumable.chunksize()) | |
| 842 | |
| 843 # A short read implies that we are at EOF, so finish the upload. | |
| 844 if len(data) < self.resumable.chunksize(): | |
| 845 size = str(self.resumable_progress + len(data)) | |
| 846 | |
| 847 chunk_end = self.resumable_progress + len(data) - 1 | |
| 848 | |
| 849 headers = { | |
| 850 'Content-Range': 'bytes %d-%d/%s' % ( | |
| 851 self.resumable_progress, chunk_end, size), | |
| 852 # Must set the content-length header here because httplib can't | |
| 853 # calculate the size when working with _StreamSlice. | |
| 854 'Content-Length': str(chunk_end - self.resumable_progress + 1) | |
| 855 } | |
| 856 | |
| 857 for retry_num in xrange(num_retries + 1): | |
| 858 if retry_num > 0: | |
| 859 self._sleep(self._rand() * 2**retry_num) | |
| 860 logging.warning( | |
| 861 'Retry #%d for media upload: %s %s, following status: %d' | |
| 862 % (retry_num, self.method, self.uri, resp.status)) | |
| 863 | |
| 864 try: | |
| 865 resp, content = http.request(self.resumable_uri, method='PUT', | |
| 866 body=data, | |
| 867 headers=headers) | |
| 868 except: | |
| 869 self._in_error_state = True | |
| 870 raise | |
| 871 if resp.status < 500: | |
| 872 break | |
| 873 | |
| 874 return self._process_response(resp, content) | |
| 875 | |
| 876 def _process_response(self, resp, content): | |
| 877 """Process the response from a single chunk upload. | |
| 878 | |
| 879 Args: | |
| 880 resp: httplib2.Response, the response object. | |
| 881 content: string, the content of the response. | |
| 882 | |
| 883 Returns: | |
| 884 (status, body): (ResumableMediaStatus, object) | |
| 885 The body will be None until the resumable media is fully uploaded. | |
| 886 | |
| 887 Raises: | |
| 888 googleapiclient.errors.HttpError if the response was not a 2xx or a 308. | |
| 889 """ | |
| 890 if resp.status in [200, 201]: | |
| 891 self._in_error_state = False | |
| 892 return None, self.postproc(resp, content) | |
| 893 elif resp.status == 308: | |
| 894 self._in_error_state = False | |
| 895 # A "308 Resume Incomplete" indicates we are not done. | |
| 896 self.resumable_progress = int(resp['range'].split('-')[1]) + 1 | |
| 897 if 'location' in resp: | |
| 898 self.resumable_uri = resp['location'] | |
| 899 else: | |
| 900 self._in_error_state = True | |
| 901 raise HttpError(resp, content, uri=self.uri) | |
| 902 | |
| 903 return (MediaUploadProgress(self.resumable_progress, self.resumable.size()), | |
| 904 None) | |
| 905 | |
| 906 def to_json(self): | |
| 907 """Returns a JSON representation of the HttpRequest.""" | |
| 908 d = copy.copy(self.__dict__) | |
| 909 if d['resumable'] is not None: | |
| 910 d['resumable'] = self.resumable.to_json() | |
| 911 del d['http'] | |
| 912 del d['postproc'] | |
| 913 del d['_sleep'] | |
| 914 del d['_rand'] | |
| 915 | |
| 916 return json.dumps(d) | |
| 917 | |
| 918 @staticmethod | |
| 919 def from_json(s, http, postproc): | |
| 920 """Returns an HttpRequest populated with info from a JSON object.""" | |
| 921 d = json.loads(s) | |
| 922 if d['resumable'] is not None: | |
| 923 d['resumable'] = MediaUpload.new_from_json(d['resumable']) | |
| 924 return HttpRequest( | |
| 925 http, | |
| 926 postproc, | |
| 927 uri=d['uri'], | |
| 928 method=d['method'], | |
| 929 body=d['body'], | |
| 930 headers=d['headers'], | |
| 931 methodId=d['methodId'], | |
| 932 resumable=d['resumable']) | |
| 933 | |
| 934 | |
| 935 class BatchHttpRequest(object): | |
| 936 """Batches multiple HttpRequest objects into a single HTTP request. | |
| 937 | |
| 938 Example: | |
| 939 from googleapiclient.http import BatchHttpRequest | |
| 940 | |
| 941 def list_animals(request_id, response, exception): | |
| 942 \"\"\"Do something with the animals list response.\"\"\" | |
| 943 if exception is not None: | |
| 944 # Do something with the exception. | |
| 945 pass | |
| 946 else: | |
| 947 # Do something with the response. | |
| 948 pass | |
| 949 | |
| 950 def list_farmers(request_id, response, exception): | |
| 951 \"\"\"Do something with the farmers list response.\"\"\" | |
| 952 if exception is not None: | |
| 953 # Do something with the exception. | |
| 954 pass | |
| 955 else: | |
| 956 # Do something with the response. | |
| 957 pass | |
| 958 | |
| 959 service = build('farm', 'v2') | |
| 960 | |
| 961 batch = BatchHttpRequest() | |
| 962 | |
| 963 batch.add(service.animals().list(), list_animals) | |
| 964 batch.add(service.farmers().list(), list_farmers) | |
| 965 batch.execute(http=http) | |
| 966 """ | |
| 967 | |
| 968 @util.positional(1) | |
| 969 def __init__(self, callback=None, batch_uri=None): | |
| 970 """Constructor for a BatchHttpRequest. | |
| 971 | |
| 972 Args: | |
| 973 callback: callable, A callback to be called for each response, of the | |
| 974 form callback(id, response, exception). The first parameter is the | |
| 975 request id, and the second is the deserialized response object. The | |
| 976 third is an googleapiclient.errors.HttpError exception object if an HTTP
error | |
| 977 occurred while processing the request, or None if no error occurred. | |
| 978 batch_uri: string, URI to send batch requests to. | |
| 979 """ | |
| 980 if batch_uri is None: | |
| 981 batch_uri = 'https://www.googleapis.com/batch' | |
| 982 self._batch_uri = batch_uri | |
| 983 | |
| 984 # Global callback to be called for each individual response in the batch. | |
| 985 self._callback = callback | |
| 986 | |
| 987 # A map from id to request. | |
| 988 self._requests = {} | |
| 989 | |
| 990 # A map from id to callback. | |
| 991 self._callbacks = {} | |
| 992 | |
| 993 # List of request ids, in the order in which they were added. | |
| 994 self._order = [] | |
| 995 | |
| 996 # The last auto generated id. | |
| 997 self._last_auto_id = 0 | |
| 998 | |
| 999 # Unique ID on which to base the Content-ID headers. | |
| 1000 self._base_id = None | |
| 1001 | |
| 1002 # A map from request id to (httplib2.Response, content) response pairs | |
| 1003 self._responses = {} | |
| 1004 | |
| 1005 # A map of id(Credentials) that have been refreshed. | |
| 1006 self._refreshed_credentials = {} | |
| 1007 | |
| 1008 def _refresh_and_apply_credentials(self, request, http): | |
| 1009 """Refresh the credentials and apply to the request. | |
| 1010 | |
| 1011 Args: | |
| 1012 request: HttpRequest, the request. | |
| 1013 http: httplib2.Http, the global http object for the batch. | |
| 1014 """ | |
| 1015 # For the credentials to refresh, but only once per refresh_token | |
| 1016 # If there is no http per the request then refresh the http passed in | |
| 1017 # via execute() | |
| 1018 creds = None | |
| 1019 if request.http is not None and hasattr(request.http.request, | |
| 1020 'credentials'): | |
| 1021 creds = request.http.request.credentials | |
| 1022 elif http is not None and hasattr(http.request, 'credentials'): | |
| 1023 creds = http.request.credentials | |
| 1024 if creds is not None: | |
| 1025 if id(creds) not in self._refreshed_credentials: | |
| 1026 creds.refresh(http) | |
| 1027 self._refreshed_credentials[id(creds)] = 1 | |
| 1028 | |
| 1029 # Only apply the credentials if we are using the http object passed in, | |
| 1030 # otherwise apply() will get called during _serialize_request(). | |
| 1031 if request.http is None or not hasattr(request.http.request, | |
| 1032 'credentials'): | |
| 1033 creds.apply(request.headers) | |
| 1034 | |
| 1035 def _id_to_header(self, id_): | |
| 1036 """Convert an id to a Content-ID header value. | |
| 1037 | |
| 1038 Args: | |
| 1039 id_: string, identifier of individual request. | |
| 1040 | |
| 1041 Returns: | |
| 1042 A Content-ID header with the id_ encoded into it. A UUID is prepended to | |
| 1043 the value because Content-ID headers are supposed to be universally | |
| 1044 unique. | |
| 1045 """ | |
| 1046 if self._base_id is None: | |
| 1047 self._base_id = uuid.uuid4() | |
| 1048 | |
| 1049 return '<%s+%s>' % (self._base_id, urllib.quote(id_)) | |
| 1050 | |
| 1051 def _header_to_id(self, header): | |
| 1052 """Convert a Content-ID header value to an id. | |
| 1053 | |
| 1054 Presumes the Content-ID header conforms to the format that _id_to_header() | |
| 1055 returns. | |
| 1056 | |
| 1057 Args: | |
| 1058 header: string, Content-ID header value. | |
| 1059 | |
| 1060 Returns: | |
| 1061 The extracted id value. | |
| 1062 | |
| 1063 Raises: | |
| 1064 BatchError if the header is not in the expected format. | |
| 1065 """ | |
| 1066 if header[0] != '<' or header[-1] != '>': | |
| 1067 raise BatchError("Invalid value for Content-ID: %s" % header) | |
| 1068 if '+' not in header: | |
| 1069 raise BatchError("Invalid value for Content-ID: %s" % header) | |
| 1070 base, id_ = header[1:-1].rsplit('+', 1) | |
| 1071 | |
| 1072 return urllib.unquote(id_) | |
| 1073 | |
| 1074 def _serialize_request(self, request): | |
| 1075 """Convert an HttpRequest object into a string. | |
| 1076 | |
| 1077 Args: | |
| 1078 request: HttpRequest, the request to serialize. | |
| 1079 | |
| 1080 Returns: | |
| 1081 The request as a string in application/http format. | |
| 1082 """ | |
| 1083 # Construct status line | |
| 1084 parsed = urlparse.urlparse(request.uri) | |
| 1085 request_line = urlparse.urlunparse( | |
| 1086 (None, None, parsed.path, parsed.params, parsed.query, None) | |
| 1087 ) | |
| 1088 status_line = request.method + ' ' + request_line + ' HTTP/1.1\n' | |
| 1089 major, minor = request.headers.get('content-type', 'application/json').split
('/') | |
| 1090 msg = MIMENonMultipart(major, minor) | |
| 1091 headers = request.headers.copy() | |
| 1092 | |
| 1093 if request.http is not None and hasattr(request.http.request, | |
| 1094 'credentials'): | |
| 1095 request.http.request.credentials.apply(headers) | |
| 1096 | |
| 1097 # MIMENonMultipart adds its own Content-Type header. | |
| 1098 if 'content-type' in headers: | |
| 1099 del headers['content-type'] | |
| 1100 | |
| 1101 for key, value in headers.iteritems(): | |
| 1102 msg[key] = value | |
| 1103 msg['Host'] = parsed.netloc | |
| 1104 msg.set_unixfrom(None) | |
| 1105 | |
| 1106 if request.body is not None: | |
| 1107 msg.set_payload(request.body) | |
| 1108 msg['content-length'] = str(len(request.body)) | |
| 1109 | |
| 1110 # Serialize the mime message. | |
| 1111 fp = StringIO.StringIO() | |
| 1112 # maxheaderlen=0 means don't line wrap headers. | |
| 1113 g = Generator(fp, maxheaderlen=0) | |
| 1114 g.flatten(msg, unixfrom=False) | |
| 1115 body = fp.getvalue() | |
| 1116 | |
| 1117 # Strip off the \n\n that the MIME lib tacks onto the end of the payload. | |
| 1118 if request.body is None: | |
| 1119 body = body[:-2] | |
| 1120 | |
| 1121 return status_line.encode('utf-8') + body | |
| 1122 | |
| 1123 def _deserialize_response(self, payload): | |
| 1124 """Convert string into httplib2 response and content. | |
| 1125 | |
| 1126 Args: | |
| 1127 payload: string, headers and body as a string. | |
| 1128 | |
| 1129 Returns: | |
| 1130 A pair (resp, content), such as would be returned from httplib2.request. | |
| 1131 """ | |
| 1132 # Strip off the status line | |
| 1133 status_line, payload = payload.split('\n', 1) | |
| 1134 protocol, status, reason = status_line.split(' ', 2) | |
| 1135 | |
| 1136 # Parse the rest of the response | |
| 1137 parser = FeedParser() | |
| 1138 parser.feed(payload) | |
| 1139 msg = parser.close() | |
| 1140 msg['status'] = status | |
| 1141 | |
| 1142 # Create httplib2.Response from the parsed headers. | |
| 1143 resp = httplib2.Response(msg) | |
| 1144 resp.reason = reason | |
| 1145 resp.version = int(protocol.split('/', 1)[1].replace('.', '')) | |
| 1146 | |
| 1147 content = payload.split('\r\n\r\n', 1)[1] | |
| 1148 | |
| 1149 return resp, content | |
| 1150 | |
| 1151 def _new_id(self): | |
| 1152 """Create a new id. | |
| 1153 | |
| 1154 Auto incrementing number that avoids conflicts with ids already used. | |
| 1155 | |
| 1156 Returns: | |
| 1157 string, a new unique id. | |
| 1158 """ | |
| 1159 self._last_auto_id += 1 | |
| 1160 while str(self._last_auto_id) in self._requests: | |
| 1161 self._last_auto_id += 1 | |
| 1162 return str(self._last_auto_id) | |
| 1163 | |
| 1164 @util.positional(2) | |
| 1165 def add(self, request, callback=None, request_id=None): | |
| 1166 """Add a new request. | |
| 1167 | |
| 1168 Every callback added will be paired with a unique id, the request_id. That | |
| 1169 unique id will be passed back to the callback when the response comes back | |
| 1170 from the server. The default behavior is to have the library generate it's | |
| 1171 own unique id. If the caller passes in a request_id then they must ensure | |
| 1172 uniqueness for each request_id, and if they are not an exception is | |
| 1173 raised. Callers should either supply all request_ids or nevery supply a | |
| 1174 request id, to avoid such an error. | |
| 1175 | |
| 1176 Args: | |
| 1177 request: HttpRequest, Request to add to the batch. | |
| 1178 callback: callable, A callback to be called for this response, of the | |
| 1179 form callback(id, response, exception). The first parameter is the | |
| 1180 request id, and the second is the deserialized response object. The | |
| 1181 third is an googleapiclient.errors.HttpError exception object if an HTTP
error | |
| 1182 occurred while processing the request, or None if no errors occurred. | |
| 1183 request_id: string, A unique id for the request. The id will be passed to | |
| 1184 the callback with the response. | |
| 1185 | |
| 1186 Returns: | |
| 1187 None | |
| 1188 | |
| 1189 Raises: | |
| 1190 BatchError if a media request is added to a batch. | |
| 1191 KeyError is the request_id is not unique. | |
| 1192 """ | |
| 1193 if request_id is None: | |
| 1194 request_id = self._new_id() | |
| 1195 if request.resumable is not None: | |
| 1196 raise BatchError("Media requests cannot be used in a batch request.") | |
| 1197 if request_id in self._requests: | |
| 1198 raise KeyError("A request with this ID already exists: %s" % request_id) | |
| 1199 self._requests[request_id] = request | |
| 1200 self._callbacks[request_id] = callback | |
| 1201 self._order.append(request_id) | |
| 1202 | |
| 1203 def _execute(self, http, order, requests): | |
| 1204 """Serialize batch request, send to server, process response. | |
| 1205 | |
| 1206 Args: | |
| 1207 http: httplib2.Http, an http object to be used to make the request with. | |
| 1208 order: list, list of request ids in the order they were added to the | |
| 1209 batch. | |
| 1210 request: list, list of request objects to send. | |
| 1211 | |
| 1212 Raises: | |
| 1213 httplib2.HttpLib2Error if a transport error has occured. | |
| 1214 googleapiclient.errors.BatchError if the response is the wrong format. | |
| 1215 """ | |
| 1216 message = MIMEMultipart('mixed') | |
| 1217 # Message should not write out it's own headers. | |
| 1218 setattr(message, '_write_headers', lambda self: None) | |
| 1219 | |
| 1220 # Add all the individual requests. | |
| 1221 for request_id in order: | |
| 1222 request = requests[request_id] | |
| 1223 | |
| 1224 msg = MIMENonMultipart('application', 'http') | |
| 1225 msg['Content-Transfer-Encoding'] = 'binary' | |
| 1226 msg['Content-ID'] = self._id_to_header(request_id) | |
| 1227 | |
| 1228 body = self._serialize_request(request) | |
| 1229 msg.set_payload(body) | |
| 1230 message.attach(msg) | |
| 1231 | |
| 1232 # encode the body: note that we can't use `as_string`, because | |
| 1233 # it plays games with `From ` lines. | |
| 1234 fp = StringIO.StringIO() | |
| 1235 g = Generator(fp, mangle_from_=False) | |
| 1236 g.flatten(message, unixfrom=False) | |
| 1237 body = fp.getvalue() | |
| 1238 | |
| 1239 headers = {} | |
| 1240 headers['content-type'] = ('multipart/mixed; ' | |
| 1241 'boundary="%s"') % message.get_boundary() | |
| 1242 | |
| 1243 resp, content = http.request(self._batch_uri, method='POST', body=body, | |
| 1244 headers=headers) | |
| 1245 | |
| 1246 if resp.status >= 300: | |
| 1247 raise HttpError(resp, content, uri=self._batch_uri) | |
| 1248 | |
| 1249 # Now break out the individual responses and store each one. | |
| 1250 boundary, _ = content.split(None, 1) | |
| 1251 | |
| 1252 # Prepend with a content-type header so FeedParser can handle it. | |
| 1253 header = 'content-type: %s\r\n\r\n' % resp['content-type'] | |
| 1254 for_parser = header + content | |
| 1255 | |
| 1256 parser = FeedParser() | |
| 1257 parser.feed(for_parser) | |
| 1258 mime_response = parser.close() | |
| 1259 | |
| 1260 if not mime_response.is_multipart(): | |
| 1261 raise BatchError("Response not in multipart/mixed format.", resp=resp, | |
| 1262 content=content) | |
| 1263 | |
| 1264 for part in mime_response.get_payload(): | |
| 1265 request_id = self._header_to_id(part['Content-ID']) | |
| 1266 response, content = self._deserialize_response(part.get_payload()) | |
| 1267 self._responses[request_id] = (response, content) | |
| 1268 | |
| 1269 @util.positional(1) | |
| 1270 def execute(self, http=None): | |
| 1271 """Execute all the requests as a single batched HTTP request. | |
| 1272 | |
| 1273 Args: | |
| 1274 http: httplib2.Http, an http object to be used in place of the one the | |
| 1275 HttpRequest request object was constructed with. If one isn't supplied | |
| 1276 then use a http object from the requests in this batch. | |
| 1277 | |
| 1278 Returns: | |
| 1279 None | |
| 1280 | |
| 1281 Raises: | |
| 1282 httplib2.HttpLib2Error if a transport error has occured. | |
| 1283 googleapiclient.errors.BatchError if the response is the wrong format. | |
| 1284 """ | |
| 1285 | |
| 1286 # If http is not supplied use the first valid one given in the requests. | |
| 1287 if http is None: | |
| 1288 for request_id in self._order: | |
| 1289 request = self._requests[request_id] | |
| 1290 if request is not None: | |
| 1291 http = request.http | |
| 1292 break | |
| 1293 | |
| 1294 if http is None: | |
| 1295 raise ValueError("Missing a valid http object.") | |
| 1296 | |
| 1297 self._execute(http, self._order, self._requests) | |
| 1298 | |
| 1299 # Loop over all the requests and check for 401s. For each 401 request the | |
| 1300 # credentials should be refreshed and then sent again in a separate batch. | |
| 1301 redo_requests = {} | |
| 1302 redo_order = [] | |
| 1303 | |
| 1304 for request_id in self._order: | |
| 1305 resp, content = self._responses[request_id] | |
| 1306 if resp['status'] == '401': | |
| 1307 redo_order.append(request_id) | |
| 1308 request = self._requests[request_id] | |
| 1309 self._refresh_and_apply_credentials(request, http) | |
| 1310 redo_requests[request_id] = request | |
| 1311 | |
| 1312 if redo_requests: | |
| 1313 self._execute(http, redo_order, redo_requests) | |
| 1314 | |
| 1315 # Now process all callbacks that are erroring, and raise an exception for | |
| 1316 # ones that return a non-2xx response? Or add extra parameter to callback | |
| 1317 # that contains an HttpError? | |
| 1318 | |
| 1319 for request_id in self._order: | |
| 1320 resp, content = self._responses[request_id] | |
| 1321 | |
| 1322 request = self._requests[request_id] | |
| 1323 callback = self._callbacks[request_id] | |
| 1324 | |
| 1325 response = None | |
| 1326 exception = None | |
| 1327 try: | |
| 1328 if resp.status >= 300: | |
| 1329 raise HttpError(resp, content, uri=request.uri) | |
| 1330 response = request.postproc(resp, content) | |
| 1331 except HttpError, e: | |
| 1332 exception = e | |
| 1333 | |
| 1334 if callback is not None: | |
| 1335 callback(request_id, response, exception) | |
| 1336 if self._callback is not None: | |
| 1337 self._callback(request_id, response, exception) | |
| 1338 | |
| 1339 | |
| 1340 class HttpRequestMock(object): | |
| 1341 """Mock of HttpRequest. | |
| 1342 | |
| 1343 Do not construct directly, instead use RequestMockBuilder. | |
| 1344 """ | |
| 1345 | |
| 1346 def __init__(self, resp, content, postproc): | |
| 1347 """Constructor for HttpRequestMock | |
| 1348 | |
| 1349 Args: | |
| 1350 resp: httplib2.Response, the response to emulate coming from the request | |
| 1351 content: string, the response body | |
| 1352 postproc: callable, the post processing function usually supplied by | |
| 1353 the model class. See model.JsonModel.response() as an example. | |
| 1354 """ | |
| 1355 self.resp = resp | |
| 1356 self.content = content | |
| 1357 self.postproc = postproc | |
| 1358 if resp is None: | |
| 1359 self.resp = httplib2.Response({'status': 200, 'reason': 'OK'}) | |
| 1360 if 'reason' in self.resp: | |
| 1361 self.resp.reason = self.resp['reason'] | |
| 1362 | |
| 1363 def execute(self, http=None): | |
| 1364 """Execute the request. | |
| 1365 | |
| 1366 Same behavior as HttpRequest.execute(), but the response is | |
| 1367 mocked and not really from an HTTP request/response. | |
| 1368 """ | |
| 1369 return self.postproc(self.resp, self.content) | |
| 1370 | |
| 1371 | |
| 1372 class RequestMockBuilder(object): | |
| 1373 """A simple mock of HttpRequest | |
| 1374 | |
| 1375 Pass in a dictionary to the constructor that maps request methodIds to | |
| 1376 tuples of (httplib2.Response, content, opt_expected_body) that should be | |
| 1377 returned when that method is called. None may also be passed in for the | |
| 1378 httplib2.Response, in which case a 200 OK response will be generated. | |
| 1379 If an opt_expected_body (str or dict) is provided, it will be compared to | |
| 1380 the body and UnexpectedBodyError will be raised on inequality. | |
| 1381 | |
| 1382 Example: | |
| 1383 response = '{"data": {"id": "tag:google.c...' | |
| 1384 requestBuilder = RequestMockBuilder( | |
| 1385 { | |
| 1386 'plus.activities.get': (None, response), | |
| 1387 } | |
| 1388 ) | |
| 1389 googleapiclient.discovery.build("plus", "v1", requestBuilder=requestBuilde
r) | |
| 1390 | |
| 1391 Methods that you do not supply a response for will return a | |
| 1392 200 OK with an empty string as the response content or raise an excpetion | |
| 1393 if check_unexpected is set to True. The methodId is taken from the rpcName | |
| 1394 in the discovery document. | |
| 1395 | |
| 1396 For more details see the project wiki. | |
| 1397 """ | |
| 1398 | |
| 1399 def __init__(self, responses, check_unexpected=False): | |
| 1400 """Constructor for RequestMockBuilder | |
| 1401 | |
| 1402 The constructed object should be a callable object | |
| 1403 that can replace the class HttpResponse. | |
| 1404 | |
| 1405 responses - A dictionary that maps methodIds into tuples | |
| 1406 of (httplib2.Response, content). The methodId | |
| 1407 comes from the 'rpcName' field in the discovery | |
| 1408 document. | |
| 1409 check_unexpected - A boolean setting whether or not UnexpectedMethodError | |
| 1410 should be raised on unsupplied method. | |
| 1411 """ | |
| 1412 self.responses = responses | |
| 1413 self.check_unexpected = check_unexpected | |
| 1414 | |
| 1415 def __call__(self, http, postproc, uri, method='GET', body=None, | |
| 1416 headers=None, methodId=None, resumable=None): | |
| 1417 """Implements the callable interface that discovery.build() expects | |
| 1418 of requestBuilder, which is to build an object compatible with | |
| 1419 HttpRequest.execute(). See that method for the description of the | |
| 1420 parameters and the expected response. | |
| 1421 """ | |
| 1422 if methodId in self.responses: | |
| 1423 response = self.responses[methodId] | |
| 1424 resp, content = response[:2] | |
| 1425 if len(response) > 2: | |
| 1426 # Test the body against the supplied expected_body. | |
| 1427 expected_body = response[2] | |
| 1428 if bool(expected_body) != bool(body): | |
| 1429 # Not expecting a body and provided one | |
| 1430 # or expecting a body and not provided one. | |
| 1431 raise UnexpectedBodyError(expected_body, body) | |
| 1432 if isinstance(expected_body, str): | |
| 1433 expected_body = json.loads(expected_body) | |
| 1434 body = json.loads(body) | |
| 1435 if body != expected_body: | |
| 1436 raise UnexpectedBodyError(expected_body, body) | |
| 1437 return HttpRequestMock(resp, content, postproc) | |
| 1438 elif self.check_unexpected: | |
| 1439 raise UnexpectedMethodError(methodId=methodId) | |
| 1440 else: | |
| 1441 model = JsonModel(False) | |
| 1442 return HttpRequestMock(None, '{}', model.response) | |
| 1443 | |
| 1444 | |
| 1445 class HttpMock(object): | |
| 1446 """Mock of httplib2.Http""" | |
| 1447 | |
| 1448 def __init__(self, filename=None, headers=None): | |
| 1449 """ | |
| 1450 Args: | |
| 1451 filename: string, absolute filename to read response from | |
| 1452 headers: dict, header to return with response | |
| 1453 """ | |
| 1454 if headers is None: | |
| 1455 headers = {'status': '200 OK'} | |
| 1456 if filename: | |
| 1457 f = file(filename, 'r') | |
| 1458 self.data = f.read() | |
| 1459 f.close() | |
| 1460 else: | |
| 1461 self.data = None | |
| 1462 self.response_headers = headers | |
| 1463 self.headers = None | |
| 1464 self.uri = None | |
| 1465 self.method = None | |
| 1466 self.body = None | |
| 1467 self.headers = None | |
| 1468 | |
| 1469 | |
| 1470 def request(self, uri, | |
| 1471 method='GET', | |
| 1472 body=None, | |
| 1473 headers=None, | |
| 1474 redirections=1, | |
| 1475 connection_type=None): | |
| 1476 self.uri = uri | |
| 1477 self.method = method | |
| 1478 self.body = body | |
| 1479 self.headers = headers | |
| 1480 return httplib2.Response(self.response_headers), self.data | |
| 1481 | |
| 1482 | |
| 1483 class HttpMockSequence(object): | |
| 1484 """Mock of httplib2.Http | |
| 1485 | |
| 1486 Mocks a sequence of calls to request returning different responses for each | |
| 1487 call. Create an instance initialized with the desired response headers | |
| 1488 and content and then use as if an httplib2.Http instance. | |
| 1489 | |
| 1490 http = HttpMockSequence([ | |
| 1491 ({'status': '401'}, ''), | |
| 1492 ({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'), | |
| 1493 ({'status': '200'}, 'echo_request_headers'), | |
| 1494 ]) | |
| 1495 resp, content = http.request("http://examples.com") | |
| 1496 | |
| 1497 There are special values you can pass in for content to trigger | |
| 1498 behavours that are helpful in testing. | |
| 1499 | |
| 1500 'echo_request_headers' means return the request headers in the response body | |
| 1501 'echo_request_headers_as_json' means return the request headers in | |
| 1502 the response body | |
| 1503 'echo_request_body' means return the request body in the response body | |
| 1504 'echo_request_uri' means return the request uri in the response body | |
| 1505 """ | |
| 1506 | |
| 1507 def __init__(self, iterable): | |
| 1508 """ | |
| 1509 Args: | |
| 1510 iterable: iterable, a sequence of pairs of (headers, body) | |
| 1511 """ | |
| 1512 self._iterable = iterable | |
| 1513 self.follow_redirects = True | |
| 1514 | |
| 1515 def request(self, uri, | |
| 1516 method='GET', | |
| 1517 body=None, | |
| 1518 headers=None, | |
| 1519 redirections=1, | |
| 1520 connection_type=None): | |
| 1521 resp, content = self._iterable.pop(0) | |
| 1522 if content == 'echo_request_headers': | |
| 1523 content = headers | |
| 1524 elif content == 'echo_request_headers_as_json': | |
| 1525 content = json.dumps(headers) | |
| 1526 elif content == 'echo_request_body': | |
| 1527 if hasattr(body, 'read'): | |
| 1528 content = body.read() | |
| 1529 else: | |
| 1530 content = body | |
| 1531 elif content == 'echo_request_uri': | |
| 1532 content = uri | |
| 1533 return httplib2.Response(resp), content | |
| 1534 | |
| 1535 | |
| 1536 def set_user_agent(http, user_agent): | |
| 1537 """Set the user-agent on every request. | |
| 1538 | |
| 1539 Args: | |
| 1540 http - An instance of httplib2.Http | |
| 1541 or something that acts like it. | |
| 1542 user_agent: string, the value for the user-agent header. | |
| 1543 | |
| 1544 Returns: | |
| 1545 A modified instance of http that was passed in. | |
| 1546 | |
| 1547 Example: | |
| 1548 | |
| 1549 h = httplib2.Http() | |
| 1550 h = set_user_agent(h, "my-app-name/6.0") | |
| 1551 | |
| 1552 Most of the time the user-agent will be set doing auth, this is for the rare | |
| 1553 cases where you are accessing an unauthenticated endpoint. | |
| 1554 """ | |
| 1555 request_orig = http.request | |
| 1556 | |
| 1557 # The closure that will replace 'httplib2.Http.request'. | |
| 1558 def new_request(uri, method='GET', body=None, headers=None, | |
| 1559 redirections=httplib2.DEFAULT_MAX_REDIRECTS, | |
| 1560 connection_type=None): | |
| 1561 """Modify the request headers to add the user-agent.""" | |
| 1562 if headers is None: | |
| 1563 headers = {} | |
| 1564 if 'user-agent' in headers: | |
| 1565 headers['user-agent'] = user_agent + ' ' + headers['user-agent'] | |
| 1566 else: | |
| 1567 headers['user-agent'] = user_agent | |
| 1568 resp, content = request_orig(uri, method, body, headers, | |
| 1569 redirections, connection_type) | |
| 1570 return resp, content | |
| 1571 | |
| 1572 http.request = new_request | |
| 1573 return http | |
| 1574 | |
| 1575 | |
| 1576 def tunnel_patch(http): | |
| 1577 """Tunnel PATCH requests over POST. | |
| 1578 Args: | |
| 1579 http - An instance of httplib2.Http | |
| 1580 or something that acts like it. | |
| 1581 | |
| 1582 Returns: | |
| 1583 A modified instance of http that was passed in. | |
| 1584 | |
| 1585 Example: | |
| 1586 | |
| 1587 h = httplib2.Http() | |
| 1588 h = tunnel_patch(h, "my-app-name/6.0") | |
| 1589 | |
| 1590 Useful if you are running on a platform that doesn't support PATCH. | |
| 1591 Apply this last if you are using OAuth 1.0, as changing the method | |
| 1592 will result in a different signature. | |
| 1593 """ | |
| 1594 request_orig = http.request | |
| 1595 | |
| 1596 # The closure that will replace 'httplib2.Http.request'. | |
| 1597 def new_request(uri, method='GET', body=None, headers=None, | |
| 1598 redirections=httplib2.DEFAULT_MAX_REDIRECTS, | |
| 1599 connection_type=None): | |
| 1600 """Modify the request headers to add the user-agent.""" | |
| 1601 if headers is None: | |
| 1602 headers = {} | |
| 1603 if method == 'PATCH': | |
| 1604 if 'oauth_token' in headers.get('authorization', ''): | |
| 1605 logging.warning( | |
| 1606 'OAuth 1.0 request made with Credentials after tunnel_patch.') | |
| 1607 headers['x-http-method-override'] = "PATCH" | |
| 1608 method = 'POST' | |
| 1609 resp, content = request_orig(uri, method, body, headers, | |
| 1610 redirections, connection_type) | |
| 1611 return resp, content | |
| 1612 | |
| 1613 http.request = new_request | |
| 1614 return http | |
| OLD | NEW |