| Index: examples/geturl/geturl_handler.cc
|
| ===================================================================
|
| --- examples/geturl/geturl_handler.cc (revision 1387)
|
| +++ examples/geturl/geturl_handler.cc (working copy)
|
| @@ -28,12 +28,16 @@
|
| url_(url),
|
| url_request_(instance),
|
| url_loader_(instance),
|
| + buffer_(new char[READ_BUFFER_SIZE]),
|
| cc_factory_(this) {
|
| url_request_.SetURL(url);
|
| url_request_.SetMethod("GET");
|
| + url_request_.SetRecordDownloadProgress(true);
|
| }
|
|
|
| GetURLHandler::~GetURLHandler() {
|
| + delete [] buffer_;
|
| + buffer_ = NULL;
|
| }
|
|
|
| void GetURLHandler::Start() {
|
| @@ -51,6 +55,23 @@
|
| // check the HTTP code and potentially cancel the request.
|
| // pp::URLResponseInfo response = loader_.GetResponseInfo();
|
|
|
| + // Try to figure out how many bytes of data are going to be downloaded in
|
| + // order to allocate memory for the response body in advance (this will
|
| + // reduce heap traffic and also the amount of memory allocated).
|
| + // It is not a problem if this fails, it just means that the
|
| + // url_response_body_.insert() call in GetURLHandler::AppendDataBytes()
|
| + // will allocate the memory later on.
|
| + int64_t bytes_received = 0;
|
| + int64_t total_bytes_to_be_received = 0;
|
| + if (url_loader_.GetDownloadProgress(&bytes_received,
|
| + &total_bytes_to_be_received)) {
|
| + if (total_bytes_to_be_received > 0) {
|
| + url_response_body_.reserve(total_bytes_to_be_received);
|
| + }
|
| + }
|
| + // We will not use the download progress anymore, so just disable it.
|
| + url_request_.SetRecordDownloadProgress(false);
|
| +
|
| // Start streaming.
|
| ReadBody();
|
| }
|
| @@ -60,7 +81,9 @@
|
| return;
|
| // Make sure we don't get a buffer overrun.
|
| num_bytes = std::min(READ_BUFFER_SIZE, num_bytes);
|
| - url_response_body_.reserve(url_response_body_.size() + num_bytes);
|
| + // Note that we do *not* try to minimally increase the amount of allocated
|
| + // memory here by calling url_response_body_.reserve(). Doing so causes a
|
| + // lot of string reallocations that kills performance for large files.
|
| url_response_body_.insert(url_response_body_.end(),
|
| buffer,
|
| buffer + num_bytes);
|
| @@ -68,7 +91,10 @@
|
|
|
| void GetURLHandler::OnRead(int32_t result) {
|
| if (result == PP_OK) {
|
| - // Streaming the file is complete.
|
| + // Streaming the file is complete, delete the read buffer since it is
|
| + // no longer needed.
|
| + delete [] buffer_;
|
| + buffer_ = NULL;
|
| ReportResultAndDie(url_, url_response_body_, true);
|
| } else if (result > 0) {
|
| // The URLLoader just filled "result" number of bytes into our buffer.
|
| @@ -94,7 +120,7 @@
|
| cc_factory_.NewOptionalCallback(&GetURLHandler::OnRead);
|
| int32_t result = PP_OK;
|
| do {
|
| - result = url_loader_.ReadResponseBody(buffer_, sizeof(buffer_), cc);
|
| + result = url_loader_.ReadResponseBody(buffer_, READ_BUFFER_SIZE, cc);
|
| // Handle streaming data directly. Note that we *don't* want to call
|
| // OnRead here, since in the case of result > 0 it will schedule
|
| // another call to this function. If the network is very fast, we could
|
|
|