Index: content/browser/download/download_request_model.cc |
diff --git a/content/browser/download/download_request_model.cc b/content/browser/download/download_request_model.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..98f5b76b112085e142e33f5cb58848f6a3f4f97a |
--- /dev/null |
+++ b/content/browser/download/download_request_model.cc |
@@ -0,0 +1,443 @@ |
+// Copyright 2013 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "content/browser/download/download_request_model.h" |
+ |
+#include "base/bind.h" |
+#include "base/callback.h" |
+#include "base/format_macros.h" |
+#include "base/logging.h" |
+#include "base/message_loop/message_loop_proxy.h" |
+#include "base/metrics/histogram.h" |
+#include "base/strings/stringprintf.h" |
+#include "base/strings/stringprintf.h" |
+#include "content/browser/byte_stream.h" |
+#include "content/browser/child_process_security_policy_impl.h" |
+#include "content/browser/download/download_create_info.h" |
+#include "content/browser/download/download_interrupt_reasons_impl.h" |
+#include "content/browser/download/download_resource_handler.h" |
+#include "content/browser/download/download_stats.h" |
+#include "content/browser/fileapi/chrome_blob_storage_context.h" |
+#include "content/browser/net/referrer.h" |
+#include "content/browser/resource_context_impl.h" |
+#include "content/public/browser/browser_thread.h" |
+#include "content/public/browser/download_url_parameters.h" |
+#include "content/public/browser/resource_context.h" |
+#include "content/public/common/url_constants.h" |
+#include "net/base/io_buffer.h" |
+#include "net/base/load_flags.h" |
+#include "net/base/upload_bytes_element_reader.h" |
+#include "net/base/upload_data_stream.h" |
+#include "net/http/http_response_headers.h" |
+#include "net/http/http_status_code.h" |
+#include "net/url_request/url_request.h" |
+#include "net/url_request/url_request_context.h" |
+#include "net/url_request/url_request_job_factory.h" |
+#include "webkit/browser/blob/blob_storage_context.h" |
+#include "webkit/browser/blob/blob_url_request_job_factory.h" |
+ |
+namespace content { |
+ |
+const int DownloadRequestModel::kDownloadByteStreamSize = 100 * 1024; |
+ |
+DownloadRequestModel::DownloadRequestModel( |
+ net::URLRequest* request, |
+ scoped_ptr<DownloadSaveInfo> save_info) |
+ : request_(request), |
+ save_info_(save_info.Pass()), |
+ last_buffer_size_(0), |
+ bytes_read_(0), |
+ pause_count_(0), |
+ last_interrupt_reason_(DOWNLOAD_INTERRUPT_REASON_NONE) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ // Both are required parameters. |
+ DCHECK(request_); |
+ DCHECK(save_info_); |
+ RecordDownloadCount(UNTHROTTLED_COUNT); |
+} |
+ |
+DownloadRequestModel::~DownloadRequestModel() {} |
+ |
+// static |
+scoped_ptr<net::URLRequest> DownloadRequestModel::CreateRequest( |
+ const DownloadUrlParameters& parameters, |
+ net::URLRequest::Delegate* request_delegate) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ |
+ scoped_ptr<net::URLRequest> request; |
+ |
+ if (!IsRequestAllowed(parameters.url(), parameters.render_process_host_id())) |
+ return request.Pass(); |
+ |
+ // TODO(asanka): This method of getting the URLRequestContext is deprecated. |
+ // Fix this to use the correct StorageParition. |
+ net::URLRequestContext* request_context = |
+ parameters.resource_context()->GetRequestContext(); |
+ if (!request_context || |
+ !request_context->job_factory()->IsHandledURL(parameters.url())) |
+ return request.Pass(); |
+ |
+ request.reset( |
+ request_context->CreateRequest(parameters.url(), request_delegate)); |
+ |
+ SetReferrerForRequest(request.get(), parameters.referrer()); |
+ if (parameters.url().SchemeIs(chrome::kBlobScheme)) { |
+ ChromeBlobStorageContext* blob_context = |
+ GetChromeBlobStorageContextForResourceContext( |
+ parameters.resource_context()); |
+ webkit_blob::BlobProtocolHandler::SetRequestedBlobDataHandle( |
+ request.get(), |
+ blob_context->context()->GetBlobDataFromPublicURL(request->url())); |
+ } |
+ |
+ int extra_load_flags = net::LOAD_IS_DOWNLOAD; |
+ if (parameters.prefer_cache()) { |
+ if (request->get_upload() != NULL) |
+ extra_load_flags |= net::LOAD_ONLY_FROM_CACHE; |
+ else |
+ extra_load_flags |= net::LOAD_PREFERRING_CACHE; |
+ } else { |
+ extra_load_flags |= net::LOAD_DISABLE_CACHE; |
+ } |
+ request->set_load_flags(request->load_flags() | parameters.load_flags() | |
+ extra_load_flags); |
+ request->set_method(parameters.method()); |
+ if (!parameters.post_body().empty()) { |
+ const std::string& body = parameters.post_body(); |
+ scoped_ptr<net::UploadElementReader> reader( |
+ net::UploadOwnedBytesElementReader::CreateWithString(body)); |
+ request->set_upload(make_scoped_ptr( |
+ net::UploadDataStream::CreateWithReader(reader.Pass(), 0))); |
+ } |
+ if (parameters.post_id() >= 0) { |
+ // The POST in this case does not have an actual body, and only works when |
+ // retrieving data from cache. This is done because we don't want to do a |
+ // re-POST without user consent, and currently don't have a good plan on how |
+ // to display the UI for that. |
+ DCHECK(parameters.prefer_cache()); |
+ DCHECK(parameters.method() == "POST"); |
+ ScopedVector<net::UploadElementReader> element_readers; |
+ request->set_upload(make_scoped_ptr( |
+ new net::UploadDataStream(&element_readers, parameters.post_id()))); |
+ } |
+ |
+ if (parameters.offset() > 0) { |
+ // If we've asked for a byte range, we want to make sure that we only get |
+ // that range if our current copy of the information is good. |
+ std::string range = |
+ base::StringPrintf("bytes=%" PRId64 "-", parameters.offset()); |
+ request->SetExtraRequestHeaderByName("Range", range, true); |
+ |
+ // We shouldn't be asked to continue if we don't have a verifier. |
+ DCHECK(!parameters.last_modified().empty() || !parameters.etag().empty()); |
+ if (!parameters.last_modified().empty()) |
+ request->SetExtraRequestHeaderByName( |
+ "If-Unmodified-Since", parameters.last_modified(), true); |
+ if (!parameters.etag().empty()) |
+ request->SetExtraRequestHeaderByName("If-Match", parameters.etag(), true); |
+ } |
+ |
+ for (DownloadUrlParameters::RequestHeadersType::const_iterator iter = |
+ parameters.request_headers_begin(); |
+ iter != parameters.request_headers_end(); |
+ ++iter) { |
+ request->SetExtraRequestHeaderByName( |
+ iter->first, iter->second, false /*overwrite*/); |
+ } |
+ return request.Pass(); |
+} |
+ |
+// static |
+bool DownloadRequestModel::IsRequestAllowed(const GURL& url, |
+ int child_process_id) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ return ChildProcessSecurityPolicyImpl::GetInstance()->CanRequestURL( |
+ child_process_id, url); |
+} |
+ |
+void DownloadRequestModel::OnRequestRedirected(const GURL& url) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ // We treat a download as a main frame load, and thus update the policy URL |
+ // on redirects. |
+ request_->set_first_party_for_cookies(url); |
+} |
+ |
+scoped_ptr<DownloadCreateInfo> DownloadRequestModel::OnResponseStarted( |
+ const std::string& sniffed_mime_type, |
+ bool has_user_gesture, |
+ PageTransition page_transition, |
+ const ResumeRequestCallback& resume_callback) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ download_start_time_ = base::TimeTicks::Now(); |
+ DVLOG(20) << DebugString() << " OnResponseStarted"; |
+ |
+ // If it's a download, we don't want to poison the cache with it. |
+ request_->StopCaching(); |
+ |
+ // Lower priority as well, so downloads don't contend for resources |
+ // with main frames. |
+ request_->SetPriority(net::IDLE); |
+ |
+ // If the content-length header is not present (or contains something other |
+ // than numbers), the incoming content_length is -1 (unknown size). |
+ // Set the content length to 0 to indicate unknown size to DownloadManager. |
+ int64 content_length = request_->GetExpectedContentSize(); |
+ if (content_length < 0) |
+ content_length = 0; |
+ |
+ // Deleted in DownloadManager. |
+ scoped_ptr<DownloadCreateInfo> info( |
+ new DownloadCreateInfo(base::Time::Now(), |
+ content_length, |
+ request_->net_log(), |
+ has_user_gesture, |
+ page_transition, |
+ save_info_.Pass())); |
+ |
+ // Create the ByteStream for sending data to the download sink. |
+ CreateByteStream( |
+ base::MessageLoopProxy::current(), |
+ BrowserThread::GetMessageLoopProxyForThread(BrowserThread::FILE), |
+ kDownloadByteStreamSize, |
+ &stream_writer_, |
+ &info->stream_reader); |
+ stream_writer_->RegisterCallback(resume_callback); |
+ |
+ info->url_chain = request_->url_chain(); |
+ info->referrer_url = GURL(request_->referrer()); |
+ info->mime_type = sniffed_mime_type; |
+ info->remote_address = request_->GetSocketAddress().host(); |
+ request_->GetResponseHeaderByName("content-disposition", |
+ &info->content_disposition); |
+ RecordDownloadMimeType(info->mime_type); |
+ RecordDownloadContentDisposition(info->content_disposition); |
+ |
+ const net::HttpResponseHeaders* headers = request_->response_headers(); |
+ if (headers) { |
+ if (headers->HasStrongValidators()) { |
+ // If we don't have strong validators as per RFC 2616 section 13.3.3, then |
+ // we neither store nor use them for range requests. |
+ headers->EnumerateHeader(NULL, "Last-Modified", &info->last_modified); |
+ headers->EnumerateHeader(NULL, "ETag", &info->etag); |
+ } |
+ |
+ int status = headers->response_code(); |
+ if (2 == status / 100 && status != net::HTTP_PARTIAL_CONTENT) { |
+ // Success & not range response; if we asked for a range, we didn't |
+ // get it--reset the file pointers to reflect that. |
+ info->save_info->offset = 0; |
+ info->save_info->hash_state = ""; |
+ } |
+ |
+ headers->GetMimeType(&info->original_mime_type); |
+ if (info->mime_type.empty()) |
+ info->mime_type = info->original_mime_type; |
+ } |
+ |
+ return info.Pass(); |
+} |
+ |
+void DownloadRequestModel::OnWillRead(net::IOBuffer** buf, |
+ int* buf_size, |
+ int min_size) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ DCHECK(buf && buf_size); |
+ DCHECK(!read_buffer_.get()); |
+ |
+ *buf_size = min_size < 0 ? kReadBufSize : min_size; |
+ last_buffer_size_ = *buf_size; |
+ read_buffer_ = new net::IOBuffer(*buf_size); |
+ *buf = read_buffer_.get(); |
+} |
+ |
+DownloadRequestModel::ReadState DownloadRequestModel::OnReadCompleted( |
+ int bytes_read) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ DCHECK(read_buffer_.get()); |
+ DCHECK(last_stream_pause_time_.is_null()); |
+ DVLOG(20) << DebugString() << " OnReadCompleted bytes_read=" << bytes_read; |
+ |
+ base::TimeTicks now(base::TimeTicks::Now()); |
+ if (!last_read_time_.is_null()) { |
+ double seconds_since_last_read = (now - last_read_time_).InSecondsF(); |
+ if (now == last_read_time_) |
+ // Use 1/10 ms as a "very small number" so that we avoid |
+ // divide-by-zero error and still record a very high potential bandwidth. |
+ seconds_since_last_read = 0.00001; |
+ |
+ double actual_bandwidth = (bytes_read) / seconds_since_last_read; |
+ double potential_bandwidth = last_buffer_size_ / seconds_since_last_read; |
+ RecordBandwidth(actual_bandwidth, potential_bandwidth); |
+ } |
+ last_read_time_ = now; |
+ |
+ if (!bytes_read) |
+ return read_state(); |
+ |
+ bytes_read_ += bytes_read; |
+ DCHECK(read_buffer_.get()); |
+ |
+ // Take the data ship it down the stream. If the stream is full, pause the |
+ // request; the stream callback will resume it. |
+ if (!stream_writer_->Write(read_buffer_, bytes_read)) { |
+ OnPauseRequest(); |
+ last_stream_pause_time_ = now; |
+ } |
+ |
+ read_buffer_ = NULL; // Drop our reference. |
+ return read_state(); |
+} |
+ |
+DownloadInterruptReason DownloadRequestModel::OnResponseCompleted() { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ const net::URLRequestStatus& status = request_->status(); |
+ int response_code = status.is_success() ? request_->GetResponseCode() : 0; |
+ net::Error error_code = net::OK; |
+ if (status.status() == net::URLRequestStatus::FAILED || |
+ // Note cancels as failures too. |
+ status.status() == net::URLRequestStatus::CANCELED) { |
+ error_code = static_cast<net::Error>(status.error()); // Normal case. |
+ // Make sure that at least the fact of failure comes through. |
+ if (error_code == net::OK) |
+ error_code = net::ERR_FAILED; |
+ } |
+ |
+ // ERR_CONTENT_LENGTH_MISMATCH and ERR_INCOMPLETE_CHUNKED_ENCODING are |
+ // allowed since a number of servers in the wild close the connection too |
+ // early by mistake. Other browsers - IE9, Firefox 11.0, and Safari 5.1.4 - |
+ // treat downloads as complete in both cases, so we follow their lead. |
+ if (error_code == net::ERR_CONTENT_LENGTH_MISMATCH || |
+ error_code == net::ERR_INCOMPLETE_CHUNKED_ENCODING) { |
+ error_code = net::OK; |
+ } |
+ DownloadInterruptReason reason = ConvertNetErrorToInterruptReason( |
+ error_code, DOWNLOAD_INTERRUPT_FROM_NETWORK); |
+ |
+ if (status.status() == net::URLRequestStatus::CANCELED && |
+ status.error() == net::ERR_ABORTED) { |
+ // CANCELED + ERR_ABORTED == something outside of the network |
+ // stack cancelled the request. There aren't that many things that |
+ // could do this to a download request (whose lifetime is separated from |
+ // the tab from which it came). We map this to USER_CANCELLED as the |
+ // case we know about (system suspend because of laptop close) corresponds |
+ // to a user action. |
+ // TODO(ahendrickson) -- Find a better set of codes to use here, as |
+ // CANCELED/ERR_ABORTED can occur for reasons other than user cancel. |
+ reason = DOWNLOAD_INTERRUPT_REASON_USER_CANCELED; |
+ } |
+ |
+ // If an interrupt reason was set for this request by OnDownloadInterrupted, |
+ // then use that instead. |
+ if (last_interrupt_reason_ != DOWNLOAD_INTERRUPT_REASON_NONE) |
+ reason = last_interrupt_reason_; |
+ |
+ if (status.is_success() && reason == DOWNLOAD_INTERRUPT_REASON_NONE && |
+ request_->response_headers()) { |
+ // Handle server's response codes. |
+ switch (response_code) { |
+ case -1: // Non-HTTP request. |
+ case net::HTTP_OK: |
+ case net::HTTP_CREATED: |
+ case net::HTTP_ACCEPTED: |
+ case net::HTTP_NON_AUTHORITATIVE_INFORMATION: |
+ case net::HTTP_RESET_CONTENT: |
+ case net::HTTP_PARTIAL_CONTENT: |
+ // Expected successful codes. |
+ break; |
+ case net::HTTP_NO_CONTENT: |
+ case net::HTTP_NOT_FOUND: |
+ reason = DOWNLOAD_INTERRUPT_REASON_SERVER_BAD_CONTENT; |
+ break; |
+ case net::HTTP_PRECONDITION_FAILED: |
+ // Failed our 'If-Unmodified-Since' or 'If-Match'; see |
+ // download_manager_impl.cc BeginDownload() |
+ reason = DOWNLOAD_INTERRUPT_REASON_SERVER_PRECONDITION; |
+ break; |
+ case net::HTTP_REQUESTED_RANGE_NOT_SATISFIABLE: |
+ // Retry by downloading from the start automatically: |
+ // If we haven't received data when we get this error, we won't. |
+ reason = DOWNLOAD_INTERRUPT_REASON_SERVER_NO_RANGE; |
+ break; |
+ default: // All other errors. |
+ // Redirection and informational codes should have been handled earlier |
+ // in the stack. |
+ DCHECK_NE(3, response_code / 100); |
+ DCHECK_NE(1, response_code / 100); |
+ reason = DOWNLOAD_INTERRUPT_REASON_SERVER_FAILED; |
+ break; |
+ } |
+ } |
+ |
+ std::string accept_ranges; |
+ bool has_strong_validators = false; |
+ if (request_->response_headers()) { |
+ request_->response_headers()->EnumerateHeader( |
+ NULL, "Accept-Ranges", &accept_ranges); |
+ has_strong_validators = request_->response_headers()->HasStrongValidators(); |
+ } |
+ RecordAcceptsRanges(accept_ranges, bytes_read_, has_strong_validators); |
+ RecordNetworkBlockage(base::TimeTicks::Now() - download_start_time_, |
+ total_pause_time_); |
+ |
+ // Send the info down the stream. Conditional is in case we get |
+ // OnResponseCompleted without OnResponseStarted. |
+ if (stream_writer_) |
+ stream_writer_->Close(reason); |
+ |
+ // If the error mapped to something unknown, record it so that |
+ // we can drill down. |
+ if (reason == DOWNLOAD_INTERRUPT_REASON_NETWORK_FAILED) |
+ RecordNetErrorForNetworkFailed(status.error()); |
+ |
+ stream_writer_.reset(); // We no longer need the stream. |
+ read_buffer_ = NULL; |
+ |
+ // TODO(asanka): Does this UMA make sense at all? Remove if not or fix it. |
+ // This UMA used to measure the lifetime of a DownloadResourceHandler object. |
+ if (reason == DOWNLOAD_INTERRUPT_REASON_NONE) { |
+ UMA_HISTOGRAM_TIMES("SB2.DownloadDuration", |
+ base::TimeTicks::Now() - download_start_time_); |
+ } |
+ DVLOG(20) << DebugString() << " OnResponseCompleted reason=" |
+ << DownloadInterruptReasonToString(reason); |
+ return reason; |
+} |
+ |
+void DownloadRequestModel::OnPauseRequest() { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ ++pause_count_; |
+} |
+ |
+DownloadRequestModel::ReadState DownloadRequestModel::OnResumeRequest() { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ DCHECK_LT(0, pause_count_); |
+ |
+ --pause_count_; |
+ |
+ if (pause_count_ == 0 && !last_stream_pause_time_.is_null()) { |
+ total_pause_time_ += (base::TimeTicks::Now() - last_stream_pause_time_); |
+ last_stream_pause_time_ = base::TimeTicks(); |
+ } |
+ return read_state(); |
+} |
+ |
+void DownloadRequestModel::OnDownloadInterrupted( |
+ DownloadInterruptReason interrupt_reason) { |
+ DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO)); |
+ last_interrupt_reason_ = interrupt_reason; |
+ if (stream_writer_) { |
+ stream_writer_->Close(interrupt_reason); |
+ stream_writer_.reset(); |
+ } |
+} |
+ |
+std::string DownloadRequestModel::DebugString() const { |
+ return base::StringPrintf("{ url = %s }", request_->url().spec().c_str()); |
+} |
+ |
+DownloadRequestModel::ReadState DownloadRequestModel::read_state() const { |
+ return pause_count_ > 0 ? WAIT_FOR_RESUME : READY_TO_READ; |
+} |
+ |
+} // namespace content |