Index: chrome/renderer/page_load_histograms.cc |
diff --git a/chrome/renderer/page_load_histograms.cc b/chrome/renderer/page_load_histograms.cc |
index e78661819647d05ffb3791894bcdb2139ada5c3d..8adb91cf998c73e1f0d97af19d614f67718b7e0f 100644 |
--- a/chrome/renderer/page_load_histograms.cc |
+++ b/chrome/renderer/page_load_histograms.cc |
@@ -50,15 +50,15 @@ static URLPattern::SchemeMasks GetSupportedSchemeType(const GURL& url) { |
static void DumpWebTiming(const Time& navigation_start, |
const Time& load_event_start, |
const Time& load_event_end, |
- content::NavigationState* navigation_state) { |
+ NavigationState::LoadTimes* load_times) { |
if (navigation_start.is_null() || |
load_event_start.is_null() || |
load_event_end.is_null()) |
return; |
- if (navigation_state->web_timing_histograms_recorded()) |
+ if (load_times->web_timing_histograms_recorded()) |
return; |
- navigation_state->set_web_timing_histograms_recorded(true); |
+ load_times->set_web_timing_histograms_recorded(true); |
// TODO(tonyg): There are many new details we can record, add them after the |
// basic metrics are evaluated. |
@@ -110,6 +110,7 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
NavigationState* navigation_state = |
NavigationState::FromDataSource(frame->dataSource()); |
+ NavigationState::LoadTimes* load_times = navigation_state->load_times(); |
// Times based on the Web Timing metrics. |
// http://www.w3.org/TR/navigation-timing/ |
@@ -120,17 +121,16 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
Time navigation_start = Time::FromDoubleT(performance.navigationStart()); |
Time load_event_start = Time::FromDoubleT(performance.loadEventStart()); |
Time load_event_end = Time::FromDoubleT(performance.loadEventEnd()); |
- DumpWebTiming(navigation_start, load_event_start, load_event_end, |
- navigation_state); |
+ DumpWebTiming(navigation_start, load_event_start, load_event_end, load_times); |
// If we've already dumped, do nothing. |
// This simple bool works because we only dump for the main frame. |
- if (navigation_state->load_histograms_recorded()) |
+ if (load_times->load_histograms_recorded()) |
return; |
// Collect measurement times. |
- Time start = navigation_state->start_load_time(); |
- Time commit = navigation_state->commit_load_time(); |
+ Time start = load_times->start_load_time(); |
+ Time commit = load_times->commit_load_time(); |
// TODO(tonyg, jar): Start can be missing after an in-document navigation and |
// possibly other cases like a very premature abandonment of the page. |
@@ -146,11 +146,11 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
return; |
// We properly handle null values for the next 3 variables. |
- Time request = navigation_state->request_time(); |
- Time first_paint = navigation_state->first_paint_time(); |
- Time first_paint_after_load = navigation_state->first_paint_after_load_time(); |
- Time finish_doc = navigation_state->finish_document_load_time(); |
- Time finish_all_loads = navigation_state->finish_load_time(); |
+ Time request = load_times->request_time(); |
+ Time first_paint = load_times->first_paint_time(); |
+ Time first_paint_after_load = load_times->first_paint_after_load_time(); |
+ Time finish_doc = load_times->finish_document_load_time(); |
+ Time finish_all_loads = load_times->finish_load_time(); |
// TODO(tonyg, jar): We suspect a bug in abandonment counting, this temporary |
// histogram should help us to troubleshoot. |
@@ -165,7 +165,7 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
bool abandoned_page = finish_doc.is_null(); |
if (abandoned_page) { |
finish_doc = Time::Now(); |
- navigation_state->set_finish_document_load_time(finish_doc); |
+ load_times->set_finish_document_load_time(finish_doc); |
} |
// TODO(jar): We should really discriminate the definition of "abandon" more |
@@ -175,14 +175,14 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
if (finish_all_loads.is_null()) { |
finish_all_loads = Time::Now(); |
- navigation_state->set_finish_load_time(finish_all_loads); |
+ load_times->set_finish_load_time(finish_all_loads); |
} else { |
DCHECK(!abandoned_page); // How can the doc have finished but not the page? |
if (abandoned_page) |
return; // Don't try to record a stat which is broken. |
} |
- navigation_state->set_load_histograms_recorded(true); |
+ load_times->set_load_histograms_recorded(true); |
// Note: Client side redirects will have no request time. |
Time begin = request.is_null() ? start : request; |
@@ -757,14 +757,14 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
static bool in_http_trial = base::FieldTrialList::Find( |
"SpdyImpact")->group_name() == "npn_with_http"; |
- bool spdy_trial_success = navigation_state->was_fetched_via_spdy() ? |
+ bool spdy_trial_success = load_times->was_fetched_via_spdy() ? |
in_spdy_trial : in_http_trial; |
if (spdy_trial_success) { |
// Histograms to determine if SPDY has an impact for https traffic. |
// TODO(mbelshe): After we've seen the difference between BeginToFinish |
// and StartToFinish, consider removing one or the other. |
if (scheme_type == URLPattern::SCHEME_HTTPS && |
- navigation_state->was_npn_negotiated()) { |
+ load_times->was_npn_negotiated()) { |
UMA_HISTOGRAM_ENUMERATION( |
base::FieldTrial::MakeName("PLT.Abandoned", "SpdyImpact"), |
abandoned_page ? 1 : 0, 2); |
@@ -799,8 +799,8 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
// Histograms to compare the impact of alternate protocol over http |
// traffic: when spdy is used vs. when http is used. |
if (scheme_type == URLPattern::SCHEME_HTTP && |
- navigation_state->was_alternate_protocol_available()) { |
- if (!navigation_state->was_npn_negotiated()) { |
+ load_times->was_alternate_protocol_available()) { |
+ if (!load_times->was_npn_negotiated()) { |
// This means that even there is alternate protocols for npn_http or |
// npn_spdy, they are not taken (due to the base::FieldTrial). |
switch (load_type) { |
@@ -823,7 +823,7 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
default: |
break; |
} |
- } else if (navigation_state->was_fetched_via_spdy()) { |
+ } else if (load_times->was_fetched_via_spdy()) { |
switch (load_type) { |
case NavigationState::LINK_LOAD_NORMAL: |
PLT_HISTOGRAM( |
@@ -850,7 +850,7 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
} |
// Record SpdyCwnd field trial results. |
- if (navigation_state->was_fetched_via_spdy()) { |
+ if (load_times->was_fetched_via_spdy()) { |
switch (load_type) { |
case NavigationState::LINK_LOAD_NORMAL: |
PLT_HISTOGRAM(base::FieldTrial::MakeName( |
@@ -880,7 +880,7 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
} |
// Record page load time and abandonment rates for proxy cases. |
- if (navigation_state->was_fetched_via_proxy()) { |
+ if (load_times->was_fetched_via_proxy()) { |
if (scheme_type == URLPattern::SCHEME_HTTPS) { |
PLT_HISTOGRAM("PLT.StartToFinish.Proxy.https", start_to_finish_all_loads); |
UMA_HISTOGRAM_ENUMERATION("PLT.Abandoned.Proxy.https", |
@@ -913,7 +913,7 @@ void PageLoadHistograms::Dump(WebFrame* frame) { |
same_origin_access_count_); |
// Log the PLT to the info log. |
- LogPageLoadTime(navigation_state, frame->dataSource()); |
+ LogPageLoadTime(load_times, frame->dataSource()); |
// Record prerendering histograms. |
prerender::PrerenderHelper::RecordHistograms(render_view(), |
@@ -949,18 +949,19 @@ void PageLoadHistograms::ClosePage() { |
ResetCrossFramePropertyAccess(); |
} |
-void PageLoadHistograms::LogPageLoadTime(const NavigationState* state, |
- const WebDataSource* ds) const { |
+void PageLoadHistograms::LogPageLoadTime( |
+ const NavigationState::LoadTimes* load_times, |
+ const WebDataSource* ds) const { |
// Because this function gets called on every page load, |
// take extra care to optimize it away if logging is turned off. |
if (logging::LOG_INFO < logging::GetMinLogLevel()) |
return; |
- DCHECK(state); |
+ DCHECK(load_times); |
DCHECK(ds); |
GURL url(ds->request().url()); |
- Time start = state->start_load_time(); |
- Time finish = state->finish_load_time(); |
+ Time start = load_times->start_load_time(); |
+ Time finish = load_times->finish_load_time(); |
// TODO(mbelshe): should we log more stats? |
VLOG(1) << "PLT: " << (finish - start).InMilliseconds() << "ms " |
<< url.spec(); |