Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1436)

Side by Side Diff: chrome/browser/extensions/api/tab_capture/tab_capture_performancetest.cc

Issue 2866943002: Give performance_browser_tests lots of love and attention. (Closed)
Patch Set: rebase Created 3 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2013 The Chromium Authors. All rights reserved. 1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include <cmath>
6
5 #include "base/command_line.h" 7 #include "base/command_line.h"
6 #include "base/strings/stringprintf.h" 8 #include "base/strings/stringprintf.h"
7 #include "base/test/trace_event_analyzer.h" 9 #include "base/test/trace_event_analyzer.h"
8 #include "chrome/browser/extensions/extension_apitest.h" 10 #include "chrome/browser/extensions/extension_apitest.h"
9 #include "chrome/browser/extensions/extension_service.h" 11 #include "chrome/browser/extensions/extension_service.h"
10 #include "chrome/browser/extensions/tab_helper.h" 12 #include "chrome/browser/extensions/tab_helper.h"
11 #include "chrome/browser/profiles/profile.h" 13 #include "chrome/browser/profiles/profile.h"
12 #include "chrome/browser/ui/exclusive_access/fullscreen_controller.h" 14 #include "chrome/browser/ui/exclusive_access/fullscreen_controller.h"
13 #include "chrome/common/chrome_switches.h" 15 #include "chrome/common/chrome_switches.h"
14 #include "chrome/test/base/test_launcher_utils.h" 16 #include "chrome/test/base/test_launcher_utils.h"
15 #include "chrome/test/base/test_switches.h" 17 #include "chrome/test/base/test_switches.h"
16 #include "chrome/test/base/tracing.h" 18 #include "chrome/test/base/tracing.h"
17 #include "content/public/common/content_switches.h" 19 #include "content/public/common/content_switches.h"
18 #include "extensions/common/switches.h" 20 #include "extensions/common/switches.h"
19 #include "extensions/test/extension_test_message_listener.h" 21 #include "extensions/test/extension_test_message_listener.h"
20 #include "testing/gtest/include/gtest/gtest.h" 22 #include "testing/gtest/include/gtest/gtest.h"
21 #include "testing/perf/perf_test.h" 23 #include "testing/perf/perf_test.h"
22 #include "ui/compositor/compositor_switches.h" 24 #include "ui/compositor/compositor_switches.h"
23 #include "ui/gl/gl_switches.h" 25 #include "ui/gl/gl_switches.h"
24 26
25 #if defined(OS_WIN) 27 #if defined(OS_WIN)
26 #include "base/win/windows_version.h" 28 #include "base/win/windows_version.h"
27 #endif 29 #endif
28 30
29 namespace { 31 namespace {
30 32
31 const char kExtensionId[] = "ddchlicdkolnonkihahngkmmmjnjlkkf"; 33 constexpr char kExtensionId[] = "ddchlicdkolnonkihahngkmmmjnjlkkf";
34
35 // Number of events to trim from the begining and end. These events don't
36 // contribute anything toward stable measurements: A brief moment of startup
37 // "jank" is acceptable, and shutdown may result in missing events (since
38 // render widget draws may stop before capture stops).
39 constexpr size_t kTrimEvents = 24; // 1 sec at 24fps, or 0.4 sec at 60 fps.
40
41 // Minimum number of events required for a reasonable analysis.
42 constexpr size_t kMinDataPoints = 100; // ~5 sec at 24fps.
32 43
33 enum TestFlags { 44 enum TestFlags {
34 kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given 45 // TODO(miu): Remove kUseGpu (since the GPU is required), kForceGpuComposited
46 // (because there's no longer a such thing as Chrome w/o a compositor), and
47 // maybe kDisableVsync. http://crbug.com/567848
48 kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given
35 // on the command line. This is required for 49 // on the command line. This is required for
36 // tests that run on GPU. 50 // tests that run on GPU.
37 kForceGpuComposited = 1 << 1, // Force the test to use the compositor. 51 kForceGpuComposited = 1 << 1, // Force the test to use the compositor.
38 kDisableVsync = 1 << 2, // Do not limit framerate to vertical refresh. 52 kDisableVsync = 1 << 2, // Do not limit framerate to vertical refresh.
39 // when on GPU, nor to 60hz when not on GPU. 53 // when on GPU, nor to 60hz when not on GPU.
40 kTestThroughWebRTC = 1 << 3, // Send captured frames through webrtc 54 kTestThroughWebRTC = 1 << 3, // Send video through a webrtc loopback.
41 kSmallWindow = 1 << 4, // 1 = 800x600, 0 = 2000x1000 55 kSmallWindow = 1 << 4, // Window size: 1 = 800x600, 0 = 2000x1000
42 }; 56 };
43 57
44 class TabCapturePerformanceTest 58 class TabCapturePerformanceTest
45 : public ExtensionApiTest, 59 : public ExtensionApiTest,
46 public testing::WithParamInterface<int> { 60 public testing::WithParamInterface<int> {
47 public: 61 public:
48 TabCapturePerformanceTest() {} 62 TabCapturePerformanceTest() {}
49 63
50 bool HasFlag(TestFlags flag) const { 64 bool HasFlag(TestFlags flag) const {
51 return (GetParam() & flag) == flag; 65 return (GetParam() & flag) == flag;
(...skipping 41 matching lines...) Expand 10 before | Expand all | Expand 10 after
93 107
94 if (HasFlag(kDisableVsync)) 108 if (HasFlag(kDisableVsync))
95 command_line->AppendSwitch(switches::kDisableGpuVsync); 109 command_line->AppendSwitch(switches::kDisableGpuVsync);
96 110
97 command_line->AppendSwitchASCII( 111 command_line->AppendSwitchASCII(
98 extensions::switches::kWhitelistedExtensionID, 112 extensions::switches::kWhitelistedExtensionID,
99 kExtensionId); 113 kExtensionId);
100 ExtensionApiTest::SetUpCommandLine(command_line); 114 ExtensionApiTest::SetUpCommandLine(command_line);
101 } 115 }
102 116
103 bool PrintResults(trace_analyzer::TraceAnalyzer *analyzer, 117 void FindEvents(trace_analyzer::TraceAnalyzer* analyzer,
104 const std::string& test_name, 118 const std::string& event_name,
105 const std::string& event_name, 119 trace_analyzer::TraceEventVector* events) {
106 const std::string& unit) {
107 trace_analyzer::TraceEventVector events;
108 trace_analyzer::Query query = 120 trace_analyzer::Query query =
109 trace_analyzer::Query::EventNameIs(event_name) && 121 trace_analyzer::Query::EventNameIs(event_name) &&
110 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) || 122 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) ||
111 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE) || 123 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE) ||
112 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) || 124 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) ||
113 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) || 125 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) ||
114 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT)); 126 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT));
115 analyzer->FindEvents(query, &events); 127 analyzer->FindEvents(query, events);
116 if (events.size() < 20) { 128 }
129
130 // Analyze and print the mean and stddev of how often events having the name
131 // |event_name| occur.
132 bool PrintRateResults(trace_analyzer::TraceAnalyzer* analyzer,
133 const std::string& test_name,
134 const std::string& event_name) {
135 trace_analyzer::TraceEventVector events;
136 FindEvents(analyzer, event_name, &events);
137 if (events.size() < (2 * kTrimEvents + kMinDataPoints)) {
117 LOG(ERROR) << "Not enough events of type " << event_name << " found (" 138 LOG(ERROR) << "Not enough events of type " << event_name << " found ("
118 << events.size() << ")."; 139 << events.size() << ") for rate analysis.";
119 return false; 140 return false;
120 } 141 }
121 142
122 // Ignore some events for startup/setup/caching. 143 // Ignore some events for startup/setup/caching/teardown.
123 trace_analyzer::TraceEventVector rate_events(events.begin() + 3, 144 trace_analyzer::TraceEventVector rate_events(events.begin() + kTrimEvents,
124 events.end() - 3); 145 events.end() - kTrimEvents);
125 trace_analyzer::RateStats stats; 146 trace_analyzer::RateStats stats;
126 if (!GetRateStats(rate_events, &stats, NULL)) { 147 if (!GetRateStats(rate_events, &stats, NULL)) {
127 LOG(ERROR) << "GetRateStats failed"; 148 LOG(ERROR) << "GetRateStats failed";
128 return false; 149 return false;
129 } 150 }
130 double mean_ms = stats.mean_us / 1000.0; 151 double mean_ms = stats.mean_us / 1000.0;
131 double std_dev_ms = stats.standard_deviation_us / 1000.0; 152 double std_dev_ms = stats.standard_deviation_us / 1000.0;
132 std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms, 153 std::string mean_and_error = base::StringPrintf("%f,%f", mean_ms,
133 std_dev_ms); 154 std_dev_ms);
134 perf_test::PrintResultMeanAndError(test_name, 155 perf_test::PrintResultMeanAndError(test_name, GetSuffixForTestFlags(),
135 GetSuffixForTestFlags(), 156 event_name, mean_and_error, "ms", true);
136 event_name,
137 mean_and_error,
138 unit,
139 true);
140 return true; 157 return true;
141 } 158 }
142 159
160 // Analyze and print the mean and stddev of the amount of time between the
161 // begin and end timestamps of each event having the name |event_name|.
162 bool PrintLatencyResults(trace_analyzer::TraceAnalyzer* analyzer,
163 const std::string& test_name,
164 const std::string& event_name) {
165 trace_analyzer::TraceEventVector events;
166 FindEvents(analyzer, event_name, &events);
167 if (events.size() < (2 * kTrimEvents + kMinDataPoints)) {
168 LOG(ERROR) << "Not enough events of type " << event_name << " found ("
169 << events.size() << ") for latency analysis.";
170 return false;
171 }
172
173 // Ignore some events for startup/setup/caching/teardown.
174 trace_analyzer::TraceEventVector events_to_analyze(
175 events.begin() + kTrimEvents, events.end() - kTrimEvents);
176
177 // Compute mean and standard deviation of all capture latencies.
178 double sum = 0.0;
179 double sqr_sum = 0.0;
180 int count = 0;
181 for (const auto* begin_event : events_to_analyze) {
182 const auto* end_event = begin_event->other_event;
183 if (!end_event)
184 continue;
185 const double latency = end_event->timestamp - begin_event->timestamp;
186 sum += latency;
187 sqr_sum += latency * latency;
188 ++count;
189 }
xjz 2017/05/23 20:51:01 nit: DCHECK_GE(count, kMinDataPoints)?
miu 2017/05/23 21:51:05 Done.
190 const double mean_us = sum / count;
191 const double std_dev_us =
192 sqrt(std::max(0.0, count * sqr_sum - sum * sum)) / count;
193 perf_test::PrintResultMeanAndError(
194 test_name, GetSuffixForTestFlags(), event_name + "Latency",
195 base::StringPrintf("%f,%f", mean_us / 1000.0, std_dev_us / 1000.0),
196 "ms", true);
197 return true;
198 }
199
200 // Analyze and print the mean and stddev of how often events having the name
201 // |event_name| are missing the success=true flag.
202 bool PrintFailRateResults(trace_analyzer::TraceAnalyzer* analyzer,
203 const std::string& test_name,
204 const std::string& event_name) {
205 trace_analyzer::TraceEventVector events;
206 FindEvents(analyzer, event_name, &events);
207 if (events.size() < (2 * kTrimEvents + kMinDataPoints)) {
208 LOG(ERROR) << "Not enough events of type " << event_name << " found ("
209 << events.size() << ") for fail rate analysis.";
210 return false;
211 }
212
213 // Ignore some events for startup/setup/caching/teardown.
214 trace_analyzer::TraceEventVector events_to_analyze(
215 events.begin() + kTrimEvents, events.end() - kTrimEvents);
216
217 // Compute percentage of begin→end events missing a success=true flag.
218 double fail_percent = 100.0;
219 if (events_to_analyze.empty()) {
220 // If there are no events to analyze, then the failure rate is 100%.
221 } else {
222 int fail_count = 0;
223 for (const auto* begin_event : events_to_analyze) {
224 const auto* end_event = begin_event->other_event;
225 if (!end_event) {
226 // This indicates the operation never completed, and so is counted as
227 // a failure.
228 ++fail_count;
229 continue;
230 }
231 const auto it = end_event->arg_numbers.find("success");
232 if (it == end_event->arg_numbers.end()) {
233 LOG(ERROR) << "Missing 'success' value in Capture end event.";
234 return false;
235 }
236 if (it->second == 0.0) {
237 ++fail_count;
238 }
239 }
240 fail_percent *= fail_count / events_to_analyze.size();
241 }
242 perf_test::PrintResult(
243 test_name, GetSuffixForTestFlags(), event_name + "FailRate",
244 base::StringPrintf("%f", fail_percent), "percent", true);
245 return true;
246 }
247
143 void RunTest(const std::string& test_name) { 248 void RunTest(const std::string& test_name) {
144 if (HasFlag(kUseGpu) && !IsGpuAvailable()) { 249 if (HasFlag(kUseGpu) && !IsGpuAvailable()) {
145 LOG(WARNING) << 250 LOG(WARNING) <<
146 "Test skipped: requires gpu. Pass --enable-gpu on the command " 251 "Test skipped: requires gpu. Pass --enable-gpu on the command "
147 "line if use of GPU is desired."; 252 "line if use of GPU is desired.";
148 return; 253 return;
149 } 254 }
150 255
151 std::string json_events; 256 std::string json_events;
152 ASSERT_TRUE(tracing::BeginTracing("gpu,gpu.capture")); 257 ASSERT_TRUE(tracing::BeginTracing("gpu,gpu.capture"));
153 std::string page = "performance.html"; 258 std::string page = "performance.html";
154 page += HasFlag(kTestThroughWebRTC) ? "?WebRTC=1" : "?WebRTC=0"; 259 page += HasFlag(kTestThroughWebRTC) ? "?WebRTC=1" : "?WebRTC=0";
155 // Ideally we'd like to run a higher capture rate when vsync is disabled, 260 // Ideally we'd like to run a higher capture rate when vsync is disabled,
156 // but libjingle currently doesn't allow that. 261 // but libjingle currently doesn't allow that.
157 // page += HasFlag(kDisableVsync) ? "&fps=300" : "&fps=30"; 262 // page += HasFlag(kDisableVsync) ? "&fps=300" : "&fps=30";
158 page += "&fps=60"; 263 page += "&fps=60";
159 ASSERT_TRUE(RunExtensionSubtest("tab_capture", page)) << message_; 264 ASSERT_TRUE(RunExtensionSubtest("tab_capture", page)) << message_;
160 ASSERT_TRUE(tracing::EndTracing(&json_events)); 265 ASSERT_TRUE(tracing::EndTracing(&json_events));
161 std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer; 266 std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer;
162 analyzer.reset(trace_analyzer::TraceAnalyzer::Create(json_events)); 267 analyzer.reset(trace_analyzer::TraceAnalyzer::Create(json_events));
268 analyzer->AssociateAsyncBeginEndEvents();
163 269
164 // The printed result will be the average time between frames in the 270 // The printed result will be the average time between composites in the
165 // browser window. 271 // renderer of the page being captured. This may not reach the full frame
166 bool gpu_frames = PrintResults( 272 // rate if the renderer cannot draw as fast as is desired.
167 analyzer.get(), 273 //
168 test_name, 274 // Note that any changes to drawing or compositing in the renderer,
169 "RenderWidget::DidCommitAndDrawCompositorFrame", 275 // including changes to Blink (e.g., Canvas drawing), layout, etc.; will
170 "ms"); 276 // have an impact on this result.
171 EXPECT_TRUE(gpu_frames); 277 EXPECT_TRUE(
278 PrintRateResults(analyzer.get(), test_name,
279 "RenderWidget::DidCommitAndDrawCompositorFrame"));
172 280
173 // This prints out the average time between capture events. 281 // This prints out the average time between capture events in the browser
174 // As the capture frame rate is capped at 30fps, this score 282 // process. This should roughly match the renderer's draw+composite rate.
175 // cannot get any better than (lower) 33.33 ms. 283 EXPECT_TRUE(PrintRateResults(analyzer.get(), test_name, "Capture"));
176 // TODO(ericrk): Remove the "Capture" result once we are confident that
177 // "CaptureSucceeded" is giving the coverage we want. crbug.com/489817
178 EXPECT_TRUE(PrintResults(analyzer.get(),
179 test_name,
180 "Capture",
181 "ms"));
182 284
183 // Also track the CaptureSucceeded event. Capture only indicates that a 285 // Analyze mean/stddev of the capture latency. This is a measure of how long
184 // capture was requested, but this capture may later be aborted without 286 // each capture took, from initiation until read-back from the GPU into a
185 // running. CaptureSucceeded tracks successful frame captures. 287 // media::VideoFrame was complete. Lower is better.
186 EXPECT_TRUE( 288 EXPECT_TRUE(PrintLatencyResults(analyzer.get(), test_name, "Capture"));
187 PrintResults(analyzer.get(), test_name, "CaptureSucceeded", "ms")); 289
290 // Analyze percentage of failed captures. This measures how often captures
291 // were initiated, but not completed successfully. Lower is better, and zero
292 // is ideal.
293 EXPECT_TRUE(PrintFailRateResults(analyzer.get(), test_name, "Capture"));
188 } 294 }
189 }; 295 };
190 296
191 } // namespace 297 } // namespace
192 298
193 IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) { 299 IN_PROC_BROWSER_TEST_P(TabCapturePerformanceTest, Performance) {
194 RunTest("TabCapturePerformance"); 300 RunTest("TabCapturePerformance");
195 } 301 }
196 302
197 // Note: First argument is optional and intentionally left blank. 303 // Note: First argument is optional and intentionally left blank.
198 // (it's a prefix for the generated test cases) 304 // (it's a prefix for the generated test cases)
199 INSTANTIATE_TEST_CASE_P( 305 INSTANTIATE_TEST_CASE_P(
200 , 306 ,
201 TabCapturePerformanceTest, 307 TabCapturePerformanceTest,
202 testing::Values( 308 testing::Values(
203 0, 309 0,
204 kUseGpu | kForceGpuComposited, 310 kUseGpu | kForceGpuComposited,
205 kDisableVsync, 311 kDisableVsync,
206 kDisableVsync | kUseGpu | kForceGpuComposited, 312 kDisableVsync | kUseGpu | kForceGpuComposited,
207 kTestThroughWebRTC, 313 kTestThroughWebRTC,
208 kTestThroughWebRTC | kUseGpu | kForceGpuComposited, 314 kTestThroughWebRTC | kUseGpu | kForceGpuComposited,
209 kTestThroughWebRTC | kDisableVsync, 315 kTestThroughWebRTC | kDisableVsync,
210 kTestThroughWebRTC | kDisableVsync | kUseGpu | kForceGpuComposited)); 316 kTestThroughWebRTC | kDisableVsync | kUseGpu | kForceGpuComposited));
211
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698