Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include <stddef.h> | 5 #include <stddef.h> |
| 6 #include <stdint.h> | 6 #include <stdint.h> |
| 7 | 7 |
| 8 #include <algorithm> | |
| 9 #include <cmath> | |
| 10 #include <iterator> | |
| 8 #include <map> | 11 #include <map> |
| 9 #include <vector> | 12 #include <vector> |
| 10 | 13 |
| 11 #include "base/command_line.h" | 14 #include "base/command_line.h" |
| 12 #include "base/macros.h" | 15 #include "base/macros.h" |
| 16 #include "base/memory/ptr_util.h" | |
| 17 #include "base/strings/string_number_conversions.h" | |
| 13 #include "base/strings/stringprintf.h" | 18 #include "base/strings/stringprintf.h" |
| 14 #include "base/test/trace_event_analyzer.h" | 19 #include "base/test/trace_event_analyzer.h" |
| 15 #include "base/time/default_tick_clock.h" | 20 #include "base/time/default_tick_clock.h" |
| 16 #include "chrome/browser/extensions/extension_apitest.h" | 21 #include "chrome/browser/extensions/extension_apitest.h" |
| 17 #include "chrome/browser/extensions/extension_service.h" | 22 #include "chrome/browser/extensions/extension_service.h" |
| 18 #include "chrome/browser/extensions/tab_helper.h" | 23 #include "chrome/browser/extensions/tab_helper.h" |
| 19 #include "chrome/browser/profiles/profile.h" | |
| 20 #include "chrome/browser/ui/exclusive_access/fullscreen_controller.h" | |
| 21 #include "chrome/common/chrome_switches.h" | 24 #include "chrome/common/chrome_switches.h" |
| 22 #include "chrome/test/base/test_launcher_utils.h" | 25 #include "chrome/test/base/test_launcher_utils.h" |
| 23 #include "chrome/test/base/test_switches.h" | 26 #include "chrome/test/base/test_switches.h" |
| 24 #include "chrome/test/base/tracing.h" | 27 #include "chrome/test/base/tracing.h" |
| 25 #include "content/public/browser/render_process_host.h" | |
| 26 #include "content/public/browser/render_view_host.h" | |
| 27 #include "content/public/common/content_switches.h" | 28 #include "content/public/common/content_switches.h" |
| 28 #include "extensions/common/switches.h" | 29 #include "extensions/common/switches.h" |
| 29 #include "extensions/test/extension_test_message_listener.h" | 30 #include "extensions/test/extension_test_message_listener.h" |
| 30 #include "media/base/audio_bus.h" | 31 #include "media/base/audio_bus.h" |
| 31 #include "media/base/video_frame.h" | 32 #include "media/base/video_frame.h" |
| 32 #include "media/cast/cast_config.h" | 33 #include "media/cast/test/skewed_tick_clock.h" |
| 33 #include "media/cast/cast_environment.h" | |
| 34 #include "media/cast/test/utility/audio_utility.h" | 34 #include "media/cast/test/utility/audio_utility.h" |
| 35 #include "media/cast/test/utility/barcode.h" | 35 #include "media/cast/test/utility/barcode.h" |
| 36 #include "media/cast/test/utility/default_config.h" | 36 #include "media/cast/test/utility/default_config.h" |
| 37 #include "media/cast/test/utility/in_process_receiver.h" | 37 #include "media/cast/test/utility/in_process_receiver.h" |
| 38 #include "media/cast/test/utility/net_utility.h" | |
| 38 #include "media/cast/test/utility/standalone_cast_environment.h" | 39 #include "media/cast/test/utility/standalone_cast_environment.h" |
| 39 #include "media/cast/test/utility/udp_proxy.h" | 40 #include "media/cast/test/utility/udp_proxy.h" |
| 40 #include "net/base/ip_address.h" | 41 #include "net/base/ip_address.h" |
| 41 #include "net/base/ip_endpoint.h" | 42 #include "net/base/ip_endpoint.h" |
| 42 #include "net/base/net_errors.h" | 43 #include "net/base/net_errors.h" |
| 43 #include "net/base/rand_callback.h" | 44 #include "net/base/rand_callback.h" |
| 44 #include "net/log/net_log_source.h" | 45 #include "net/log/net_log_source.h" |
| 45 #include "net/socket/udp_server_socket.h" | 46 #include "net/socket/udp_server_socket.h" |
| 46 #include "testing/gtest/include/gtest/gtest.h" | 47 #include "testing/gtest/include/gtest/gtest.h" |
| 47 #include "testing/perf/perf_test.h" | 48 #include "testing/perf/perf_test.h" |
| 48 #include "ui/compositor/compositor_switches.h" | 49 #include "ui/compositor/compositor_switches.h" |
| 49 #include "ui/gl/gl_switches.h" | 50 #include "ui/gl/gl_switches.h" |
| 50 | 51 |
| 51 #if defined(OS_WIN) | 52 #if defined(OS_WIN) |
| 52 #include "base/win/windows_version.h" | 53 #include "base/win/windows_version.h" |
| 53 #endif | 54 #endif |
| 54 | 55 |
| 55 namespace { | 56 namespace { |
| 56 | 57 |
| 57 const char kExtensionId[] = "ddchlicdkolnonkihahngkmmmjnjlkkf"; | 58 constexpr char kExtensionId[] = "ddchlicdkolnonkihahngkmmmjnjlkkf"; |
| 58 | 59 |
| 59 // Skip a few events from the beginning. | 60 // Number of events to trim from the begining and end. These events don't |
| 60 static const size_t kSkipEvents = 3; | 61 // contribute anything toward stable measurements: A brief moment of startup |
| 62 // "jank" is acceptable, and shutdown may result in missing events (e.g., if | |
| 63 // streaming stops a few frames before capture stops). | |
| 64 constexpr size_t kTrimEvents = 24; // 1 sec at 24fps, or 0.4 sec at 60 fps. | |
| 65 | |
| 66 // Minimum number of events required for a reasonable analysis. | |
| 67 constexpr size_t kMinDataPoints = 100; // 1 sec of audio, or ~5 sec at 24fps. | |
| 61 | 68 |
| 62 enum TestFlags { | 69 enum TestFlags { |
| 63 kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given | 70 // TODO(miu): Remove kUseGpu (since the GPU is required), and maybe |
| 64 // on the command line. This is required for | 71 // kDisableVsync. http://crbug.com/567848 |
| 65 // tests that run on GPU. | 72 kUseGpu = 1 << 0, // Only execute test if --enable-gpu was given |
| 66 kDisableVsync = 1 << 1, // Do not limit framerate to vertical refresh. | 73 // on the command line. This is required for |
| 67 // when on GPU, nor to 60hz when not on GPU. | 74 // tests that run on GPU. |
| 68 kSmallWindow = 1 << 2, // 1 = 800x600, 0 = 2000x1000 | 75 kDisableVsync = 1 << 1, // Do not limit framerate to vertical refresh. |
| 69 k24fps = 1 << 3, // use 24 fps video | 76 // when on GPU, nor to 60hz when not on GPU. |
| 70 k30fps = 1 << 4, // use 30 fps video | 77 kSmallWindow = 1 << 2, // Window size: 1 = 800x600, 0 = 2000x1000 |
| 71 k60fps = 1 << 5, // use 60 fps video | 78 k24fps = 1 << 3, // Use 24 fps video. |
| 72 kProxyWifi = 1 << 6, // Run UDP through UDPProxy wifi profile | 79 k30fps = 1 << 4, // Use 30 fps video. |
| 73 kProxyBad = 1 << 7, // Run UDP through UDPProxy bad profile | 80 k60fps = 1 << 5, // Use 60 fps video (captured at 30 fps). |
| 74 kSlowClock = 1 << 8, // Receiver clock is 10 seconds slow | 81 kProxyWifi = 1 << 6, // Run UDP through UDPProxy wifi profile. |
| 75 kFastClock = 1 << 9, // Receiver clock is 10 seconds fast | 82 kProxySlow = 1 << 7, // Run UDP through UDPProxy slow profile. |
| 83 kProxyBad = 1 << 8, // Run UDP through UDPProxy bad profile. | |
| 84 kSlowClock = 1 << 9, // Receiver clock is 10 seconds slow. | |
| 85 kFastClock = 1 << 10, // Receiver clock is 10 seconds fast. | |
| 86 kAutoThrottling = 1 << 11, // Use auto-resolution/framerate throttling. | |
| 76 }; | 87 }; |
| 77 | 88 |
| 78 class SkewedTickClock : public base::DefaultTickClock { | 89 // These are just for testing! Use cryptographically-secure random keys in |
| 79 public: | 90 // production code! |
| 80 explicit SkewedTickClock(const base::TimeDelta& delta) : delta_(delta) { | 91 static constexpr char kAesKey[16] = {0, 1, 2, 3, 4, 5, 6, 7, |
| 81 } | 92 8, 9, 10, 11, 12, 13, 14, 15}; |
| 82 base::TimeTicks NowTicks() override { | 93 static constexpr char kAesIvMask[16] = {15, 14, 13, 12, 11, 10, 9, 8, |
| 83 return DefaultTickClock::NowTicks() + delta_; | 94 7, 6, 5, 4, 3, 2, 1, 0}; |
| 84 } | 95 |
| 85 private: | 96 media::cast::FrameReceiverConfig WithAesKeyAndIvSet( |
| 86 base::TimeDelta delta_; | 97 const media::cast::FrameReceiverConfig& config) { |
| 87 }; | 98 media::cast::FrameReceiverConfig result = config; |
| 99 result.aes_key = std::string(kAesKey, kAesKey + sizeof(kAesKey)); | |
| 100 result.aes_iv_mask = std::string(kAesIvMask, kAesIvMask + sizeof(kAesIvMask)); | |
| 101 return result; | |
| 102 } | |
| 88 | 103 |
| 89 class SkewedCastEnvironment : public media::cast::StandaloneCastEnvironment { | 104 class SkewedCastEnvironment : public media::cast::StandaloneCastEnvironment { |
| 90 public: | 105 public: |
| 91 explicit SkewedCastEnvironment(const base::TimeDelta& delta) : | 106 explicit SkewedCastEnvironment(const base::TimeDelta& delta) : |
| 92 StandaloneCastEnvironment() { | 107 StandaloneCastEnvironment() { |
| 93 clock_.reset(new SkewedTickClock(delta)); | 108 auto skewed_clock = |
| 109 base::MakeUnique<media::cast::test::SkewedTickClock>(&default_clock_); | |
| 110 // If testing with a receiver clock that is ahead or behind the sender | |
|
hubbe
2017/05/23 21:26:28
I don't think this really works, since PostDelayTa
miu
2017/05/23 22:09:22
It sort of does: Code will use Now() to determine
| |
| 111 // clock, fake a clock that is offset and also ticks at a rate of 50 parts | |
| 112 // per million faster or slower than the local sender's clock. This is the | |
|
hubbe
2017/05/23 21:26:28
50 ppm might be the worst we encounter in RL, but
miu
2017/05/23 22:09:22
Guidance from the perf folks is not to test things
hubbe
2017/05/23 22:47:08
Probably better to drop it.
I think we have some o
| |
| 113 // worst-case scenario for skew in-the-wild. | |
| 114 if (!delta.is_zero()) { | |
| 115 const double skew = delta < base::TimeDelta() ? 0.999950 : 1.000050; | |
| 116 skewed_clock->SetSkew(skew, delta); | |
| 117 } | |
| 118 clock_ = std::move(skewed_clock); | |
| 94 } | 119 } |
| 95 | 120 |
| 96 protected: | 121 protected: |
| 97 ~SkewedCastEnvironment() override {} | 122 ~SkewedCastEnvironment() override {} |
| 123 | |
| 124 private: | |
| 125 base::DefaultTickClock default_clock_; | |
| 98 }; | 126 }; |
| 99 | 127 |
| 100 // We log one of these for each call to OnAudioFrame/OnVideoFrame. | 128 // We log one of these for each call to OnAudioFrame/OnVideoFrame. |
| 101 struct TimeData { | 129 struct TimeData { |
| 102 TimeData(uint16_t frame_no_, base::TimeTicks render_time_) | 130 TimeData(uint16_t frame_no_, base::TimeTicks playout_time_) |
| 103 : frame_no(frame_no_), render_time(render_time_) {} | 131 : frame_no(frame_no_), playout_time(playout_time_) {} |
| 104 // The unit here is video frames, for audio data there can be duplicates. | 132 // The unit here is video frames, for audio data there can be duplicates. |
| 105 // This was decoded from the actual audio/video data. | 133 // This was decoded from the actual audio/video data. |
| 106 uint16_t frame_no; | 134 uint16_t frame_no; |
| 107 // This is when we should play this data, according to the sender. | 135 // This is when we should play this data, according to the sender. |
| 108 base::TimeTicks render_time; | 136 base::TimeTicks playout_time; |
| 109 }; | 137 }; |
| 110 | 138 |
| 111 // TODO(hubbe): Move to media/cast to use for offline log analysis. | 139 // TODO(hubbe): Move to media/cast to use for offline log analysis. |
| 112 class MeanAndError { | 140 class MeanAndError { |
| 113 public: | 141 public: |
| 114 MeanAndError() {} | |
| 115 explicit MeanAndError(const std::vector<double>& values) { | 142 explicit MeanAndError(const std::vector<double>& values) { |
| 116 double sum = 0.0; | 143 double sum = 0.0; |
| 117 double sqr_sum = 0.0; | 144 double sqr_sum = 0.0; |
| 118 num_values = values.size(); | 145 num_values_ = values.size(); |
| 119 if (num_values) { | 146 if (num_values_ > 0) { |
| 120 for (size_t i = 0; i < num_values; i++) { | 147 for (size_t i = 0; i < num_values_; i++) { |
| 121 sum += values[i]; | 148 sum += values[i]; |
| 122 sqr_sum += values[i] * values[i]; | 149 sqr_sum += values[i] * values[i]; |
| 123 } | 150 } |
| 124 mean = sum / num_values; | 151 mean_ = sum / num_values_; |
| 125 std_dev = sqrt(std::max(0.0, num_values * sqr_sum - sum * sum)) / | 152 std_dev_ = |
| 126 num_values; | 153 sqrt(std::max(0.0, num_values_ * sqr_sum - sum * sum)) / num_values_; |
| 154 } else { | |
| 155 mean_ = NAN; | |
| 156 std_dev_ = NAN; | |
| 127 } | 157 } |
| 128 } | 158 } |
| 159 | |
| 129 std::string AsString() const { | 160 std::string AsString() const { |
| 130 return base::StringPrintf("%f,%f", mean, std_dev); | 161 return base::StringPrintf("%f,%f", mean_, std_dev_); |
| 131 } | 162 } |
| 132 | 163 |
| 133 void Print(const std::string& measurement, | 164 void Print(const std::string& measurement, |
| 134 const std::string& modifier, | 165 const std::string& modifier, |
| 135 const std::string& trace, | 166 const std::string& trace, |
| 136 const std::string& unit) { | 167 const std::string& unit) { |
| 137 if (num_values >= 20) { | 168 if (num_values_ >= 20) { |
| 138 perf_test::PrintResultMeanAndError(measurement, | 169 perf_test::PrintResultMeanAndError(measurement, |
| 139 modifier, | 170 modifier, |
| 140 trace, | 171 trace, |
| 141 AsString(), | 172 AsString(), |
| 142 unit, | 173 unit, |
| 143 true); | 174 true); |
| 144 } else { | 175 } else { |
| 145 LOG(ERROR) << "Not enough events for " | 176 LOG(ERROR) << "Not enough events (" << num_values_ << ") for " |
| 146 << measurement << modifier << " " << trace; | 177 << measurement << modifier << " " << trace; |
| 147 } | 178 } |
| 148 } | 179 } |
| 149 | 180 |
| 150 size_t num_values; | 181 private: |
| 151 double mean; | 182 size_t num_values_; |
| 152 double std_dev; | 183 double mean_; |
| 184 double std_dev_; | |
| 153 }; | 185 }; |
| 154 | 186 |
| 155 // This function checks how smooth the data in |data| is. | 187 // This function checks how smooth the data in |data| is. |
| 156 // It computes the average error of deltas and the average delta. | 188 // It computes the average error of deltas and the average delta. |
| 157 // If data[x] == x * A + B, then this function returns zero. | 189 // If data[x] == x * A + B, then this function returns zero. |
| 158 // The unit is milliseconds. | 190 // The unit is milliseconds. |
| 159 static MeanAndError AnalyzeJitter(const std::vector<TimeData>& data) { | 191 static MeanAndError AnalyzeJitter(const std::vector<TimeData>& data) { |
| 160 CHECK_GT(data.size(), 1UL); | 192 CHECK_GT(data.size(), 1UL); |
| 161 VLOG(0) << "Jitter analyzis on " << data.size() << " values."; | 193 VLOG(0) << "Jitter analysis on " << data.size() << " values."; |
| 162 std::vector<double> deltas; | 194 std::vector<double> deltas; |
| 163 double sum = 0.0; | 195 double sum = 0.0; |
| 164 for (size_t i = 1; i < data.size(); i++) { | 196 for (size_t i = 1; i < data.size(); i++) { |
| 165 double delta = (data[i].render_time - | 197 double delta = |
| 166 data[i - 1].render_time).InMillisecondsF(); | 198 (data[i].playout_time - data[i - 1].playout_time).InMillisecondsF(); |
| 167 deltas.push_back(delta); | 199 deltas.push_back(delta); |
| 168 sum += delta; | 200 sum += delta; |
| 169 } | 201 } |
| 170 double mean = sum / deltas.size(); | 202 double mean = sum / deltas.size(); |
| 171 for (size_t i = 0; i < deltas.size(); i++) { | 203 for (size_t i = 0; i < deltas.size(); i++) { |
| 172 deltas[i] = fabs(mean - deltas[i]); | 204 deltas[i] = fabs(mean - deltas[i]); |
| 173 } | 205 } |
| 174 | 206 |
| 175 return MeanAndError(deltas); | 207 return MeanAndError(deltas); |
| 176 } | 208 } |
| 177 | 209 |
| 178 // An in-process Cast receiver that examines the audio/video frames being | 210 // An in-process Cast receiver that examines the audio/video frames being |
| 179 // received and logs some data about each received audio/video frame. | 211 // received and logs some data about each received audio/video frame. |
| 180 class TestPatternReceiver : public media::cast::InProcessReceiver { | 212 class TestPatternReceiver : public media::cast::InProcessReceiver { |
| 181 public: | 213 public: |
| 182 explicit TestPatternReceiver( | 214 explicit TestPatternReceiver( |
| 183 const scoped_refptr<media::cast::CastEnvironment>& cast_environment, | 215 const scoped_refptr<media::cast::CastEnvironment>& cast_environment, |
| 184 const net::IPEndPoint& local_end_point) | 216 const net::IPEndPoint& local_end_point) |
| 185 : InProcessReceiver(cast_environment, | 217 : InProcessReceiver( |
| 186 local_end_point, | 218 cast_environment, |
| 187 net::IPEndPoint(), | 219 local_end_point, |
| 188 media::cast::GetDefaultAudioReceiverConfig(), | 220 net::IPEndPoint(), |
| 189 media::cast::GetDefaultVideoReceiverConfig()) { | 221 WithAesKeyAndIvSet(media::cast::GetDefaultAudioReceiverConfig()), |
| 190 } | 222 WithAesKeyAndIvSet(media::cast::GetDefaultVideoReceiverConfig())) {} |
| 191 | 223 |
| 192 typedef std::map<uint16_t, base::TimeTicks> TimeMap; | 224 typedef std::map<uint16_t, base::TimeTicks> TimeMap; |
| 193 | 225 |
| 194 // Build a map from frame ID (as encoded in the audio and video data) | 226 // Build a map from frame ID (as encoded in the audio and video data) |
| 195 // to the rtp timestamp for that frame. Note that there will be multiple | 227 // to the rtp timestamp for that frame. Note that there will be multiple |
| 196 // audio frames which all have the same frame ID. When that happens we | 228 // audio frames which all have the same frame ID. When that happens we |
| 197 // want the minimum rtp timestamp, because that audio frame is supposed | 229 // want the minimum rtp timestamp, because that audio frame is supposed |
| 198 // to play at the same time that the corresponding image is presented. | 230 // to play at the same time that the corresponding image is presented. |
| 199 void MapFrameTimes(const std::vector<TimeData>& events, TimeMap* map) { | 231 void MapFrameTimes(const std::vector<TimeData>& events, TimeMap* map) { |
| 200 for (size_t i = kSkipEvents; i < events.size(); i++) { | 232 for (size_t i = kTrimEvents; i < events.size(); i++) { |
|
xjz
2017/05/23 20:51:01
nit: Do we want to trim the events from the end to
miu
2017/05/23 21:51:05
Done.
| |
| 201 base::TimeTicks& frame_tick = (*map)[events[i].frame_no]; | 233 base::TimeTicks& frame_tick = (*map)[events[i].frame_no]; |
| 202 if (frame_tick.is_null()) { | 234 if (frame_tick.is_null()) { |
| 203 frame_tick = events[i].render_time; | 235 frame_tick = events[i].playout_time; |
| 204 } else { | 236 } else { |
| 205 frame_tick = std::min(events[i].render_time, frame_tick); | 237 frame_tick = std::min(events[i].playout_time, frame_tick); |
| 206 } | 238 } |
| 207 } | 239 } |
| 208 } | 240 } |
| 209 | 241 |
| 210 void Analyze(const std::string& name, const std::string& modifier) { | 242 void Analyze(const std::string& name, const std::string& modifier) { |
| 211 // First, find the minimum rtp timestamp for each audio and video frame. | 243 // First, find the minimum rtp timestamp for each audio and video frame. |
| 212 // Note that the data encoded in the audio stream contains video frame | 244 // Note that the data encoded in the audio stream contains video frame |
| 213 // numbers. So in a 30-fps video stream, there will be 1/30s of "1", then | 245 // numbers. So in a 30-fps video stream, there will be 1/30s of "1", then |
| 214 // 1/30s of "2", etc. | 246 // 1/30s of "2", etc. |
| 215 TimeMap audio_frame_times, video_frame_times; | 247 TimeMap audio_frame_times, video_frame_times; |
| 216 MapFrameTimes(audio_events_, &audio_frame_times); | 248 MapFrameTimes(audio_events_, &audio_frame_times); |
| 249 EXPECT_GE(audio_frame_times.size(), kMinDataPoints); | |
| 217 MapFrameTimes(video_events_, &video_frame_times); | 250 MapFrameTimes(video_events_, &video_frame_times); |
| 251 EXPECT_GE(video_frame_times.size(), kMinDataPoints); | |
| 218 std::vector<double> deltas; | 252 std::vector<double> deltas; |
| 219 for (TimeMap::const_iterator i = audio_frame_times.begin(); | 253 for (TimeMap::const_iterator i = audio_frame_times.begin(); |
| 220 i != audio_frame_times.end(); | 254 i != audio_frame_times.end(); |
| 221 ++i) { | 255 ++i) { |
| 222 TimeMap::const_iterator j = video_frame_times.find(i->first); | 256 TimeMap::const_iterator j = video_frame_times.find(i->first); |
| 223 if (j != video_frame_times.end()) { | 257 if (j != video_frame_times.end()) { |
| 224 deltas.push_back((i->second - j->second).InMillisecondsF()); | 258 deltas.push_back((i->second - j->second).InMillisecondsF()); |
| 225 } | 259 } |
| 226 } | 260 } |
| 261 EXPECT_GE(deltas.size(), kMinDataPoints); | |
| 227 | 262 |
| 228 // Close to zero is better. (can be negative) | 263 // Close to zero is better. (can be negative) |
| 229 MeanAndError(deltas).Print(name, modifier, "av_sync", "ms"); | 264 MeanAndError(deltas).Print(name, modifier, "av_sync", "ms"); |
| 230 // lower is better. | 265 // lower is better. |
| 231 AnalyzeJitter(audio_events_).Print(name, modifier, "audio_jitter", "ms"); | 266 AnalyzeJitter(audio_events_).Print(name, modifier, "audio_jitter", "ms"); |
| 232 // lower is better. | 267 // lower is better. |
| 233 AnalyzeJitter(video_events_).Print(name, modifier, "video_jitter", "ms"); | 268 AnalyzeJitter(video_events_).Print(name, modifier, "video_jitter", "ms"); |
| 269 | |
| 270 // Mean resolution of video at receiver. Lower stddev is better, while the | |
| 271 // mean should be something reasonable given the network constraints | |
| 272 // (usually 480 lines or more). Note that this is the video resolution at | |
| 273 // the receiver, but changes originate on the sender side. | |
| 274 std::vector<double> slice_for_analysis; | |
| 275 if (video_frame_lines_.size() > kTrimEvents * 2) { | |
| 276 slice_for_analysis.reserve(video_frame_lines_.size() - kTrimEvents * 2); | |
| 277 EXPECT_GE(slice_for_analysis.capacity(), kMinDataPoints); | |
| 278 std::transform(video_frame_lines_.begin() + kTrimEvents, | |
| 279 video_frame_lines_.end() - kTrimEvents, | |
| 280 std::back_inserter(slice_for_analysis), | |
| 281 [](int lines) { return static_cast<double>(lines); }); | |
| 282 } | |
| 283 MeanAndError(slice_for_analysis) | |
| 284 .Print(name, modifier, "playout_resolution", "lines"); | |
| 285 | |
| 286 // Number of resolution changes. Lower is better (and 1 is ideal). Zero | |
|
hubbe
2017/05/23 21:26:28
It's probably better to print out nothing if there
miu
2017/05/23 22:09:22
The EXPECT_GT() a few lines down should fail in th
hubbe
2017/05/23 22:47:08
EXPECT_GT should be fine.
| |
| 287 // indicates a lack of data. | |
| 288 int last_lines = -1; | |
| 289 int change_count = 0; | |
| 290 for (size_t i = kTrimEvents; i < video_frame_lines_.size() - kTrimEvents; | |
| 291 ++i) { | |
| 292 if (video_frame_lines_[i] != last_lines) { | |
| 293 ++change_count; | |
| 294 last_lines = video_frame_lines_[i]; | |
| 295 } | |
| 296 } | |
| 297 EXPECT_GT(change_count, 0); | |
| 298 perf_test::PrintResult(name, modifier, "resolution_changes", | |
| 299 base::IntToString(change_count), "count", true); | |
| 234 } | 300 } |
| 235 | 301 |
| 236 private: | 302 private: |
| 237 // Invoked by InProcessReceiver for each received audio frame. | 303 // Invoked by InProcessReceiver for each received audio frame. |
| 238 void OnAudioFrame(std::unique_ptr<media::AudioBus> audio_frame, | 304 void OnAudioFrame(std::unique_ptr<media::AudioBus> audio_frame, |
| 239 const base::TimeTicks& playout_time, | 305 const base::TimeTicks& playout_time, |
| 240 bool is_continuous) override { | 306 bool is_continuous) override { |
| 241 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); | 307 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); |
| 242 | 308 |
| 243 if (audio_frame->frames() <= 0) { | 309 if (audio_frame->frames() <= 0) { |
| 244 NOTREACHED() << "OnAudioFrame called with no samples?!?"; | 310 NOTREACHED() << "OnAudioFrame called with no samples?!?"; |
| 245 return; | 311 return; |
| 246 } | 312 } |
| 247 | 313 |
| 248 // Note: This is the number of the video frame that this audio belongs to. | 314 // Note: This is the number of the video frame that this audio belongs to. |
| 249 uint16_t frame_no; | 315 uint16_t frame_no; |
| 250 if (media::cast::DecodeTimestamp(audio_frame->channel(0), | 316 if (media::cast::DecodeTimestamp(audio_frame->channel(0), |
| 251 audio_frame->frames(), | 317 audio_frame->frames(), |
| 252 &frame_no)) { | 318 &frame_no)) { |
| 253 audio_events_.push_back(TimeData(frame_no, playout_time)); | 319 audio_events_.push_back(TimeData(frame_no, playout_time)); |
| 254 } else { | 320 } else { |
| 255 VLOG(0) << "Failed to decode audio timestamp!"; | 321 DVLOG(2) << "Failed to decode audio timestamp!"; |
| 256 } | 322 } |
| 257 } | 323 } |
| 258 | 324 |
| 259 void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, | 325 void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, |
| 260 const base::TimeTicks& render_time, | 326 const base::TimeTicks& playout_time, |
| 261 bool is_continuous) override { | 327 bool is_continuous) override { |
| 262 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); | 328 CHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); |
| 263 | 329 |
| 264 TRACE_EVENT_INSTANT1( | 330 TRACE_EVENT_INSTANT1("cast_perf_test", "VideoFramePlayout", |
| 265 "mirroring", "TestPatternReceiver::OnVideoFrame", | 331 TRACE_EVENT_SCOPE_THREAD, "playout_time", |
| 266 TRACE_EVENT_SCOPE_THREAD, | 332 (playout_time - base::TimeTicks()).InMicroseconds()); |
| 267 "render_time", render_time.ToInternalValue()); | |
| 268 | 333 |
| 269 uint16_t frame_no; | 334 uint16_t frame_no; |
| 270 if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) { | 335 if (media::cast::test::DecodeBarcode(video_frame, &frame_no)) { |
| 271 video_events_.push_back(TimeData(frame_no, render_time)); | 336 video_events_.push_back(TimeData(frame_no, playout_time)); |
| 272 } else { | 337 } else { |
| 273 VLOG(0) << "Failed to decode barcode!"; | 338 DVLOG(2) << "Failed to decode barcode!"; |
| 274 } | 339 } |
| 340 | |
| 341 video_frame_lines_.push_back(video_frame->visible_rect().height()); | |
| 275 } | 342 } |
| 276 | 343 |
| 277 std::vector<TimeData> audio_events_; | 344 std::vector<TimeData> audio_events_; |
| 278 std::vector<TimeData> video_events_; | 345 std::vector<TimeData> video_events_; |
| 279 | 346 |
| 347 // The height (number of lines) of each video frame received. | |
| 348 std::vector<int> video_frame_lines_; | |
| 349 | |
| 280 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver); | 350 DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver); |
| 281 }; | 351 }; |
| 282 | 352 |
| 283 class CastV2PerformanceTest | 353 class CastV2PerformanceTest |
| 284 : public ExtensionApiTest, | 354 : public ExtensionApiTest, |
| 285 public testing::WithParamInterface<int> { | 355 public testing::WithParamInterface<int> { |
| 286 public: | 356 public: |
| 287 CastV2PerformanceTest() {} | 357 CastV2PerformanceTest() {} |
| 288 | 358 |
| 289 bool HasFlag(TestFlags flag) const { | 359 bool HasFlag(TestFlags flag) const { |
| (...skipping 13 matching lines...) Expand all Loading... | |
| 303 if (HasFlag(kSmallWindow)) | 373 if (HasFlag(kSmallWindow)) |
| 304 suffix += "_small"; | 374 suffix += "_small"; |
| 305 if (HasFlag(k24fps)) | 375 if (HasFlag(k24fps)) |
| 306 suffix += "_24fps"; | 376 suffix += "_24fps"; |
| 307 if (HasFlag(k30fps)) | 377 if (HasFlag(k30fps)) |
| 308 suffix += "_30fps"; | 378 suffix += "_30fps"; |
| 309 if (HasFlag(k60fps)) | 379 if (HasFlag(k60fps)) |
| 310 suffix += "_60fps"; | 380 suffix += "_60fps"; |
| 311 if (HasFlag(kProxyWifi)) | 381 if (HasFlag(kProxyWifi)) |
| 312 suffix += "_wifi"; | 382 suffix += "_wifi"; |
| 383 if (HasFlag(kProxySlow)) | |
| 384 suffix += "_slowwifi"; | |
| 313 if (HasFlag(kProxyBad)) | 385 if (HasFlag(kProxyBad)) |
| 314 suffix += "_bad"; | 386 suffix += "_bad"; |
| 315 if (HasFlag(kSlowClock)) | 387 if (HasFlag(kSlowClock)) |
| 316 suffix += "_slow"; | 388 suffix += "_slow"; |
| 317 if (HasFlag(kFastClock)) | 389 if (HasFlag(kFastClock)) |
| 318 suffix += "_fast"; | 390 suffix += "_fast"; |
| 391 if (HasFlag(kAutoThrottling)) | |
| 392 suffix += "_autothrottling"; | |
| 319 return suffix; | 393 return suffix; |
| 320 } | 394 } |
| 321 | 395 |
| 322 int getfps() { | 396 int getfps() { |
| 323 if (HasFlag(k24fps)) | 397 if (HasFlag(k24fps)) |
| 324 return 24; | 398 return 24; |
| 325 if (HasFlag(k30fps)) | 399 if (HasFlag(k30fps)) |
| 326 return 30; | 400 return 30; |
| 327 if (HasFlag(k60fps)) | 401 if (HasFlag(k60fps)) |
| 328 return 60; | 402 return 60; |
| 329 NOTREACHED(); | 403 NOTREACHED(); |
| 330 return 0; | 404 return 0; |
| 331 } | 405 } |
| 332 | 406 |
| 333 net::IPEndPoint GetFreeLocalPort() { | |
| 334 // Determine a unused UDP port for the in-process receiver to listen on. | |
| 335 // Method: Bind a UDP socket on port 0, and then check which port the | |
| 336 // operating system assigned to it. | |
| 337 std::unique_ptr<net::UDPServerSocket> receive_socket( | |
| 338 new net::UDPServerSocket(NULL, net::NetLogSource())); | |
| 339 receive_socket->AllowAddressReuse(); | |
| 340 CHECK_EQ(net::OK, receive_socket->Listen( | |
| 341 net::IPEndPoint(net::IPAddress::IPv4Localhost(), 0))); | |
| 342 net::IPEndPoint endpoint; | |
| 343 CHECK_EQ(net::OK, receive_socket->GetLocalAddress(&endpoint)); | |
| 344 return endpoint; | |
| 345 } | |
| 346 | |
| 347 void SetUp() override { | 407 void SetUp() override { |
| 348 EnablePixelOutput(); | 408 EnablePixelOutput(); |
| 349 ExtensionApiTest::SetUp(); | 409 ExtensionApiTest::SetUp(); |
| 350 } | 410 } |
| 351 | 411 |
| 352 void SetUpCommandLine(base::CommandLine* command_line) override { | 412 void SetUpCommandLine(base::CommandLine* command_line) override { |
| 353 // Some of the tests may launch http requests through JSON or AJAX | 413 // Some of the tests may launch http requests through JSON or AJAX |
| 354 // which causes a security error (cross domain request) when the page | 414 // which causes a security error (cross domain request) when the page |
| 355 // is loaded from the local file system ( file:// ). The following switch | 415 // is loaded from the local file system ( file:// ). The following switch |
| 356 // fixes that error. | 416 // fixes that error. |
| (...skipping 21 matching lines...) Expand all Loading... | |
| 378 const std::string& event_name, | 438 const std::string& event_name, |
| 379 trace_analyzer::TraceEventVector* events) { | 439 trace_analyzer::TraceEventVector* events) { |
| 380 trace_analyzer::Query query = | 440 trace_analyzer::Query query = |
| 381 trace_analyzer::Query::EventNameIs(event_name) && | 441 trace_analyzer::Query::EventNameIs(event_name) && |
| 382 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) || | 442 (trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_BEGIN) || |
| 383 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) || | 443 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_ASYNC_BEGIN) || |
| 384 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) || | 444 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_FLOW_BEGIN) || |
| 385 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT) || | 445 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_INSTANT) || |
| 386 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE)); | 446 trace_analyzer::Query::EventPhaseIs(TRACE_EVENT_PHASE_COMPLETE)); |
| 387 analyzer->FindEvents(query, events); | 447 analyzer->FindEvents(query, events); |
| 448 VLOG(0) << "Retrieved " << events->size() << " events for: " << event_name; | |
| 388 } | 449 } |
| 389 | 450 |
| 390 // The key contains the name of the argument and the argument. | 451 // The key contains the name of the argument and the argument. |
| 391 typedef std::pair<std::string, double> EventMapKey; | 452 typedef std::pair<std::string, double> EventMapKey; |
| 392 typedef std::map<EventMapKey, const trace_analyzer::TraceEvent*> EventMap; | 453 typedef std::map<EventMapKey, const trace_analyzer::TraceEvent*> EventMap; |
| 393 | 454 |
| 394 // Make events findable by their arguments, for instance, if an | 455 // Make events findable by their arguments, for instance, if an |
| 395 // event has a "timestamp": 238724 argument, the map will contain | 456 // event has a "timestamp": 238724 argument, the map will contain |
| 396 // pair<"timestamp", 238724> -> &event. All arguments are indexed. | 457 // pair<"timestamp", 238724> -> &event. All arguments are indexed. |
| 397 void IndexEvents(trace_analyzer::TraceAnalyzer* analyzer, | 458 void IndexEvents(trace_analyzer::TraceAnalyzer* analyzer, |
| 398 const std::string& event_name, | 459 const std::string& event_name, |
| 399 EventMap* event_map) { | 460 EventMap* event_map) { |
| 400 trace_analyzer::TraceEventVector events; | 461 trace_analyzer::TraceEventVector events; |
| 401 GetTraceEvents(analyzer, event_name, &events); | 462 GetTraceEvents(analyzer, event_name, &events); |
| 402 for (size_t i = 0; i < events.size(); i++) { | 463 for (size_t i = 0; i < events.size(); i++) { |
| 403 std::map<std::string, double>::const_iterator j; | 464 std::map<std::string, double>::const_iterator j; |
| 404 for (j = events[i]->arg_numbers.begin(); | 465 for (j = events[i]->arg_numbers.begin(); |
| 405 j != events[i]->arg_numbers.end(); | 466 j != events[i]->arg_numbers.end(); |
| 406 ++j) { | 467 ++j) { |
| 407 (*event_map)[*j] = events[i]; | 468 (*event_map)[*j] = events[i]; |
| 408 } | 469 } |
| 409 } | 470 } |
| 410 } | 471 } |
| 411 | 472 |
| 412 // Look up an event in |event_map|. The return event will have the same | 473 // Look up an event in |event_map|. The return event will have the same |
| 413 // value for the argument |key_name| as |prev_event|. Note that if | 474 // value for the argument |key_name| as |prev_event|. |
| 414 // the |key_name| is "time_delta", then we allow some fuzzy logic since | |
| 415 // the time deltas are truncated to milliseconds in the code. | |
| 416 const trace_analyzer::TraceEvent* FindNextEvent( | 475 const trace_analyzer::TraceEvent* FindNextEvent( |
| 417 const EventMap& event_map, | 476 const EventMap& event_map, |
| 418 std::vector<const trace_analyzer::TraceEvent*> prev_events, | 477 const trace_analyzer::TraceEvent* prev_event, |
| 419 std::string key_name) { | 478 std::string key_name) { |
| 420 EventMapKey key; | 479 const auto arg_it = prev_event->arg_numbers.find(key_name); |
| 421 for (size_t i = prev_events.size(); i;) { | 480 if (arg_it == prev_event->arg_numbers.end()) |
| 422 --i; | 481 return nullptr; |
| 423 std::map<std::string, double>::const_iterator j = | 482 const EventMapKey& key = *arg_it; |
| 424 prev_events[i]->arg_numbers.find(key_name); | 483 const auto event_it = event_map.find(key); |
| 425 if (j != prev_events[i]->arg_numbers.end()) { | 484 if (event_it == event_map.end()) |
| 426 key = *j; | 485 return nullptr; |
| 427 break; | 486 return event_it->second; |
| 428 } | |
| 429 } | |
| 430 EventMap::const_iterator i = event_map.lower_bound(key); | |
| 431 if (i == event_map.end()) | |
| 432 return NULL; | |
| 433 if (i->first.second == key.second) | |
| 434 return i->second; | |
| 435 if (key_name != "time_delta") | |
| 436 return NULL; | |
| 437 if (fabs(i->first.second - key.second) < 1000) | |
| 438 return i->second; | |
| 439 if (i == event_map.begin()) | |
| 440 return NULL; | |
| 441 i--; | |
| 442 if (fabs(i->first.second - key.second) < 1000) | |
| 443 return i->second; | |
| 444 return NULL; | |
| 445 } | 487 } |
| 446 | 488 |
| 447 // Given a vector of vector of data, extract the difference between | 489 // Given a vector of vector of data, extract the difference between |
| 448 // two columns (|col_a| and |col_b|) and output the result as a | 490 // two columns (|col_a| and |col_b|) and output the result as a |
| 449 // performance metric. | 491 // performance metric. |
| 450 void OutputMeasurement(const std::string& test_name, | 492 void OutputMeasurement(const std::string& test_name, |
| 451 const std::vector<std::vector<double> > data, | 493 const std::vector<std::vector<double>>& data, |
| 452 const std::string& measurement_name, | 494 const std::string& measurement_name, |
| 453 int col_a, | 495 int col_a, |
| 454 int col_b) { | 496 int col_b) { |
| 455 std::vector<double> tmp; | 497 std::vector<double> tmp; |
| 456 for (size_t i = 0; i < data.size(); i++) { | 498 for (size_t i = 0; i < data.size(); i++) { |
| 457 tmp.push_back((data[i][col_b] - data[i][col_a]) / 1000.0); | 499 tmp.push_back((data[i][col_b] - data[i][col_a]) / 1000.0); |
| 458 } | 500 } |
| 459 return MeanAndError(tmp).Print(test_name, | 501 return MeanAndError(tmp).Print(test_name, |
| 460 GetSuffixForTestFlags(), | 502 GetSuffixForTestFlags(), |
| 461 measurement_name, | 503 measurement_name, |
| 462 "ms"); | 504 "ms"); |
| 463 } | 505 } |
| 464 | 506 |
| 465 // Analyzing latency is hard, because there is no unifying identifier for | 507 // Analyze the latency of each frame as it goes from capture to playout. The |
| 466 // frames throughout the code. At first, we have a capture timestamp, which | 508 // event tracing system is used to track the frames. |
| 467 // gets converted to a time delta, then back to a timestamp. Once it enters | |
| 468 // the cast library it gets converted to an rtp_timestamp, and when it leaves | |
| 469 // the cast library, all we have is the render_time. | |
| 470 // | |
| 471 // To be able to follow the frame throughout all this, we insert TRACE | |
| 472 // calls that tracks each conversion as it happens. Then we extract all | |
| 473 // these events and link them together. | |
| 474 void AnalyzeLatency(const std::string& test_name, | 509 void AnalyzeLatency(const std::string& test_name, |
| 475 trace_analyzer::TraceAnalyzer* analyzer) { | 510 trace_analyzer::TraceAnalyzer* analyzer) { |
| 511 // Retrieve and index all "checkpoint" events related to frames progressing | |
| 512 // from start to finish. | |
| 513 trace_analyzer::TraceEventVector capture_events; | |
| 514 // Sender side: | |
| 515 GetTraceEvents(analyzer, "Capture", &capture_events); | |
| 476 EventMap onbuffer, sink, inserted, encoded, transmitted, decoded, done; | 516 EventMap onbuffer, sink, inserted, encoded, transmitted, decoded, done; |
| 477 IndexEvents(analyzer, "OnBufferReceived", &onbuffer); | 517 IndexEvents(analyzer, "OnBufferReceived", &onbuffer); |
| 478 IndexEvents(analyzer, "MediaStreamVideoSink::OnVideoFrame", &sink); | 518 IndexEvents(analyzer, "ConsumeVideoFrame", &sink); |
| 479 IndexEvents(analyzer, "InsertRawVideoFrame", &inserted); | 519 IndexEvents(analyzer, "InsertRawVideoFrame", &inserted); |
| 480 IndexEvents(analyzer, "VideoFrameEncoded", &encoded); | 520 IndexEvents(analyzer, "VideoFrameEncoded", &encoded); |
| 521 // Receiver side: | |
| 481 IndexEvents(analyzer, "PullEncodedVideoFrame", &transmitted); | 522 IndexEvents(analyzer, "PullEncodedVideoFrame", &transmitted); |
| 482 IndexEvents(analyzer, "FrameDecoded", &decoded); | 523 IndexEvents(analyzer, "VideoFrameDecoded", &decoded); |
| 483 IndexEvents(analyzer, "TestPatternReceiver::OnVideoFrame", &done); | 524 IndexEvents(analyzer, "VideoFramePlayout", &done); |
| 484 std::vector<std::pair<EventMap*, std::string> > event_maps; | 525 |
| 526 // Analyzing latency is non-trivial, because only the frame timestamps | |
| 527 // uniquely identify frames AND the timestamps take varying forms throughout | |
| 528 // the pipeline (TimeTicks, TimeDelta, RtpTimestamp, etc.). Luckily, each | |
| 529 // neighboring stage in the pipeline knows about the timestamp from the | |
| 530 // prior stage, in whatever form it had, and so it's possible to track | |
| 531 // specific frames all the way from capture until playout at the receiver. | |
| 532 std::vector<std::pair<EventMap*, std::string>> event_maps; | |
| 485 event_maps.push_back(std::make_pair(&onbuffer, "timestamp")); | 533 event_maps.push_back(std::make_pair(&onbuffer, "timestamp")); |
| 486 event_maps.push_back(std::make_pair(&sink, "time_delta")); | 534 event_maps.push_back(std::make_pair(&sink, "time_delta")); |
| 487 event_maps.push_back(std::make_pair(&inserted, "timestamp")); | 535 event_maps.push_back(std::make_pair(&inserted, "timestamp")); |
| 488 event_maps.push_back(std::make_pair(&encoded, "rtp_timestamp")); | 536 event_maps.push_back(std::make_pair(&encoded, "rtp_timestamp")); |
| 489 event_maps.push_back(std::make_pair(&transmitted, "rtp_timestamp")); | 537 event_maps.push_back(std::make_pair(&transmitted, "rtp_timestamp")); |
| 490 event_maps.push_back(std::make_pair(&decoded, "rtp_timestamp")); | 538 event_maps.push_back(std::make_pair(&decoded, "rtp_timestamp")); |
| 491 event_maps.push_back(std::make_pair(&done, "render_time")); | 539 event_maps.push_back(std::make_pair(&done, "playout_time")); |
| 492 | 540 |
| 493 trace_analyzer::TraceEventVector capture_events; | 541 // For each "begin capture" event, search for all the events following it, |
| 494 GetTraceEvents(analyzer, "Capture" , &capture_events); | 542 // producing a matrix of when each frame reached each pipeline checkpoint. |
| 495 EXPECT_GT(capture_events.size(), 0UL); | 543 // See the "cheat sheet" below for a description of each pipeline |
| 496 std::vector<std::vector<double> > traced_frames; | 544 // checkpoint. |
| 497 for (size_t i = kSkipEvents; i < capture_events.size(); i++) { | 545 ASSERT_GT(capture_events.size(), 2 * kTrimEvents); |
| 546 std::vector<std::vector<double>> traced_frames; | |
| 547 for (size_t i = kTrimEvents; i < capture_events.size() - kTrimEvents; i++) { | |
| 498 std::vector<double> times; | 548 std::vector<double> times; |
| 499 const trace_analyzer::TraceEvent *event = capture_events[i]; | 549 const trace_analyzer::TraceEvent* event = capture_events[i]; |
| 550 if (!event->other_event) | |
| 551 continue; // Begin capture event without a corresponding end event. | |
| 500 times.push_back(event->timestamp); // begin capture | 552 times.push_back(event->timestamp); // begin capture |
| 501 event = event->other_event; | 553 event = event->other_event; |
| 502 if (!event) { | 554 times.push_back(event->timestamp); // end capture |
| 503 continue; | 555 const trace_analyzer::TraceEvent* prev_event = event; |
| 504 } | |
| 505 times.push_back(event->timestamp); // end capture (with timestamp) | |
| 506 std::vector<const trace_analyzer::TraceEvent*> prev_events; | |
| 507 prev_events.push_back(event); | |
| 508 for (size_t j = 0; j < event_maps.size(); j++) { | 556 for (size_t j = 0; j < event_maps.size(); j++) { |
| 509 event = FindNextEvent(*event_maps[j].first, | 557 event = FindNextEvent(*event_maps[j].first, prev_event, |
| 510 prev_events, | |
| 511 event_maps[j].second); | 558 event_maps[j].second); |
| 512 if (!event) { | 559 if (!event) |
| 513 break; | 560 break; // Missing an event: The frame was dropped along the way. |
| 514 } | 561 prev_event = event; |
| 515 prev_events.push_back(event); | |
| 516 times.push_back(event->timestamp); | 562 times.push_back(event->timestamp); |
| 517 } | 563 } |
| 518 if (event) { | 564 if (event) { |
| 519 // Successfully traced frame from beginning to end | 565 // Successfully traced frame from beginning to end. |
| 520 traced_frames.push_back(times); | 566 traced_frames.push_back(std::move(times)); |
| 521 } | 567 } |
| 522 } | 568 } |
| 523 | 569 |
| 524 // 0 = capture begin | 570 // Report the fraction of captured frames that were dropped somewhere along |
| 525 // 1 = capture end | 571 // the way (i.e., before playout at the receiver). |
| 526 // 2 = onbuffer | 572 const size_t capture_event_count = capture_events.size() - 2 * kTrimEvents; |
| 527 // 3 = sink | 573 EXPECT_GE(capture_event_count, kMinDataPoints); |
| 528 // 4 = inserted | 574 const double success_percent = |
| 529 // 5 = encoded | 575 100.0 * traced_frames.size() / capture_event_count; |
| 530 // 6 = transmitted | 576 perf_test::PrintResult( |
| 531 // 7 = decoded | 577 test_name, GetSuffixForTestFlags(), "frame_drop_rate", |
| 532 // 8 = done | 578 base::StringPrintf("%f", 100 - success_percent), "percent", true); |
| 533 | 579 |
| 534 // Lower is better for all of these. | 580 // Report the latency between various pairs of checkpoints in the pipeline. |
| 581 // Lower latency is better for all of these measurements. | |
| 582 // | |
| 583 // Cheat sheet: | |
| 584 // 0 = Sender: capture begin | |
| 585 // 1 = Sender: capture end | |
| 586 // 2 = Sender: memory buffer reached the render process | |
| 587 // 3 = Sender: frame routed to Cast RTP consumer | |
| 588 // 4 = Sender: frame reached VideoSender::InsertRawVideoFrame() | |
| 589 // 5 = Sender: frame encoding complete, queueing for transmission | |
| 590 // 6 = Receiver: frame fully received from network | |
| 591 // 7 = Receiver: frame decoded | |
| 592 // 8 = Receiver: frame played out | |
| 535 OutputMeasurement(test_name, traced_frames, "total_latency", 0, 8); | 593 OutputMeasurement(test_name, traced_frames, "total_latency", 0, 8); |
| 536 OutputMeasurement(test_name, traced_frames, "capture_duration", 0, 1); | 594 OutputMeasurement(test_name, traced_frames, "capture_duration", 0, 1); |
| 537 OutputMeasurement(test_name, traced_frames, "send_to_renderer", 1, 3); | 595 OutputMeasurement(test_name, traced_frames, "send_to_renderer", 1, 3); |
| 538 OutputMeasurement(test_name, traced_frames, "encode", 3, 5); | 596 OutputMeasurement(test_name, traced_frames, "encode", 3, 5); |
| 539 OutputMeasurement(test_name, traced_frames, "transmit", 5, 6); | 597 OutputMeasurement(test_name, traced_frames, "transmit", 5, 6); |
| 540 OutputMeasurement(test_name, traced_frames, "decode", 6, 7); | 598 OutputMeasurement(test_name, traced_frames, "decode", 6, 7); |
| 541 OutputMeasurement(test_name, traced_frames, "cast_latency", 3, 8); | 599 OutputMeasurement(test_name, traced_frames, "cast_latency", 3, 8); |
| 542 } | 600 } |
| 543 | 601 |
| 544 MeanAndError AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer* analyzer, | 602 MeanAndError AnalyzeTraceDistance(trace_analyzer::TraceAnalyzer* analyzer, |
| 545 const std::string& event_name) { | 603 const std::string& event_name) { |
| 546 trace_analyzer::TraceEventVector events; | 604 trace_analyzer::TraceEventVector events; |
| 547 GetTraceEvents(analyzer, event_name, &events); | 605 GetTraceEvents(analyzer, event_name, &events); |
| 548 | 606 |
| 549 std::vector<double> deltas; | 607 std::vector<double> deltas; |
| 550 for (size_t i = kSkipEvents + 1; i < events.size(); ++i) { | 608 for (size_t i = kTrimEvents + 1; i < events.size() - kTrimEvents; ++i) { |
| 551 double delta_micros = events[i]->timestamp - events[i - 1]->timestamp; | 609 double delta_micros = events[i]->timestamp - events[i - 1]->timestamp; |
| 552 deltas.push_back(delta_micros / 1000.0); | 610 deltas.push_back(delta_micros / 1000.0); |
| 553 } | 611 } |
| 554 return MeanAndError(deltas); | 612 return MeanAndError(deltas); |
| 555 } | 613 } |
| 556 | 614 |
| 557 void RunTest(const std::string& test_name) { | 615 void RunTest(const std::string& test_name) { |
| 558 if (HasFlag(kUseGpu) && !IsGpuAvailable()) { | 616 if (HasFlag(kUseGpu) && !IsGpuAvailable()) { |
| 559 LOG(WARNING) << | 617 LOG(WARNING) << |
| 560 "Test skipped: requires gpu. Pass --enable-gpu on the command " | 618 "Test skipped: requires gpu. Pass --enable-gpu on the command " |
| 561 "line if use of GPU is desired."; | 619 "line if use of GPU is desired."; |
| 562 return; | 620 return; |
| 563 } | 621 } |
| 564 | 622 |
| 565 ASSERT_EQ(1, | 623 ASSERT_EQ(1, |
| 566 (HasFlag(k24fps) ? 1 : 0) + | 624 (HasFlag(k24fps) ? 1 : 0) + |
| 567 (HasFlag(k30fps) ? 1 : 0) + | 625 (HasFlag(k30fps) ? 1 : 0) + |
| 568 (HasFlag(k60fps) ? 1 : 0)); | 626 (HasFlag(k60fps) ? 1 : 0)); |
| 569 | 627 |
| 570 net::IPEndPoint receiver_end_point = GetFreeLocalPort(); | 628 net::IPEndPoint receiver_end_point = media::cast::test::GetFreeLocalPort(); |
| 571 | 629 |
| 572 // Start the in-process receiver that examines audio/video for the expected | 630 // Start the in-process receiver that examines audio/video for the expected |
| 573 // test patterns. | 631 // test patterns. |
| 574 base::TimeDelta delta = base::TimeDelta::FromSeconds(0); | 632 base::TimeDelta delta = base::TimeDelta::FromSeconds(0); |
| 575 if (HasFlag(kFastClock)) { | 633 if (HasFlag(kFastClock)) { |
| 576 delta = base::TimeDelta::FromSeconds(10); | 634 delta = base::TimeDelta::FromSeconds(10); |
| 577 } | 635 } |
| 578 if (HasFlag(kSlowClock)) { | 636 if (HasFlag(kSlowClock)) { |
| 579 delta = base::TimeDelta::FromSeconds(-10); | 637 delta = base::TimeDelta::FromSeconds(-10); |
| 580 } | 638 } |
| 581 scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment( | 639 scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment( |
| 582 new SkewedCastEnvironment(delta)); | 640 new SkewedCastEnvironment(delta)); |
| 583 TestPatternReceiver* const receiver = | 641 TestPatternReceiver* const receiver = |
| 584 new TestPatternReceiver(cast_environment, receiver_end_point); | 642 new TestPatternReceiver(cast_environment, receiver_end_point); |
| 585 receiver->Start(); | 643 receiver->Start(); |
| 586 | 644 |
| 587 std::unique_ptr<media::cast::test::UDPProxy> udp_proxy; | 645 std::unique_ptr<media::cast::test::UDPProxy> udp_proxy; |
| 588 if (HasFlag(kProxyWifi) || HasFlag(kProxyBad)) { | 646 if (HasFlag(kProxyWifi) || HasFlag(kProxySlow) || HasFlag(kProxyBad)) { |
| 589 net::IPEndPoint proxy_end_point = GetFreeLocalPort(); | 647 net::IPEndPoint proxy_end_point = media::cast::test::GetFreeLocalPort(); |
| 590 if (HasFlag(kProxyWifi)) { | 648 if (HasFlag(kProxyWifi)) { |
| 591 udp_proxy = media::cast::test::UDPProxy::Create( | 649 udp_proxy = media::cast::test::UDPProxy::Create( |
| 592 proxy_end_point, receiver_end_point, | 650 proxy_end_point, receiver_end_point, |
| 593 media::cast::test::WifiNetwork(), media::cast::test::WifiNetwork(), | 651 media::cast::test::WifiNetwork(), media::cast::test::WifiNetwork(), |
| 594 NULL); | 652 nullptr); |
| 653 } else if (HasFlag(kProxySlow)) { | |
| 654 udp_proxy = media::cast::test::UDPProxy::Create( | |
| 655 proxy_end_point, receiver_end_point, | |
| 656 media::cast::test::SlowNetwork(), media::cast::test::SlowNetwork(), | |
| 657 nullptr); | |
| 595 } else if (HasFlag(kProxyBad)) { | 658 } else if (HasFlag(kProxyBad)) { |
| 596 udp_proxy = media::cast::test::UDPProxy::Create( | 659 udp_proxy = media::cast::test::UDPProxy::Create( |
| 597 proxy_end_point, receiver_end_point, | 660 proxy_end_point, receiver_end_point, |
| 598 media::cast::test::BadNetwork(), media::cast::test::BadNetwork(), | 661 media::cast::test::BadNetwork(), media::cast::test::BadNetwork(), |
| 599 NULL); | 662 nullptr); |
| 600 } | 663 } |
| 601 receiver_end_point = proxy_end_point; | 664 receiver_end_point = proxy_end_point; |
| 602 } | 665 } |
| 603 | 666 |
| 604 std::string json_events; | 667 std::string json_events; |
| 605 ASSERT_TRUE(tracing::BeginTracing( | 668 ASSERT_TRUE(tracing::BeginTracing("gpu.capture,cast_perf_test")); |
| 606 "test_fps,mirroring,gpu.capture,cast_perf_test")); | |
| 607 const std::string page_url = base::StringPrintf( | 669 const std::string page_url = base::StringPrintf( |
| 608 "performance%d.html?port=%d", | 670 "performance%d.html?port=%d&autoThrottling=%s&aesKey=%s&aesIvMask=%s", |
| 609 getfps(), | 671 getfps(), receiver_end_point.port(), |
| 610 receiver_end_point.port()); | 672 HasFlag(kAutoThrottling) ? "true" : "false", |
| 673 base::HexEncode(kAesKey, sizeof(kAesKey)).c_str(), | |
| 674 base::HexEncode(kAesIvMask, sizeof(kAesIvMask)).c_str()); | |
| 611 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_; | 675 ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_; |
| 612 ASSERT_TRUE(tracing::EndTracing(&json_events)); | 676 ASSERT_TRUE(tracing::EndTracing(&json_events)); |
| 613 receiver->Stop(); | 677 receiver->Stop(); |
| 614 | 678 |
| 615 // Stop all threads, removes the need for synchronization when analyzing | 679 // Stop all threads, removes the need for synchronization when analyzing |
| 616 // the data. | 680 // the data. |
| 617 cast_environment->Shutdown(); | 681 cast_environment->Shutdown(); |
| 618 std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer; | 682 std::unique_ptr<trace_analyzer::TraceAnalyzer> analyzer; |
| 619 analyzer.reset(trace_analyzer::TraceAnalyzer::Create(json_events)); | 683 analyzer.reset(trace_analyzer::TraceAnalyzer::Create(json_events)); |
| 620 analyzer->AssociateAsyncBeginEndEvents(); | 684 analyzer->AssociateAsyncBeginEndEvents(); |
| 621 | 685 |
| 622 MeanAndError frame_data = AnalyzeTraceDistance( | |
| 623 analyzer.get(), | |
| 624 "OnSwapCompositorFrame"); | |
| 625 if (frame_data.num_values > 0) { | |
| 626 // Lower is better. | |
| 627 frame_data.Print(test_name, GetSuffixForTestFlags(), | |
| 628 "time_between_frames", "ms"); | |
| 629 } else { | |
| 630 // TODO(miu): Fix is currently WIP. http://crbug.com/709247 | |
| 631 LOG(WARNING) << "No frame_data values, so no time_between_frames result."; | |
| 632 } | |
| 633 | |
| 634 // This prints out the average time between capture events. | 686 // This prints out the average time between capture events. |
| 635 // As the capture frame rate is capped at 30fps, this score | 687 // Depending on the test, the capture frame rate is capped (e.g., at 30fps, |
| 636 // cannot get any better than (lower) 33.33 ms. | 688 // this score cannot get any better than 33.33 ms). However, the measurement |
| 689 // is important since it provides a valuable check that capture can keep up | |
| 690 // with the content's framerate. | |
| 637 MeanAndError capture_data = AnalyzeTraceDistance(analyzer.get(), "Capture"); | 691 MeanAndError capture_data = AnalyzeTraceDistance(analyzer.get(), "Capture"); |
| 638 // Lower is better. | 692 // Lower is better. |
| 639 capture_data.Print(test_name, | 693 capture_data.Print(test_name, |
| 640 GetSuffixForTestFlags(), | 694 GetSuffixForTestFlags(), |
| 641 "time_between_captures", | 695 "time_between_captures", |
| 642 "ms"); | 696 "ms"); |
| 643 | 697 |
| 644 receiver->Analyze(test_name, GetSuffixForTestFlags()); | 698 receiver->Analyze(test_name, GetSuffixForTestFlags()); |
| 645 | 699 |
| 646 AnalyzeLatency(test_name, analyzer.get()); | 700 AnalyzeLatency(test_name, analyzer.get()); |
| 647 } | 701 } |
| 648 }; | 702 }; |
| 649 | 703 |
| 650 } // namespace | 704 } // namespace |
| 651 | 705 |
| 652 IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest, Performance) { | 706 IN_PROC_BROWSER_TEST_P(CastV2PerformanceTest, Performance) { |
| 653 RunTest("CastV2Performance"); | 707 RunTest("CastV2Performance"); |
| 654 } | 708 } |
| 655 | 709 |
| 656 // Note: First argument is optional and intentionally left blank. | 710 // Note: First argument is optional and intentionally left blank. |
| 657 // (it's a prefix for the generated test cases) | 711 // (it's a prefix for the generated test cases) |
| 658 INSTANTIATE_TEST_CASE_P( | 712 INSTANTIATE_TEST_CASE_P( |
| 659 , | 713 , |
| 660 CastV2PerformanceTest, | 714 CastV2PerformanceTest, |
| 661 testing::Values( | 715 testing::Values(kUseGpu | k24fps, |
| 662 kUseGpu | k24fps, | 716 kUseGpu | k30fps, |
| 663 kUseGpu | k30fps, | 717 kUseGpu | k60fps, |
| 664 kUseGpu | k60fps, | 718 kUseGpu | k24fps | kDisableVsync, |
| 665 kUseGpu | k24fps | kDisableVsync, | 719 kUseGpu | k30fps | kProxyWifi, |
| 666 kUseGpu | k30fps | kProxyWifi, | 720 kUseGpu | k30fps | kProxyBad, |
| 667 kUseGpu | k30fps | kProxyBad, | 721 kUseGpu | k30fps | kSlowClock, |
| 668 kUseGpu | k30fps | kSlowClock, | 722 kUseGpu | k30fps | kFastClock, |
| 669 kUseGpu | k30fps | kFastClock)); | 723 kUseGpu | k30fps | kProxyWifi | kAutoThrottling, |
| 724 kUseGpu | k30fps | kProxySlow | kAutoThrottling, | |
| 725 kUseGpu | k30fps | kProxyBad | kAutoThrottling)); | |
| OLD | NEW |