Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 <!DOCTYPE html> | 1 <!DOCTYPE html> |
| 2 <!-- | 2 <!-- |
| 3 Copyright 2017 The Chromium Authors. All rights reserved. | 3 Copyright 2017 The Chromium Authors. All rights reserved. |
| 4 Use of this source code is governed by a BSD-style license that can be | 4 Use of this source code is governed by a BSD-style license that can be |
| 5 found in the LICENSE file. | 5 found in the LICENSE file. |
| 6 --> | 6 --> |
| 7 | 7 |
| 8 <!-- | 8 <!-- |
| 9 media_metrics uses Chrome trace events to calculate metrics about video | 9 media_metrics uses Chrome trace events to calculate metrics about video |
| 10 and audio playback. It is meant to be used for pages with a <video> or | 10 and audio playback. It is meant to be used for pages with a <video> or |
| 11 <audio> element. It is used by videostack-eng@google.com team for | 11 <audio> element. It is used by videostack-eng@google.com team for |
| 12 regression testing. | 12 regression testing. |
| 13 | 13 |
| 14 This metric currently supports the following measurement: | 14 This metric currently supports the following measurement: |
| 15 * time_to_video_play calculates how long after a video is requested to | 15 * time_to_video_play calculates how long after a video is requested to |
| 16 start playing before the video actually starts. If time_to_video_play | 16 start playing before the video actually starts. If time_to_video_play |
| 17 regresses, then users will click to play videos and then have | 17 regresses, then users will click to play videos and then have |
| 18 to wait longer before the videos start actually playing. | 18 to wait longer before the videos start actually playing. |
| 19 * time_to_audio_play is similar to time_to_video_play, but measures the | 19 * time_to_audio_play is similar to time_to_video_play, but measures the |
| 20 time delay before audio starts playing. | 20 time delay before audio starts playing. |
| 21 | 21 * buffering_time calculates the difference between the actual play time of |
| 22 More measurements are expected to be added in the near future, such as: | 22 media vs its expected play time. Ideally the two should be the same. |
| 23 * buffering_time | 23 If actual play time is significantly longer than expected play time, |
| 24 * seek_time | 24 it indicates that there were stalls during the play for buffering or |
| 25 * dropped_frame_count | 25 some other reasons. |
| 26 * dropped_frame_count reports the number of video frames that were dropped. | |
| 27 Ideally this should be 0. If a large number of frames are dropped, the | |
| 28 video play will not be smooth. | |
| 29 * seek_time calculates how long after a user requests a seek operation | |
| 30 before the seek completes and the media starts playing at the new | |
| 31 location. | |
| 26 | 32 |
| 27 Please inform crouleau@chromium.org and johnchen@chromium.org about | 33 Please inform crouleau@chromium.org and johnchen@chromium.org about |
| 28 changes to this file. | 34 changes to this file. |
| 29 --> | 35 --> |
| 30 | 36 |
| 31 <link rel="import" href="/tracing/metrics/metric_registry.html"> | 37 <link rel="import" href="/tracing/metrics/metric_registry.html"> |
| 32 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> | 38 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> |
| 33 <link rel="import" href="/tracing/value/histogram.html"> | 39 <link rel="import" href="/tracing/value/histogram.html"> |
| 34 | 40 |
| 35 <script> | 41 <script> |
| 36 'use strict'; | 42 'use strict'; |
| 37 | 43 |
| 38 tr.exportTo('tr.metrics', function() { | 44 tr.exportTo('tr.metrics', function() { |
| 39 function mediaMetric(histograms, model) { | 45 function mediaMetric(histograms, model) { |
| 40 let playStart; | |
| 41 let timeToAudioPlay; | |
| 42 let timeToVideoPlay; | |
| 43 | |
| 44 const chromeHelper = model.getOrCreateHelper( | 46 const chromeHelper = model.getOrCreateHelper( |
| 45 tr.model.helpers.ChromeModelHelper); | 47 tr.model.helpers.ChromeModelHelper); |
| 46 if (chromeHelper === undefined) return; | 48 if (chromeHelper === undefined) return; |
| 47 | 49 |
| 48 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { | 50 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { |
| 49 // Find the threads we're interested in, and if a needed thread | 51 // Find the threads we're interested in, and if a needed thread |
| 50 // is missing, no need to look further in this process. | 52 // is missing, no need to look further in this process. |
| 51 const mainThread = rendererHelper.mainThread; | 53 const mainThread = rendererHelper.mainThread; |
| 52 if (mainThread === undefined) continue; | 54 if (mainThread === undefined) continue; |
| 53 | 55 |
| 54 const compositorThread = rendererHelper.compositorThread; | 56 const compositorThread = rendererHelper.compositorThread; |
| 55 const audioThread = | 57 const audioThread = |
| 56 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); | 58 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); |
| 57 if (compositorThread === undefined && audioThread === undefined) continue; | 59 if (compositorThread === undefined && audioThread === undefined) continue; |
| 58 | 60 |
| 59 // Look for the media player DoLoad event on main thread. | 61 const playStart = getPlayStart(mainThread); |
| 60 for (const event of mainThread.getDescendantEvents()) { | |
| 61 if (event.title === 'WebMediaPlayerImpl::DoLoad') { | |
| 62 // TODO(johnchen@chromium.org): Support multiple audio/video | |
| 63 // elements per page. Currently, we only support a single | |
| 64 // audio or video element, so we can store the start time in | |
| 65 // a simple variable, and exit the loop. | |
| 66 if (playStart !== undefined) { | |
| 67 throw new Error( | |
| 68 'Loading multiple audio/video elements not yet supported'); | |
| 69 } | |
| 70 playStart = event.start; | |
| 71 break; | |
| 72 } | |
| 73 } | |
| 74 if (playStart === undefined) continue; | 62 if (playStart === undefined) continue; |
| 75 | 63 |
| 76 // Look for video render event. | 64 const timeToVideoPlay = getTimeToVideoPlay( |
| 77 if (compositorThread !== undefined) { | 65 compositorThread, playStart, histograms); |
| 78 for (const event of compositorThread.getDescendantEvents()) { | 66 const timeToAudioPlay = getTimeToAudioPlay( |
| 79 if (event.title === 'VideoRendererImpl::Render') { | 67 audioThread, playStart, histograms); |
| 80 timeToVideoPlay = event.start - playStart; | 68 |
| 81 break; | 69 if (timeToVideoPlay === undefined && timeToAudioPlay === undefined) { |
| 82 } | 70 continue; |
| 83 } | |
| 84 } | 71 } |
| 85 | 72 |
| 86 // Look for audio render event. | 73 const droppedFrameCount = getDroppedFrameCount(compositorThread, |
| 87 if (audioThread !== undefined) { | 74 timeToVideoPlay, histograms); |
| 88 for (const event of audioThread.getDescendantEvents()) { | 75 const seekTimes = getSeekTimes(mainThread, timeToVideoPlay, histograms); |
| 89 if (event.title === 'AudioRendererImpl::Render') { | 76 const bufferingTime = getBufferingTime(mainThread, playStart, |
| 90 timeToAudioPlay = event.start - playStart; | 77 timeToVideoPlay, timeToAudioPlay, seekTimes, histograms); |
| 91 break; | |
| 92 } | |
| 93 } | |
| 94 } | |
| 95 if (timeToVideoPlay !== undefined) break; | |
| 96 if (timeToAudioPlay !== undefined) break; | |
| 97 } | |
| 98 | |
| 99 if (timeToVideoPlay !== undefined) { | |
| 100 histograms.createHistogram('time_to_video_play', | |
| 101 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); | |
| 102 } | |
| 103 if (timeToAudioPlay !== undefined) { | |
| 104 histograms.createHistogram('time_to_audio_play', | |
| 105 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); | |
| 106 } | 78 } |
| 107 } | 79 } |
| 108 | 80 |
| 81 function getPlayStart(mainThread) { | |
| 82 let playStart; | |
| 83 for (const event of mainThread.getDescendantEvents()) { | |
| 84 if (event.title === 'WebMediaPlayerImpl::DoLoad') { | |
| 85 // TODO(johnchen@chromium.org): Support multiple audio/video | |
| 86 // elements per page. Currently, we only support a single | |
| 87 // audio or video element, so we can store the start time in | |
| 88 // a simple variable. | |
| 89 if (playStart !== undefined) { | |
| 90 throw new Error( | |
| 91 'Loading multiple audio/video elements not yet supported'); | |
| 92 } | |
| 93 playStart = event.start; | |
| 94 } | |
| 95 } | |
| 96 return playStart; | |
| 97 } | |
| 98 | |
| 99 function getTimeToVideoPlay(compositorThread, playStart, histograms) { | |
| 100 if (compositorThread !== undefined) { | |
| 101 for (const event of compositorThread.getDescendantEvents()) { | |
| 102 if (event.title === 'VideoRendererImpl::Render') { | |
| 103 const timeToVideoPlay = event.start - playStart; | |
| 104 histograms.createHistogram('time_to_video_play', | |
| 105 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, | |
| 106 timeToVideoPlay); | |
| 107 return timeToVideoPlay; | |
| 108 } | |
| 109 } | |
| 110 } | |
| 111 return undefined; | |
| 112 } | |
| 113 | |
| 114 function getTimeToAudioPlay(audioThread, playStart, histograms) { | |
| 115 if (audioThread !== undefined) { | |
| 116 for (const event of audioThread.getDescendantEvents()) { | |
|
benjhayden
2017/09/21 05:57:30
Can you iterate over only one of the thread's even
johnchen
2017/09/22 16:25:39
Done. Now getting events from sliceGroup.
| |
| 117 if (event.title === 'AudioRendererImpl::Render') { | |
| 118 const timeToAudioPlay = event.start - playStart; | |
| 119 histograms.createHistogram('time_to_audio_play', | |
| 120 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, | |
| 121 timeToAudioPlay); | |
| 122 return timeToAudioPlay; | |
| 123 } | |
| 124 } | |
| 125 } | |
| 126 return undefined; | |
| 127 } | |
| 128 | |
| 129 function getSeekTimes(mainThread, timeToVideoPlay, histograms) { | |
| 130 if (timeToVideoPlay === undefined) return new Map(); | |
| 131 | |
| 132 const seekStartTimes = new Map(); | |
| 133 const seekTimes = new Map(); | |
| 134 for (const event of mainThread.getDescendantEvents()) { | |
| 135 if (event.title === 'WebMediaPlayerImpl::DoSeek') { | |
| 136 seekStartTimes.set(event.args.target, event.start); | |
|
benjhayden
2017/09/21 05:57:30
We're a bit leery of copying strings from the trac
johnchen
2017/09/22 16:25:39
Target are numerical values, not strings. It equal
| |
| 137 } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') { | |
| 138 const startTime = seekStartTimes.get(event.args.target); | |
| 139 if (startTime !== undefined) { | |
| 140 seekTimes.set(event.args.target, event.start - startTime); | |
| 141 seekStartTimes.delete(event.args.target); | |
| 142 } | |
| 143 } | |
| 144 } | |
| 145 for (const [key, value] of seekTimes.entries()) { | |
| 146 histograms.createHistogram('seek_time_' + key, | |
| 147 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value); | |
| 148 } | |
| 149 return seekTimes; | |
| 150 } | |
| 151 | |
| 152 function getBufferingTime(mainThread, playStart, timeToVideoPlay, | |
| 153 timeToAudioPlay, seekTimes, histograms) { | |
| 154 // We can calculate buffering time only if the media is played from | |
| 155 // beginning to end. Thus if any seek operations had occurred, we skip | |
| 156 // calculating this metric. | |
| 157 if (seekTimes.size !== 0) return undefined; | |
| 158 | |
| 159 let playEnd; | |
| 160 let duration; | |
| 161 for (const event of mainThread.getDescendantEvents()) { | |
| 162 if (event.title === 'WebMediaPlayerImpl::OnEnded') { | |
| 163 // TODO(johnchen@chromium.org): Support multiple audio/video | |
| 164 // elements per page. Currently, we only support a single | |
| 165 // audio or video element, so we can store the end time in | |
| 166 // a simple variable. | |
| 167 if (playEnd !== undefined) { | |
| 168 throw new Error( | |
| 169 'Multiple media ended events not yet supported'); | |
| 170 } | |
| 171 playEnd = event.start; | |
| 172 duration = 1000 * event.args.duration; // seconds to milliseconds | |
| 173 } | |
| 174 } | |
| 175 let bufferingTime = playEnd - playStart - duration; | |
| 176 if (timeToVideoPlay !== undefined) { | |
| 177 bufferingTime -= timeToVideoPlay; | |
| 178 } else { | |
| 179 bufferingTime -= timeToAudioPlay; | |
| 180 } | |
| 181 histograms.createHistogram('buffering_time', | |
| 182 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime); | |
| 183 return bufferingTime; | |
| 184 } | |
| 185 | |
| 186 function getDroppedFrameCount(compositorThread, timeToVideoPlay, | |
| 187 histograms) { | |
| 188 if (timeToVideoPlay === undefined) return undefined; | |
|
benjhayden
2017/09/21 05:57:30
If this function only needs timeToVideoPlay in ord
johnchen
2017/09/22 16:25:39
Done
| |
| 189 | |
| 190 let droppedFrameCount = 0; | |
| 191 for (const event of compositorThread.getDescendantEvents()) { | |
| 192 if (event.title === 'VideoFramesDropped') { | |
| 193 droppedFrameCount += event.args.count; | |
| 194 } | |
| 195 } | |
| 196 histograms.createHistogram('dropped_frame_count', | |
| 197 tr.b.Unit.byName.count_smallerIsBetter, droppedFrameCount); | |
| 198 return droppedFrameCount; | |
| 199 } | |
| 200 | |
| 109 tr.metrics.MetricRegistry.register(mediaMetric); | 201 tr.metrics.MetricRegistry.register(mediaMetric); |
| 110 | 202 |
| 111 return { | 203 return { |
| 112 mediaMetric, | 204 mediaMetric, |
| 113 }; | 205 }; |
| 114 }); | 206 }); |
| 115 </script> | 207 </script> |
| OLD | NEW |