Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 <!DOCTYPE html> | 1 <!DOCTYPE html> |
| 2 <!-- | 2 <!-- |
| 3 Copyright 2017 The Chromium Authors. All rights reserved. | 3 Copyright 2017 The Chromium Authors. All rights reserved. |
| 4 Use of this source code is governed by a BSD-style license that can be | 4 Use of this source code is governed by a BSD-style license that can be |
| 5 found in the LICENSE file. | 5 found in the LICENSE file. |
| 6 --> | 6 --> |
| 7 | 7 |
| 8 <!-- | 8 <!-- |
| 9 media_metrics uses Chrome trace events to calculate metrics about video | 9 media_metrics uses Chrome trace events to calculate metrics about video |
| 10 and audio playback. It is meant to be used for pages with a <video> or | 10 and audio playback. It is meant to be used for pages with a <video> or |
| 11 <audio> element. It is used by videostack-eng@google.com team for | 11 <audio> element. It is used by videostack-eng@google.com team for |
| 12 regression testing. | 12 regression testing. |
| 13 | 13 |
| 14 This metric currently supports the following measurement: | 14 This metric currently supports the following measurement: |
| 15 * time_to_video_play calculates how long after a video is requested to | 15 * time_to_video_play calculates how long after a video is requested to |
| 16 start playing before the video actually starts. If time_to_video_play | 16 start playing before the video actually starts. If time_to_video_play |
| 17 regresses, then users will click to play videos and then have | 17 regresses, then users will click to play videos and then have |
| 18 to wait longer before the videos start actually playing. | 18 to wait longer before the videos start actually playing. |
| 19 * time_to_audio_play is similar to time_to_video_play, but measures the | 19 * time_to_audio_play is similar to time_to_video_play, but measures the |
| 20 time delay before audio starts playing. | 20 time delay before audio starts playing. |
| 21 | 21 * buffering_time calculates the difference between the actual play time of |
| 22 More measurements are expected to be added in the near future, such as: | 22 media vs its expected play time. Ideally the two should be the same. |
| 23 * buffering_time | 23 If actual play time is significantly longer than expected play time, |
| 24 * seek_time | 24 it indicates that there were stalls during the play for buffering or |
| 25 * dropped_frame_count | 25 some other reasons. |
| 26 * dropped_frame_count reports the number of video frames that were dropped. | |
| 27 Ideally this should be 0. If a large number of frames are dropped, the | |
| 28 video play will not be smooth. | |
| 29 * seek_time calculates how long after a user requests a seek operation | |
| 30 before the seek completes and the media starts playing at the new | |
| 31 location. | |
| 26 | 32 |
| 27 Please inform crouleau@chromium.org and johnchen@chromium.org about | 33 Please inform crouleau@chromium.org and johnchen@chromium.org about |
| 28 changes to this file. | 34 changes to this file. |
| 29 --> | 35 --> |
| 30 | 36 |
| 31 <link rel="import" href="/tracing/metrics/metric_registry.html"> | 37 <link rel="import" href="/tracing/metrics/metric_registry.html"> |
| 32 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> | 38 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> |
| 33 <link rel="import" href="/tracing/value/histogram.html"> | 39 <link rel="import" href="/tracing/value/histogram.html"> |
| 34 | 40 |
| 35 <script> | 41 <script> |
| 36 'use strict'; | 42 'use strict'; |
| 37 | 43 |
| 38 tr.exportTo('tr.metrics', function() { | 44 tr.exportTo('tr.metrics', function() { |
| 39 function mediaMetric(histograms, model) { | 45 function mediaMetric(histograms, model) { |
| 40 let playStart; | |
| 41 let timeToAudioPlay; | |
| 42 let timeToVideoPlay; | |
| 43 | |
| 44 const chromeHelper = model.getOrCreateHelper( | 46 const chromeHelper = model.getOrCreateHelper( |
| 45 tr.model.helpers.ChromeModelHelper); | 47 tr.model.helpers.ChromeModelHelper); |
| 46 if (chromeHelper === undefined) return; | 48 if (chromeHelper === undefined) return; |
| 47 | 49 |
| 48 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { | 50 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { |
| 49 // Find the threads we're interested in, and if a needed thread | 51 // Find the threads we're interested in, and if a needed thread |
| 50 // is missing, no need to look further in this process. | 52 // is missing, no need to look further in this process. |
| 51 const mainThread = rendererHelper.mainThread; | 53 const mainThread = rendererHelper.mainThread; |
| 52 if (mainThread === undefined) continue; | 54 if (mainThread === undefined) continue; |
| 53 | 55 |
| 54 const compositorThread = rendererHelper.compositorThread; | 56 const compositorThread = rendererHelper.compositorThread; |
| 55 const audioThread = | 57 const audioThread = |
| 56 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); | 58 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); |
| 57 if (compositorThread === undefined && audioThread === undefined) continue; | 59 if (compositorThread === undefined && audioThread === undefined) continue; |
| 58 | 60 |
| 59 // Look for the media player DoLoad event on main thread. | 61 const playStart = getPlayStart(mainThread); |
| 60 for (const event of mainThread.getDescendantEvents()) { | 62 if (playStart === undefined) continue; |
| 61 if (event.title === 'WebMediaPlayerImpl::DoLoad') { | 63 |
| 62 // TODO(johnchen@chromium.org): Support multiple audio/video | 64 const timeToVideoPlay = compositorThread === undefined ? undefined : |
| 63 // elements per page. Currently, we only support a single | 65 getTimeToVideoPlay(compositorThread, playStart); |
| 64 // audio or video element, so we can store the start time in | 66 const timeToAudioPlay = audioThread === undefined ? undefined : |
| 65 // a simple variable, and exit the loop. | 67 getTimeToAudioPlay(audioThread, playStart); |
| 66 if (playStart !== undefined) { | 68 |
| 67 throw new Error( | 69 if (timeToVideoPlay === undefined && timeToAudioPlay === undefined) { |
| 68 'Loading multiple audio/video elements not yet supported'); | 70 continue; |
| 69 } | 71 } |
| 70 playStart = event.start; | 72 |
| 71 break; | 73 const droppedFrameCount = timeToVideoPlay === undefined ? undefined : |
| 74 getDroppedFrameCount(compositorThread); | |
| 75 const seekTimes = timeToVideoPlay === undefined ? new Map() : | |
| 76 getSeekTimes(mainThread); | |
| 77 const bufferingTime = seekTimes.size !== 0 ? undefined : | |
| 78 getBufferingTime(mainThread, playStart, timeToVideoPlay, | |
| 79 timeToAudioPlay); | |
| 80 | |
| 81 if (timeToVideoPlay !== undefined) { | |
| 82 histograms.createHistogram('time_to_video_play', | |
| 83 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, | |
| 84 timeToVideoPlay); | |
| 85 } | |
| 86 if (timeToAudioPlay !== undefined) { | |
| 87 histograms.createHistogram('time_to_audio_play', | |
| 88 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, | |
| 89 timeToAudioPlay); | |
| 90 } | |
| 91 if (droppedFrameCount !== undefined) { | |
| 92 histograms.createHistogram('dropped_frame_count', | |
| 93 tr.b.Unit.byName.count_smallerIsBetter, droppedFrameCount); | |
| 94 } | |
| 95 if (seekTimes.size !== 0) { | |
|
CalebRouleau
2017/09/26 01:26:35
You can drop this "if"
johnchen
2017/09/26 02:22:13
Done.
| |
| 96 for (const [key, value] of seekTimes.entries()) { | |
| 97 histograms.createHistogram('seek_time_' + key, | |
| 98 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value); | |
| 72 } | 99 } |
| 73 } | 100 } |
| 74 if (playStart === undefined) continue; | 101 if (bufferingTime !== undefined) { |
| 75 | 102 histograms.createHistogram('buffering_time', |
| 76 // Look for video render event. | 103 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime); |
| 77 if (compositorThread !== undefined) { | |
| 78 for (const event of compositorThread.getDescendantEvents()) { | |
| 79 if (event.title === 'VideoRendererImpl::Render') { | |
| 80 timeToVideoPlay = event.start - playStart; | |
| 81 break; | |
| 82 } | |
| 83 } | |
| 84 } | 104 } |
| 85 | |
| 86 // Look for audio render event. | |
| 87 if (audioThread !== undefined) { | |
| 88 for (const event of audioThread.getDescendantEvents()) { | |
| 89 if (event.title === 'AudioRendererImpl::Render') { | |
| 90 timeToAudioPlay = event.start - playStart; | |
| 91 break; | |
| 92 } | |
| 93 } | |
| 94 } | |
| 95 if (timeToVideoPlay !== undefined) break; | |
| 96 if (timeToAudioPlay !== undefined) break; | |
| 97 } | |
| 98 | |
| 99 if (timeToVideoPlay !== undefined) { | |
| 100 histograms.createHistogram('time_to_video_play', | |
| 101 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); | |
| 102 } | |
| 103 if (timeToAudioPlay !== undefined) { | |
| 104 histograms.createHistogram('time_to_audio_play', | |
| 105 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); | |
| 106 } | 105 } |
| 107 } | 106 } |
| 108 | 107 |
| 108 function getPlayStart(mainThread) { | |
| 109 let playStart; | |
| 110 for (const event of mainThread.sliceGroup.getDescendantEvents()) { | |
| 111 if (event.title === 'WebMediaPlayerImpl::DoLoad') { | |
| 112 // TODO(johnchen@chromium.org): Support multiple audio/video | |
| 113 // elements per page. Currently, we only support a single | |
| 114 // audio or video element, so we can store the start time in | |
| 115 // a simple variable. | |
| 116 if (playStart !== undefined) { | |
| 117 throw new Error( | |
| 118 'Loading multiple audio/video elements not yet supported'); | |
| 119 } | |
| 120 playStart = event.start; | |
| 121 } | |
| 122 } | |
| 123 return playStart; | |
| 124 } | |
| 125 | |
| 126 function getTimeToVideoPlay(compositorThread, playStart) { | |
| 127 for (const event of compositorThread.sliceGroup.getDescendantEvents()) { | |
| 128 if (event.title === 'VideoRendererImpl::Render') { | |
| 129 return event.start - playStart; | |
| 130 } | |
| 131 } | |
| 132 return undefined; | |
| 133 } | |
| 134 | |
| 135 function getTimeToAudioPlay(audioThread, playStart) { | |
| 136 for (const event of audioThread.sliceGroup.getDescendantEvents()) { | |
| 137 if (event.title === 'AudioRendererImpl::Render') { | |
| 138 return event.start - playStart; | |
| 139 } | |
| 140 } | |
| 141 return undefined; | |
| 142 } | |
| 143 | |
| 144 function getSeekTimes(mainThread) { | |
| 145 // We support multiple seeks per page, as long as they seek to different | |
| 146 // target time. Thus the following two variables are maps instead of simple | |
| 147 // variables. The key of the maps is event.args.target, which is a numerical | |
| 148 // value indicating the target location of the seek, in unit of seconds. | |
| 149 // For example, with a seek to 5 seconds mark, event.args.target === 5. | |
| 150 const seekStartTimes = new Map(); | |
| 151 const seekTimes = new Map(); | |
| 152 for (const event of mainThread.sliceGroup.getDescendantEvents()) { | |
| 153 if (event.title === 'WebMediaPlayerImpl::DoSeek') { | |
| 154 seekStartTimes.set(event.args.target, event.start); | |
| 155 } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') { | |
| 156 const startTime = seekStartTimes.get(event.args.target); | |
| 157 if (startTime !== undefined) { | |
| 158 seekTimes.set(event.args.target, event.start - startTime); | |
| 159 seekStartTimes.delete(event.args.target); | |
| 160 } | |
| 161 } | |
| 162 } | |
| 163 return seekTimes; | |
| 164 } | |
| 165 | |
| 166 function getBufferingTime(mainThread, playStart, timeToVideoPlay, | |
| 167 timeToAudioPlay) { | |
| 168 let playEnd; | |
| 169 let duration; | |
| 170 for (const event of mainThread.sliceGroup.getDescendantEvents()) { | |
| 171 if (event.title === 'WebMediaPlayerImpl::OnEnded') { | |
| 172 // TODO(johnchen@chromium.org): Support multiple audio/video | |
| 173 // elements per page. Currently, we only support a single | |
| 174 // audio or video element, so we can store the end time in | |
| 175 // a simple variable. | |
| 176 if (playEnd !== undefined) { | |
| 177 throw new Error( | |
| 178 'Multiple media ended events not yet supported'); | |
| 179 } | |
| 180 playEnd = event.start; | |
| 181 duration = 1000 * event.args.duration; // seconds to milliseconds | |
| 182 } | |
| 183 } | |
| 184 if (playEnd === undefined) return undefined; | |
| 185 let bufferingTime = playEnd - playStart - duration; | |
| 186 if (timeToVideoPlay !== undefined) { | |
| 187 bufferingTime -= timeToVideoPlay; | |
| 188 } else { | |
| 189 bufferingTime -= timeToAudioPlay; | |
| 190 } | |
| 191 return bufferingTime; | |
| 192 } | |
| 193 | |
| 194 function getDroppedFrameCount(compositorThread) { | |
| 195 let droppedFrameCount = 0; | |
| 196 for (const event of compositorThread.sliceGroup.getDescendantEvents()) { | |
| 197 if (event.title === 'VideoFramesDropped') { | |
| 198 droppedFrameCount += event.args.count; | |
| 199 } | |
| 200 } | |
| 201 return droppedFrameCount; | |
| 202 } | |
| 203 | |
| 109 tr.metrics.MetricRegistry.register(mediaMetric); | 204 tr.metrics.MetricRegistry.register(mediaMetric); |
| 110 | 205 |
| 111 return { | 206 return { |
| 112 mediaMetric, | 207 mediaMetric, |
| 113 }; | 208 }; |
| 114 }); | 209 }); |
| 115 </script> | 210 </script> |
| OLD | NEW |