OLD | NEW |
---|---|
1 <!DOCTYPE html> | 1 <!DOCTYPE html> |
2 <!-- | 2 <!-- |
3 Copyright 2017 The Chromium Authors. All rights reserved. | 3 Copyright 2017 The Chromium Authors. All rights reserved. |
4 Use of this source code is governed by a BSD-style license that can be | 4 Use of this source code is governed by a BSD-style license that can be |
5 found in the LICENSE file. | 5 found in the LICENSE file. |
6 --> | 6 --> |
7 | 7 |
8 <!-- | 8 <!-- |
9 media_metrics uses Chrome trace events to calculate metrics about video | 9 media_metrics uses Chrome trace events to calculate metrics about video |
10 and audio playback. It is meant to be used for pages with a <video> or | 10 and audio playback. It is meant to be used for pages with a <video> or |
11 <audio> element. It is used by videostack-eng@google.com team for | 11 <audio> element. It is used by videostack-eng@google.com team for |
12 regression testing. | 12 regression testing. |
13 | 13 |
14 This metric currently supports the following measurement: | 14 This metric currently supports the following measurement: |
15 * time_to_video_play calculates how long after a video is requested to | 15 * time_to_video_play calculates how long after a video is requested to |
16 start playing before the video actually starts. If time_to_video_play | 16 start playing before the video actually starts. If time_to_video_play |
17 regresses, then users will click to play videos and then have | 17 regresses, then users will click to play videos and then have |
18 to wait longer before the videos start actually playing. | 18 to wait longer before the videos start actually playing. |
19 * time_to_audio_play is similar to time_to_video_play, but measures the | 19 * time_to_audio_play is similar to time_to_video_play, but measures the |
20 time delay before audio starts playing. | 20 time delay before audio starts playing. |
21 | 21 * buffering_time calculates the difference between the actual play time of |
22 More measurements are expected to be added in the near future, such as: | 22 media vs its expected play time. Ideally the two should be the same. |
23 * buffering_time | 23 If actual play time is significantly longer than expected play time, |
24 * seek_time | 24 it indicates that there were stalls during the play for buffering or |
25 * dropped_frame_count | 25 some other reasons. |
26 * dropped_frame_count reports the number of video frames that were dropped. | |
27 Ideally this should be 0. If a large number of frames are dropped, the | |
28 video play will not be smooth. | |
29 * seek_time calculates how long after a user requests a seek operation | |
30 before the seek completes and the media starts playing at the new | |
31 location. | |
26 | 32 |
27 Please inform crouleau@chromium.org and johnchen@chromium.org about | 33 Please inform crouleau@chromium.org and johnchen@chromium.org about |
28 changes to this file. | 34 changes to this file. |
29 --> | 35 --> |
30 | 36 |
31 <link rel="import" href="/tracing/metrics/metric_registry.html"> | 37 <link rel="import" href="/tracing/metrics/metric_registry.html"> |
32 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> | 38 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> |
33 <link rel="import" href="/tracing/value/histogram.html"> | 39 <link rel="import" href="/tracing/value/histogram.html"> |
34 | 40 |
35 <script> | 41 <script> |
36 'use strict'; | 42 'use strict'; |
37 | 43 |
38 tr.exportTo('tr.metrics', function() { | 44 tr.exportTo('tr.metrics', function() { |
39 function mediaMetric(histograms, model) { | 45 function mediaMetric(histograms, model) { |
40 let playStart; | 46 let playStart; |
41 let timeToAudioPlay; | 47 let timeToAudioPlay; |
42 let timeToVideoPlay; | 48 let timeToVideoPlay; |
49 let playEnd; | |
50 let duration; | |
51 let framesDropped = 0; | |
52 const seekStartTimes = new Map(); | |
53 const seekTimes = new Map(); | |
43 | 54 |
44 const chromeHelper = model.getOrCreateHelper( | 55 const chromeHelper = model.getOrCreateHelper( |
45 tr.model.helpers.ChromeModelHelper); | 56 tr.model.helpers.ChromeModelHelper); |
46 if (chromeHelper === undefined) return; | 57 if (chromeHelper === undefined) return; |
47 | 58 |
48 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { | 59 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { |
49 // Find the threads we're interested in, and if a needed thread | 60 // Find the threads we're interested in, and if a needed thread |
50 // is missing, no need to look further in this process. | 61 // is missing, no need to look further in this process. |
51 const mainThread = rendererHelper.mainThread; | 62 const mainThread = rendererHelper.mainThread; |
52 if (mainThread === undefined) continue; | 63 if (mainThread === undefined) continue; |
53 | 64 |
54 const compositorThread = rendererHelper.compositorThread; | 65 const compositorThread = rendererHelper.compositorThread; |
55 const audioThread = | 66 const audioThread = |
56 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); | 67 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); |
57 if (compositorThread === undefined && audioThread === undefined) continue; | 68 if (compositorThread === undefined && audioThread === undefined) continue; |
58 | 69 |
59 // Look for the media player DoLoad event on main thread. | 70 processMainThread(mainThread); |
60 for (const event of mainThread.getDescendantEvents()) { | 71 if (playStart === undefined) continue; |
72 | |
73 processCompositorThread(compositorThread); | |
74 processAudioThread(audioThread); | |
75 | |
76 if (timeToVideoPlay !== undefined) break; | |
77 if (timeToAudioPlay !== undefined) break; | |
78 } | |
79 | |
80 calculateMetrics(); | |
81 | |
82 // Look for events on main thread. | |
83 function processMainThread(thread) { | |
CalebRouleau
2017/09/20 22:53:20
Even though the concept of what you're doing here
| |
84 for (const event of thread.getDescendantEvents()) { | |
61 if (event.title === 'WebMediaPlayerImpl::DoLoad') { | 85 if (event.title === 'WebMediaPlayerImpl::DoLoad') { |
62 // TODO(johnchen@chromium.org): Support multiple audio/video | 86 // TODO(johnchen@chromium.org): Support multiple audio/video |
63 // elements per page. Currently, we only support a single | 87 // elements per page. Currently, we only support a single |
64 // audio or video element, so we can store the start time in | 88 // audio or video element, so we can store the start time in |
65 // a simple variable, and exit the loop. | 89 // a simple variable. |
66 if (playStart !== undefined) { | 90 if (playStart !== undefined) { |
67 throw new Error( | 91 throw new Error( |
68 'Loading multiple audio/video elements not yet supported'); | 92 'Loading multiple audio/video elements not yet supported'); |
69 } | 93 } |
70 playStart = event.start; | 94 playStart = event.start; |
95 } else if (event.title === 'WebMediaPlayerImpl::DoSeek') { | |
96 seekStartTimes.set(event.args.target, event.start); | |
97 } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') { | |
98 const startTime = seekStartTimes.get(event.args.target); | |
99 if (startTime !== undefined) { | |
100 seekTimes.set(event.args.target, event.start - startTime); | |
101 seekStartTimes.delete(event.args.target); | |
102 } | |
103 } else if (event.title === 'WebMediaPlayerImpl::OnEnded') { | |
104 if (playEnd === undefined) { | |
105 playEnd = event.start; | |
106 duration = 1000 * event.args.duration; // seconds to milliseconds | |
107 } | |
CalebRouleau
2017/09/20 22:53:20
We throw an error for multiple playStart events. W
| |
108 } | |
109 } | |
110 } | |
111 | |
112 // Look for events on compositor thread. | |
113 function processCompositorThread(thread) { | |
114 if (thread === undefined) { | |
115 return; | |
116 } | |
117 for (const event of thread.getDescendantEvents()) { | |
118 if (event.title === 'VideoRendererImpl::Render') { | |
119 if (timeToVideoPlay === undefined) { | |
120 timeToVideoPlay = event.start - playStart; | |
121 } | |
122 } else if (event.title === 'FramesDropped') { | |
123 framesDropped += event.args.count; | |
124 } | |
125 } | |
126 } | |
127 | |
128 // Look for audio render event on audio thread. | |
129 function processAudioThread(thread) { | |
130 if (thread === undefined) { | |
131 return; | |
132 } | |
133 for (const event of thread.getDescendantEvents()) { | |
134 if (event.title === 'AudioRendererImpl::Render') { | |
135 timeToAudioPlay = event.start - playStart; | |
71 break; | 136 break; |
72 } | 137 } |
73 } | 138 } |
74 if (playStart === undefined) continue; | 139 } |
75 | 140 |
76 // Look for video render event. | 141 // Calculate all metrics after event have been collected. |
77 if (compositorThread !== undefined) { | 142 function calculateMetrics() { |
78 for (const event of compositorThread.getDescendantEvents()) { | 143 if (timeToVideoPlay !== undefined) { |
79 if (event.title === 'VideoRendererImpl::Render') { | 144 histograms.createHistogram('time_to_video_play', |
80 timeToVideoPlay = event.start - playStart; | 145 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); |
81 break; | 146 // dropped_frame_count is meaningful only if we have video. |
82 } | 147 histograms.createHistogram('dropped_frame_count', |
148 tr.b.Unit.byName.count_smallerIsBetter, framesDropped); | |
149 } | |
150 if (timeToAudioPlay !== undefined) { | |
151 histograms.createHistogram('time_to_audio_play', | |
152 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); | |
153 } | |
154 if (playStart !== undefined && playEnd !== undefined && | |
155 duration !== undefined && seekTimes.size === 0 && | |
156 (timeToVideoPlay !== undefined || timeToAudioPlay !== undefined)) { | |
157 // We can calculate buffering time only if the media is played from | |
158 // beginning to end. Thus if any seek operations had occurred, we skip | |
159 // calculating this metric. | |
160 let bufferingTime = playEnd - playStart - duration; | |
161 if (timeToVideoPlay !== undefined) { | |
162 bufferingTime -= timeToVideoPlay; | |
163 } else { | |
164 bufferingTime -= timeToAudioPlay; | |
165 } | |
166 histograms.createHistogram('buffering_time', | |
167 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime); | |
168 } | |
169 if (seekTimes.size > 0) { | |
170 for (const [key, value] of seekTimes.entries()) { | |
171 histograms.createHistogram('seek_time_' + key, | |
172 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value); | |
83 } | 173 } |
84 } | 174 } |
85 | |
86 // Look for audio render event. | |
87 if (audioThread !== undefined) { | |
88 for (const event of audioThread.getDescendantEvents()) { | |
89 if (event.title === 'AudioRendererImpl::Render') { | |
90 timeToAudioPlay = event.start - playStart; | |
91 break; | |
92 } | |
93 } | |
94 } | |
95 if (timeToVideoPlay !== undefined) break; | |
96 if (timeToAudioPlay !== undefined) break; | |
97 } | |
98 | |
99 if (timeToVideoPlay !== undefined) { | |
100 histograms.createHistogram('time_to_video_play', | |
101 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); | |
102 } | |
103 if (timeToAudioPlay !== undefined) { | |
104 histograms.createHistogram('time_to_audio_play', | |
105 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); | |
106 } | 175 } |
107 } | 176 } |
108 | 177 |
109 tr.metrics.MetricRegistry.register(mediaMetric); | 178 tr.metrics.MetricRegistry.register(mediaMetric); |
110 | 179 |
111 return { | 180 return { |
112 mediaMetric, | 181 mediaMetric, |
113 }; | 182 }; |
114 }); | 183 }); |
115 </script> | 184 </script> |
OLD | NEW |