Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(386)

Side by Side Diff: tracing/tracing/metrics/media_metric.html

Issue 3020433002: Finish migrating media metrics to TBMv2 (Closed)
Patch Set: Fix typo Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 <!DOCTYPE html> 1 <!DOCTYPE html>
2 <!-- 2 <!--
3 Copyright 2017 The Chromium Authors. All rights reserved. 3 Copyright 2017 The Chromium Authors. All rights reserved.
4 Use of this source code is governed by a BSD-style license that can be 4 Use of this source code is governed by a BSD-style license that can be
5 found in the LICENSE file. 5 found in the LICENSE file.
6 --> 6 -->
7 7
8 <!-- 8 <!--
9 media_metrics uses Chrome trace events to calculate metrics about video 9 media_metrics uses Chrome trace events to calculate metrics about video
10 and audio playback. It is meant to be used for pages with a <video> or 10 and audio playback. It is meant to be used for pages with a <video> or
11 <audio> element. It is used by videostack-eng@google.com team for 11 <audio> element. It is used by videostack-eng@google.com team for
12 regression testing. 12 regression testing.
13 13
14 This metric currently supports the following measurement: 14 This metric currently supports the following measurement:
15 * time_to_video_play calculates how long after a video is requested to 15 * time_to_video_play calculates how long after a video is requested to
16 start playing before the video actually starts. If time_to_video_play 16 start playing before the video actually starts. If time_to_video_play
17 regresses, then users will click to play videos and then have 17 regresses, then users will click to play videos and then have
18 to wait longer before the videos start actually playing. 18 to wait longer before the videos start actually playing.
19 * time_to_audio_play is similar to time_to_video_play, but measures the 19 * time_to_audio_play is similar to time_to_video_play, but measures the
20 time delay before audio starts playing. 20 time delay before audio starts playing.
21 21 * buffering_time calculates the difference between the actual play time of
22 More measurements are expected to be added in the near future, such as: 22 media vs its expected play time. Ideally the two should be the same.
23 * buffering_time 23 If actual play time is significantly longer than expected play time,
24 * seek_time 24 it indicates that there were stalls during the play for buffering or
25 * dropped_frame_count 25 some other reasons.
26 * dropped_frame_count reports the number of video frames that were dropped.
27 Ideally this should be 0. If a large number of frames are dropped, the
28 video play will not be smooth.
29 * seek_time calculates how long after a user requests a seek operation
30 before the seek completes and the media starts playing at the new
31 location.
26 32
27 Please inform crouleau@chromium.org and johnchen@chromium.org about 33 Please inform crouleau@chromium.org and johnchen@chromium.org about
28 changes to this file. 34 changes to this file.
29 --> 35 -->
30 36
31 <link rel="import" href="/tracing/metrics/metric_registry.html"> 37 <link rel="import" href="/tracing/metrics/metric_registry.html">
32 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> 38 <link rel="import" href="/tracing/model/helpers/chrome_model_helper.html">
33 <link rel="import" href="/tracing/value/histogram.html"> 39 <link rel="import" href="/tracing/value/histogram.html">
34 40
35 <script> 41 <script>
36 'use strict'; 42 'use strict';
37 43
38 tr.exportTo('tr.metrics', function() { 44 tr.exportTo('tr.metrics', function() {
39 function mediaMetric(histograms, model) { 45 function mediaMetric(histograms, model) {
40 let playStart;
41 let timeToAudioPlay;
42 let timeToVideoPlay;
43
44 const chromeHelper = model.getOrCreateHelper( 46 const chromeHelper = model.getOrCreateHelper(
45 tr.model.helpers.ChromeModelHelper); 47 tr.model.helpers.ChromeModelHelper);
46 if (chromeHelper === undefined) return; 48 if (chromeHelper === undefined) return;
47 49
48 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { 50 for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) {
49 // Find the threads we're interested in, and if a needed thread 51 // Find the threads we're interested in, and if a needed thread
50 // is missing, no need to look further in this process. 52 // is missing, no need to look further in this process.
51 const mainThread = rendererHelper.mainThread; 53 const mainThread = rendererHelper.mainThread;
52 if (mainThread === undefined) continue; 54 if (mainThread === undefined) continue;
53 55
54 const compositorThread = rendererHelper.compositorThread; 56 const compositorThread = rendererHelper.compositorThread;
55 const audioThread = 57 const audioThread =
56 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); 58 rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice');
57 if (compositorThread === undefined && audioThread === undefined) continue; 59 if (compositorThread === undefined && audioThread === undefined) continue;
58 60
59 // Look for the media player DoLoad event on main thread. 61 const playStart = getPlayStart(mainThread);
60 for (const event of mainThread.getDescendantEvents()) {
61 if (event.title === 'WebMediaPlayerImpl::DoLoad') {
62 // TODO(johnchen@chromium.org): Support multiple audio/video
63 // elements per page. Currently, we only support a single
64 // audio or video element, so we can store the start time in
65 // a simple variable, and exit the loop.
66 if (playStart !== undefined) {
67 throw new Error(
68 'Loading multiple audio/video elements not yet supported');
69 }
70 playStart = event.start;
71 break;
72 }
73 }
74 if (playStart === undefined) continue; 62 if (playStart === undefined) continue;
75 63
76 // Look for video render event. 64 const timeToVideoPlay = compositorThread === undefined ? undefined :
77 if (compositorThread !== undefined) { 65 getTimeToVideoPlay(compositorThread, playStart, histograms);
78 for (const event of compositorThread.getDescendantEvents()) { 66 const timeToAudioPlay = audioThread === undefined ? undefined :
79 if (event.title === 'VideoRendererImpl::Render') { 67 getTimeToAudioPlay(audioThread, playStart, histograms);
80 timeToVideoPlay = event.start - playStart; 68
81 break; 69 if (timeToVideoPlay === undefined && timeToAudioPlay === undefined) {
82 } 70 continue;
83 }
84 } 71 }
85 72
86 // Look for audio render event. 73 const droppedFrameCount = timeToVideoPlay === undefined ? undefined :
CalebRouleau 2017/09/22 17:44:21 I don't know javascript very well, but I'm wonderi
johnchen 2017/09/23 04:43:02 "const droppedFrameCount = timeToVideoPlay && get.
CalebRouleau 2017/09/25 23:48:10 No, you're right. Good catch. :)
87 if (audioThread !== undefined) { 74 getDroppedFrameCount(compositorThread, histograms);
88 for (const event of audioThread.getDescendantEvents()) { 75 const seekTimes = timeToVideoPlay === undefined ? undefined :
89 if (event.title === 'AudioRendererImpl::Render') { 76 getSeekTimes(mainThread, histograms);
90 timeToAudioPlay = event.start - playStart; 77 const bufferingTime = seekTimes !== undefined ? undefined :
91 break; 78 getBufferingTime(mainThread, playStart, timeToVideoPlay,
CalebRouleau 2017/09/22 17:44:21 getX functions should be used for getting a value,
johnchen 2017/09/23 04:43:02 Done.
92 } 79 timeToAudioPlay, histograms);
93 }
94 }
95 if (timeToVideoPlay !== undefined) break;
96 if (timeToAudioPlay !== undefined) break;
97 }
98
99 if (timeToVideoPlay !== undefined) {
100 histograms.createHistogram('time_to_video_play',
101 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay);
102 }
103 if (timeToAudioPlay !== undefined) {
104 histograms.createHistogram('time_to_audio_play',
105 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay);
106 } 80 }
107 } 81 }
108 82
83 function getPlayStart(mainThread) {
84 let playStart;
85 for (const event of mainThread.sliceGroup.getDescendantEvents()) {
86 if (event.title === 'WebMediaPlayerImpl::DoLoad') {
87 // TODO(johnchen@chromium.org): Support multiple audio/video
88 // elements per page. Currently, we only support a single
89 // audio or video element, so we can store the start time in
90 // a simple variable.
91 if (playStart !== undefined) {
92 throw new Error(
93 'Loading multiple audio/video elements not yet supported');
94 }
95 playStart = event.start;
96 }
97 }
98 return playStart;
99 }
100
101 function getTimeToVideoPlay(compositorThread, playStart, histograms) {
102 for (const event of compositorThread.sliceGroup.getDescendantEvents()) {
103 if (event.title === 'VideoRendererImpl::Render') {
104 const timeToVideoPlay = event.start - playStart;
105 histograms.createHistogram('time_to_video_play',
106 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter,
107 timeToVideoPlay);
108 return timeToVideoPlay;
109 }
110 }
111 return undefined;
112 }
113
114 function getTimeToAudioPlay(audioThread, playStart, histograms) {
115 for (const event of audioThread.sliceGroup.getDescendantEvents()) {
116 if (event.title === 'AudioRendererImpl::Render') {
117 const timeToAudioPlay = event.start - playStart;
118 histograms.createHistogram('time_to_audio_play',
119 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter,
120 timeToAudioPlay);
121 return timeToAudioPlay;
122 }
123 }
124 return undefined;
125 }
126
127 function getSeekTimes(mainThread, histograms) {
128 // We support multiple seeks per page, as long as they seek to different
129 // target time. Thus the following two variables are maps instead of simple
130 // variables. The key of the maps is event.args.target, which is a numerical
131 // value indicating the target location of the seek, in unit of seconds.
132 // For example, with a seek to 5 seconds mark, event.args.target === 5.
133 const seekStartTimes = new Map();
134 const seekTimes = new Map();
135 for (const event of mainThread.sliceGroup.getDescendantEvents()) {
136 if (event.title === 'WebMediaPlayerImpl::DoSeek') {
137 seekStartTimes.set(event.args.target, event.start);
138 } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') {
139 const startTime = seekStartTimes.get(event.args.target);
140 if (startTime !== undefined) {
141 seekTimes.set(event.args.target, event.start - startTime);
142 seekStartTimes.delete(event.args.target);
143 }
144 }
145 }
146 if (seekTimes.size === 0) return undefined;
147 for (const [key, value] of seekTimes.entries()) {
148 histograms.createHistogram('seek_time_' + key,
149 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value);
150 }
151 return seekTimes;
152 }
153
154 function getBufferingTime(mainThread, playStart, timeToVideoPlay,
155 timeToAudioPlay, histograms) {
156 let playEnd;
157 let duration;
158 for (const event of mainThread.sliceGroup.getDescendantEvents()) {
159 if (event.title === 'WebMediaPlayerImpl::OnEnded') {
160 // TODO(johnchen@chromium.org): Support multiple audio/video
161 // elements per page. Currently, we only support a single
162 // audio or video element, so we can store the end time in
163 // a simple variable.
164 if (playEnd !== undefined) {
165 throw new Error(
166 'Multiple media ended events not yet supported');
167 }
168 playEnd = event.start;
169 duration = 1000 * event.args.duration; // seconds to milliseconds
170 }
171 }
172 if (playEnd === undefined) return undefined;
173 let bufferingTime = playEnd - playStart - duration;
174 if (timeToVideoPlay !== undefined) {
175 bufferingTime -= timeToVideoPlay;
176 } else {
177 bufferingTime -= timeToAudioPlay;
178 }
179 histograms.createHistogram('buffering_time',
180 tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime);
181 return bufferingTime;
182 }
183
184 function getDroppedFrameCount(compositorThread, histograms) {
185 let droppedFrameCount = 0;
186 for (const event of compositorThread.sliceGroup.getDescendantEvents()) {
187 if (event.title === 'VideoFramesDropped') {
188 droppedFrameCount += event.args.count;
189 }
190 }
191 histograms.createHistogram('dropped_frame_count',
192 tr.b.Unit.byName.count_smallerIsBetter, droppedFrameCount);
193 return droppedFrameCount;
194 }
195
109 tr.metrics.MetricRegistry.register(mediaMetric); 196 tr.metrics.MetricRegistry.register(mediaMetric);
110 197
111 return { 198 return {
112 mediaMetric, 199 mediaMetric,
113 }; 200 };
114 }); 201 });
115 </script> 202 </script>
OLDNEW
« no previous file with comments | « no previous file | tracing/tracing/metrics/media_metric_test.html » ('j') | tracing/tracing/metrics/media_metric_test.html » ('J')

Powered by Google App Engine
This is Rietveld 408576698