Index: tracing/tracing/metrics/media_metric.html |
diff --git a/tracing/tracing/metrics/media_metric.html b/tracing/tracing/metrics/media_metric.html |
index 3e86e7d26a9025fd0ad49bd7c485c5bdc9a1e564..d7fca0d34f35cc44275a1b36c42d56356d7d1fb8 100644 |
--- a/tracing/tracing/metrics/media_metric.html |
+++ b/tracing/tracing/metrics/media_metric.html |
@@ -18,11 +18,17 @@ This metric currently supports the following measurement: |
to wait longer before the videos start actually playing. |
* time_to_audio_play is similar to time_to_video_play, but measures the |
time delay before audio starts playing. |
- |
-More measurements are expected to be added in the near future, such as: |
-* buffering_time |
-* seek_time |
-* dropped_frame_count |
+* buffering_time calculates the difference between the actual play time of |
+ media vs its expected play time. Ideally the two should be the same. |
+ If actual play time is significantly longer than expected play time, |
+ it indicates that there were stalls during the play for buffering or |
+ some other reasons. |
+* dropped_frame_count reports the number of video frames that were dropped. |
+ Ideally this should be 0. If a large number of frames are dropped, the |
+ video play will not be smooth. |
+* seek_time calculates how long after a user requests a seek operation |
+ before the seek completes and the media starts playing at the new |
+ location. |
Please inform crouleau@chromium.org and johnchen@chromium.org about |
changes to this file. |
@@ -40,6 +46,11 @@ tr.exportTo('tr.metrics', function() { |
let playStart; |
let timeToAudioPlay; |
let timeToVideoPlay; |
+ let playEnd; |
+ let duration; |
+ let framesDropped = 0; |
+ const seekStartTimes = new Map(); |
+ const seekTimes = new Map(); |
const chromeHelper = model.getOrCreateHelper( |
tr.model.helpers.ChromeModelHelper); |
@@ -56,53 +67,111 @@ tr.exportTo('tr.metrics', function() { |
rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); |
if (compositorThread === undefined && audioThread === undefined) continue; |
- // Look for the media player DoLoad event on main thread. |
- for (const event of mainThread.getDescendantEvents()) { |
+ processMainThread(mainThread); |
+ if (playStart === undefined) continue; |
+ |
+ processCompositorThread(compositorThread); |
+ processAudioThread(audioThread); |
+ |
+ if (timeToVideoPlay !== undefined) break; |
+ if (timeToAudioPlay !== undefined) break; |
+ } |
+ |
+ calculateMetrics(); |
+ |
+ // Look for events on main thread. |
+ function processMainThread(thread) { |
CalebRouleau
2017/09/20 22:53:20
Even though the concept of what you're doing here
|
+ for (const event of thread.getDescendantEvents()) { |
if (event.title === 'WebMediaPlayerImpl::DoLoad') { |
// TODO(johnchen@chromium.org): Support multiple audio/video |
// elements per page. Currently, we only support a single |
// audio or video element, so we can store the start time in |
- // a simple variable, and exit the loop. |
+ // a simple variable. |
if (playStart !== undefined) { |
throw new Error( |
'Loading multiple audio/video elements not yet supported'); |
} |
playStart = event.start; |
- break; |
+ } else if (event.title === 'WebMediaPlayerImpl::DoSeek') { |
+ seekStartTimes.set(event.args.target, event.start); |
+ } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') { |
+ const startTime = seekStartTimes.get(event.args.target); |
+ if (startTime !== undefined) { |
+ seekTimes.set(event.args.target, event.start - startTime); |
+ seekStartTimes.delete(event.args.target); |
+ } |
+ } else if (event.title === 'WebMediaPlayerImpl::OnEnded') { |
+ if (playEnd === undefined) { |
+ playEnd = event.start; |
+ duration = 1000 * event.args.duration; // seconds to milliseconds |
+ } |
CalebRouleau
2017/09/20 22:53:20
We throw an error for multiple playStart events. W
|
} |
} |
- if (playStart === undefined) continue; |
+ } |
- // Look for video render event. |
- if (compositorThread !== undefined) { |
- for (const event of compositorThread.getDescendantEvents()) { |
- if (event.title === 'VideoRendererImpl::Render') { |
+ // Look for events on compositor thread. |
+ function processCompositorThread(thread) { |
+ if (thread === undefined) { |
+ return; |
+ } |
+ for (const event of thread.getDescendantEvents()) { |
+ if (event.title === 'VideoRendererImpl::Render') { |
+ if (timeToVideoPlay === undefined) { |
timeToVideoPlay = event.start - playStart; |
- break; |
} |
+ } else if (event.title === 'FramesDropped') { |
+ framesDropped += event.args.count; |
} |
} |
+ } |
- // Look for audio render event. |
- if (audioThread !== undefined) { |
- for (const event of audioThread.getDescendantEvents()) { |
- if (event.title === 'AudioRendererImpl::Render') { |
- timeToAudioPlay = event.start - playStart; |
- break; |
- } |
+ // Look for audio render event on audio thread. |
+ function processAudioThread(thread) { |
+ if (thread === undefined) { |
+ return; |
+ } |
+ for (const event of thread.getDescendantEvents()) { |
+ if (event.title === 'AudioRendererImpl::Render') { |
+ timeToAudioPlay = event.start - playStart; |
+ break; |
} |
} |
- if (timeToVideoPlay !== undefined) break; |
- if (timeToAudioPlay !== undefined) break; |
} |
- if (timeToVideoPlay !== undefined) { |
- histograms.createHistogram('time_to_video_play', |
- tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); |
- } |
- if (timeToAudioPlay !== undefined) { |
- histograms.createHistogram('time_to_audio_play', |
- tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); |
+ // Calculate all metrics after event have been collected. |
+ function calculateMetrics() { |
+ if (timeToVideoPlay !== undefined) { |
+ histograms.createHistogram('time_to_video_play', |
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); |
+ // dropped_frame_count is meaningful only if we have video. |
+ histograms.createHistogram('dropped_frame_count', |
+ tr.b.Unit.byName.count_smallerIsBetter, framesDropped); |
+ } |
+ if (timeToAudioPlay !== undefined) { |
+ histograms.createHistogram('time_to_audio_play', |
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); |
+ } |
+ if (playStart !== undefined && playEnd !== undefined && |
+ duration !== undefined && seekTimes.size === 0 && |
+ (timeToVideoPlay !== undefined || timeToAudioPlay !== undefined)) { |
+ // We can calculate buffering time only if the media is played from |
+ // beginning to end. Thus if any seek operations had occurred, we skip |
+ // calculating this metric. |
+ let bufferingTime = playEnd - playStart - duration; |
+ if (timeToVideoPlay !== undefined) { |
+ bufferingTime -= timeToVideoPlay; |
+ } else { |
+ bufferingTime -= timeToAudioPlay; |
+ } |
+ histograms.createHistogram('buffering_time', |
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime); |
+ } |
+ if (seekTimes.size > 0) { |
+ for (const [key, value] of seekTimes.entries()) { |
+ histograms.createHistogram('seek_time_' + key, |
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value); |
+ } |
+ } |
} |
} |