Chromium Code Reviews| Index: tracing/tracing/metrics/media_metric.html |
| diff --git a/tracing/tracing/metrics/media_metric.html b/tracing/tracing/metrics/media_metric.html |
| index 3e86e7d26a9025fd0ad49bd7c485c5bdc9a1e564..8e761e4ba2eda0e5c17cff82568a865a57130113 100644 |
| --- a/tracing/tracing/metrics/media_metric.html |
| +++ b/tracing/tracing/metrics/media_metric.html |
| @@ -18,11 +18,17 @@ This metric currently supports the following measurement: |
| to wait longer before the videos start actually playing. |
| * time_to_audio_play is similar to time_to_video_play, but measures the |
| time delay before audio starts playing. |
| - |
| -More measurements are expected to be added in the near future, such as: |
| -* buffering_time |
| -* seek_time |
| -* dropped_frame_count |
| +* buffering_time calculates the difference between the actual play time of |
| + media vs its expected play time. Ideally the two should be the same. |
| + If actual play time is significantly longer than expected play time, |
| + it indicates that there were stalls during the play for buffering or |
| + some other reasons. |
| +* dropped_frame_count reports the number of video frames that were dropped. |
| + Ideally this should be 0. If a large number of frames are dropped, the |
| + video play will not be smooth. |
| +* seek_time calculates how long after a user requests a seek operation |
| + before the seek completes and the media starts playing at the new |
| + location. |
| Please inform crouleau@chromium.org and johnchen@chromium.org about |
| changes to this file. |
| @@ -37,10 +43,6 @@ changes to this file. |
| tr.exportTo('tr.metrics', function() { |
| function mediaMetric(histograms, model) { |
| - let playStart; |
| - let timeToAudioPlay; |
| - let timeToVideoPlay; |
| - |
| const chromeHelper = model.getOrCreateHelper( |
| tr.model.helpers.ChromeModelHelper); |
| if (chromeHelper === undefined) return; |
| @@ -56,54 +58,139 @@ tr.exportTo('tr.metrics', function() { |
| rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); |
| if (compositorThread === undefined && audioThread === undefined) continue; |
| - // Look for the media player DoLoad event on main thread. |
| - for (const event of mainThread.getDescendantEvents()) { |
| - if (event.title === 'WebMediaPlayerImpl::DoLoad') { |
| - // TODO(johnchen@chromium.org): Support multiple audio/video |
| - // elements per page. Currently, we only support a single |
| - // audio or video element, so we can store the start time in |
| - // a simple variable, and exit the loop. |
| - if (playStart !== undefined) { |
| - throw new Error( |
| - 'Loading multiple audio/video elements not yet supported'); |
| - } |
| - playStart = event.start; |
| - break; |
| - } |
| - } |
| + const playStart = getPlayStart(mainThread); |
| if (playStart === undefined) continue; |
| - // Look for video render event. |
| - if (compositorThread !== undefined) { |
| - for (const event of compositorThread.getDescendantEvents()) { |
| - if (event.title === 'VideoRendererImpl::Render') { |
| - timeToVideoPlay = event.start - playStart; |
| - break; |
| - } |
| + const timeToVideoPlay = compositorThread === undefined ? undefined : |
| + getTimeToVideoPlay(compositorThread, playStart, histograms); |
| + const timeToAudioPlay = audioThread === undefined ? undefined : |
| + getTimeToAudioPlay(audioThread, playStart, histograms); |
| + |
| + if (timeToVideoPlay === undefined && timeToAudioPlay === undefined) { |
| + continue; |
| + } |
| + |
| + const droppedFrameCount = timeToVideoPlay === undefined ? undefined : |
|
CalebRouleau
2017/09/22 17:44:21
I don't know javascript very well, but I'm wonderi
johnchen
2017/09/23 04:43:02
"const droppedFrameCount = timeToVideoPlay && get.
CalebRouleau
2017/09/25 23:48:10
No, you're right. Good catch. :)
|
| + getDroppedFrameCount(compositorThread, histograms); |
| + const seekTimes = timeToVideoPlay === undefined ? undefined : |
| + getSeekTimes(mainThread, histograms); |
| + const bufferingTime = seekTimes !== undefined ? undefined : |
| + getBufferingTime(mainThread, playStart, timeToVideoPlay, |
|
CalebRouleau
2017/09/22 17:44:21
getX functions should be used for getting a value,
johnchen
2017/09/23 04:43:02
Done.
|
| + timeToAudioPlay, histograms); |
| + } |
| + } |
| + |
| + function getPlayStart(mainThread) { |
| + let playStart; |
| + for (const event of mainThread.sliceGroup.getDescendantEvents()) { |
| + if (event.title === 'WebMediaPlayerImpl::DoLoad') { |
| + // TODO(johnchen@chromium.org): Support multiple audio/video |
| + // elements per page. Currently, we only support a single |
| + // audio or video element, so we can store the start time in |
| + // a simple variable. |
| + if (playStart !== undefined) { |
| + throw new Error( |
| + 'Loading multiple audio/video elements not yet supported'); |
| } |
| + playStart = event.start; |
| + } |
| + } |
| + return playStart; |
| + } |
| + |
| + function getTimeToVideoPlay(compositorThread, playStart, histograms) { |
| + for (const event of compositorThread.sliceGroup.getDescendantEvents()) { |
| + if (event.title === 'VideoRendererImpl::Render') { |
| + const timeToVideoPlay = event.start - playStart; |
| + histograms.createHistogram('time_to_video_play', |
| + tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, |
| + timeToVideoPlay); |
| + return timeToVideoPlay; |
| + } |
| + } |
| + return undefined; |
| + } |
| + |
| + function getTimeToAudioPlay(audioThread, playStart, histograms) { |
| + for (const event of audioThread.sliceGroup.getDescendantEvents()) { |
| + if (event.title === 'AudioRendererImpl::Render') { |
| + const timeToAudioPlay = event.start - playStart; |
| + histograms.createHistogram('time_to_audio_play', |
| + tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, |
| + timeToAudioPlay); |
| + return timeToAudioPlay; |
| } |
| + } |
| + return undefined; |
| + } |
| - // Look for audio render event. |
| - if (audioThread !== undefined) { |
| - for (const event of audioThread.getDescendantEvents()) { |
| - if (event.title === 'AudioRendererImpl::Render') { |
| - timeToAudioPlay = event.start - playStart; |
| - break; |
| - } |
| + function getSeekTimes(mainThread, histograms) { |
| + // We support multiple seeks per page, as long as they seek to different |
| + // target time. Thus the following two variables are maps instead of simple |
| + // variables. The key of the maps is event.args.target, which is a numerical |
| + // value indicating the target location of the seek, in unit of seconds. |
| + // For example, with a seek to 5 seconds mark, event.args.target === 5. |
| + const seekStartTimes = new Map(); |
| + const seekTimes = new Map(); |
| + for (const event of mainThread.sliceGroup.getDescendantEvents()) { |
| + if (event.title === 'WebMediaPlayerImpl::DoSeek') { |
| + seekStartTimes.set(event.args.target, event.start); |
| + } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') { |
| + const startTime = seekStartTimes.get(event.args.target); |
| + if (startTime !== undefined) { |
| + seekTimes.set(event.args.target, event.start - startTime); |
| + seekStartTimes.delete(event.args.target); |
| } |
| } |
| - if (timeToVideoPlay !== undefined) break; |
| - if (timeToAudioPlay !== undefined) break; |
| } |
| + if (seekTimes.size === 0) return undefined; |
| + for (const [key, value] of seekTimes.entries()) { |
| + histograms.createHistogram('seek_time_' + key, |
| + tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value); |
| + } |
| + return seekTimes; |
| + } |
| + function getBufferingTime(mainThread, playStart, timeToVideoPlay, |
| + timeToAudioPlay, histograms) { |
| + let playEnd; |
| + let duration; |
| + for (const event of mainThread.sliceGroup.getDescendantEvents()) { |
| + if (event.title === 'WebMediaPlayerImpl::OnEnded') { |
| + // TODO(johnchen@chromium.org): Support multiple audio/video |
| + // elements per page. Currently, we only support a single |
| + // audio or video element, so we can store the end time in |
| + // a simple variable. |
| + if (playEnd !== undefined) { |
| + throw new Error( |
| + 'Multiple media ended events not yet supported'); |
| + } |
| + playEnd = event.start; |
| + duration = 1000 * event.args.duration; // seconds to milliseconds |
| + } |
| + } |
| + if (playEnd === undefined) return undefined; |
| + let bufferingTime = playEnd - playStart - duration; |
| if (timeToVideoPlay !== undefined) { |
| - histograms.createHistogram('time_to_video_play', |
| - tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay); |
| + bufferingTime -= timeToVideoPlay; |
| + } else { |
| + bufferingTime -= timeToAudioPlay; |
| } |
| - if (timeToAudioPlay !== undefined) { |
| - histograms.createHistogram('time_to_audio_play', |
| - tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay); |
| + histograms.createHistogram('buffering_time', |
| + tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime); |
| + return bufferingTime; |
| + } |
| + |
| + function getDroppedFrameCount(compositorThread, histograms) { |
| + let droppedFrameCount = 0; |
| + for (const event of compositorThread.sliceGroup.getDescendantEvents()) { |
| + if (event.title === 'VideoFramesDropped') { |
| + droppedFrameCount += event.args.count; |
| + } |
| } |
| + histograms.createHistogram('dropped_frame_count', |
| + tr.b.Unit.byName.count_smallerIsBetter, droppedFrameCount); |
| + return droppedFrameCount; |
| } |
| tr.metrics.MetricRegistry.register(mediaMetric); |