Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(265)

Unified Diff: tracing/tracing/metrics/media_metric.html

Issue 3020433002: Finish migrating media metrics to TBMv2 (Closed)
Patch Set: Refactoring Created 3 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « no previous file | tracing/tracing/metrics/media_metric_test.html » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: tracing/tracing/metrics/media_metric.html
diff --git a/tracing/tracing/metrics/media_metric.html b/tracing/tracing/metrics/media_metric.html
index 3e86e7d26a9025fd0ad49bd7c485c5bdc9a1e564..a0bfdb9d32ee7bf147457b10c04d8fc4845fc5e4 100644
--- a/tracing/tracing/metrics/media_metric.html
+++ b/tracing/tracing/metrics/media_metric.html
@@ -18,11 +18,17 @@ This metric currently supports the following measurement:
to wait longer before the videos start actually playing.
* time_to_audio_play is similar to time_to_video_play, but measures the
time delay before audio starts playing.
-
-More measurements are expected to be added in the near future, such as:
-* buffering_time
-* seek_time
-* dropped_frame_count
+* buffering_time calculates the difference between the actual play time of
+ media vs its expected play time. Ideally the two should be the same.
+ If actual play time is significantly longer than expected play time,
+ it indicates that there were stalls during the play for buffering or
+ some other reasons.
+* dropped_frame_count reports the number of video frames that were dropped.
+ Ideally this should be 0. If a large number of frames are dropped, the
+ video play will not be smooth.
+* seek_time calculates how long after a user requests a seek operation
+ before the seek completes and the media starts playing at the new
+ location.
Please inform crouleau@chromium.org and johnchen@chromium.org about
changes to this file.
@@ -37,10 +43,6 @@ changes to this file.
tr.exportTo('tr.metrics', function() {
function mediaMetric(histograms, model) {
- let playStart;
- let timeToAudioPlay;
- let timeToVideoPlay;
-
const chromeHelper = model.getOrCreateHelper(
tr.model.helpers.ChromeModelHelper);
if (chromeHelper === undefined) return;
@@ -56,54 +58,144 @@ tr.exportTo('tr.metrics', function() {
rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice');
if (compositorThread === undefined && audioThread === undefined) continue;
- // Look for the media player DoLoad event on main thread.
- for (const event of mainThread.getDescendantEvents()) {
- if (event.title === 'WebMediaPlayerImpl::DoLoad') {
- // TODO(johnchen@chromium.org): Support multiple audio/video
- // elements per page. Currently, we only support a single
- // audio or video element, so we can store the start time in
- // a simple variable, and exit the loop.
- if (playStart !== undefined) {
- throw new Error(
- 'Loading multiple audio/video elements not yet supported');
- }
- playStart = event.start;
- break;
+ const playStart = getPlayStart(mainThread);
+ if (playStart === undefined) continue;
+
+ const timeToVideoPlay = getTimeToVideoPlay(
+ compositorThread, playStart, histograms);
+ const timeToAudioPlay = getTimeToAudioPlay(
+ audioThread, playStart, histograms);
+
+ if (timeToVideoPlay === undefined && timeToAudioPlay === undefined) {
+ continue;
+ }
+
+ const droppedFrameCount = getDroppedFrameCount(compositorThread,
+ timeToVideoPlay, histograms);
+ const seekTimes = getSeekTimes(mainThread, timeToVideoPlay, histograms);
+ const bufferingTime = getBufferingTime(mainThread, playStart,
+ timeToVideoPlay, timeToAudioPlay, seekTimes, histograms);
+ }
+ }
+
+ function getPlayStart(mainThread) {
+ let playStart;
+ for (const event of mainThread.getDescendantEvents()) {
+ if (event.title === 'WebMediaPlayerImpl::DoLoad') {
+ // TODO(johnchen@chromium.org): Support multiple audio/video
+ // elements per page. Currently, we only support a single
+ // audio or video element, so we can store the start time in
+ // a simple variable.
+ if (playStart !== undefined) {
+ throw new Error(
+ 'Loading multiple audio/video elements not yet supported');
}
+ playStart = event.start;
}
- if (playStart === undefined) continue;
+ }
+ return playStart;
+ }
- // Look for video render event.
- if (compositorThread !== undefined) {
- for (const event of compositorThread.getDescendantEvents()) {
- if (event.title === 'VideoRendererImpl::Render') {
- timeToVideoPlay = event.start - playStart;
- break;
- }
+ function getTimeToVideoPlay(compositorThread, playStart, histograms) {
+ if (compositorThread !== undefined) {
+ for (const event of compositorThread.getDescendantEvents()) {
+ if (event.title === 'VideoRendererImpl::Render') {
+ const timeToVideoPlay = event.start - playStart;
+ histograms.createHistogram('time_to_video_play',
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter,
+ timeToVideoPlay);
+ return timeToVideoPlay;
}
}
+ }
+ return undefined;
+ }
+
+ function getTimeToAudioPlay(audioThread, playStart, histograms) {
+ if (audioThread !== undefined) {
+ for (const event of audioThread.getDescendantEvents()) {
benjhayden 2017/09/21 05:57:30 Can you iterate over only one of the thread's even
johnchen 2017/09/22 16:25:39 Done. Now getting events from sliceGroup.
+ if (event.title === 'AudioRendererImpl::Render') {
+ const timeToAudioPlay = event.start - playStart;
+ histograms.createHistogram('time_to_audio_play',
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter,
+ timeToAudioPlay);
+ return timeToAudioPlay;
+ }
+ }
+ }
+ return undefined;
+ }
- // Look for audio render event.
- if (audioThread !== undefined) {
- for (const event of audioThread.getDescendantEvents()) {
- if (event.title === 'AudioRendererImpl::Render') {
- timeToAudioPlay = event.start - playStart;
- break;
- }
+ function getSeekTimes(mainThread, timeToVideoPlay, histograms) {
+ if (timeToVideoPlay === undefined) return new Map();
+
+ const seekStartTimes = new Map();
+ const seekTimes = new Map();
+ for (const event of mainThread.getDescendantEvents()) {
+ if (event.title === 'WebMediaPlayerImpl::DoSeek') {
+ seekStartTimes.set(event.args.target, event.start);
benjhayden 2017/09/21 05:57:30 We're a bit leery of copying strings from the trac
johnchen 2017/09/22 16:25:39 Target are numerical values, not strings. It equal
+ } else if (event.title === 'WebMediaPlayerImpl::OnPipelineSeeked') {
+ const startTime = seekStartTimes.get(event.args.target);
+ if (startTime !== undefined) {
+ seekTimes.set(event.args.target, event.start - startTime);
+ seekStartTimes.delete(event.args.target);
}
}
- if (timeToVideoPlay !== undefined) break;
- if (timeToAudioPlay !== undefined) break;
}
+ for (const [key, value] of seekTimes.entries()) {
+ histograms.createHistogram('seek_time_' + key,
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, value);
+ }
+ return seekTimes;
+ }
+ function getBufferingTime(mainThread, playStart, timeToVideoPlay,
+ timeToAudioPlay, seekTimes, histograms) {
+ // We can calculate buffering time only if the media is played from
+ // beginning to end. Thus if any seek operations had occurred, we skip
+ // calculating this metric.
+ if (seekTimes.size !== 0) return undefined;
+
+ let playEnd;
+ let duration;
+ for (const event of mainThread.getDescendantEvents()) {
+ if (event.title === 'WebMediaPlayerImpl::OnEnded') {
+ // TODO(johnchen@chromium.org): Support multiple audio/video
+ // elements per page. Currently, we only support a single
+ // audio or video element, so we can store the end time in
+ // a simple variable.
+ if (playEnd !== undefined) {
+ throw new Error(
+ 'Multiple media ended events not yet supported');
+ }
+ playEnd = event.start;
+ duration = 1000 * event.args.duration; // seconds to milliseconds
+ }
+ }
+ let bufferingTime = playEnd - playStart - duration;
if (timeToVideoPlay !== undefined) {
- histograms.createHistogram('time_to_video_play',
- tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToVideoPlay);
+ bufferingTime -= timeToVideoPlay;
+ } else {
+ bufferingTime -= timeToAudioPlay;
}
- if (timeToAudioPlay !== undefined) {
- histograms.createHistogram('time_to_audio_play',
- tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToAudioPlay);
+ histograms.createHistogram('buffering_time',
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, bufferingTime);
+ return bufferingTime;
+ }
+
+ function getDroppedFrameCount(compositorThread, timeToVideoPlay,
+ histograms) {
+ if (timeToVideoPlay === undefined) return undefined;
benjhayden 2017/09/21 05:57:30 If this function only needs timeToVideoPlay in ord
johnchen 2017/09/22 16:25:39 Done
+
+ let droppedFrameCount = 0;
+ for (const event of compositorThread.getDescendantEvents()) {
+ if (event.title === 'VideoFramesDropped') {
+ droppedFrameCount += event.args.count;
+ }
}
+ histograms.createHistogram('dropped_frame_count',
+ tr.b.Unit.byName.count_smallerIsBetter, droppedFrameCount);
+ return droppedFrameCount;
}
tr.metrics.MetricRegistry.register(mediaMetric);
« no previous file with comments | « no previous file | tracing/tracing/metrics/media_metric_test.html » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698