Index: tracing/tracing/metrics/media_metric.html |
diff --git a/tracing/tracing/metrics/media_metric.html b/tracing/tracing/metrics/media_metric.html |
new file mode 100644 |
index 0000000000000000000000000000000000000000..a27cdbe6c1c58ffd9c0add641dab8be6fd776be3 |
--- /dev/null |
+++ b/tracing/tracing/metrics/media_metric.html |
@@ -0,0 +1,96 @@ |
+<!DOCTYPE html> |
+<!-- |
+Copyright 2017 The Chromium Authors. All rights reserved. |
+Use of this source code is governed by a BSD-style license that can be |
+found in the LICENSE file. |
+--> |
+ |
+<!-- |
+media_metrics uses Chrome trace events to calculate metrics about video |
+and audio playback. It is meant to be used for pages with a <video> or |
+<audio> element. It is used by videostack-eng@google.com team for |
+regression testing. |
+ |
+This metric currently supports the following measurement: |
+* time_to_play calculates how long after a video is requested to |
+ start playing before the video actually starts. If time_to_play |
+ regresses, then users will click to play videos and then have |
+ to wait longer before the videos start actually playing. |
+ |
+More measurements are expected to be added in the near future, such as: |
+* buffering_time |
+* seek_time |
+* dropped_frame_count |
+ |
+Please inform crouleau@chromium.org and johnchen@chromium.org about |
+changes to this file. |
+--> |
+ |
+<link rel="import" href="/tracing/metrics/metric_registry.html"> |
+<link rel="import" href="/tracing/model/helpers/chrome_model_helper.html"> |
+<link rel="import" href="/tracing/value/histogram.html"> |
+ |
+<script> |
+'use strict'; |
+ |
+tr.exportTo('tr.metrics', function() { |
+ function mediaMetric(histograms, model) { |
+ let playStart; |
+ let timeToPlay; |
+ |
+ const chromeHelper = model.getOrCreateHelper( |
+ tr.model.helpers.ChromeModelHelper); |
+ if (chromeHelper === undefined) return; |
+ for (const rendererHelper of Object.values(chromeHelper.rendererHelpers)) { |
+ const mainThread = rendererHelper.mainThread; |
+ const compositorThread = rendererHelper.compositorThread; |
+ const audioThread = |
+ rendererHelper.process.findAtMostOneThreadNamed('AudioOutputDevice'); |
+ if (mainThread !== undefined && |
benjhayden
2017/08/30 17:25:19
There's a lot of indentation here that is making i
|
+ (compositorThread !== undefined || audioThread !== undefined)) { |
+ // Look for the media player DoLoad event on main thread. |
+ for (const event of mainThread.getDescendantEvents()) { |
+ if (event.title === 'WebMediaPlayerImpl::DoLoad') { |
Dale Curtis
2017/08/30 16:04:54
Keep in mind this only works if you ensure that th
|
+ playStart = event.start; |
+ break; |
+ } |
+ } |
+ if (playStart === undefined) continue; |
+ |
+ // See if we have a video render event. |
+ if (compositorThread !== undefined) { |
+ for (const event of compositorThread.getDescendantEvents()) { |
+ if (event.title === 'VideoRendererImpl::Render') { |
+ timeToPlay = event.start - playStart; |
Dale Curtis
2017/08/30 16:04:54
Probably should be timeToVideoPlay and timeToAudio
|
+ break; |
+ } |
+ } |
+ } |
+ if (timeToPlay !== undefined) break; |
+ |
+ // No video render events. Try audio events instead. |
+ if (audioThread !== undefined) { |
+ for (const event of audioThread.getDescendantEvents()) { |
+ if (event.title === 'AudioRendererImpl::Render') { |
+ timeToPlay = event.start - playStart; |
+ break; |
+ } |
+ } |
+ } |
+ if (timeToPlay !== undefined) break; |
+ } |
+ } |
+ |
+ if (timeToPlay !== undefined) { |
+ histograms.createHistogram('time_to_play', |
+ tr.b.Unit.byName.timeDurationInMs_smallerIsBetter, timeToPlay); |
+ } |
+ } |
+ |
+ tr.metrics.MetricRegistry.register(mediaMetric); |
+ |
+ return { |
+ mediaMetric, |
+ }; |
+}); |
+</script> |