| Index: content/browser/media/capture/video_capture_oracle.cc
|
| diff --git a/content/browser/media/capture/video_capture_oracle.cc b/content/browser/media/capture/video_capture_oracle.cc
|
| index c41e5e7e0b121770578d4285e613249c513227c4..ce836becf1ecc9dc3700c2b523c1d64903d995ee 100644
|
| --- a/content/browser/media/capture/video_capture_oracle.cc
|
| +++ b/content/browser/media/capture/video_capture_oracle.cc
|
| @@ -4,7 +4,11 @@
|
|
|
| #include "content/browser/media/capture/video_capture_oracle.h"
|
|
|
| +#include <algorithm>
|
| +
|
| #include "base/debug/trace_event.h"
|
| +#include "base/format_macros.h"
|
| +#include "base/strings/stringprintf.h"
|
|
|
| namespace content {
|
|
|
| @@ -21,59 +25,150 @@ namespace {
|
| // further into the WebRTC encoding stack.
|
| const int kNumRedundantCapturesOfStaticContent = 200;
|
|
|
| +// These specify the minimum/maximum amount of recent event history to examine
|
| +// to detect animated content. If the values are too low, there is a greater
|
| +// risk of false-positive detections and low accuracy. If they are too high,
|
| +// the the implementation will be slow to lock-in/out, and also will not react
|
| +// well to mildly-variable frame rate content (e.g., 25 +/- 1 FPS).
|
| +//
|
| +// These values were established by experimenting with a wide variety of
|
| +// scenarios, including 24/25/30 FPS videos, 60 FPS WebGL demos, and the
|
| +// transitions between static and animated content.
|
| +const int kMinObservationWindowMillis = 1000;
|
| +const int kMaxObservationWindowMillis = 2000;
|
| +
|
| +// The maximum amount of time that can elapse before declaring two subsequent
|
| +// events as "not animating." This is the same value found in
|
| +// cc::FrameRateCounter.
|
| +const int kNonAnimatingThresholdMillis = 250; // 4 FPS
|
| +
|
| +// The slowest that content can be animating in order for AnimatedContentSampler
|
| +// to lock-in. This is the threshold at which the "smoothness" problem is no
|
| +// longer relevant.
|
| +const int kMaxLockInPeriodMillis = 83333; // 12 FPS
|
| +
|
| +// The amount of time over which to fully correct clock drift, when computing
|
| +// the timestamp of each successive frame. The lower the value, the higher the
|
| +// variance in frame timestamps.
|
| +const int kDriftCorrectionMillis = 6000;
|
| +
|
| +// Given the amount of time between frames, compare to the expected amount of
|
| +// time between frames at |frame_rate| and return the fractional difference.
|
| +double FractionFromExpectedFrameRate(base::TimeDelta delta, int frame_rate) {
|
| + DCHECK_GT(frame_rate, 0);
|
| + const base::TimeDelta expected_delta =
|
| + base::TimeDelta::FromSeconds(1) / frame_rate;
|
| + return (delta - expected_delta).InMillisecondsF() /
|
| + expected_delta.InMillisecondsF();
|
| +}
|
| +
|
| } // anonymous namespace
|
|
|
| -VideoCaptureOracle::VideoCaptureOracle(base::TimeDelta capture_period,
|
| +VideoCaptureOracle::VideoCaptureOracle(base::TimeDelta min_capture_period,
|
| bool events_are_reliable)
|
| - : capture_period_(capture_period),
|
| + : min_capture_period_(min_capture_period),
|
| frame_number_(0),
|
| - last_delivered_frame_number_(0),
|
| - sampler_(capture_period_,
|
| - events_are_reliable,
|
| - kNumRedundantCapturesOfStaticContent) {}
|
| + last_delivered_frame_number_(-1),
|
| + smoothing_sampler_(min_capture_period_,
|
| + events_are_reliable,
|
| + kNumRedundantCapturesOfStaticContent),
|
| + content_sampler_(min_capture_period_) {
|
| +}
|
| +
|
| +VideoCaptureOracle::~VideoCaptureOracle() {}
|
|
|
| bool VideoCaptureOracle::ObserveEventAndDecideCapture(
|
| Event event,
|
| + const gfx::Rect& damage_rect,
|
| base::TimeTicks event_time) {
|
| - // Record |event| and decide whether it's a good time to capture.
|
| - const bool content_is_dirty = (event == kCompositorUpdate ||
|
| - event == kSoftwarePaint);
|
| + DCHECK_GE(event, 0);
|
| + DCHECK_LT(event, kNumEvents);
|
| + if (event_time < last_event_time_[event]) {
|
| + LOG(WARNING) << "Event time is not monotonically non-decreasing. "
|
| + << "Deciding not to capture this frame.";
|
| + return false;
|
| + }
|
| + last_event_time_[event] = event_time;
|
| +
|
| bool should_sample;
|
| - if (content_is_dirty) {
|
| - frame_number_++;
|
| - should_sample = sampler_.AddEventAndConsiderSampling(event_time);
|
| - } else {
|
| - should_sample = sampler_.IsOverdueForSamplingAt(event_time);
|
| + switch (event) {
|
| + case kCompositorUpdate:
|
| + case kSoftwarePaint:
|
| + should_sample =
|
| + smoothing_sampler_.AddEventAndConsiderSampling(event_time);
|
| + if (content_sampler_.ConsiderPresentationEvent(damage_rect, event_time)) {
|
| + event_time = content_sampler_.next_frame_timestamp();
|
| + should_sample = !event_time.is_null();
|
| + }
|
| + break;
|
| + default:
|
| + should_sample = smoothing_sampler_.IsOverdueForSamplingAt(event_time);
|
| + break;
|
| }
|
| +
|
| + SetFrameTimestamp(frame_number_, event_time);
|
| return should_sample;
|
| }
|
|
|
| int VideoCaptureOracle::RecordCapture() {
|
| - sampler_.RecordSample();
|
| - return frame_number_;
|
| + smoothing_sampler_.RecordSample();
|
| + content_sampler_.RecordSample(GetFrameTimestamp(frame_number_));
|
| + return frame_number_++;
|
| }
|
|
|
| bool VideoCaptureOracle::CompleteCapture(int frame_number,
|
| - base::TimeTicks timestamp) {
|
| - // Drop frame if previous frame number is higher or we're trying to deliver
|
| - // a frame with the same timestamp.
|
| - if (last_delivered_frame_number_ > frame_number ||
|
| - last_delivered_frame_timestamp_ == timestamp) {
|
| - LOG(ERROR) << "Frame with same timestamp or out of order delivery. "
|
| - << "Dropping frame.";
|
| + base::TimeTicks* frame_timestamp) {
|
| + // Drop frame if previous frame number is higher.
|
| + if (last_delivered_frame_number_ > frame_number) {
|
| + LOG(WARNING) << "Out of order frame delivery detected. Dropping frame ";
|
| return false;
|
| }
|
| + last_delivered_frame_number_ = frame_number;
|
|
|
| - if (last_delivered_frame_timestamp_ > timestamp) {
|
| - // We should not get here unless time was adjusted backwards.
|
| - LOG(ERROR) << "Frame with past timestamp (" << timestamp.ToInternalValue()
|
| - << ") was delivered";
|
| + *frame_timestamp = GetFrameTimestamp(frame_number);
|
| +
|
| + // If enabled, log a measurement of how this frame timestamp has incremented
|
| + // in relation to an ideal increment.
|
| + if (VLOG_IS_ON(2) && frame_number > 0) {
|
| + const base::TimeDelta delta =
|
| + *frame_timestamp - GetFrameTimestamp(frame_number - 1);
|
| + if (content_sampler_.detected_period() > base::TimeDelta()) {
|
| + const double estimated_frame_rate =
|
| + 1000000.0 / content_sampler_.detected_period().InMicroseconds();
|
| + const int rounded_frame_rate =
|
| + static_cast<int>(estimated_frame_rate + 0.5);
|
| + VLOG(2) << base::StringPrintf(
|
| + "Captured #%d: delta=%" PRId64 " usec"
|
| + ", now locked into {%s}, %+0.1f%% slower than %d FPS",
|
| + frame_number,
|
| + delta.InMicroseconds(),
|
| + content_sampler_.detected_region().ToString().c_str(),
|
| + 100.0 * FractionFromExpectedFrameRate(delta, rounded_frame_rate),
|
| + rounded_frame_rate);
|
| + } else {
|
| + VLOG(2) << base::StringPrintf(
|
| + "Captured #%d: delta=%" PRId64 " usec"
|
| + ", d/30fps=%+0.1f%%, d/25fps=%+0.1f%%, d/24fps=%+0.1f%%",
|
| + frame_number,
|
| + delta.InMicroseconds(),
|
| + 100.0 * FractionFromExpectedFrameRate(delta, 30),
|
| + 100.0 * FractionFromExpectedFrameRate(delta, 25),
|
| + 100.0 * FractionFromExpectedFrameRate(delta, 24));
|
| + }
|
| }
|
|
|
| - last_delivered_frame_number_ = frame_number;
|
| - last_delivered_frame_timestamp_ = timestamp;
|
| + return !frame_timestamp->is_null();
|
| +}
|
|
|
| - return true;
|
| +base::TimeTicks VideoCaptureOracle::GetFrameTimestamp(int frame_number) const {
|
| + DCHECK_LE(frame_number, frame_number_);
|
| + DCHECK_LT(frame_number_ - frame_number, kMaxFrameTimestamps);
|
| + return frame_timestamps_[frame_number % kMaxFrameTimestamps];
|
| +}
|
| +
|
| +void VideoCaptureOracle::SetFrameTimestamp(int frame_number,
|
| + base::TimeTicks timestamp) {
|
| + frame_timestamps_[frame_number % kMaxFrameTimestamps] = timestamp;
|
| }
|
|
|
| SmoothEventSampler::SmoothEventSampler(base::TimeDelta capture_period,
|
| @@ -159,14 +254,172 @@ bool SmoothEventSampler::IsOverdueForSamplingAt(base::TimeTicks event_time)
|
| // If we're dirty but not yet old, then we've recently gotten updates, so we
|
| // won't request a sample just yet.
|
| base::TimeDelta dirty_interval = event_time - last_sample_;
|
| - if (dirty_interval < capture_period_ * 4)
|
| - return false;
|
| - else
|
| - return true;
|
| + return dirty_interval >=
|
| + base::TimeDelta::FromMilliseconds(kNonAnimatingThresholdMillis);
|
| }
|
|
|
| bool SmoothEventSampler::HasUnrecordedEvent() const {
|
| return !current_event_.is_null() && current_event_ != last_sample_;
|
| }
|
|
|
| +AnimatedContentSampler::AnimatedContentSampler(
|
| + base::TimeDelta min_capture_period)
|
| + : min_capture_period_(min_capture_period) {}
|
| +
|
| +AnimatedContentSampler::~AnimatedContentSampler() {}
|
| +
|
| +bool AnimatedContentSampler::ConsiderPresentationEvent(
|
| + const gfx::Rect& damage_rect, base::TimeTicks event_time) {
|
| + AddObservation(damage_rect, event_time);
|
| +
|
| + if (AnalyzeObservations(event_time, &detected_region_, &detected_period_) &&
|
| + detected_period_ > base::TimeDelta() &&
|
| + detected_period_ <=
|
| + base::TimeDelta::FromMilliseconds(kMaxLockInPeriodMillis)) {
|
| + if (damage_rect == detected_region_)
|
| + UpdateNextFrameTimestamp(event_time);
|
| + else
|
| + next_frame_timestamp_ = base::TimeTicks();
|
| +
|
| + return true;
|
| + }
|
| +
|
| + detected_region_ = gfx::Rect();
|
| + detected_period_ = base::TimeDelta();
|
| + next_frame_timestamp_ = base::TimeTicks();
|
| + return false;
|
| +}
|
| +
|
| +void AnimatedContentSampler::RecordSample(base::TimeTicks frame_timestamp) {
|
| + recorded_frame_timestamp_ = frame_timestamp;
|
| + sequence_offset_ = base::TimeDelta();
|
| +}
|
| +
|
| +void AnimatedContentSampler::AddObservation(const gfx::Rect& damage_rect,
|
| + base::TimeTicks event_time) {
|
| + if (damage_rect.IsEmpty())
|
| + return; // Useless observation.
|
| +
|
| + // Add the observation to the FIFO queue.
|
| + if (!observations_.empty() && observations_.back().second > event_time)
|
| + return; // The implementation assumes chronological order.
|
| + observations_.push_back(Observation(damage_rect, event_time));
|
| +
|
| + // Prune-out old observations.
|
| + const base::TimeDelta threshold =
|
| + base::TimeDelta::FromMilliseconds(kMaxObservationWindowMillis);
|
| + while ((event_time - observations_.front().second) > threshold)
|
| + observations_.pop_front();
|
| +}
|
| +
|
| +bool AnimatedContentSampler::AnalyzeObservations(
|
| + base::TimeTicks event_time,
|
| + gfx::Rect* rect,
|
| + base::TimeDelta* period) const {
|
| + // There must be at least three observations, or else it's possible to divide
|
| + // by zero at the end of this method, where |*period| is assigned the result.
|
| + if (observations_.size() < 3)
|
| + return false;
|
| +
|
| + // Find the candidate damage Rect that *would* be the majority value, if a
|
| + // majority value exists. This is an implementation of the Boyer-Moore
|
| + // Majority Vote Algorithm.
|
| + const gfx::Rect* candidate = NULL;
|
| + size_t count = 0;
|
| + for (ObservationFifo::const_iterator i = observations_.begin();
|
| + i != observations_.end(); ++i) {
|
| + if (count == 0) {
|
| + candidate = &(i->first);
|
| + count = 1;
|
| + } else if (i->first == *candidate) {
|
| + ++count;
|
| + } else {
|
| + --count;
|
| + }
|
| + }
|
| +
|
| + // Accomplish two goals by making a second pass over |observations_|. First,
|
| + // confirm that |candidate| in fact points to the majority damage Rect, and
|
| + // didn't just win the "voting" phase. Second, sum up the durations between
|
| + // the frames having the candidate damage Rect, and track the event time of
|
| + // the first and last of those frames.
|
| + count = 0;
|
| + base::TimeDelta sum_frame_durations;
|
| + base::TimeTicks first_event_time;
|
| + base::TimeTicks last_event_time;
|
| + for (ObservationFifo::const_iterator i = observations_.begin();
|
| + i != observations_.end(); ++i) {
|
| + if (i->first != *candidate)
|
| + continue;
|
| + ++count;
|
| +
|
| + if (first_event_time.is_null()) {
|
| + first_event_time = i->second;
|
| + } else {
|
| + const base::TimeDelta frame_duration = i->second - last_event_time;
|
| + if (frame_duration >=
|
| + base::TimeDelta::FromMilliseconds(kNonAnimatingThresholdMillis)) {
|
| + return false; // Content has not animated continuously.
|
| + }
|
| + sum_frame_durations += frame_duration;
|
| + }
|
| + last_event_time = i->second;
|
| + }
|
| +
|
| + if (count <= observations_.size() / 2)
|
| + return false; // |candidate| was not a majority value.
|
| + if ((last_event_time - first_event_time) <
|
| + base::TimeDelta::FromMilliseconds(kMinObservationWindowMillis)) {
|
| + return false; // Content has not animated for long enough.
|
| + }
|
| + if ((event_time - last_event_time) >=
|
| + base::TimeDelta::FromMilliseconds(kNonAnimatingThresholdMillis)) {
|
| + return false; // Content animation has recently ended.
|
| + }
|
| +
|
| + *rect = *candidate;
|
| + *period = sum_frame_durations / (count - 1);
|
| + return true;
|
| +}
|
| +
|
| +void AnimatedContentSampler::UpdateNextFrameTimestamp(
|
| + base::TimeTicks event_time) {
|
| + // This is how much time to advance from the last frame timestamp. Never
|
| + // advance by less than |min_capture_period_| because the downstream consumer
|
| + // cannot handle the higher frame rate. If |detected_period_| is less than
|
| + // |min_capture_period_|, excess frames should be dropped.
|
| + const base::TimeDelta advancement =
|
| + std::max(detected_period_, min_capture_period_);
|
| +
|
| + // Compute the |timebase| upon which to determine the |next_frame_timestamp_|.
|
| + // Ideally, this would always equal the timestamp of the last recorded frame
|
| + // sampling, but no clock is perfect. Determine how much drift from the ideal
|
| + // is present, then adjust the timebase by a small amount to spread out the
|
| + // entire correction over many frame timestamps.
|
| + base::TimeTicks timebase = event_time - sequence_offset_ - advancement;
|
| + if (!recorded_frame_timestamp_.is_null()) {
|
| + const base::TimeDelta drift = recorded_frame_timestamp_ - timebase;
|
| + const int64 correct_over_num_frames =
|
| + base::TimeDelta::FromMilliseconds(kDriftCorrectionMillis) /
|
| + detected_period_;
|
| + DCHECK_GT(correct_over_num_frames, 0);
|
| + timebase = recorded_frame_timestamp_ - (drift / correct_over_num_frames);
|
| + }
|
| +
|
| + // Compute the |next_frame_timestamp_|. Whenever |detected_period_| is less
|
| + // than |min_capture_period_|, some extra time is "borrowed" to be able to
|
| + // advance by the full |min_capture_period_|. Then, whenever the total amount
|
| + // of borrowed time reaches a full |min_capture_period_|, drop a frame. Note
|
| + // that when |detected_period_| is greater or equal to |min_capture_period_|,
|
| + // this logic is effectively disabled.
|
| + borrowed_time_ += advancement - detected_period_;
|
| + if (borrowed_time_ >= min_capture_period_) {
|
| + borrowed_time_ -= min_capture_period_;
|
| + next_frame_timestamp_ = base::TimeTicks();
|
| + } else {
|
| + sequence_offset_ += advancement;
|
| + next_frame_timestamp_ = timebase + sequence_offset_;
|
| + }
|
| +}
|
| +
|
| } // namespace content
|
|
|