Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1002)

Unified Diff: chrome/browser/extensions/cast_streaming_apitest.cc

Issue 184813009: Cast Streaming API end-to-end browser_test. (Closed) Base URL: svn://svn.chromium.org/chrome/trunk/src
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: chrome/browser/extensions/cast_streaming_apitest.cc
diff --git a/chrome/browser/extensions/cast_streaming_apitest.cc b/chrome/browser/extensions/cast_streaming_apitest.cc
index 04ef6770c2a9914603dcc63736e85be3f68a5003..7ae769b654b3305750707d1b192cc48b558b9527 100644
--- a/chrome/browser/extensions/cast_streaming_apitest.cc
+++ b/chrome/browser/extensions/cast_streaming_apitest.cc
@@ -2,10 +2,24 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <algorithm>
+#include <cmath>
+
#include "base/command_line.h"
+#include "base/float_util.h"
+#include "base/strings/stringprintf.h"
+#include "base/synchronization/condition_variable.h"
+#include "base/synchronization/lock.h"
+#include "base/time/time.h"
#include "chrome/browser/extensions/extension_apitest.h"
#include "chrome/common/chrome_switches.h"
#include "content/public/common/content_switches.h"
+#include "media/base/video_frame.h"
+#include "media/cast/cast_config.h"
+#include "media/cast/cast_environment.h"
+#include "media/cast/test/utility/audio_utility.h"
+#include "media/cast/test/utility/in_process_receiver.h"
+#include "net/base/net_util.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace extensions {
@@ -21,11 +35,284 @@ class CastStreamingApiTest : public ExtensionApiTest {
// Test running the test extension for Cast Mirroring API.
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) {
- ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html"));
+ ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html")) << message_;
}
IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) {
- ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"));
+ ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html"))
+ << message_;
+}
+
+namespace {
+
+// An in-process Cast receiver that examines the audio/video frames being
+// received for expected colors and tones. Used in
+// CastStreamingApiTest.EndToEnd, below.
+class TestPatternReceiver : public media::cast::InProcessReceiver {
+ public:
+ explicit TestPatternReceiver(const net::IPEndPoint& local_end_point)
+ : InProcessReceiver(
+ make_scoped_refptr(new media::cast::CastEnvironment(
+ media::cast::CastLoggingConfig())),
+ local_end_point,
+ net::IPEndPoint(),
+ media::cast::InProcessReceiver::GetDefaultAudioConfig(),
+ media::cast::InProcessReceiver::GetDefaultVideoConfig()),
+ cond_(&lock_),
+ target_color_y_(0),
+ target_color_u_(0),
+ target_color_v_(0),
+ target_tone_frequency_(0),
+ current_color_y_(0.0f),
+ current_color_u_(0.0f),
+ current_color_v_(0.0f),
+ current_tone_frequency_(0.0f) {}
+ virtual ~TestPatternReceiver() {}
+
+ // Blocks the caller until this receiver has seen both |yuv_color| and
+ // |tone_frequency| consistently for the given |duration|.
+ bool WaitForColorAndTone(const uint8 yuv_color[3],
+ int tone_frequency,
+ base::TimeDelta duration) {
+ DCHECK(!cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
+
+ DVLOG(1) << "Waiting for test pattern: color=yuv("
+ << static_cast<int>(yuv_color[0]) << ", "
+ << static_cast<int>(yuv_color[1]) << ", "
+ << static_cast<int>(yuv_color[2])
+ << "), tone_frequency=" << tone_frequency << " Hz";
+
+ const base::TimeTicks start_time = cast_env()->Clock()->NowTicks();
+
+ // Reset target values and counters.
+ base::AutoLock auto_lock(lock_); // Released by |cond_| in the loop below.
+ target_color_y_ = yuv_color[0];
+ target_color_u_ = yuv_color[1];
+ target_color_v_ = yuv_color[2];
+ target_tone_frequency_ = tone_frequency;
+ first_time_near_target_color_ = base::TimeTicks();
+ first_time_near_target_tone_ = base::TimeTicks();
+
+ // Wait until both the color and tone have matched, subject to a timeout.
+ const int kMaxTotalWaitSeconds = 20;
+ do {
+ const base::TimeDelta remaining_wait_time =
+ base::TimeDelta::FromSeconds(kMaxTotalWaitSeconds) -
+ (cast_env()->Clock()->NowTicks() - start_time);
+ if (remaining_wait_time <= base::TimeDelta())
+ return false; // Failed to match test pattern within total wait time.
+ cond_.TimedWait(remaining_wait_time);
+
+ if (!first_time_near_target_color_.is_null() &&
+ !first_time_near_target_tone_.is_null()) {
+ const base::TimeTicks now = cast_env()->Clock()->NowTicks();
+ if ((now - first_time_near_target_color_) >= duration &&
+ (now - first_time_near_target_tone_) >= duration) {
+ return true; // Successfully matched for sufficient duration.
+ }
+ }
+ } while (true);
+ }
+
+ private:
+ // Invoked by InProcessReceiver for each received audio frame.
+ virtual void OnAudioFrame(scoped_ptr<media::cast::PcmAudioFrame> audio_frame,
+ const base::TimeTicks& playout_time) OVERRIDE {
+ DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
+
+ if (audio_frame->samples.empty()) {
+ NOTREACHED() << "OnAudioFrame called with no samples?!?";
+ return;
+ }
+
+ // Assume the audio signal is a single sine wave (it can have some
+ // low-amplitude noise). Count zero crossings, and extrapolate the
+ // frequency of the sine wave in |audio_frame|.
+ const int crossings = media::cast::CountZeroCrossings(audio_frame->samples);
+ const float seconds_per_frame = audio_frame->samples.size() /
+ static_cast<float>(audio_frame->frequency);
+ const float frequency_in_frame = crossings / seconds_per_frame;
+
+ const float kAveragingWeight = 0.1f;
+ UpdateExponentialMovingAverage(
+ kAveragingWeight, frequency_in_frame, &current_tone_frequency_);
+ DVLOG(1) << "Current audio tone frequency: " << current_tone_frequency_;
+
+ {
+ base::AutoLock auto_lock(lock_);
hubbe 2014/03/04 22:42:26 What is this lock for? Doesn't all of these functi
miu 2014/03/06 06:09:15 No. All CastEnvironment threads are different fro
hubbe 2014/03/06 19:54:43 I missed the exclamation mark in the DCHECK in Wai
+ const float kTargetWindowHz = 20;
+ // Trigger the waiting thread while the current tone is within
+ // kTargetWindowHz of the target tone.
+ if (fabsf(current_tone_frequency_ - target_tone_frequency_) <
+ kTargetWindowHz) {
+ if (first_time_near_target_tone_.is_null())
+ first_time_near_target_tone_ = cast_env()->Clock()->NowTicks();
+ cond_.Broadcast();
+ } else {
+ first_time_near_target_tone_ = base::TimeTicks();
+ }
+ }
+ }
+
+ virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
+ const base::TimeTicks& render_time) OVERRIDE {
+ DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN));
+
+ CHECK(video_frame->format() == media::VideoFrame::YV12 ||
+ video_frame->format() == media::VideoFrame::I420 ||
+ video_frame->format() == media::VideoFrame::YV12A);
+
+ // Note: We take the median value of each plane because the test image will
+ // contain mostly a solid color plus some "cruft" which is the "Testing..."
+ // text in the upper-left corner of the video frame. In other words, we
+ // want to read "the most common color."
+ const float kAveragingWeight = 0.2f;
+#define UPDATE_FOR_PLANE(which, kXPlane) \
+ const uint8 median_##which = ComputeMedianIntensityInPlane( \
+ video_frame->row_bytes(media::VideoFrame::kXPlane), \
+ video_frame->rows(media::VideoFrame::kXPlane), \
+ video_frame->stride(media::VideoFrame::kXPlane), \
+ video_frame->data(media::VideoFrame::kXPlane)); \
+ UpdateExponentialMovingAverage( \
hubbe 2014/03/04 22:42:26 Why do we need a moving average? A moving average
miu 2014/03/06 06:09:15 Good point. Done.
+ kAveragingWeight, median_##which, &current_color_##which##_)
+
+ UPDATE_FOR_PLANE(y, kYPlane);
hubbe 2014/03/04 22:42:26 I think an uint8 medians[3], a const int planes[]
miu 2014/03/06 06:09:15 Done.
+ UPDATE_FOR_PLANE(u, kUPlane);
+ UPDATE_FOR_PLANE(v, kVPlane);
+
+#undef UPDATE_FOR_PLANE
+
+ DVLOG(1) << "Current video color: yuv(" << current_color_y_ << ", "
+ << current_color_u_ << ", " << current_color_v_ << ')';
+
+ {
+ base::AutoLock auto_lock(lock_);
+ const float kTargetWindow = 10.0f;
+ // Trigger the waiting thread while all color channels are within
+ // kTargetWindow of the target.
+ if (fabsf(current_color_y_ - target_color_y_) < kTargetWindow &&
+ fabsf(current_color_u_ - target_color_u_) < kTargetWindow &&
+ fabsf(current_color_v_ - target_color_v_) < kTargetWindow) {
+ if (first_time_near_target_color_.is_null())
+ first_time_near_target_color_ = cast_env()->Clock()->NowTicks();
+ cond_.Broadcast();
+ } else {
+ first_time_near_target_color_ = base::TimeTicks();
+ }
+ }
+ }
+
+ static void UpdateExponentialMovingAverage(float weight,
+ float sample_value,
+ float* average) {
+ *average += weight * sample_value - weight * (*average);
hubbe 2014/03/04 22:42:26 Shouldn't this be: *average += weight * sample_val
miu 2014/03/06 06:09:15 Note: I'm using "*average += ..." and not "*averag
hubbe 2014/03/06 19:54:43 Ah tricky. Seems harder to read that way though.
miu 2014/03/07 22:40:29 Changed.
+ CHECK(base::IsFinite(*average));
+ }
+
+ static uint8 ComputeMedianIntensityInPlane(int width,
+ int height,
+ int stride,
+ uint8* data) {
+ const int num_pixels = width * height;
+ if (num_pixels <= 0)
+ return 0;
+ // If necessary, re-pack the pixels such that the stride is equal to the
+ // width.
+ if (width < stride) {
+ for (int y = 1; y < height; ++y) {
+ uint8* const src = data + y * stride;
+ uint8* const dest = data + y * width;
+ memmove(dest, src, width);
hubbe 2014/03/04 22:42:26 Are you sure we're actually allowed to modify the
miu 2014/03/06 06:09:15 Yes. There is no other reader of the frame.
hubbe 2014/03/06 19:54:43 I was more concerned about the vp8 library using t
+ }
+ }
+ const size_t middle_idx = num_pixels / 2;
+ std::nth_element(data, data + middle_idx, data + num_pixels);
+ return data[middle_idx];
+ }
+
+ base::Lock lock_;
+ base::ConditionVariable cond_;
+
+ float target_color_y_;
+ float target_color_u_;
+ float target_color_v_;
+ float target_tone_frequency_;
+
+ float current_color_y_;
+ float current_color_u_;
+ float current_color_v_;
+ base::TimeTicks first_time_near_target_color_;
+ float current_tone_frequency_;
+ base::TimeTicks first_time_near_target_tone_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver);
+};
+
+} // namespace
+
+class CastStreamingApiTestWithPixelOutput : public CastStreamingApiTest {
+ virtual void SetUp() OVERRIDE {
+ if (!UsingOSMesa())
+ EnablePixelOutput();
+ CastStreamingApiTest::SetUp();
+ }
+};
+
+// Tests the Cast streaming API and its basic functionality end-to-end. An
+// extension subtest is run to generate test content, capture that content, and
+// use the API to send it out. At the same time, this test launches an
+// in-process Cast receiver, listening on a localhost UDP socket, to receive the
+// content and check whether it matches expectations.
+//
+// Note: This test is disabled until outstanding bugs are fixed and the
+// media/cast library has achieved sufficient stability.
hubbe 2014/03/04 22:42:26 Maybe file a bug too?
miu 2014/03/06 06:09:15 Done.
+IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, DISABLED_EndToEnd) {
+ // This test is too slow to succeed with OSMesa on the bots.
+ if (UsingOSMesa()) {
+ LOG(WARNING) << "Skipping this test since OSMesa is too slow on the bots.";
+ return;
+ }
+
+ // Determine a unused UDP port for the in-process receiver to listen on, in
+ // the range [2300,2345]. We utilize the hero super-power known as "crossing
+ // our fingers" to find an unused port. Note: As of this writing, the cast
+ // sender runs on port 2346.
hubbe 2014/03/04 22:42:26 This is not a good way to do it. We should create
miu 2014/03/06 06:09:15 Done. I had looked several weeks ago for the help
+ net::IPAddressNumber localhost;
+ localhost.push_back(127);
+ localhost.push_back(0);
+ localhost.push_back(0);
+ localhost.push_back(1);
+ const int64 random_temporal_offset =
+ (base::TimeTicks::Now() - base::TimeTicks::UnixEpoch()).InMilliseconds() /
+ 10;
+ const int hopefully_unused_receiver_port = 2300 + random_temporal_offset % 46;
+
+ // Start the in-process receiver that examines audio/video for the expected
+ // test patterns.
+ TestPatternReceiver receiver(
+ net::IPEndPoint(localhost, hopefully_unused_receiver_port));
+ receiver.Start();
+
+ // Launch the page that: 1) renders the source content; 2) uses the
+ // chrome.tabCapture and chrome.cast.streaming APIs to capture its content and
+ // stream using Cast; and 3) calls chrome.test.succeed() once it is
+ // operational.
+ const std::string& page_url = base::StringPrintf(
+ "end_to_end_sender.html?port=%d", hopefully_unused_receiver_port);
+ ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_;
+
+ // Examine the Cast receiver for expected audio/video test patterns. The
+ // colors and tones specified here must match those in end_to_end_sender.js.
+ const uint8 kRedInYUV[3] = {82, 90, 240}; // rgb(255, 0, 0)
+ const uint8 kGreenInYUV[3] = {145, 54, 34}; // rgb(0, 255, 0)
+ const uint8 kBlueInYUV[3] = {41, 240, 110}; // rgb(0, 0, 255)
+ const base::TimeDelta kOneHalfSecond = base::TimeDelta::FromMilliseconds(500);
+ EXPECT_TRUE(
+ receiver.WaitForColorAndTone(kRedInYUV, 200 /* Hz */, kOneHalfSecond));
+ EXPECT_TRUE(
+ receiver.WaitForColorAndTone(kGreenInYUV, 500 /* Hz */, kOneHalfSecond));
+ EXPECT_TRUE(
+ receiver.WaitForColorAndTone(kBlueInYUV, 1800 /* Hz */, kOneHalfSecond));
}
} // namespace extensions

Powered by Google App Engine
This is Rietveld 408576698