Index: chrome/browser/extensions/api/cast_streaming/cast_streaming_apitest.cc |
diff --git a/chrome/browser/extensions/api/cast_streaming/cast_streaming_apitest.cc b/chrome/browser/extensions/api/cast_streaming/cast_streaming_apitest.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..2360ecbe424f5728aa31e83747e3186f09007128 |
--- /dev/null |
+++ b/chrome/browser/extensions/api/cast_streaming/cast_streaming_apitest.cc |
@@ -0,0 +1,330 @@ |
+// Copyright 2013 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include <algorithm> |
+#include <cmath> |
+ |
+#include "base/command_line.h" |
+#include "base/float_util.h" |
+#include "base/run_loop.h" |
+#include "base/strings/stringprintf.h" |
+#include "base/synchronization/lock.h" |
+#include "base/time/time.h" |
+#include "chrome/browser/extensions/extension_apitest.h" |
+#include "chrome/common/chrome_switches.h" |
+#include "content/public/common/content_switches.h" |
+#include "media/base/bind_to_current_loop.h" |
+#include "media/base/video_frame.h" |
+#include "media/cast/cast_config.h" |
+#include "media/cast/cast_environment.h" |
+#include "media/cast/test/utility/audio_utility.h" |
+#include "media/cast/test/utility/default_config.h" |
+#include "media/cast/test/utility/in_process_receiver.h" |
+#include "media/cast/test/utility/standalone_cast_environment.h" |
+#include "net/base/net_errors.h" |
+#include "net/base/net_util.h" |
+#include "net/base/rand_callback.h" |
+#include "net/udp/udp_socket.h" |
+#include "testing/gtest/include/gtest/gtest.h" |
+ |
+namespace extensions { |
+ |
+class CastStreamingApiTest : public ExtensionApiTest { |
+ public: |
+ virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE { |
+ ExtensionApiTest::SetUpCommandLine(command_line); |
+ command_line->AppendSwitchASCII(switches::kWhitelistedExtensionID, |
+ "ddchlicdkolnonkihahngkmmmjnjlkkf"); |
+ } |
+}; |
+ |
+// Test running the test extension for Cast Mirroring API. |
+IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, Basics) { |
+ ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "basics.html")) << message_; |
+} |
+ |
+IN_PROC_BROWSER_TEST_F(CastStreamingApiTest, BadLogging) { |
+ ASSERT_TRUE(RunExtensionSubtest("cast_streaming", "bad_logging.html")) |
+ << message_; |
+} |
+ |
+namespace { |
+ |
+// An in-process Cast receiver that examines the audio/video frames being |
+// received for expected colors and tones. Used in |
+// CastStreamingApiTest.EndToEnd, below. |
+class TestPatternReceiver : public media::cast::InProcessReceiver { |
+ public: |
+ explicit TestPatternReceiver( |
+ const scoped_refptr<media::cast::CastEnvironment>& cast_environment, |
+ const net::IPEndPoint& local_end_point) |
+ : InProcessReceiver(cast_environment, |
+ local_end_point, |
+ net::IPEndPoint(), |
+ media::cast::GetDefaultAudioReceiverConfig(), |
+ media::cast::GetDefaultVideoReceiverConfig()), |
+ target_tone_frequency_(0), |
+ current_tone_frequency_(0.0f) { |
+ memset(&target_color_, 0, sizeof(target_color_)); |
+ memset(¤t_color_, 0, sizeof(current_color_)); |
+ } |
+ |
+ // Blocks the caller until this receiver has seen both |yuv_color| and |
+ // |tone_frequency| consistently for the given |duration|. |
+ void WaitForColorAndTone(const uint8 yuv_color[3], |
+ int tone_frequency, |
+ base::TimeDelta duration) { |
+ DCHECK(!cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); |
+ |
+ LOG(INFO) << "Waiting for test pattern: color=yuv(" |
+ << static_cast<int>(yuv_color[0]) << ", " |
+ << static_cast<int>(yuv_color[1]) << ", " |
+ << static_cast<int>(yuv_color[2]) |
+ << "), tone_frequency=" << tone_frequency << " Hz"; |
+ |
+ // Reset target values and counters. |
+ base::AutoLock auto_lock(lock_); |
+ target_color_[0] = yuv_color[0]; |
+ target_color_[1] = yuv_color[1]; |
+ target_color_[2] = yuv_color[2]; |
+ target_tone_frequency_ = tone_frequency; |
+ first_time_near_target_color_ = base::TimeTicks(); |
+ first_time_near_target_tone_ = base::TimeTicks(); |
+ |
+ // Wait until both the color and tone have matched, subject to a timeout. |
+ while (true) { |
+ base::RunLoop run_loop; |
+ notify_callback_ = media::BindToCurrentLoop(run_loop.QuitClosure()); |
+ { |
+ base::AutoUnlock auto_unlock(lock_); |
+ run_loop.Run(); |
+ } |
+ notify_callback_.Reset(); |
+ |
+ // TODO(miu): Check audio tone too, once audio is fixed in the library. |
+ if (!first_time_near_target_color_.is_null() && |
+ /*!first_time_near_target_tone_.is_null()*/ true) { |
+ const base::TimeTicks now = cast_env()->Clock()->NowTicks(); |
+ if ((now - first_time_near_target_color_) >= duration && |
+ /*(now - first_time_near_target_tone_) >= duration*/ true) { |
+ return; // Successfully matched for sufficient duration. |
+ } |
+ } |
+ } |
+ } |
+ |
+ private: |
+ virtual ~TestPatternReceiver() {} |
+ |
+ // Invoked by InProcessReceiver for each received audio frame. |
+ virtual void OnAudioFrame(scoped_ptr<media::cast::PcmAudioFrame> audio_frame, |
+ const base::TimeTicks& playout_time) OVERRIDE { |
+ DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); |
+ |
+ if (audio_frame->samples.empty()) { |
+ NOTREACHED() << "OnAudioFrame called with no samples?!?"; |
+ return; |
+ } |
+ |
+ // Assume the audio signal is a single sine wave (it can have some |
+ // low-amplitude noise). Count zero crossings, and extrapolate the |
+ // frequency of the sine wave in |audio_frame|. |
+ const int crossings = media::cast::CountZeroCrossings(audio_frame->samples); |
+ const float seconds_per_frame = audio_frame->samples.size() / |
+ static_cast<float>(audio_frame->frequency); |
+ const float frequency_in_frame = crossings / seconds_per_frame; |
+ |
+ const float kAveragingWeight = 0.1f; |
+ UpdateExponentialMovingAverage( |
+ kAveragingWeight, frequency_in_frame, ¤t_tone_frequency_); |
+ VLOG(1) << "Current audio tone frequency: " << current_tone_frequency_; |
+ |
+ { |
+ base::AutoLock auto_lock(lock_); |
+ const float kTargetWindowHz = 20; |
+ // Trigger the waiting thread while the current tone is within |
+ // kTargetWindowHz of the target tone. |
+ if (fabsf(current_tone_frequency_ - target_tone_frequency_) < |
+ kTargetWindowHz) { |
+ if (first_time_near_target_tone_.is_null()) |
+ first_time_near_target_tone_ = cast_env()->Clock()->NowTicks(); |
+ if (!notify_callback_.is_null()) |
+ notify_callback_.Run(); |
+ } else { |
+ first_time_near_target_tone_ = base::TimeTicks(); |
+ } |
+ } |
+ } |
+ |
+ virtual void OnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, |
+ const base::TimeTicks& render_time) OVERRIDE { |
+ DCHECK(cast_env()->CurrentlyOn(media::cast::CastEnvironment::MAIN)); |
+ |
+ CHECK(video_frame->format() == media::VideoFrame::YV12 || |
+ video_frame->format() == media::VideoFrame::I420 || |
+ video_frame->format() == media::VideoFrame::YV12A); |
+ |
+ // Note: We take the median value of each plane because the test image will |
+ // contain mostly a solid color plus some "cruft" which is the "Testing..." |
+ // text in the upper-left corner of the video frame. In other words, we |
+ // want to read "the most common color." |
+ const int kPlanes[] = {media::VideoFrame::kYPlane, |
+ media::VideoFrame::kUPlane, |
+ media::VideoFrame::kVPlane}; |
+ for (size_t i = 0; i < arraysize(kPlanes); ++i) { |
+ current_color_[i] = |
+ ComputeMedianIntensityInPlane(video_frame->row_bytes(kPlanes[i]), |
+ video_frame->rows(kPlanes[i]), |
+ video_frame->stride(kPlanes[i]), |
+ video_frame->data(kPlanes[i])); |
+ } |
+ |
+ VLOG(1) << "Current video color: yuv(" << current_color_[0] << ", " |
+ << current_color_[1] << ", " << current_color_[2] << ')'; |
+ |
+ { |
+ base::AutoLock auto_lock(lock_); |
+ const float kTargetWindow = 10.0f; |
+ // Trigger the waiting thread while all color channels are within |
+ // kTargetWindow of the target. |
+ if (fabsf(current_color_[0] - target_color_[0]) < kTargetWindow && |
+ fabsf(current_color_[1] - target_color_[1]) < kTargetWindow && |
+ fabsf(current_color_[2] - target_color_[2]) < kTargetWindow) { |
+ if (first_time_near_target_color_.is_null()) |
+ first_time_near_target_color_ = cast_env()->Clock()->NowTicks(); |
+ if (!notify_callback_.is_null()) |
+ notify_callback_.Run(); |
+ } else { |
+ first_time_near_target_color_ = base::TimeTicks(); |
+ } |
+ } |
+ } |
+ |
+ static void UpdateExponentialMovingAverage(float weight, |
+ float sample_value, |
+ float* average) { |
+ *average += weight * sample_value - weight * (*average); |
+ CHECK(base::IsFinite(*average)); |
+ } |
+ |
+ static uint8 ComputeMedianIntensityInPlane(int width, |
+ int height, |
+ int stride, |
+ uint8* data) { |
+ const int num_pixels = width * height; |
+ if (num_pixels <= 0) |
+ return 0; |
+ // If necessary, re-pack the pixels such that the stride is equal to the |
+ // width. |
+ if (width < stride) { |
+ for (int y = 1; y < height; ++y) { |
+ uint8* const src = data + y * stride; |
+ uint8* const dest = data + y * width; |
+ memmove(dest, src, width); |
+ } |
+ } |
+ const size_t middle_idx = num_pixels / 2; |
+ std::nth_element(data, data + middle_idx, data + num_pixels); |
+ return data[middle_idx]; |
+ } |
+ |
+ base::Lock lock_; |
+ base::Closure notify_callback_; |
+ |
+ float target_color_[3]; // Y, U, V |
+ float target_tone_frequency_; |
+ |
+ float current_color_[3]; // Y, U, V |
+ base::TimeTicks first_time_near_target_color_; |
+ float current_tone_frequency_; |
+ base::TimeTicks first_time_near_target_tone_; |
+ |
+ DISALLOW_COPY_AND_ASSIGN(TestPatternReceiver); |
+}; |
+ |
+} // namespace |
+ |
+class CastStreamingApiTestWithPixelOutput : public CastStreamingApiTest { |
+ virtual void SetUp() OVERRIDE { |
+ if (!UsingOSMesa()) |
+ EnablePixelOutput(); |
+ CastStreamingApiTest::SetUp(); |
+ } |
+ |
+ virtual void SetUpCommandLine(CommandLine* command_line) OVERRIDE { |
+ command_line->AppendSwitchASCII(switches::kWindowSize, "128,128"); |
+ CastStreamingApiTest::SetUpCommandLine(command_line); |
+ } |
+}; |
+ |
+// Tests the Cast streaming API and its basic functionality end-to-end. An |
+// extension subtest is run to generate test content, capture that content, and |
+// use the API to send it out. At the same time, this test launches an |
+// in-process Cast receiver, listening on a localhost UDP socket, to receive the |
+// content and check whether it matches expectations. |
+// |
+// Note: This test is disabled until outstanding bugs are fixed and the |
+// media/cast library has achieved sufficient stability. |
+// http://crbug.com/349599 |
+IN_PROC_BROWSER_TEST_F(CastStreamingApiTestWithPixelOutput, DISABLED_EndToEnd) { |
+ // This test is too slow to succeed with OSMesa on the bots. |
+ if (UsingOSMesa()) { |
+ LOG(WARNING) << "Skipping this test since OSMesa is too slow on the bots. " |
hubbe
2014/03/06 19:54:43
It's still too slow, even with a window size of 12
miu
2014/03/07 22:40:29
Fixed resolution issues, now that I know about the
|
+ "Try --enable-gpu."; |
+ return; |
+ } |
+ |
+ // Determine a unused UDP port for the in-process receiver to listen on. |
+ // Method: Bind a UDP socket on port 0, and then check which port the |
+ // operating system assigned to it. |
+ net::IPAddressNumber localhost; |
+ localhost.push_back(127); |
+ localhost.push_back(0); |
+ localhost.push_back(0); |
+ localhost.push_back(1); |
+ scoped_ptr<net::UDPSocket> receive_socket( |
+ new net::UDPSocket(net::DatagramSocket::DEFAULT_BIND, |
+ net::RandIntCallback(), |
+ NULL, |
+ net::NetLog::Source())); |
+ receive_socket->AllowAddressReuse(); |
+ ASSERT_EQ(net::OK, receive_socket->Bind(net::IPEndPoint(localhost, 0))); |
+ net::IPEndPoint receiver_end_point; |
+ ASSERT_EQ(net::OK, receive_socket->GetLocalAddress(&receiver_end_point)); |
+ receive_socket.reset(); |
+ |
+ // Start the in-process receiver that examines audio/video for the expected |
+ // test patterns. |
+ const scoped_refptr<media::cast::StandaloneCastEnvironment> cast_environment( |
+ new media::cast::StandaloneCastEnvironment( |
+ media::cast::CastLoggingConfig())); |
+ const scoped_refptr<TestPatternReceiver> receiver( |
+ new TestPatternReceiver(cast_environment, receiver_end_point)); |
+ receiver->Start(); |
+ |
+ // Launch the page that: 1) renders the source content; 2) uses the |
+ // chrome.tabCapture and chrome.cast.streaming APIs to capture its content and |
+ // stream using Cast; and 3) calls chrome.test.succeed() once it is |
+ // operational. |
+ const std::string page_url = base::StringPrintf( |
+ "end_to_end_sender.html?port=%d", receiver_end_point.port()); |
+ ASSERT_TRUE(RunExtensionSubtest("cast_streaming", page_url)) << message_; |
+ |
+ // Examine the Cast receiver for expected audio/video test patterns. The |
+ // colors and tones specified here must match those in end_to_end_sender.js. |
+ const uint8 kRedInYUV[3] = {82, 90, 240}; // rgb(255, 0, 0) |
+ const uint8 kGreenInYUV[3] = {145, 54, 34}; // rgb(0, 255, 0) |
+ const uint8 kBlueInYUV[3] = {41, 240, 110}; // rgb(0, 0, 255) |
+ const base::TimeDelta kOneHalfSecond = base::TimeDelta::FromMilliseconds(500); |
+ receiver->WaitForColorAndTone(kRedInYUV, 200 /* Hz */, kOneHalfSecond); |
+ receiver->WaitForColorAndTone(kGreenInYUV, 500 /* Hz */, kOneHalfSecond); |
+ receiver->WaitForColorAndTone(kBlueInYUV, 1800 /* Hz */, kOneHalfSecond); |
+ |
+ // TODO(miu): Uncomment once GetWeakPtr() NULL crash in PacedSender is fixed |
+ // (see http://crbug.com/349786): |
+ // receiver->StopSoon(); |
+ cast_environment->Shutdown(); |
+} |
+ |
+} // namespace extensions |