Index: remoting/test/test_video_renderer_unittest.cc |
diff --git a/remoting/test/test_video_renderer_unittest.cc b/remoting/test/test_video_renderer_unittest.cc |
index 8c1d92f7fd04d30fefa57345ad3d167a0172310b..334d833232a6d05336ef8dea341a91ec0f0f3807 100644 |
--- a/remoting/test/test_video_renderer_unittest.cc |
+++ b/remoting/test/test_video_renderer_unittest.cc |
@@ -9,6 +9,7 @@ |
#include "base/memory/scoped_vector.h" |
#include "base/message_loop/message_loop.h" |
#include "base/run_loop.h" |
+#include "base/thread_task_runner_handle.h" |
#include "base/timer/timer.h" |
#include "media/base/video_frame.h" |
#include "remoting/codec/video_encoder.h" |
@@ -20,12 +21,25 @@ |
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h" |
namespace { |
-const int kBytesPerPixel = 4; |
-const int kDefaultScreenWidth = 1024; |
-const int kDefaultScreenHeight = 768; |
-const double kDefaultErrorLimit = 0.02; |
+ |
+// Used to verify if image pattern is matched. |
+void SecondPacketDoneHandler(const base::Closure& done_closure, |
joedow
2015/07/13 19:30:03
I would remove the 'Second' string from the name o
liaoyuke
2015/07/13 20:43:19
Yes, this should be more general.
|
+ bool* second_packet_done_is_called) { |
+ *second_packet_done_is_called = true; |
+ done_closure.Run(); |
} |
+const int kDefaultScreenWidthPx = 1024; |
+const int kDefaultScreenHeightPx = 768; |
+ |
+// Default max error for encoding and decoding, measured in percent. |
+const double kDefaultErrorLimit = 0.02; |
+ |
+// Default expected rect for image pattern, measured in pixels. |
+const webrtc::DesktopRect kDefaultExpectedRect = |
+ webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200); |
+} // namespace |
+ |
namespace remoting { |
namespace test { |
@@ -36,15 +50,17 @@ class TestVideoRendererTest : public testing::Test { |
TestVideoRendererTest(); |
~TestVideoRendererTest() override; |
- // Generate a frame containing a gradient and test decoding of |
- // TestVideoRenderer. The original frame is compared to the one obtained from |
- // decoding the video packet, and the error at each pixel is the root mean |
- // square of the errors in the R, G and B components, each normalized to |
- // [0, 1]. This routine checks that the mean error over all pixels do not |
- // exceed a given limit. |
+ // Handles creating a frame and sending to TestVideoRenderer for processing. |
void TestVideoPacketProcessing(int screen_width, int screen_height, |
double error_limit); |
+ // Handles setting an image pattern and sending a frame to TestVideoRenderer. |
+ // |expect_to_match| indicates if the image pattern is expected to match. |
+ void TestImagePatternMatch(int screen_width, |
+ int screen_height, |
+ const webrtc::DesktopRect& expected_rect, |
+ bool expect_to_match); |
+ |
// Generate a basic desktop frame containing a gradient. |
scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( |
int screen_width, int screen_height) const; |
@@ -66,7 +82,21 @@ class TestVideoRendererTest : public testing::Test { |
// testing::Test interface. |
void SetUp() override; |
- // return the mean error of two frames. |
+ // Set image pattern, send video packet and returns if the expected pattern is |
+ // matched. |
+ bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet, |
+ const webrtc::DesktopRect& expected_rect, |
+ uint32_t expected_color); |
+ |
+ // Returns the average color value of pixels fall within |rect|. |
+ // NOTE: Callers should not release the objects. |
+ uint32_t CalculateAverageColorValueForFrame( |
+ const webrtc::DesktopFrame* frame, |
+ const webrtc::DesktopRect& rect) const; |
+ |
+ // Return the mean error of two frames over all pixels, where error at each |
+ // pixel is the root mean square of the errors in the R, G and B components, |
+ // each normalized to [0, 1]. |
double CalculateError(const webrtc::DesktopFrame* original_frame, |
const webrtc::DesktopFrame* decoded_frame) const; |
@@ -99,25 +129,149 @@ void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, |
DCHECK(encoder_); |
DCHECK(test_video_renderer_); |
+ // Generate a frame containing a gradient. |
scoped_ptr<webrtc::DesktopFrame> original_frame = |
CreateDesktopFrameWithGradient(screen_width, screen_height); |
EXPECT_TRUE(original_frame); |
+ |
scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); |
+ |
DCHECK(!run_loop_ || !run_loop_->running()); |
+ DCHECK(!timer_->IsRunning()); |
run_loop_.reset(new base::RunLoop()); |
+ // Set an extremely long time: 10min to prevent bugs from hanging the system. |
joedow
2015/07/13 19:30:03
nit: Can you describe why it isn't shorter, in the
liaoyuke
2015/07/13 20:43:19
Done.
|
+ timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10), |
+ run_loop_->QuitClosure()); |
+ |
// Wait for the video packet to be processed and rendered to buffer. |
test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
run_loop_->QuitClosure()); |
+ |
run_loop_->Run(); |
+ EXPECT_TRUE(timer_->IsRunning()); |
+ timer_->Stop(); |
+ run_loop_.reset(); |
scoped_ptr<webrtc::DesktopFrame> buffer_copy = |
- test_video_renderer_->GetBufferForTest(); |
+ test_video_renderer_->GetCurrentFrameForTest(); |
EXPECT_NE(buffer_copy, nullptr); |
+ |
+ // The original frame is compared to the decoded video frame to check that |
+ // the mean error over all pixels does not exceed a given limit. |
double error = CalculateError(original_frame.get(), buffer_copy.get()); |
EXPECT_LT(error, error_limit); |
} |
+bool TestVideoRendererTest::SendPacketAndWaitForMatch( |
+ scoped_ptr<VideoPacket> packet, |
+ const webrtc::DesktopRect& expected_rect, |
+ uint32_t expected_color) { |
+ DCHECK(!run_loop_ || !run_loop_->running()); |
+ DCHECK(!timer_->IsRunning()); |
+ run_loop_.reset(new base::RunLoop()); |
+ |
+ // Set an extremely long time: 10min to prevent bugs from hanging the system. |
joedow
2015/07/13 19:30:03
same here, a comment would be good to help prevent
liaoyuke
2015/07/13 20:43:19
Done.
|
+ timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10), |
+ run_loop_->QuitClosure()); |
+ |
+ // Set expected image pattern. |
+ test_video_renderer_->SetImagePatternAndMatchedCallback( |
+ expected_rect, expected_color, run_loop_->QuitClosure()); |
+ |
+ // Used to verify if the expected image pattern will be matched by |packet|. |
+ scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get())); |
+ |
+ // Post first test packet: |packet|. |
+ test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
+ base::Bind(&base::DoNothing)); |
+ |
+ // Second packet: |packet_copy| is posted, and |second_packet_done_callback| |
+ // will always be posted back to main thread, however, whether it will be |
+ // called depends on whether the expected pattern is matched or not. |
+ bool second_packet_done_is_called = false; |
+ base::Closure second_packet_done_callback = |
+ base::Bind(&SecondPacketDoneHandler, run_loop_->QuitClosure(), |
+ &second_packet_done_is_called); |
+ |
+ test_video_renderer_->ProcessVideoPacket(packet_copy.Pass(), |
+ second_packet_done_callback); |
+ |
+ run_loop_->Run(); |
+ EXPECT_TRUE(timer_->IsRunning()); |
+ timer_->Stop(); |
+ run_loop_.reset(); |
+ |
+ // if expected image pattern is matched, the QuitClosure of |run_loop_| will |
+ // be called before |second_packet_done_callback|, which leaves |
+ // |second_packet_done_is_called| be false. |
+ bool image_pattern_is_matched = !second_packet_done_is_called; |
+ |
+ return image_pattern_is_matched; |
+} |
+ |
+void TestVideoRendererTest::TestImagePatternMatch( |
+ int screen_width, |
+ int screen_height, |
+ const webrtc::DesktopRect& expected_rect, |
+ bool expect_to_match) { |
+ DCHECK(encoder_); |
+ DCHECK(test_video_renderer_); |
+ |
+ scoped_ptr<webrtc::DesktopFrame> frame = |
+ CreateDesktopFrameWithGradient(screen_width, screen_height); |
+ uint32_t expected_color = |
+ CalculateAverageColorValueForFrame(frame.get(), expected_rect); |
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); |
+ |
+ if (expect_to_match) { |
+ EXPECT_TRUE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect, |
+ expected_color)); |
+ } else { |
+ // Shift each channel by 128. |
+ // e.g. (10, 127, 200) -> (138, 255, 73). |
+ // In this way, the error between expected color and true value is always |
+ // around 0.5. |
+ int red_shift = (((expected_color >> 16) & 0xFF) + 128) % 255; |
+ int green_shift = (((expected_color >> 8) & 0xFF) + 128) % 255; |
+ int blue_shift = ((expected_color & 0xFF) + 128) % 255; |
+ |
+ int expected_color_shift = |
+ 0xFF000000 | (red_shift << 16) | (green_shift << 8) | blue_shift; |
+ |
+ EXPECT_FALSE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect, |
+ expected_color_shift)); |
+ } |
+} |
+ |
+uint32_t TestVideoRendererTest::CalculateAverageColorValueForFrame( |
+ const webrtc::DesktopFrame* frame, |
+ const webrtc::DesktopRect& rect) const { |
+ int red_sum = 0; |
+ int green_sum = 0; |
+ int blue_sum = 0; |
+ |
+ // Loop through pixels that fall within |accumulating_rect_| to obtain the |
+ // average color value. |
+ for (int y = rect.top(); y < rect.bottom(); ++y) { |
+ uint8_t* frame_pos = |
+ frame->data() + (y * frame->stride() + |
+ rect.left() * webrtc::DesktopFrame::kBytesPerPixel); |
+ |
+ // Pixels of decoded video frame are presented in ARGB format. |
+ for (int x = 0; x < rect.width(); ++x) { |
+ red_sum += frame_pos[2]; |
+ green_sum += frame_pos[1]; |
+ blue_sum += frame_pos[0]; |
+ frame_pos += 4; |
+ } |
+ } |
+ |
+ int area = rect.width() * rect.height(); |
+ return 0xFF000000 | ((red_sum / area) << 16) | ((green_sum / area) << 8) | |
+ (blue_sum / area); |
+} |
+ |
double TestVideoRendererTest::CalculateError( |
const webrtc::DesktopFrame* original_frame, |
const webrtc::DesktopFrame* decoded_frame) const { |
@@ -156,7 +310,7 @@ double TestVideoRendererTest::CalculateError( |
for (int width = 0; width < screen_width; ++width) { |
// Errors are calculated in the R, G, B components. |
for (int j = 0; j < 3; ++j) { |
- int offset = kBytesPerPixel * width + j; |
+ int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j; |
double original_value = static_cast<double>(*(original_ptr + offset)); |
double decoded_value = static_cast<double>(*(decoded_ptr + offset)); |
double error = original_value - decoded_value; |
@@ -201,7 +355,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { |
encoder_ = VideoEncoderVpx::CreateForVP8(); |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::CODEC_VP8); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
} |
@@ -210,7 +364,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { |
encoder_ = VideoEncoderVpx::CreateForVP9(); |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::CODEC_VP9); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
} |
@@ -220,7 +374,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { |
encoder_.reset(new VideoEncoderVerbatim()); |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::CODEC_VERBATIM); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
} |
@@ -238,8 +392,8 @@ TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { |
ScopedVector<VideoPacket> video_packets; |
for (int i = 0; i < task_num; ++i) { |
scoped_ptr<webrtc::DesktopFrame> original_frame = |
- CreateDesktopFrameWithGradient(kDefaultScreenWidth, |
- kDefaultScreenHeight); |
+ CreateDesktopFrameWithGradient(kDefaultScreenWidthPx, |
+ kDefaultScreenHeightPx); |
video_packets.push_back(encoder_->Encode(*original_frame.get())); |
} |
@@ -258,14 +412,94 @@ TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::Codec::CODEC_VP8); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
- TestVideoPacketProcessing(2 * kDefaultScreenWidth, 2 * kDefaultScreenHeight, |
- kDefaultErrorLimit); |
+ TestVideoPacketProcessing(2 * kDefaultScreenWidthPx, |
+ 2 * kDefaultScreenHeightPx, kDefaultErrorLimit); |
- TestVideoPacketProcessing(kDefaultScreenWidth / 2, kDefaultScreenHeight / 2, |
- kDefaultErrorLimit); |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx / 2, |
+ kDefaultScreenHeightPx / 2, kDefaultErrorLimit); |
+} |
+ |
+// Verify setting expected image pattern doesn't break video packet processing. |
+TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) { |
+ encoder_ = VideoEncoderVpx::CreateForVP8(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP8); |
+ |
+ DCHECK(encoder_); |
+ DCHECK(test_video_renderer_); |
+ |
+ scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient( |
+ kDefaultScreenWidthPx, kDefaultScreenHeightPx); |
+ |
+ // Since we don't care whether expected image pattern is matched or not in |
+ // this case, an expected color is chosen arbitrarily. |
+ uint32_t black_color = 0xFF000000; |
+ |
+ // Set expected image pattern. |
+ test_video_renderer_->SetImagePatternAndMatchedCallback( |
+ kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing)); |
+ |
+ // Post test video packet. |
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); |
+ test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
+ base::Bind(&base::DoNothing)); |
+} |
+ |
+// Verify correct image pattern can be matched for VP8. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) { |
+ encoder_ = VideoEncoderVpx::CreateForVP8(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP8); |
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect, true); |
+} |
+ |
+// Verify expected image pattern can be matched for VP9. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) { |
+ encoder_ = VideoEncoderVpx::CreateForVP9(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP9); |
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect, true); |
+} |
+ |
+// Verify expected image pattern can be matched for VERBATIM. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) { |
+ encoder_.reset(new VideoEncoderVerbatim()); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VERBATIM); |
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect, true); |
+} |
+ |
+// Verify incorrect image pattern shouldn't be matched for VP8. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) { |
+ encoder_ = VideoEncoderVpx::CreateForVP8(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP8); |
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect, false); |
+} |
+ |
+// Verify incorrect image pattern shouldn't be matched for VP9. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) { |
+ encoder_ = VideoEncoderVpx::CreateForVP9(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP9); |
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx, |
+ kDefaultExpectedRect, false); |
+} |
+ |
+// Verify incorrect image pattern shouldn't be matched for VERBATIM. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) { |
+ encoder_.reset(new VideoEncoderVerbatim()); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VERBATIM); |
+ TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect, false); |
} |
} // namespace test |