Index: remoting/test/test_video_renderer_unittest.cc |
diff --git a/remoting/test/test_video_renderer_unittest.cc b/remoting/test/test_video_renderer_unittest.cc |
index 8c1d92f7fd04d30fefa57345ad3d167a0172310b..e08edd8205dd3973b91c95dbca15af1807160d64 100644 |
--- a/remoting/test/test_video_renderer_unittest.cc |
+++ b/remoting/test/test_video_renderer_unittest.cc |
@@ -9,6 +9,7 @@ |
#include "base/memory/scoped_vector.h" |
#include "base/message_loop/message_loop.h" |
#include "base/run_loop.h" |
+#include "base/thread_task_runner_handle.h" |
#include "base/timer/timer.h" |
#include "media/base/video_frame.h" |
#include "remoting/codec/video_encoder.h" |
@@ -20,12 +21,25 @@ |
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h" |
namespace { |
-const int kBytesPerPixel = 4; |
-const int kDefaultScreenWidth = 1024; |
-const int kDefaultScreenHeight = 768; |
-const double kDefaultErrorLimit = 0.02; |
+ |
+// Used to verify if image pattern is matched. |
+void PatternMatchedVerifyPacketDoneHandler(const base::Closure& done_closure, |
+ bool* image_pattern_is_matched) { |
+ *image_pattern_is_matched = false; |
+ done_closure.Run(); |
} |
+const int kDefaultScreenWidthPx = 1024; |
+const int kDefaultScreenHeightPx = 768; |
+ |
+// Default max error for encoding and decoding, measured in percent. |
+const double kDefaultErrorLimit = 0.02; |
+ |
+// Default expected rect for image pattern, measured in pixels. |
+const webrtc::DesktopRect kDefaultExpectedRect = |
+ webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200); |
+} // namespace |
+ |
namespace remoting { |
namespace test { |
@@ -36,15 +50,23 @@ class TestVideoRendererTest : public testing::Test { |
TestVideoRendererTest(); |
~TestVideoRendererTest() override; |
- // Generate a frame containing a gradient and test decoding of |
- // TestVideoRenderer. The original frame is compared to the one obtained from |
- // decoding the video packet, and the error at each pixel is the root mean |
- // square of the errors in the R, G and B components, each normalized to |
- // [0, 1]. This routine checks that the mean error over all pixels do not |
- // exceed a given limit. |
+ // Handles creating a frame and sending to TestVideoRenderer for processing. |
void TestVideoPacketProcessing(int screen_width, int screen_height, |
double error_limit); |
+ // Handles setting an image pattern and sending a frame which is expected to |
+ // be matched and replied by the TestVideoRenderer. |
+ void TestImagePatternMatchAndCallback( |
+ int screen_width, |
+ int screen_height, |
+ const webrtc::DesktopRect& expected_rect); |
+ |
+ // Handles setting an image pattern and sending a frame which is not expected |
+ // to be matched by the TestVideoRenderer. |
+ void TestImagePatternNotMatch(int screen_width, |
+ int screen_height, |
+ const webrtc::DesktopRect& expected_rect); |
joedow
2015/07/13 16:27:51
I think the TestImagePattern*Match functions arte
liaoyuke
2015/07/13 18:12:59
Done.
|
+ |
// Generate a basic desktop frame containing a gradient. |
scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( |
int screen_width, int screen_height) const; |
@@ -66,7 +88,21 @@ class TestVideoRendererTest : public testing::Test { |
// testing::Test interface. |
void SetUp() override; |
- // return the mean error of two frames. |
+ // Set image pattern, send video packet and returns if the expected pattern is |
+ // matched. |
+ bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet, |
+ const webrtc::DesktopRect& expected_rect, |
+ RGBA32 expected_color); |
+ |
+ // Returns the average color value of pixels fall within |rect|. |
+ // NOTE: Callers should not release the objects. |
+ RGBA32 CalculateAverageColorValueForFrame( |
+ const webrtc::DesktopFrame* frame, |
+ const webrtc::DesktopRect& rect) const; |
+ |
+ // return the mean error of two frames over all pixels, where error at each |
joedow
2015/07/13 16:27:51
nit: capitalize 'r' in return.
liaoyuke
2015/07/13 18:12:59
Done.
|
+ // pixel is the root mean square of the errors in the R, G and B components, |
+ // each normalized to [0, 1]. |
double CalculateError(const webrtc::DesktopFrame* original_frame, |
const webrtc::DesktopFrame* decoded_frame) const; |
@@ -99,9 +135,11 @@ void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, |
DCHECK(encoder_); |
DCHECK(test_video_renderer_); |
+ // Generate a frame containing a gradient |
joedow
2015/07/13 16:27:51
nit: add period to end of comment.
liaoyuke
2015/07/13 18:12:59
Done.
|
scoped_ptr<webrtc::DesktopFrame> original_frame = |
CreateDesktopFrameWithGradient(screen_width, screen_height); |
EXPECT_TRUE(original_frame); |
+ |
scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); |
DCHECK(!run_loop_ || !run_loop_->running()); |
run_loop_.reset(new base::RunLoop()); |
@@ -109,15 +147,131 @@ void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, |
// Wait for the video packet to be processed and rendered to buffer. |
test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
run_loop_->QuitClosure()); |
+ |
run_loop_->Run(); |
scoped_ptr<webrtc::DesktopFrame> buffer_copy = |
- test_video_renderer_->GetBufferForTest(); |
+ test_video_renderer_->GetCurrentFrameForTest(); |
EXPECT_NE(buffer_copy, nullptr); |
+ |
+ // The original frame is compared to the decoded video frame to checks that |
joedow
2015/07/13 16:27:51
nit: s/checks/check
liaoyuke
2015/07/13 18:12:59
Done.
|
+ // the mean error over all pixels do not exceed a given limit. |
joedow
2015/07/13 16:27:51
nit: s/do/does
liaoyuke
2015/07/13 18:13:00
Done.
|
double error = CalculateError(original_frame.get(), buffer_copy.get()); |
EXPECT_LT(error, error_limit); |
} |
+bool TestVideoRendererTest::SendPacketAndWaitForMatch( |
+ scoped_ptr<VideoPacket> packet, |
+ const webrtc::DesktopRect& expected_rect, |
+ RGBA32 expected_color) { |
+ DCHECK(!run_loop_ || !run_loop_->running()); |
+ run_loop_.reset(new base::RunLoop()); |
+ |
+ // Set expected image pattern. |
+ test_video_renderer_->SetImagePatternAndMatchedCallback( |
+ expected_rect, expected_color, run_loop_->QuitClosure()); |
+ |
+ // Used to verify if the expected image pattern will be matched by |packet|. |
+ scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get())); |
+ |
+ // Post test video packet. |
+ test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
+ base::Bind(&base::DoNothing)); |
+ |
+ // The copy of the packet with a PatternMatchedVerifyPacketDoneHandler is sent |
+ // to check if the callback has been fired. |
+ bool image_pattern_is_matched = true; |
joedow
2015/07/13 16:27:51
This is really confusing, you are setting the imag
liaoyuke
2015/07/13 18:13:00
Done.
|
+ base::Closure pattern_matched_verify_packet_done_callback = |
joedow
2015/07/13 16:27:51
This is a pretty long name, I think you can just c
liaoyuke
2015/07/13 18:13:00
Done.
|
+ base::Bind(&PatternMatchedVerifyPacketDoneHandler, |
+ run_loop_->QuitClosure(), &image_pattern_is_matched); |
+ |
+ test_video_renderer_->ProcessVideoPacket( |
+ packet_copy.Pass(), pattern_matched_verify_packet_done_callback); |
+ |
+ run_loop_->Run(); |
+ run_loop_.reset(); |
+ |
+ return image_pattern_is_matched; |
+} |
+ |
+void TestVideoRendererTest::TestImagePatternMatchAndCallback( |
+ int screen_width, |
+ int screen_height, |
+ const webrtc::DesktopRect& expected_rect) { |
+ DCHECK(encoder_); |
+ DCHECK(test_video_renderer_); |
+ |
+ scoped_ptr<webrtc::DesktopFrame> frame = |
+ CreateDesktopFrameWithGradient(screen_width, screen_height); |
+ RGBA32 expected_color = |
+ CalculateAverageColorValueForFrame(frame.get(), expected_rect); |
+ |
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); |
+ bool image_pattern_is_matched = |
+ SendPacketAndWaitForMatch(packet.Pass(), expected_rect, expected_color); |
+ |
+ EXPECT_TRUE(image_pattern_is_matched); |
+} |
+ |
+void TestVideoRendererTest::TestImagePatternNotMatch( |
+ int screen_width, |
+ int screen_height, |
+ const webrtc::DesktopRect& expected_rect) { |
+ DCHECK(encoder_); |
+ DCHECK(test_video_renderer_); |
+ |
+ scoped_ptr<webrtc::DesktopFrame> frame = |
+ CreateDesktopFrameWithGradient(screen_width, screen_height); |
+ RGBA32 expected_color = |
+ CalculateAverageColorValueForFrame(frame.get(), expected_rect); |
+ |
+ // Shift each channel by 128. |
+ // e.g. (10, 127, 200) -> (138, 255, 73). |
+ // In this way, the error between expected color and true value is always |
+ // around 0.5. |
+ int red_shift = (((expected_color >> 16) & 0xFF) + 128) % 255; |
+ int green_shift = (((expected_color >> 8) & 0xFF) + 128) % 255; |
+ int blue_shift = ((expected_color & 0xFF) + 128) % 255; |
+ |
+ int expected_color_shift = |
+ 0xFF000000 | (red_shift << 16) | (green_shift << 8) | blue_shift; |
+ |
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); |
+ |
+ bool image_pattern_is_matched = SendPacketAndWaitForMatch( |
+ packet.Pass(), expected_rect, expected_color_shift); |
+ |
+ EXPECT_FALSE(image_pattern_is_matched); |
+} |
+ |
+RGBA32 TestVideoRendererTest::CalculateAverageColorValueForFrame( |
+ const webrtc::DesktopFrame* frame, |
+ const webrtc::DesktopRect& rect) const { |
+ int red_sum = 0; |
+ int green_sum = 0; |
+ int blue_sum = 0; |
+ |
+ // Loop through pixels that fall within |accumulating_rect_| to obtain the |
+ // average color value. |
+ for (int y = rect.top(); y < rect.bottom(); ++y) { |
+ uint8_t* frame_pos = |
+ frame->data() + (y * frame->stride() + |
+ rect.left() * webrtc::DesktopFrame::kBytesPerPixel); |
+ |
+ // Pixels of decoded video frame are presented in ARGB format. |
+ for (int x = 0; x < rect.width(); ++x) { |
+ red_sum += frame_pos[2]; |
+ green_sum += frame_pos[1]; |
+ blue_sum += frame_pos[0]; |
+ frame_pos += 4; |
+ } |
+ } |
+ |
+ int area = rect.width() * rect.height(); |
+ return 0xFF000000 | ((red_sum / area) << 16) | ((green_sum / area) << 8) | |
+ (blue_sum / area); |
+} |
+ |
double TestVideoRendererTest::CalculateError( |
const webrtc::DesktopFrame* original_frame, |
const webrtc::DesktopFrame* decoded_frame) const { |
@@ -156,7 +310,7 @@ double TestVideoRendererTest::CalculateError( |
for (int width = 0; width < screen_width; ++width) { |
// Errors are calculated in the R, G, B components. |
for (int j = 0; j < 3; ++j) { |
- int offset = kBytesPerPixel * width + j; |
+ int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j; |
double original_value = static_cast<double>(*(original_ptr + offset)); |
double decoded_value = static_cast<double>(*(decoded_ptr + offset)); |
double error = original_value - decoded_value; |
@@ -201,7 +355,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { |
encoder_ = VideoEncoderVpx::CreateForVP8(); |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::CODEC_VP8); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
} |
@@ -210,7 +364,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { |
encoder_ = VideoEncoderVpx::CreateForVP9(); |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::CODEC_VP9); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
} |
@@ -220,7 +374,7 @@ TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { |
encoder_.reset(new VideoEncoderVerbatim()); |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::CODEC_VERBATIM); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
} |
@@ -238,8 +392,8 @@ TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { |
ScopedVector<VideoPacket> video_packets; |
for (int i = 0; i < task_num; ++i) { |
scoped_ptr<webrtc::DesktopFrame> original_frame = |
- CreateDesktopFrameWithGradient(kDefaultScreenWidth, |
- kDefaultScreenHeight); |
+ CreateDesktopFrameWithGradient(kDefaultScreenWidthPx, |
+ kDefaultScreenHeightPx); |
video_packets.push_back(encoder_->Encode(*original_frame.get())); |
} |
@@ -258,14 +412,94 @@ TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { |
test_video_renderer_->SetCodecForDecoding( |
protocol::ChannelConfig::Codec::CODEC_VP8); |
- TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
kDefaultErrorLimit); |
- TestVideoPacketProcessing(2 * kDefaultScreenWidth, 2 * kDefaultScreenHeight, |
- kDefaultErrorLimit); |
+ TestVideoPacketProcessing(2 * kDefaultScreenWidthPx, |
+ 2 * kDefaultScreenHeightPx, kDefaultErrorLimit); |
- TestVideoPacketProcessing(kDefaultScreenWidth / 2, kDefaultScreenHeight / 2, |
- kDefaultErrorLimit); |
+ TestVideoPacketProcessing(kDefaultScreenWidthPx / 2, |
+ kDefaultScreenHeightPx / 2, kDefaultErrorLimit); |
+} |
+ |
+// Verify setting expected image pattern doesn't break video packet processing. |
+TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) { |
+ encoder_ = VideoEncoderVpx::CreateForVP8(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP8); |
+ |
+ DCHECK(encoder_); |
+ DCHECK(test_video_renderer_); |
+ |
+ scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient( |
+ kDefaultScreenWidthPx, kDefaultScreenHeightPx); |
+ |
+ // Since we don't care whether expected image pattern is matched or not in |
+ // this case, an expected color is chosen arbitrarily. |
+ RGBA32 black_color = 0xFF000000; |
+ |
+ // Set expected image pattern. |
+ test_video_renderer_->SetImagePatternAndMatchedCallback( |
+ kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing)); |
+ |
+ // Post test video packet. |
+ scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); |
+ test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
+ base::Bind(&base::DoNothing)); |
+} |
+ |
+// Verify correct image pattern can be matched for VP8. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) { |
+ encoder_ = VideoEncoderVpx::CreateForVP8(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP8); |
+ TestImagePatternMatchAndCallback( |
+ kDefaultScreenWidthPx, kDefaultScreenHeightPx, kDefaultExpectedRect); |
+} |
+ |
+// Verify expected image pattern can be matched for VP9. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) { |
+ encoder_ = VideoEncoderVpx::CreateForVP9(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP9); |
+ TestImagePatternMatchAndCallback( |
+ kDefaultScreenWidthPx, kDefaultScreenHeightPx, kDefaultExpectedRect); |
+} |
+ |
+// Verify expected image pattern can be matched for VERBATIM. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) { |
+ encoder_.reset(new VideoEncoderVerbatim()); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VERBATIM); |
+ TestImagePatternMatchAndCallback( |
+ kDefaultScreenWidthPx, kDefaultScreenHeightPx, kDefaultExpectedRect); |
+} |
+ |
+// Verify incorrect image pattern shouldn't be matched for VP8. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) { |
+ encoder_ = VideoEncoderVpx::CreateForVP8(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP8); |
+ TestImagePatternNotMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect); |
+} |
+ |
+// Verify incorrect image pattern shouldn't be matched for VP9. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) { |
+ encoder_ = VideoEncoderVpx::CreateForVP9(); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VP9); |
+ TestImagePatternNotMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx, |
+ kDefaultExpectedRect); |
+} |
+ |
+// Verify incorrect image pattern shouldn't be matched for VERBATIM. |
+TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) { |
+ encoder_.reset(new VideoEncoderVerbatim()); |
+ test_video_renderer_->SetCodecForDecoding( |
+ protocol::ChannelConfig::Codec::CODEC_VERBATIM); |
+ TestImagePatternNotMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
+ kDefaultExpectedRect); |
} |
} // namespace test |