OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/test/test_video_renderer.h" | 5 #include "remoting/test/test_video_renderer.h" |
6 | 6 |
7 #include <cmath> | 7 #include <cmath> |
8 | 8 |
9 #include "base/memory/scoped_vector.h" | 9 #include "base/memory/scoped_vector.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
11 #include "base/run_loop.h" | 11 #include "base/run_loop.h" |
12 #include "base/thread_task_runner_handle.h" | |
12 #include "base/timer/timer.h" | 13 #include "base/timer/timer.h" |
13 #include "media/base/video_frame.h" | 14 #include "media/base/video_frame.h" |
14 #include "remoting/codec/video_encoder.h" | 15 #include "remoting/codec/video_encoder.h" |
15 #include "remoting/codec/video_encoder_verbatim.h" | 16 #include "remoting/codec/video_encoder_verbatim.h" |
16 #include "remoting/codec/video_encoder_vpx.h" | 17 #include "remoting/codec/video_encoder_vpx.h" |
17 #include "remoting/proto/video.pb.h" | 18 #include "remoting/proto/video.pb.h" |
18 #include "testing/gtest/include/gtest/gtest.h" | 19 #include "testing/gtest/include/gtest/gtest.h" |
19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | 20 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" |
20 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h" | 21 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h" |
21 | 22 |
22 namespace { | 23 namespace { |
23 const int kBytesPerPixel = 4; | 24 |
24 const int kDefaultScreenWidth = 1024; | 25 // Used to verify if image pattern is matched. |
25 const int kDefaultScreenHeight = 768; | 26 void PatternMatchedVerifyPacketDoneHandler(const base::Closure& done_closure, |
27 bool* image_pattern_is_matched) { | |
28 *image_pattern_is_matched = false; | |
29 done_closure.Run(); | |
30 } | |
31 | |
32 const int kDefaultScreenWidthPx = 1024; | |
33 const int kDefaultScreenHeightPx = 768; | |
34 | |
35 // Default max error for encoding and decoding, measured in percent. | |
26 const double kDefaultErrorLimit = 0.02; | 36 const double kDefaultErrorLimit = 0.02; |
27 } | 37 |
38 // Default expected rect for image pattern, measured in pixels. | |
39 const webrtc::DesktopRect kDefaultExpectedRect = | |
40 webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200); | |
41 } // namespace | |
28 | 42 |
29 namespace remoting { | 43 namespace remoting { |
30 namespace test { | 44 namespace test { |
31 | 45 |
32 // Provides basic functionality for for the TestVideoRenderer Tests below. | 46 // Provides basic functionality for for the TestVideoRenderer Tests below. |
33 // This fixture also creates an MessageLoop to test decoding video packets. | 47 // This fixture also creates an MessageLoop to test decoding video packets. |
34 class TestVideoRendererTest : public testing::Test { | 48 class TestVideoRendererTest : public testing::Test { |
35 public: | 49 public: |
36 TestVideoRendererTest(); | 50 TestVideoRendererTest(); |
37 ~TestVideoRendererTest() override; | 51 ~TestVideoRendererTest() override; |
38 | 52 |
39 // Generate a frame containing a gradient and test decoding of | 53 // Handles creating a frame and sending to TestVideoRenderer for processing. |
40 // TestVideoRenderer. The original frame is compared to the one obtained from | |
41 // decoding the video packet, and the error at each pixel is the root mean | |
42 // square of the errors in the R, G and B components, each normalized to | |
43 // [0, 1]. This routine checks that the mean error over all pixels do not | |
44 // exceed a given limit. | |
45 void TestVideoPacketProcessing(int screen_width, int screen_height, | 54 void TestVideoPacketProcessing(int screen_width, int screen_height, |
46 double error_limit); | 55 double error_limit); |
47 | 56 |
57 // Handles setting an image pattern and sending a frame which is expected to | |
58 // be matched and replied by the TestVideoRenderer. | |
59 void TestImagePatternMatchAndCallback( | |
60 int screen_width, | |
61 int screen_height, | |
62 const webrtc::DesktopRect& expected_rect); | |
63 | |
64 // Handles setting an image pattern and sending a frame which is not expected | |
65 // to be matched by the TestVideoRenderer. | |
66 void TestImagePatternNotMatch(int screen_width, | |
67 int screen_height, | |
68 const webrtc::DesktopRect& expected_rect); | |
joedow
2015/07/13 16:27:51
I think the TestImagePattern*Match functions arte
liaoyuke
2015/07/13 18:12:59
Done.
| |
69 | |
48 // Generate a basic desktop frame containing a gradient. | 70 // Generate a basic desktop frame containing a gradient. |
49 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( | 71 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( |
50 int screen_width, int screen_height) const; | 72 int screen_width, int screen_height) const; |
51 | 73 |
52 protected: | 74 protected: |
53 // Used to post tasks to the message loop. | 75 // Used to post tasks to the message loop. |
54 scoped_ptr<base::RunLoop> run_loop_; | 76 scoped_ptr<base::RunLoop> run_loop_; |
55 | 77 |
56 // Used to set timeouts and delays. | 78 // Used to set timeouts and delays. |
57 scoped_ptr<base::Timer> timer_; | 79 scoped_ptr<base::Timer> timer_; |
58 | 80 |
59 // Manages the decoder and process generated video packets. | 81 // Manages the decoder and process generated video packets. |
60 scoped_ptr<TestVideoRenderer> test_video_renderer_; | 82 scoped_ptr<TestVideoRenderer> test_video_renderer_; |
61 | 83 |
62 // Used to encode desktop frames to generate video packets. | 84 // Used to encode desktop frames to generate video packets. |
63 scoped_ptr<VideoEncoder> encoder_; | 85 scoped_ptr<VideoEncoder> encoder_; |
64 | 86 |
65 private: | 87 private: |
66 // testing::Test interface. | 88 // testing::Test interface. |
67 void SetUp() override; | 89 void SetUp() override; |
68 | 90 |
69 // return the mean error of two frames. | 91 // Set image pattern, send video packet and returns if the expected pattern is |
92 // matched. | |
93 bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet, | |
94 const webrtc::DesktopRect& expected_rect, | |
95 RGBA32 expected_color); | |
96 | |
97 // Returns the average color value of pixels fall within |rect|. | |
98 // NOTE: Callers should not release the objects. | |
99 RGBA32 CalculateAverageColorValueForFrame( | |
100 const webrtc::DesktopFrame* frame, | |
101 const webrtc::DesktopRect& rect) const; | |
102 | |
103 // return the mean error of two frames over all pixels, where error at each | |
joedow
2015/07/13 16:27:51
nit: capitalize 'r' in return.
liaoyuke
2015/07/13 18:12:59
Done.
| |
104 // pixel is the root mean square of the errors in the R, G and B components, | |
105 // each normalized to [0, 1]. | |
70 double CalculateError(const webrtc::DesktopFrame* original_frame, | 106 double CalculateError(const webrtc::DesktopFrame* original_frame, |
71 const webrtc::DesktopFrame* decoded_frame) const; | 107 const webrtc::DesktopFrame* decoded_frame) const; |
72 | 108 |
73 // Fill a desktop frame with a gradient. | 109 // Fill a desktop frame with a gradient. |
74 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const; | 110 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const; |
75 | 111 |
76 // The thread's message loop. Valid only when the thread is alive. | 112 // The thread's message loop. Valid only when the thread is alive. |
77 scoped_ptr<base::MessageLoop> message_loop_; | 113 scoped_ptr<base::MessageLoop> message_loop_; |
78 | 114 |
79 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest); | 115 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest); |
(...skipping 12 matching lines...) Expand all Loading... | |
92 } | 128 } |
93 test_video_renderer_.reset(new TestVideoRenderer()); | 129 test_video_renderer_.reset(new TestVideoRenderer()); |
94 } | 130 } |
95 | 131 |
96 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, | 132 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, |
97 int screen_height, | 133 int screen_height, |
98 double error_limit) { | 134 double error_limit) { |
99 DCHECK(encoder_); | 135 DCHECK(encoder_); |
100 DCHECK(test_video_renderer_); | 136 DCHECK(test_video_renderer_); |
101 | 137 |
138 // Generate a frame containing a gradient | |
joedow
2015/07/13 16:27:51
nit: add period to end of comment.
liaoyuke
2015/07/13 18:12:59
Done.
| |
102 scoped_ptr<webrtc::DesktopFrame> original_frame = | 139 scoped_ptr<webrtc::DesktopFrame> original_frame = |
103 CreateDesktopFrameWithGradient(screen_width, screen_height); | 140 CreateDesktopFrameWithGradient(screen_width, screen_height); |
104 EXPECT_TRUE(original_frame); | 141 EXPECT_TRUE(original_frame); |
142 | |
105 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); | 143 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); |
106 DCHECK(!run_loop_ || !run_loop_->running()); | 144 DCHECK(!run_loop_ || !run_loop_->running()); |
107 run_loop_.reset(new base::RunLoop()); | 145 run_loop_.reset(new base::RunLoop()); |
108 | 146 |
109 // Wait for the video packet to be processed and rendered to buffer. | 147 // Wait for the video packet to be processed and rendered to buffer. |
110 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | 148 test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
111 run_loop_->QuitClosure()); | 149 run_loop_->QuitClosure()); |
150 | |
112 run_loop_->Run(); | 151 run_loop_->Run(); |
113 | 152 |
114 scoped_ptr<webrtc::DesktopFrame> buffer_copy = | 153 scoped_ptr<webrtc::DesktopFrame> buffer_copy = |
115 test_video_renderer_->GetBufferForTest(); | 154 test_video_renderer_->GetCurrentFrameForTest(); |
116 EXPECT_NE(buffer_copy, nullptr); | 155 EXPECT_NE(buffer_copy, nullptr); |
156 | |
157 // The original frame is compared to the decoded video frame to checks that | |
joedow
2015/07/13 16:27:51
nit: s/checks/check
liaoyuke
2015/07/13 18:12:59
Done.
| |
158 // the mean error over all pixels do not exceed a given limit. | |
joedow
2015/07/13 16:27:51
nit: s/do/does
liaoyuke
2015/07/13 18:13:00
Done.
| |
117 double error = CalculateError(original_frame.get(), buffer_copy.get()); | 159 double error = CalculateError(original_frame.get(), buffer_copy.get()); |
118 EXPECT_LT(error, error_limit); | 160 EXPECT_LT(error, error_limit); |
119 } | 161 } |
120 | 162 |
163 bool TestVideoRendererTest::SendPacketAndWaitForMatch( | |
164 scoped_ptr<VideoPacket> packet, | |
165 const webrtc::DesktopRect& expected_rect, | |
166 RGBA32 expected_color) { | |
167 DCHECK(!run_loop_ || !run_loop_->running()); | |
168 run_loop_.reset(new base::RunLoop()); | |
169 | |
170 // Set expected image pattern. | |
171 test_video_renderer_->SetImagePatternAndMatchedCallback( | |
172 expected_rect, expected_color, run_loop_->QuitClosure()); | |
173 | |
174 // Used to verify if the expected image pattern will be matched by |packet|. | |
175 scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get())); | |
176 | |
177 // Post test video packet. | |
178 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | |
179 base::Bind(&base::DoNothing)); | |
180 | |
181 // The copy of the packet with a PatternMatchedVerifyPacketDoneHandler is sent | |
182 // to check if the callback has been fired. | |
183 bool image_pattern_is_matched = true; | |
joedow
2015/07/13 16:27:51
This is really confusing, you are setting the imag
liaoyuke
2015/07/13 18:13:00
Done.
| |
184 base::Closure pattern_matched_verify_packet_done_callback = | |
joedow
2015/07/13 16:27:51
This is a pretty long name, I think you can just c
liaoyuke
2015/07/13 18:13:00
Done.
| |
185 base::Bind(&PatternMatchedVerifyPacketDoneHandler, | |
186 run_loop_->QuitClosure(), &image_pattern_is_matched); | |
187 | |
188 test_video_renderer_->ProcessVideoPacket( | |
189 packet_copy.Pass(), pattern_matched_verify_packet_done_callback); | |
190 | |
191 run_loop_->Run(); | |
192 run_loop_.reset(); | |
193 | |
194 return image_pattern_is_matched; | |
195 } | |
196 | |
197 void TestVideoRendererTest::TestImagePatternMatchAndCallback( | |
198 int screen_width, | |
199 int screen_height, | |
200 const webrtc::DesktopRect& expected_rect) { | |
201 DCHECK(encoder_); | |
202 DCHECK(test_video_renderer_); | |
203 | |
204 scoped_ptr<webrtc::DesktopFrame> frame = | |
205 CreateDesktopFrameWithGradient(screen_width, screen_height); | |
206 RGBA32 expected_color = | |
207 CalculateAverageColorValueForFrame(frame.get(), expected_rect); | |
208 | |
209 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
210 bool image_pattern_is_matched = | |
211 SendPacketAndWaitForMatch(packet.Pass(), expected_rect, expected_color); | |
212 | |
213 EXPECT_TRUE(image_pattern_is_matched); | |
214 } | |
215 | |
216 void TestVideoRendererTest::TestImagePatternNotMatch( | |
217 int screen_width, | |
218 int screen_height, | |
219 const webrtc::DesktopRect& expected_rect) { | |
220 DCHECK(encoder_); | |
221 DCHECK(test_video_renderer_); | |
222 | |
223 scoped_ptr<webrtc::DesktopFrame> frame = | |
224 CreateDesktopFrameWithGradient(screen_width, screen_height); | |
225 RGBA32 expected_color = | |
226 CalculateAverageColorValueForFrame(frame.get(), expected_rect); | |
227 | |
228 // Shift each channel by 128. | |
229 // e.g. (10, 127, 200) -> (138, 255, 73). | |
230 // In this way, the error between expected color and true value is always | |
231 // around 0.5. | |
232 int red_shift = (((expected_color >> 16) & 0xFF) + 128) % 255; | |
233 int green_shift = (((expected_color >> 8) & 0xFF) + 128) % 255; | |
234 int blue_shift = ((expected_color & 0xFF) + 128) % 255; | |
235 | |
236 int expected_color_shift = | |
237 0xFF000000 | (red_shift << 16) | (green_shift << 8) | blue_shift; | |
238 | |
239 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
240 | |
241 bool image_pattern_is_matched = SendPacketAndWaitForMatch( | |
242 packet.Pass(), expected_rect, expected_color_shift); | |
243 | |
244 EXPECT_FALSE(image_pattern_is_matched); | |
245 } | |
246 | |
247 RGBA32 TestVideoRendererTest::CalculateAverageColorValueForFrame( | |
248 const webrtc::DesktopFrame* frame, | |
249 const webrtc::DesktopRect& rect) const { | |
250 int red_sum = 0; | |
251 int green_sum = 0; | |
252 int blue_sum = 0; | |
253 | |
254 // Loop through pixels that fall within |accumulating_rect_| to obtain the | |
255 // average color value. | |
256 for (int y = rect.top(); y < rect.bottom(); ++y) { | |
257 uint8_t* frame_pos = | |
258 frame->data() + (y * frame->stride() + | |
259 rect.left() * webrtc::DesktopFrame::kBytesPerPixel); | |
260 | |
261 // Pixels of decoded video frame are presented in ARGB format. | |
262 for (int x = 0; x < rect.width(); ++x) { | |
263 red_sum += frame_pos[2]; | |
264 green_sum += frame_pos[1]; | |
265 blue_sum += frame_pos[0]; | |
266 frame_pos += 4; | |
267 } | |
268 } | |
269 | |
270 int area = rect.width() * rect.height(); | |
271 return 0xFF000000 | ((red_sum / area) << 16) | ((green_sum / area) << 8) | | |
272 (blue_sum / area); | |
273 } | |
274 | |
121 double TestVideoRendererTest::CalculateError( | 275 double TestVideoRendererTest::CalculateError( |
122 const webrtc::DesktopFrame* original_frame, | 276 const webrtc::DesktopFrame* original_frame, |
123 const webrtc::DesktopFrame* decoded_frame) const { | 277 const webrtc::DesktopFrame* decoded_frame) const { |
124 DCHECK(original_frame); | 278 DCHECK(original_frame); |
125 DCHECK(decoded_frame); | 279 DCHECK(decoded_frame); |
126 | 280 |
127 // Check size remains the same after encoding and decoding. | 281 // Check size remains the same after encoding and decoding. |
128 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width()); | 282 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width()); |
129 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height()); | 283 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height()); |
130 EXPECT_EQ(original_frame->stride(), decoded_frame->stride()); | 284 EXPECT_EQ(original_frame->stride(), decoded_frame->stride()); |
(...skipping 18 matching lines...) Expand all Loading... | |
149 // | 303 // |
150 for (int height = 0; height < screen_height; ++height) { | 304 for (int height = 0; height < screen_height; ++height) { |
151 uint8_t* original_ptr = original_frame->data() + | 305 uint8_t* original_ptr = original_frame->data() + |
152 height * original_frame->stride(); | 306 height * original_frame->stride(); |
153 uint8_t* decoded_ptr = decoded_frame->data() + | 307 uint8_t* decoded_ptr = decoded_frame->data() + |
154 height * decoded_frame->stride(); | 308 height * decoded_frame->stride(); |
155 | 309 |
156 for (int width = 0; width < screen_width; ++width) { | 310 for (int width = 0; width < screen_width; ++width) { |
157 // Errors are calculated in the R, G, B components. | 311 // Errors are calculated in the R, G, B components. |
158 for (int j = 0; j < 3; ++j) { | 312 for (int j = 0; j < 3; ++j) { |
159 int offset = kBytesPerPixel * width + j; | 313 int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j; |
160 double original_value = static_cast<double>(*(original_ptr + offset)); | 314 double original_value = static_cast<double>(*(original_ptr + offset)); |
161 double decoded_value = static_cast<double>(*(decoded_ptr + offset)); | 315 double decoded_value = static_cast<double>(*(decoded_ptr + offset)); |
162 double error = original_value - decoded_value; | 316 double error = original_value - decoded_value; |
163 | 317 |
164 // Normalize the error to [0, 1]. | 318 // Normalize the error to [0, 1]. |
165 error /= 255.0; | 319 error /= 255.0; |
166 error_sum_squares += error * error; | 320 error_sum_squares += error * error; |
167 } | 321 } |
168 } | 322 } |
169 } | 323 } |
(...skipping 24 matching lines...) Expand all Loading... | |
194 *p++ = 0; | 348 *p++ = 0; |
195 } | 349 } |
196 } | 350 } |
197 } | 351 } |
198 | 352 |
199 // Verify video decoding for VP8 Codec. | 353 // Verify video decoding for VP8 Codec. |
200 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { | 354 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { |
201 encoder_ = VideoEncoderVpx::CreateForVP8(); | 355 encoder_ = VideoEncoderVpx::CreateForVP8(); |
202 test_video_renderer_->SetCodecForDecoding( | 356 test_video_renderer_->SetCodecForDecoding( |
203 protocol::ChannelConfig::CODEC_VP8); | 357 protocol::ChannelConfig::CODEC_VP8); |
204 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 358 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
205 kDefaultErrorLimit); | 359 kDefaultErrorLimit); |
206 } | 360 } |
207 | 361 |
208 // Verify video decoding for VP9 Codec. | 362 // Verify video decoding for VP9 Codec. |
209 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { | 363 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { |
210 encoder_ = VideoEncoderVpx::CreateForVP9(); | 364 encoder_ = VideoEncoderVpx::CreateForVP9(); |
211 test_video_renderer_->SetCodecForDecoding( | 365 test_video_renderer_->SetCodecForDecoding( |
212 protocol::ChannelConfig::CODEC_VP9); | 366 protocol::ChannelConfig::CODEC_VP9); |
213 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 367 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
214 kDefaultErrorLimit); | 368 kDefaultErrorLimit); |
215 } | 369 } |
216 | 370 |
217 | 371 |
218 // Verify video decoding for VERBATIM Codec. | 372 // Verify video decoding for VERBATIM Codec. |
219 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { | 373 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { |
220 encoder_.reset(new VideoEncoderVerbatim()); | 374 encoder_.reset(new VideoEncoderVerbatim()); |
221 test_video_renderer_->SetCodecForDecoding( | 375 test_video_renderer_->SetCodecForDecoding( |
222 protocol::ChannelConfig::CODEC_VERBATIM); | 376 protocol::ChannelConfig::CODEC_VERBATIM); |
223 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 377 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
224 kDefaultErrorLimit); | 378 kDefaultErrorLimit); |
225 } | 379 } |
226 | 380 |
227 // Verify a set of video packets are processed correctly. | 381 // Verify a set of video packets are processed correctly. |
228 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { | 382 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { |
229 encoder_ = VideoEncoderVpx::CreateForVP8(); | 383 encoder_ = VideoEncoderVpx::CreateForVP8(); |
230 test_video_renderer_->SetCodecForDecoding( | 384 test_video_renderer_->SetCodecForDecoding( |
231 protocol::ChannelConfig::CODEC_VP8); | 385 protocol::ChannelConfig::CODEC_VP8); |
232 | 386 |
233 // Post multiple tasks to |test_video_renderer_|, and it should not crash. | 387 // Post multiple tasks to |test_video_renderer_|, and it should not crash. |
234 // 20 is chosen because it's large enough to make sure that there will be | 388 // 20 is chosen because it's large enough to make sure that there will be |
235 // more than one task on the video decode thread, while not too large to wait | 389 // more than one task on the video decode thread, while not too large to wait |
236 // for too long for the unit test to complete. | 390 // for too long for the unit test to complete. |
237 const int task_num = 20; | 391 const int task_num = 20; |
238 ScopedVector<VideoPacket> video_packets; | 392 ScopedVector<VideoPacket> video_packets; |
239 for (int i = 0; i < task_num; ++i) { | 393 for (int i = 0; i < task_num; ++i) { |
240 scoped_ptr<webrtc::DesktopFrame> original_frame = | 394 scoped_ptr<webrtc::DesktopFrame> original_frame = |
241 CreateDesktopFrameWithGradient(kDefaultScreenWidth, | 395 CreateDesktopFrameWithGradient(kDefaultScreenWidthPx, |
242 kDefaultScreenHeight); | 396 kDefaultScreenHeightPx); |
243 video_packets.push_back(encoder_->Encode(*original_frame.get())); | 397 video_packets.push_back(encoder_->Encode(*original_frame.get())); |
244 } | 398 } |
245 | 399 |
246 for (int i = 0; i < task_num; ++i) { | 400 for (int i = 0; i < task_num; ++i) { |
247 // Transfer ownership of video packet. | 401 // Transfer ownership of video packet. |
248 VideoPacket* packet = video_packets[i]; | 402 VideoPacket* packet = video_packets[i]; |
249 video_packets[i] = nullptr; | 403 video_packets[i] = nullptr; |
250 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet), | 404 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet), |
251 base::Bind(&base::DoNothing)); | 405 base::Bind(&base::DoNothing)); |
252 } | 406 } |
253 } | 407 } |
254 | 408 |
255 // Verify video packet size change is handled properly. | 409 // Verify video packet size change is handled properly. |
256 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { | 410 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { |
257 encoder_ = VideoEncoderVpx::CreateForVP8(); | 411 encoder_ = VideoEncoderVpx::CreateForVP8(); |
258 test_video_renderer_->SetCodecForDecoding( | 412 test_video_renderer_->SetCodecForDecoding( |
259 protocol::ChannelConfig::Codec::CODEC_VP8); | 413 protocol::ChannelConfig::Codec::CODEC_VP8); |
260 | 414 |
261 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 415 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
262 kDefaultErrorLimit); | 416 kDefaultErrorLimit); |
263 | 417 |
264 TestVideoPacketProcessing(2 * kDefaultScreenWidth, 2 * kDefaultScreenHeight, | 418 TestVideoPacketProcessing(2 * kDefaultScreenWidthPx, |
265 kDefaultErrorLimit); | 419 2 * kDefaultScreenHeightPx, kDefaultErrorLimit); |
266 | 420 |
267 TestVideoPacketProcessing(kDefaultScreenWidth / 2, kDefaultScreenHeight / 2, | 421 TestVideoPacketProcessing(kDefaultScreenWidthPx / 2, |
268 kDefaultErrorLimit); | 422 kDefaultScreenHeightPx / 2, kDefaultErrorLimit); |
423 } | |
424 | |
425 // Verify setting expected image pattern doesn't break video packet processing. | |
426 TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) { | |
427 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
428 test_video_renderer_->SetCodecForDecoding( | |
429 protocol::ChannelConfig::Codec::CODEC_VP8); | |
430 | |
431 DCHECK(encoder_); | |
432 DCHECK(test_video_renderer_); | |
433 | |
434 scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient( | |
435 kDefaultScreenWidthPx, kDefaultScreenHeightPx); | |
436 | |
437 // Since we don't care whether expected image pattern is matched or not in | |
438 // this case, an expected color is chosen arbitrarily. | |
439 RGBA32 black_color = 0xFF000000; | |
440 | |
441 // Set expected image pattern. | |
442 test_video_renderer_->SetImagePatternAndMatchedCallback( | |
443 kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing)); | |
444 | |
445 // Post test video packet. | |
446 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
447 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | |
448 base::Bind(&base::DoNothing)); | |
449 } | |
450 | |
451 // Verify correct image pattern can be matched for VP8. | |
452 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) { | |
453 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
454 test_video_renderer_->SetCodecForDecoding( | |
455 protocol::ChannelConfig::Codec::CODEC_VP8); | |
456 TestImagePatternMatchAndCallback( | |
457 kDefaultScreenWidthPx, kDefaultScreenHeightPx, kDefaultExpectedRect); | |
458 } | |
459 | |
460 // Verify expected image pattern can be matched for VP9. | |
461 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) { | |
462 encoder_ = VideoEncoderVpx::CreateForVP9(); | |
463 test_video_renderer_->SetCodecForDecoding( | |
464 protocol::ChannelConfig::Codec::CODEC_VP9); | |
465 TestImagePatternMatchAndCallback( | |
466 kDefaultScreenWidthPx, kDefaultScreenHeightPx, kDefaultExpectedRect); | |
467 } | |
468 | |
469 // Verify expected image pattern can be matched for VERBATIM. | |
470 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) { | |
471 encoder_.reset(new VideoEncoderVerbatim()); | |
472 test_video_renderer_->SetCodecForDecoding( | |
473 protocol::ChannelConfig::Codec::CODEC_VERBATIM); | |
474 TestImagePatternMatchAndCallback( | |
475 kDefaultScreenWidthPx, kDefaultScreenHeightPx, kDefaultExpectedRect); | |
476 } | |
477 | |
478 // Verify incorrect image pattern shouldn't be matched for VP8. | |
479 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) { | |
480 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
481 test_video_renderer_->SetCodecForDecoding( | |
482 protocol::ChannelConfig::Codec::CODEC_VP8); | |
483 TestImagePatternNotMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
484 kDefaultExpectedRect); | |
485 } | |
486 | |
487 // Verify incorrect image pattern shouldn't be matched for VP9. | |
488 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) { | |
489 encoder_ = VideoEncoderVpx::CreateForVP9(); | |
490 test_video_renderer_->SetCodecForDecoding( | |
491 protocol::ChannelConfig::Codec::CODEC_VP9); | |
492 TestImagePatternNotMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx, | |
493 kDefaultExpectedRect); | |
494 } | |
495 | |
496 // Verify incorrect image pattern shouldn't be matched for VERBATIM. | |
497 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) { | |
498 encoder_.reset(new VideoEncoderVerbatim()); | |
499 test_video_renderer_->SetCodecForDecoding( | |
500 protocol::ChannelConfig::Codec::CODEC_VERBATIM); | |
501 TestImagePatternNotMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
502 kDefaultExpectedRect); | |
269 } | 503 } |
270 | 504 |
271 } // namespace test | 505 } // namespace test |
272 } // namespace remoting | 506 } // namespace remoting |
OLD | NEW |