OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/test/test_video_renderer.h" | 5 #include "remoting/test/test_video_renderer.h" |
6 | 6 |
7 #include <cmath> | 7 #include <cmath> |
8 | 8 |
9 #include "base/memory/scoped_vector.h" | 9 #include "base/memory/scoped_vector.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
11 #include "base/run_loop.h" | 11 #include "base/run_loop.h" |
12 #include "base/thread_task_runner_handle.h" | |
12 #include "base/timer/timer.h" | 13 #include "base/timer/timer.h" |
13 #include "media/base/video_frame.h" | 14 #include "media/base/video_frame.h" |
14 #include "remoting/codec/video_encoder.h" | 15 #include "remoting/codec/video_encoder.h" |
15 #include "remoting/codec/video_encoder_verbatim.h" | 16 #include "remoting/codec/video_encoder_verbatim.h" |
16 #include "remoting/codec/video_encoder_vpx.h" | 17 #include "remoting/codec/video_encoder_vpx.h" |
17 #include "remoting/proto/video.pb.h" | 18 #include "remoting/proto/video.pb.h" |
18 #include "testing/gtest/include/gtest/gtest.h" | 19 #include "testing/gtest/include/gtest/gtest.h" |
19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | 20 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" |
20 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h" | 21 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h" |
21 | 22 |
22 namespace { | 23 namespace { |
23 const int kBytesPerPixel = 4; | 24 |
24 const int kDefaultScreenWidth = 1024; | 25 // Used to verify if image pattern is matched. |
25 const int kDefaultScreenHeight = 768; | 26 void ProcessPacketDoneHandler(const base::Closure& done_closure, |
27 bool* handler_called) { | |
28 *handler_called = true; | |
29 done_closure.Run(); | |
30 } | |
31 | |
32 const int kDefaultScreenWidthPx = 1024; | |
33 const int kDefaultScreenHeightPx = 768; | |
34 | |
35 // Default max error for encoding and decoding, measured in percent. | |
26 const double kDefaultErrorLimit = 0.02; | 36 const double kDefaultErrorLimit = 0.02; |
27 } | 37 |
38 // Default expected rect for image pattern, measured in pixels. | |
39 const webrtc::DesktopRect kDefaultExpectedRect = | |
40 webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200); | |
41 } // namespace | |
28 | 42 |
29 namespace remoting { | 43 namespace remoting { |
30 namespace test { | 44 namespace test { |
31 | 45 |
32 // Provides basic functionality for for the TestVideoRenderer Tests below. | 46 // Provides basic functionality for for the TestVideoRenderer Tests below. |
33 // This fixture also creates an MessageLoop to test decoding video packets. | 47 // This fixture also creates an MessageLoop to test decoding video packets. |
34 class TestVideoRendererTest : public testing::Test { | 48 class TestVideoRendererTest : public testing::Test { |
35 public: | 49 public: |
36 TestVideoRendererTest(); | 50 TestVideoRendererTest(); |
37 ~TestVideoRendererTest() override; | 51 ~TestVideoRendererTest() override; |
38 | 52 |
39 // Generate a frame containing a gradient and test decoding of | 53 // Handles creating a frame and sending to TestVideoRenderer for processing. |
40 // TestVideoRenderer. The original frame is compared to the one obtained from | |
41 // decoding the video packet, and the error at each pixel is the root mean | |
42 // square of the errors in the R, G and B components, each normalized to | |
43 // [0, 1]. This routine checks that the mean error over all pixels do not | |
44 // exceed a given limit. | |
45 void TestVideoPacketProcessing(int screen_width, int screen_height, | 54 void TestVideoPacketProcessing(int screen_width, int screen_height, |
46 double error_limit); | 55 double error_limit); |
47 | 56 |
57 // Handles setting an image pattern and sending a frame to TestVideoRenderer. | |
58 // |expect_to_match| indicates if the image pattern is expected to match. | |
59 void TestImagePatternMatch(int screen_width, | |
60 int screen_height, | |
61 const webrtc::DesktopRect& expected_rect, | |
62 bool expect_to_match); | |
63 | |
48 // Generate a basic desktop frame containing a gradient. | 64 // Generate a basic desktop frame containing a gradient. |
49 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( | 65 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( |
50 int screen_width, int screen_height) const; | 66 int screen_width, int screen_height) const; |
51 | 67 |
52 protected: | 68 protected: |
53 // Used to post tasks to the message loop. | 69 // Used to post tasks to the message loop. |
54 scoped_ptr<base::RunLoop> run_loop_; | 70 scoped_ptr<base::RunLoop> run_loop_; |
55 | 71 |
56 // Used to set timeouts and delays. | 72 // Used to set timeouts and delays. |
57 scoped_ptr<base::Timer> timer_; | 73 scoped_ptr<base::Timer> timer_; |
58 | 74 |
59 // Manages the decoder and process generated video packets. | 75 // Manages the decoder and process generated video packets. |
60 scoped_ptr<TestVideoRenderer> test_video_renderer_; | 76 scoped_ptr<TestVideoRenderer> test_video_renderer_; |
61 | 77 |
62 // Used to encode desktop frames to generate video packets. | 78 // Used to encode desktop frames to generate video packets. |
63 scoped_ptr<VideoEncoder> encoder_; | 79 scoped_ptr<VideoEncoder> encoder_; |
64 | 80 |
65 private: | 81 private: |
66 // testing::Test interface. | 82 // testing::Test interface. |
67 void SetUp() override; | 83 void SetUp() override; |
68 | 84 |
69 // return the mean error of two frames. | 85 // Set image pattern, send video packet and returns if the expected pattern is |
86 // matched. | |
87 bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet, | |
88 const webrtc::DesktopRect& expected_rect, | |
89 uint32_t expected_average_color); | |
90 | |
91 // Returns the average color value of pixels fall within |rect|. | |
92 // NOTE: Callers should not release the objects. | |
93 uint32_t CalculateAverageColorValueForFrame( | |
94 const webrtc::DesktopFrame* frame, | |
95 const webrtc::DesktopRect& rect) const; | |
96 | |
97 // Return the mean error of two frames over all pixels, where error at each | |
98 // pixel is the root mean square of the errors in the R, G and B components, | |
99 // each normalized to [0, 1]. | |
70 double CalculateError(const webrtc::DesktopFrame* original_frame, | 100 double CalculateError(const webrtc::DesktopFrame* original_frame, |
71 const webrtc::DesktopFrame* decoded_frame) const; | 101 const webrtc::DesktopFrame* decoded_frame) const; |
72 | 102 |
73 // Fill a desktop frame with a gradient. | 103 // Fill a desktop frame with a gradient. |
74 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const; | 104 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const; |
75 | 105 |
76 // The thread's message loop. Valid only when the thread is alive. | 106 // The thread's message loop. Valid only when the thread is alive. |
77 scoped_ptr<base::MessageLoop> message_loop_; | 107 scoped_ptr<base::MessageLoop> message_loop_; |
78 | 108 |
79 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest); | 109 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest); |
(...skipping 12 matching lines...) Expand all Loading... | |
92 } | 122 } |
93 test_video_renderer_.reset(new TestVideoRenderer()); | 123 test_video_renderer_.reset(new TestVideoRenderer()); |
94 } | 124 } |
95 | 125 |
96 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, | 126 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, |
97 int screen_height, | 127 int screen_height, |
98 double error_limit) { | 128 double error_limit) { |
99 DCHECK(encoder_); | 129 DCHECK(encoder_); |
100 DCHECK(test_video_renderer_); | 130 DCHECK(test_video_renderer_); |
101 | 131 |
132 // Generate a frame containing a gradient. | |
102 scoped_ptr<webrtc::DesktopFrame> original_frame = | 133 scoped_ptr<webrtc::DesktopFrame> original_frame = |
103 CreateDesktopFrameWithGradient(screen_width, screen_height); | 134 CreateDesktopFrameWithGradient(screen_width, screen_height); |
104 EXPECT_TRUE(original_frame); | 135 EXPECT_TRUE(original_frame); |
136 | |
105 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); | 137 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); |
138 | |
106 DCHECK(!run_loop_ || !run_loop_->running()); | 139 DCHECK(!run_loop_ || !run_loop_->running()); |
140 DCHECK(!timer_->IsRunning()); | |
107 run_loop_.reset(new base::RunLoop()); | 141 run_loop_.reset(new base::RunLoop()); |
108 | 142 |
143 // Set an extremely long time: 10 min to prevent bugs from hanging the system. | |
144 // NOTE: We've seen cases which take up to 1 min to process a packet, so an | |
145 // extremely long time as 10 min is chosen to void being variable/flaky. | |
joedow
2015/07/13 20:53:03
nit: s/void/avoid
| |
146 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10), | |
147 run_loop_->QuitClosure()); | |
148 | |
109 // Wait for the video packet to be processed and rendered to buffer. | 149 // Wait for the video packet to be processed and rendered to buffer. |
110 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | 150 test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
111 run_loop_->QuitClosure()); | 151 run_loop_->QuitClosure()); |
152 | |
112 run_loop_->Run(); | 153 run_loop_->Run(); |
154 EXPECT_TRUE(timer_->IsRunning()); | |
155 timer_->Stop(); | |
156 run_loop_.reset(); | |
113 | 157 |
114 scoped_ptr<webrtc::DesktopFrame> buffer_copy = | 158 scoped_ptr<webrtc::DesktopFrame> buffer_copy = |
115 test_video_renderer_->GetBufferForTest(); | 159 test_video_renderer_->GetCurrentFrameForTest(); |
116 EXPECT_NE(buffer_copy, nullptr); | 160 EXPECT_NE(buffer_copy, nullptr); |
161 | |
162 // The original frame is compared to the decoded video frame to check that | |
163 // the mean error over all pixels does not exceed a given limit. | |
117 double error = CalculateError(original_frame.get(), buffer_copy.get()); | 164 double error = CalculateError(original_frame.get(), buffer_copy.get()); |
118 EXPECT_LT(error, error_limit); | 165 EXPECT_LT(error, error_limit); |
119 } | 166 } |
120 | 167 |
168 bool TestVideoRendererTest::SendPacketAndWaitForMatch( | |
169 scoped_ptr<VideoPacket> packet, | |
170 const webrtc::DesktopRect& expected_rect, | |
171 uint32_t expected_average_color) { | |
172 DCHECK(!run_loop_ || !run_loop_->running()); | |
173 DCHECK(!timer_->IsRunning()); | |
174 run_loop_.reset(new base::RunLoop()); | |
175 | |
176 // Set an extremely long time: 10 min to prevent bugs from hanging the system. | |
177 // NOTE: We've seen cases which take up to 1 min to process a packet, so an | |
178 // extremely long time as 10 min is chosen to void being variable/flaky. | |
joedow
2015/07/13 20:53:03
nit: s/void/avoid
| |
179 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10), | |
180 run_loop_->QuitClosure()); | |
181 | |
182 // Set expected image pattern. | |
183 test_video_renderer_->ExpectAverageColorInRect( | |
184 expected_rect, expected_average_color, run_loop_->QuitClosure()); | |
185 | |
186 // Used to verify if the expected image pattern will be matched by |packet|. | |
187 scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get())); | |
188 | |
189 // Post first test packet: |packet|. | |
190 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | |
191 base::Bind(&base::DoNothing)); | |
192 | |
193 // Second packet: |packet_copy| is posted, and |second_packet_done_callback| | |
194 // will always be posted back to main thread, however, whether it will be | |
195 // called depends on whether the expected pattern is matched or not. | |
196 bool second_packet_done_is_called = false; | |
197 base::Closure second_packet_done_callback = | |
198 base::Bind(&ProcessPacketDoneHandler, run_loop_->QuitClosure(), | |
199 &second_packet_done_is_called); | |
200 | |
201 test_video_renderer_->ProcessVideoPacket(packet_copy.Pass(), | |
202 second_packet_done_callback); | |
203 | |
204 run_loop_->Run(); | |
205 EXPECT_TRUE(timer_->IsRunning()); | |
206 timer_->Stop(); | |
207 run_loop_.reset(); | |
208 | |
209 // if expected image pattern is matched, the QuitClosure of |run_loop_| will | |
210 // be called before |second_packet_done_callback|, which leaves | |
211 // |second_packet_done_is_called| be false. | |
212 bool image_pattern_is_matched = !second_packet_done_is_called; | |
213 | |
214 return image_pattern_is_matched; | |
215 } | |
216 | |
217 void TestVideoRendererTest::TestImagePatternMatch( | |
218 int screen_width, | |
219 int screen_height, | |
220 const webrtc::DesktopRect& expected_rect, | |
221 bool expect_to_match) { | |
222 DCHECK(encoder_); | |
223 DCHECK(test_video_renderer_); | |
224 | |
225 scoped_ptr<webrtc::DesktopFrame> frame = | |
226 CreateDesktopFrameWithGradient(screen_width, screen_height); | |
227 uint32_t expected_average_color = | |
228 CalculateAverageColorValueForFrame(frame.get(), expected_rect); | |
229 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
230 | |
231 if (expect_to_match) { | |
232 EXPECT_TRUE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect, | |
233 expected_average_color)); | |
234 } else { | |
235 // Shift each channel by 128. | |
236 // e.g. (10, 127, 200) -> (138, 255, 73). | |
237 // In this way, the error between expected color and true value is always | |
238 // around 0.5. | |
239 int red_shift = (((expected_average_color >> 16) & 0xFF) + 128) % 255; | |
240 int green_shift = (((expected_average_color >> 8) & 0xFF) + 128) % 255; | |
241 int blue_shift = ((expected_average_color & 0xFF) + 128) % 255; | |
242 | |
243 int expected_average_color_shift = | |
244 0xFF000000 | (red_shift << 16) | (green_shift << 8) | blue_shift; | |
245 | |
246 EXPECT_FALSE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect, | |
247 expected_average_color_shift)); | |
248 } | |
249 } | |
250 | |
251 uint32_t TestVideoRendererTest::CalculateAverageColorValueForFrame( | |
252 const webrtc::DesktopFrame* frame, | |
253 const webrtc::DesktopRect& rect) const { | |
254 int red_sum = 0; | |
255 int green_sum = 0; | |
256 int blue_sum = 0; | |
257 | |
258 // Loop through pixels that fall within |accumulating_rect_| to obtain the | |
259 // average color value. | |
260 for (int y = rect.top(); y < rect.bottom(); ++y) { | |
261 uint8_t* frame_pos = | |
262 frame->data() + (y * frame->stride() + | |
263 rect.left() * webrtc::DesktopFrame::kBytesPerPixel); | |
264 | |
265 // Pixels of decoded video frame are presented in ARGB format. | |
266 for (int x = 0; x < rect.width(); ++x) { | |
267 red_sum += frame_pos[2]; | |
268 green_sum += frame_pos[1]; | |
269 blue_sum += frame_pos[0]; | |
270 frame_pos += 4; | |
271 } | |
272 } | |
273 | |
274 int area = rect.width() * rect.height(); | |
275 return 0xFF000000 | ((red_sum / area) << 16) | ((green_sum / area) << 8) | | |
276 (blue_sum / area); | |
277 } | |
278 | |
121 double TestVideoRendererTest::CalculateError( | 279 double TestVideoRendererTest::CalculateError( |
122 const webrtc::DesktopFrame* original_frame, | 280 const webrtc::DesktopFrame* original_frame, |
123 const webrtc::DesktopFrame* decoded_frame) const { | 281 const webrtc::DesktopFrame* decoded_frame) const { |
124 DCHECK(original_frame); | 282 DCHECK(original_frame); |
125 DCHECK(decoded_frame); | 283 DCHECK(decoded_frame); |
126 | 284 |
127 // Check size remains the same after encoding and decoding. | 285 // Check size remains the same after encoding and decoding. |
128 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width()); | 286 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width()); |
129 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height()); | 287 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height()); |
130 EXPECT_EQ(original_frame->stride(), decoded_frame->stride()); | 288 EXPECT_EQ(original_frame->stride(), decoded_frame->stride()); |
(...skipping 18 matching lines...) Expand all Loading... | |
149 // | 307 // |
150 for (int height = 0; height < screen_height; ++height) { | 308 for (int height = 0; height < screen_height; ++height) { |
151 uint8_t* original_ptr = original_frame->data() + | 309 uint8_t* original_ptr = original_frame->data() + |
152 height * original_frame->stride(); | 310 height * original_frame->stride(); |
153 uint8_t* decoded_ptr = decoded_frame->data() + | 311 uint8_t* decoded_ptr = decoded_frame->data() + |
154 height * decoded_frame->stride(); | 312 height * decoded_frame->stride(); |
155 | 313 |
156 for (int width = 0; width < screen_width; ++width) { | 314 for (int width = 0; width < screen_width; ++width) { |
157 // Errors are calculated in the R, G, B components. | 315 // Errors are calculated in the R, G, B components. |
158 for (int j = 0; j < 3; ++j) { | 316 for (int j = 0; j < 3; ++j) { |
159 int offset = kBytesPerPixel * width + j; | 317 int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j; |
160 double original_value = static_cast<double>(*(original_ptr + offset)); | 318 double original_value = static_cast<double>(*(original_ptr + offset)); |
161 double decoded_value = static_cast<double>(*(decoded_ptr + offset)); | 319 double decoded_value = static_cast<double>(*(decoded_ptr + offset)); |
162 double error = original_value - decoded_value; | 320 double error = original_value - decoded_value; |
163 | 321 |
164 // Normalize the error to [0, 1]. | 322 // Normalize the error to [0, 1]. |
165 error /= 255.0; | 323 error /= 255.0; |
166 error_sum_squares += error * error; | 324 error_sum_squares += error * error; |
167 } | 325 } |
168 } | 326 } |
169 } | 327 } |
(...skipping 24 matching lines...) Expand all Loading... | |
194 *p++ = 0; | 352 *p++ = 0; |
195 } | 353 } |
196 } | 354 } |
197 } | 355 } |
198 | 356 |
199 // Verify video decoding for VP8 Codec. | 357 // Verify video decoding for VP8 Codec. |
200 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { | 358 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { |
201 encoder_ = VideoEncoderVpx::CreateForVP8(); | 359 encoder_ = VideoEncoderVpx::CreateForVP8(); |
202 test_video_renderer_->SetCodecForDecoding( | 360 test_video_renderer_->SetCodecForDecoding( |
203 protocol::ChannelConfig::CODEC_VP8); | 361 protocol::ChannelConfig::CODEC_VP8); |
204 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 362 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
205 kDefaultErrorLimit); | 363 kDefaultErrorLimit); |
206 } | 364 } |
207 | 365 |
208 // Verify video decoding for VP9 Codec. | 366 // Verify video decoding for VP9 Codec. |
209 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { | 367 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { |
210 encoder_ = VideoEncoderVpx::CreateForVP9(); | 368 encoder_ = VideoEncoderVpx::CreateForVP9(); |
211 test_video_renderer_->SetCodecForDecoding( | 369 test_video_renderer_->SetCodecForDecoding( |
212 protocol::ChannelConfig::CODEC_VP9); | 370 protocol::ChannelConfig::CODEC_VP9); |
213 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 371 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
214 kDefaultErrorLimit); | 372 kDefaultErrorLimit); |
215 } | 373 } |
216 | 374 |
217 | 375 |
218 // Verify video decoding for VERBATIM Codec. | 376 // Verify video decoding for VERBATIM Codec. |
219 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { | 377 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { |
220 encoder_.reset(new VideoEncoderVerbatim()); | 378 encoder_.reset(new VideoEncoderVerbatim()); |
221 test_video_renderer_->SetCodecForDecoding( | 379 test_video_renderer_->SetCodecForDecoding( |
222 protocol::ChannelConfig::CODEC_VERBATIM); | 380 protocol::ChannelConfig::CODEC_VERBATIM); |
223 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 381 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
224 kDefaultErrorLimit); | 382 kDefaultErrorLimit); |
225 } | 383 } |
226 | 384 |
227 // Verify a set of video packets are processed correctly. | 385 // Verify a set of video packets are processed correctly. |
228 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { | 386 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { |
229 encoder_ = VideoEncoderVpx::CreateForVP8(); | 387 encoder_ = VideoEncoderVpx::CreateForVP8(); |
230 test_video_renderer_->SetCodecForDecoding( | 388 test_video_renderer_->SetCodecForDecoding( |
231 protocol::ChannelConfig::CODEC_VP8); | 389 protocol::ChannelConfig::CODEC_VP8); |
232 | 390 |
233 // Post multiple tasks to |test_video_renderer_|, and it should not crash. | 391 // Post multiple tasks to |test_video_renderer_|, and it should not crash. |
234 // 20 is chosen because it's large enough to make sure that there will be | 392 // 20 is chosen because it's large enough to make sure that there will be |
235 // more than one task on the video decode thread, while not too large to wait | 393 // more than one task on the video decode thread, while not too large to wait |
236 // for too long for the unit test to complete. | 394 // for too long for the unit test to complete. |
237 const int task_num = 20; | 395 const int task_num = 20; |
238 ScopedVector<VideoPacket> video_packets; | 396 ScopedVector<VideoPacket> video_packets; |
239 for (int i = 0; i < task_num; ++i) { | 397 for (int i = 0; i < task_num; ++i) { |
240 scoped_ptr<webrtc::DesktopFrame> original_frame = | 398 scoped_ptr<webrtc::DesktopFrame> original_frame = |
241 CreateDesktopFrameWithGradient(kDefaultScreenWidth, | 399 CreateDesktopFrameWithGradient(kDefaultScreenWidthPx, |
242 kDefaultScreenHeight); | 400 kDefaultScreenHeightPx); |
243 video_packets.push_back(encoder_->Encode(*original_frame.get())); | 401 video_packets.push_back(encoder_->Encode(*original_frame.get())); |
244 } | 402 } |
245 | 403 |
246 for (int i = 0; i < task_num; ++i) { | 404 for (int i = 0; i < task_num; ++i) { |
247 // Transfer ownership of video packet. | 405 // Transfer ownership of video packet. |
248 VideoPacket* packet = video_packets[i]; | 406 VideoPacket* packet = video_packets[i]; |
249 video_packets[i] = nullptr; | 407 video_packets[i] = nullptr; |
250 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet), | 408 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet), |
251 base::Bind(&base::DoNothing)); | 409 base::Bind(&base::DoNothing)); |
252 } | 410 } |
253 } | 411 } |
254 | 412 |
255 // Verify video packet size change is handled properly. | 413 // Verify video packet size change is handled properly. |
256 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { | 414 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { |
257 encoder_ = VideoEncoderVpx::CreateForVP8(); | 415 encoder_ = VideoEncoderVpx::CreateForVP8(); |
258 test_video_renderer_->SetCodecForDecoding( | 416 test_video_renderer_->SetCodecForDecoding( |
259 protocol::ChannelConfig::Codec::CODEC_VP8); | 417 protocol::ChannelConfig::Codec::CODEC_VP8); |
260 | 418 |
261 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 419 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
262 kDefaultErrorLimit); | 420 kDefaultErrorLimit); |
263 | 421 |
264 TestVideoPacketProcessing(2 * kDefaultScreenWidth, 2 * kDefaultScreenHeight, | 422 TestVideoPacketProcessing(2 * kDefaultScreenWidthPx, |
265 kDefaultErrorLimit); | 423 2 * kDefaultScreenHeightPx, kDefaultErrorLimit); |
266 | 424 |
267 TestVideoPacketProcessing(kDefaultScreenWidth / 2, kDefaultScreenHeight / 2, | 425 TestVideoPacketProcessing(kDefaultScreenWidthPx / 2, |
268 kDefaultErrorLimit); | 426 kDefaultScreenHeightPx / 2, kDefaultErrorLimit); |
427 } | |
428 | |
429 // Verify setting expected image pattern doesn't break video packet processing. | |
430 TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) { | |
431 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
432 test_video_renderer_->SetCodecForDecoding( | |
433 protocol::ChannelConfig::Codec::CODEC_VP8); | |
434 | |
435 DCHECK(encoder_); | |
436 DCHECK(test_video_renderer_); | |
437 | |
438 scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient( | |
439 kDefaultScreenWidthPx, kDefaultScreenHeightPx); | |
440 | |
441 // Since we don't care whether expected image pattern is matched or not in | |
442 // this case, an expected color is chosen arbitrarily. | |
443 uint32_t black_color = 0xFF000000; | |
444 | |
445 // Set expected image pattern. | |
446 test_video_renderer_->ExpectAverageColorInRect( | |
447 kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing)); | |
448 | |
449 // Post test video packet. | |
450 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
451 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | |
452 base::Bind(&base::DoNothing)); | |
453 } | |
454 | |
455 // Verify correct image pattern can be matched for VP8. | |
456 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) { | |
457 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
458 test_video_renderer_->SetCodecForDecoding( | |
459 protocol::ChannelConfig::Codec::CODEC_VP8); | |
460 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
461 kDefaultExpectedRect, true); | |
462 } | |
463 | |
464 // Verify expected image pattern can be matched for VP9. | |
465 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) { | |
466 encoder_ = VideoEncoderVpx::CreateForVP9(); | |
467 test_video_renderer_->SetCodecForDecoding( | |
468 protocol::ChannelConfig::Codec::CODEC_VP9); | |
469 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
470 kDefaultExpectedRect, true); | |
471 } | |
472 | |
473 // Verify expected image pattern can be matched for VERBATIM. | |
474 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) { | |
475 encoder_.reset(new VideoEncoderVerbatim()); | |
476 test_video_renderer_->SetCodecForDecoding( | |
477 protocol::ChannelConfig::Codec::CODEC_VERBATIM); | |
478 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
479 kDefaultExpectedRect, true); | |
480 } | |
481 | |
482 // Verify incorrect image pattern shouldn't be matched for VP8. | |
483 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) { | |
484 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
485 test_video_renderer_->SetCodecForDecoding( | |
486 protocol::ChannelConfig::Codec::CODEC_VP8); | |
487 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
488 kDefaultExpectedRect, false); | |
489 } | |
490 | |
491 // Verify incorrect image pattern shouldn't be matched for VP9. | |
492 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) { | |
493 encoder_ = VideoEncoderVpx::CreateForVP9(); | |
494 test_video_renderer_->SetCodecForDecoding( | |
495 protocol::ChannelConfig::Codec::CODEC_VP9); | |
496 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx, | |
497 kDefaultExpectedRect, false); | |
498 } | |
499 | |
500 // Verify incorrect image pattern shouldn't be matched for VERBATIM. | |
501 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) { | |
502 encoder_.reset(new VideoEncoderVerbatim()); | |
503 test_video_renderer_->SetCodecForDecoding( | |
504 protocol::ChannelConfig::Codec::CODEC_VERBATIM); | |
505 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
506 kDefaultExpectedRect, false); | |
269 } | 507 } |
270 | 508 |
271 } // namespace test | 509 } // namespace test |
272 } // namespace remoting | 510 } // namespace remoting |
OLD | NEW |