OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/test/test_video_renderer.h" | 5 #include "remoting/test/test_video_renderer.h" |
6 | 6 |
7 #include <cmath> | 7 #include <cmath> |
8 | 8 |
9 #include "base/memory/scoped_vector.h" | 9 #include "base/memory/scoped_vector.h" |
10 #include "base/message_loop/message_loop.h" | 10 #include "base/message_loop/message_loop.h" |
11 #include "base/run_loop.h" | 11 #include "base/run_loop.h" |
12 #include "base/thread_task_runner_handle.h" | |
12 #include "base/timer/timer.h" | 13 #include "base/timer/timer.h" |
13 #include "media/base/video_frame.h" | 14 #include "media/base/video_frame.h" |
14 #include "remoting/codec/video_encoder.h" | 15 #include "remoting/codec/video_encoder.h" |
15 #include "remoting/codec/video_encoder_verbatim.h" | 16 #include "remoting/codec/video_encoder_verbatim.h" |
16 #include "remoting/codec/video_encoder_vpx.h" | 17 #include "remoting/codec/video_encoder_vpx.h" |
17 #include "remoting/proto/video.pb.h" | 18 #include "remoting/proto/video.pb.h" |
18 #include "testing/gtest/include/gtest/gtest.h" | 19 #include "testing/gtest/include/gtest/gtest.h" |
19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | 20 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" |
20 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h" | 21 #include "third_party/webrtc/modules/desktop_capture/desktop_region.h" |
21 | 22 |
22 namespace { | 23 namespace { |
23 const int kBytesPerPixel = 4; | 24 |
24 const int kDefaultScreenWidth = 1024; | 25 // Used to verify if image pattern is matched. |
25 const int kDefaultScreenHeight = 768; | 26 void SecondPacketDoneHandler(const base::Closure& done_closure, |
joedow
2015/07/13 19:30:03
I would remove the 'Second' string from the name o
liaoyuke
2015/07/13 20:43:19
Yes, this should be more general.
| |
27 bool* second_packet_done_is_called) { | |
28 *second_packet_done_is_called = true; | |
29 done_closure.Run(); | |
30 } | |
31 | |
32 const int kDefaultScreenWidthPx = 1024; | |
33 const int kDefaultScreenHeightPx = 768; | |
34 | |
35 // Default max error for encoding and decoding, measured in percent. | |
26 const double kDefaultErrorLimit = 0.02; | 36 const double kDefaultErrorLimit = 0.02; |
27 } | 37 |
38 // Default expected rect for image pattern, measured in pixels. | |
39 const webrtc::DesktopRect kDefaultExpectedRect = | |
40 webrtc::DesktopRect::MakeLTRB(100, 100, 200, 200); | |
41 } // namespace | |
28 | 42 |
29 namespace remoting { | 43 namespace remoting { |
30 namespace test { | 44 namespace test { |
31 | 45 |
32 // Provides basic functionality for for the TestVideoRenderer Tests below. | 46 // Provides basic functionality for for the TestVideoRenderer Tests below. |
33 // This fixture also creates an MessageLoop to test decoding video packets. | 47 // This fixture also creates an MessageLoop to test decoding video packets. |
34 class TestVideoRendererTest : public testing::Test { | 48 class TestVideoRendererTest : public testing::Test { |
35 public: | 49 public: |
36 TestVideoRendererTest(); | 50 TestVideoRendererTest(); |
37 ~TestVideoRendererTest() override; | 51 ~TestVideoRendererTest() override; |
38 | 52 |
39 // Generate a frame containing a gradient and test decoding of | 53 // Handles creating a frame and sending to TestVideoRenderer for processing. |
40 // TestVideoRenderer. The original frame is compared to the one obtained from | |
41 // decoding the video packet, and the error at each pixel is the root mean | |
42 // square of the errors in the R, G and B components, each normalized to | |
43 // [0, 1]. This routine checks that the mean error over all pixels do not | |
44 // exceed a given limit. | |
45 void TestVideoPacketProcessing(int screen_width, int screen_height, | 54 void TestVideoPacketProcessing(int screen_width, int screen_height, |
46 double error_limit); | 55 double error_limit); |
47 | 56 |
57 // Handles setting an image pattern and sending a frame to TestVideoRenderer. | |
58 // |expect_to_match| indicates if the image pattern is expected to match. | |
59 void TestImagePatternMatch(int screen_width, | |
60 int screen_height, | |
61 const webrtc::DesktopRect& expected_rect, | |
62 bool expect_to_match); | |
63 | |
48 // Generate a basic desktop frame containing a gradient. | 64 // Generate a basic desktop frame containing a gradient. |
49 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( | 65 scoped_ptr<webrtc::DesktopFrame> CreateDesktopFrameWithGradient( |
50 int screen_width, int screen_height) const; | 66 int screen_width, int screen_height) const; |
51 | 67 |
52 protected: | 68 protected: |
53 // Used to post tasks to the message loop. | 69 // Used to post tasks to the message loop. |
54 scoped_ptr<base::RunLoop> run_loop_; | 70 scoped_ptr<base::RunLoop> run_loop_; |
55 | 71 |
56 // Used to set timeouts and delays. | 72 // Used to set timeouts and delays. |
57 scoped_ptr<base::Timer> timer_; | 73 scoped_ptr<base::Timer> timer_; |
58 | 74 |
59 // Manages the decoder and process generated video packets. | 75 // Manages the decoder and process generated video packets. |
60 scoped_ptr<TestVideoRenderer> test_video_renderer_; | 76 scoped_ptr<TestVideoRenderer> test_video_renderer_; |
61 | 77 |
62 // Used to encode desktop frames to generate video packets. | 78 // Used to encode desktop frames to generate video packets. |
63 scoped_ptr<VideoEncoder> encoder_; | 79 scoped_ptr<VideoEncoder> encoder_; |
64 | 80 |
65 private: | 81 private: |
66 // testing::Test interface. | 82 // testing::Test interface. |
67 void SetUp() override; | 83 void SetUp() override; |
68 | 84 |
69 // return the mean error of two frames. | 85 // Set image pattern, send video packet and returns if the expected pattern is |
86 // matched. | |
87 bool SendPacketAndWaitForMatch(scoped_ptr<VideoPacket> packet, | |
88 const webrtc::DesktopRect& expected_rect, | |
89 uint32_t expected_color); | |
90 | |
91 // Returns the average color value of pixels fall within |rect|. | |
92 // NOTE: Callers should not release the objects. | |
93 uint32_t CalculateAverageColorValueForFrame( | |
94 const webrtc::DesktopFrame* frame, | |
95 const webrtc::DesktopRect& rect) const; | |
96 | |
97 // Return the mean error of two frames over all pixels, where error at each | |
98 // pixel is the root mean square of the errors in the R, G and B components, | |
99 // each normalized to [0, 1]. | |
70 double CalculateError(const webrtc::DesktopFrame* original_frame, | 100 double CalculateError(const webrtc::DesktopFrame* original_frame, |
71 const webrtc::DesktopFrame* decoded_frame) const; | 101 const webrtc::DesktopFrame* decoded_frame) const; |
72 | 102 |
73 // Fill a desktop frame with a gradient. | 103 // Fill a desktop frame with a gradient. |
74 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const; | 104 void FillFrameWithGradient(webrtc::DesktopFrame* frame) const; |
75 | 105 |
76 // The thread's message loop. Valid only when the thread is alive. | 106 // The thread's message loop. Valid only when the thread is alive. |
77 scoped_ptr<base::MessageLoop> message_loop_; | 107 scoped_ptr<base::MessageLoop> message_loop_; |
78 | 108 |
79 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest); | 109 DISALLOW_COPY_AND_ASSIGN(TestVideoRendererTest); |
(...skipping 12 matching lines...) Expand all Loading... | |
92 } | 122 } |
93 test_video_renderer_.reset(new TestVideoRenderer()); | 123 test_video_renderer_.reset(new TestVideoRenderer()); |
94 } | 124 } |
95 | 125 |
96 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, | 126 void TestVideoRendererTest::TestVideoPacketProcessing(int screen_width, |
97 int screen_height, | 127 int screen_height, |
98 double error_limit) { | 128 double error_limit) { |
99 DCHECK(encoder_); | 129 DCHECK(encoder_); |
100 DCHECK(test_video_renderer_); | 130 DCHECK(test_video_renderer_); |
101 | 131 |
132 // Generate a frame containing a gradient. | |
102 scoped_ptr<webrtc::DesktopFrame> original_frame = | 133 scoped_ptr<webrtc::DesktopFrame> original_frame = |
103 CreateDesktopFrameWithGradient(screen_width, screen_height); | 134 CreateDesktopFrameWithGradient(screen_width, screen_height); |
104 EXPECT_TRUE(original_frame); | 135 EXPECT_TRUE(original_frame); |
136 | |
105 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); | 137 scoped_ptr<VideoPacket> packet = encoder_->Encode(*original_frame.get()); |
138 | |
106 DCHECK(!run_loop_ || !run_loop_->running()); | 139 DCHECK(!run_loop_ || !run_loop_->running()); |
140 DCHECK(!timer_->IsRunning()); | |
107 run_loop_.reset(new base::RunLoop()); | 141 run_loop_.reset(new base::RunLoop()); |
108 | 142 |
143 // Set an extremely long time: 10min to prevent bugs from hanging the system. | |
joedow
2015/07/13 19:30:03
nit: Can you describe why it isn't shorter, in the
liaoyuke
2015/07/13 20:43:19
Done.
| |
144 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10), | |
145 run_loop_->QuitClosure()); | |
146 | |
109 // Wait for the video packet to be processed and rendered to buffer. | 147 // Wait for the video packet to be processed and rendered to buffer. |
110 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | 148 test_video_renderer_->ProcessVideoPacket(packet.Pass(), |
111 run_loop_->QuitClosure()); | 149 run_loop_->QuitClosure()); |
150 | |
112 run_loop_->Run(); | 151 run_loop_->Run(); |
152 EXPECT_TRUE(timer_->IsRunning()); | |
153 timer_->Stop(); | |
154 run_loop_.reset(); | |
113 | 155 |
114 scoped_ptr<webrtc::DesktopFrame> buffer_copy = | 156 scoped_ptr<webrtc::DesktopFrame> buffer_copy = |
115 test_video_renderer_->GetBufferForTest(); | 157 test_video_renderer_->GetCurrentFrameForTest(); |
116 EXPECT_NE(buffer_copy, nullptr); | 158 EXPECT_NE(buffer_copy, nullptr); |
159 | |
160 // The original frame is compared to the decoded video frame to check that | |
161 // the mean error over all pixels does not exceed a given limit. | |
117 double error = CalculateError(original_frame.get(), buffer_copy.get()); | 162 double error = CalculateError(original_frame.get(), buffer_copy.get()); |
118 EXPECT_LT(error, error_limit); | 163 EXPECT_LT(error, error_limit); |
119 } | 164 } |
120 | 165 |
166 bool TestVideoRendererTest::SendPacketAndWaitForMatch( | |
167 scoped_ptr<VideoPacket> packet, | |
168 const webrtc::DesktopRect& expected_rect, | |
169 uint32_t expected_color) { | |
170 DCHECK(!run_loop_ || !run_loop_->running()); | |
171 DCHECK(!timer_->IsRunning()); | |
172 run_loop_.reset(new base::RunLoop()); | |
173 | |
174 // Set an extremely long time: 10min to prevent bugs from hanging the system. | |
joedow
2015/07/13 19:30:03
same here, a comment would be good to help prevent
liaoyuke
2015/07/13 20:43:19
Done.
| |
175 timer_->Start(FROM_HERE, base::TimeDelta::FromMinutes(10), | |
176 run_loop_->QuitClosure()); | |
177 | |
178 // Set expected image pattern. | |
179 test_video_renderer_->SetImagePatternAndMatchedCallback( | |
180 expected_rect, expected_color, run_loop_->QuitClosure()); | |
181 | |
182 // Used to verify if the expected image pattern will be matched by |packet|. | |
183 scoped_ptr<VideoPacket> packet_copy(new VideoPacket(*packet.get())); | |
184 | |
185 // Post first test packet: |packet|. | |
186 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | |
187 base::Bind(&base::DoNothing)); | |
188 | |
189 // Second packet: |packet_copy| is posted, and |second_packet_done_callback| | |
190 // will always be posted back to main thread, however, whether it will be | |
191 // called depends on whether the expected pattern is matched or not. | |
192 bool second_packet_done_is_called = false; | |
193 base::Closure second_packet_done_callback = | |
194 base::Bind(&SecondPacketDoneHandler, run_loop_->QuitClosure(), | |
195 &second_packet_done_is_called); | |
196 | |
197 test_video_renderer_->ProcessVideoPacket(packet_copy.Pass(), | |
198 second_packet_done_callback); | |
199 | |
200 run_loop_->Run(); | |
201 EXPECT_TRUE(timer_->IsRunning()); | |
202 timer_->Stop(); | |
203 run_loop_.reset(); | |
204 | |
205 // if expected image pattern is matched, the QuitClosure of |run_loop_| will | |
206 // be called before |second_packet_done_callback|, which leaves | |
207 // |second_packet_done_is_called| be false. | |
208 bool image_pattern_is_matched = !second_packet_done_is_called; | |
209 | |
210 return image_pattern_is_matched; | |
211 } | |
212 | |
213 void TestVideoRendererTest::TestImagePatternMatch( | |
214 int screen_width, | |
215 int screen_height, | |
216 const webrtc::DesktopRect& expected_rect, | |
217 bool expect_to_match) { | |
218 DCHECK(encoder_); | |
219 DCHECK(test_video_renderer_); | |
220 | |
221 scoped_ptr<webrtc::DesktopFrame> frame = | |
222 CreateDesktopFrameWithGradient(screen_width, screen_height); | |
223 uint32_t expected_color = | |
224 CalculateAverageColorValueForFrame(frame.get(), expected_rect); | |
225 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
226 | |
227 if (expect_to_match) { | |
228 EXPECT_TRUE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect, | |
229 expected_color)); | |
230 } else { | |
231 // Shift each channel by 128. | |
232 // e.g. (10, 127, 200) -> (138, 255, 73). | |
233 // In this way, the error between expected color and true value is always | |
234 // around 0.5. | |
235 int red_shift = (((expected_color >> 16) & 0xFF) + 128) % 255; | |
236 int green_shift = (((expected_color >> 8) & 0xFF) + 128) % 255; | |
237 int blue_shift = ((expected_color & 0xFF) + 128) % 255; | |
238 | |
239 int expected_color_shift = | |
240 0xFF000000 | (red_shift << 16) | (green_shift << 8) | blue_shift; | |
241 | |
242 EXPECT_FALSE(SendPacketAndWaitForMatch(packet.Pass(), expected_rect, | |
243 expected_color_shift)); | |
244 } | |
245 } | |
246 | |
247 uint32_t TestVideoRendererTest::CalculateAverageColorValueForFrame( | |
248 const webrtc::DesktopFrame* frame, | |
249 const webrtc::DesktopRect& rect) const { | |
250 int red_sum = 0; | |
251 int green_sum = 0; | |
252 int blue_sum = 0; | |
253 | |
254 // Loop through pixels that fall within |accumulating_rect_| to obtain the | |
255 // average color value. | |
256 for (int y = rect.top(); y < rect.bottom(); ++y) { | |
257 uint8_t* frame_pos = | |
258 frame->data() + (y * frame->stride() + | |
259 rect.left() * webrtc::DesktopFrame::kBytesPerPixel); | |
260 | |
261 // Pixels of decoded video frame are presented in ARGB format. | |
262 for (int x = 0; x < rect.width(); ++x) { | |
263 red_sum += frame_pos[2]; | |
264 green_sum += frame_pos[1]; | |
265 blue_sum += frame_pos[0]; | |
266 frame_pos += 4; | |
267 } | |
268 } | |
269 | |
270 int area = rect.width() * rect.height(); | |
271 return 0xFF000000 | ((red_sum / area) << 16) | ((green_sum / area) << 8) | | |
272 (blue_sum / area); | |
273 } | |
274 | |
121 double TestVideoRendererTest::CalculateError( | 275 double TestVideoRendererTest::CalculateError( |
122 const webrtc::DesktopFrame* original_frame, | 276 const webrtc::DesktopFrame* original_frame, |
123 const webrtc::DesktopFrame* decoded_frame) const { | 277 const webrtc::DesktopFrame* decoded_frame) const { |
124 DCHECK(original_frame); | 278 DCHECK(original_frame); |
125 DCHECK(decoded_frame); | 279 DCHECK(decoded_frame); |
126 | 280 |
127 // Check size remains the same after encoding and decoding. | 281 // Check size remains the same after encoding and decoding. |
128 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width()); | 282 EXPECT_EQ(original_frame->size().width(), decoded_frame->size().width()); |
129 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height()); | 283 EXPECT_EQ(original_frame->size().height(), decoded_frame->size().height()); |
130 EXPECT_EQ(original_frame->stride(), decoded_frame->stride()); | 284 EXPECT_EQ(original_frame->stride(), decoded_frame->stride()); |
(...skipping 18 matching lines...) Expand all Loading... | |
149 // | 303 // |
150 for (int height = 0; height < screen_height; ++height) { | 304 for (int height = 0; height < screen_height; ++height) { |
151 uint8_t* original_ptr = original_frame->data() + | 305 uint8_t* original_ptr = original_frame->data() + |
152 height * original_frame->stride(); | 306 height * original_frame->stride(); |
153 uint8_t* decoded_ptr = decoded_frame->data() + | 307 uint8_t* decoded_ptr = decoded_frame->data() + |
154 height * decoded_frame->stride(); | 308 height * decoded_frame->stride(); |
155 | 309 |
156 for (int width = 0; width < screen_width; ++width) { | 310 for (int width = 0; width < screen_width; ++width) { |
157 // Errors are calculated in the R, G, B components. | 311 // Errors are calculated in the R, G, B components. |
158 for (int j = 0; j < 3; ++j) { | 312 for (int j = 0; j < 3; ++j) { |
159 int offset = kBytesPerPixel * width + j; | 313 int offset = webrtc::DesktopFrame::kBytesPerPixel * width + j; |
160 double original_value = static_cast<double>(*(original_ptr + offset)); | 314 double original_value = static_cast<double>(*(original_ptr + offset)); |
161 double decoded_value = static_cast<double>(*(decoded_ptr + offset)); | 315 double decoded_value = static_cast<double>(*(decoded_ptr + offset)); |
162 double error = original_value - decoded_value; | 316 double error = original_value - decoded_value; |
163 | 317 |
164 // Normalize the error to [0, 1]. | 318 // Normalize the error to [0, 1]. |
165 error /= 255.0; | 319 error /= 255.0; |
166 error_sum_squares += error * error; | 320 error_sum_squares += error * error; |
167 } | 321 } |
168 } | 322 } |
169 } | 323 } |
(...skipping 24 matching lines...) Expand all Loading... | |
194 *p++ = 0; | 348 *p++ = 0; |
195 } | 349 } |
196 } | 350 } |
197 } | 351 } |
198 | 352 |
199 // Verify video decoding for VP8 Codec. | 353 // Verify video decoding for VP8 Codec. |
200 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { | 354 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP8) { |
201 encoder_ = VideoEncoderVpx::CreateForVP8(); | 355 encoder_ = VideoEncoderVpx::CreateForVP8(); |
202 test_video_renderer_->SetCodecForDecoding( | 356 test_video_renderer_->SetCodecForDecoding( |
203 protocol::ChannelConfig::CODEC_VP8); | 357 protocol::ChannelConfig::CODEC_VP8); |
204 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 358 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
205 kDefaultErrorLimit); | 359 kDefaultErrorLimit); |
206 } | 360 } |
207 | 361 |
208 // Verify video decoding for VP9 Codec. | 362 // Verify video decoding for VP9 Codec. |
209 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { | 363 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVP9) { |
210 encoder_ = VideoEncoderVpx::CreateForVP9(); | 364 encoder_ = VideoEncoderVpx::CreateForVP9(); |
211 test_video_renderer_->SetCodecForDecoding( | 365 test_video_renderer_->SetCodecForDecoding( |
212 protocol::ChannelConfig::CODEC_VP9); | 366 protocol::ChannelConfig::CODEC_VP9); |
213 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 367 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
214 kDefaultErrorLimit); | 368 kDefaultErrorLimit); |
215 } | 369 } |
216 | 370 |
217 | 371 |
218 // Verify video decoding for VERBATIM Codec. | 372 // Verify video decoding for VERBATIM Codec. |
219 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { | 373 TEST_F(TestVideoRendererTest, VerifyVideoDecodingForVERBATIM) { |
220 encoder_.reset(new VideoEncoderVerbatim()); | 374 encoder_.reset(new VideoEncoderVerbatim()); |
221 test_video_renderer_->SetCodecForDecoding( | 375 test_video_renderer_->SetCodecForDecoding( |
222 protocol::ChannelConfig::CODEC_VERBATIM); | 376 protocol::ChannelConfig::CODEC_VERBATIM); |
223 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 377 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
224 kDefaultErrorLimit); | 378 kDefaultErrorLimit); |
225 } | 379 } |
226 | 380 |
227 // Verify a set of video packets are processed correctly. | 381 // Verify a set of video packets are processed correctly. |
228 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { | 382 TEST_F(TestVideoRendererTest, VerifyMultipleVideoProcessing) { |
229 encoder_ = VideoEncoderVpx::CreateForVP8(); | 383 encoder_ = VideoEncoderVpx::CreateForVP8(); |
230 test_video_renderer_->SetCodecForDecoding( | 384 test_video_renderer_->SetCodecForDecoding( |
231 protocol::ChannelConfig::CODEC_VP8); | 385 protocol::ChannelConfig::CODEC_VP8); |
232 | 386 |
233 // Post multiple tasks to |test_video_renderer_|, and it should not crash. | 387 // Post multiple tasks to |test_video_renderer_|, and it should not crash. |
234 // 20 is chosen because it's large enough to make sure that there will be | 388 // 20 is chosen because it's large enough to make sure that there will be |
235 // more than one task on the video decode thread, while not too large to wait | 389 // more than one task on the video decode thread, while not too large to wait |
236 // for too long for the unit test to complete. | 390 // for too long for the unit test to complete. |
237 const int task_num = 20; | 391 const int task_num = 20; |
238 ScopedVector<VideoPacket> video_packets; | 392 ScopedVector<VideoPacket> video_packets; |
239 for (int i = 0; i < task_num; ++i) { | 393 for (int i = 0; i < task_num; ++i) { |
240 scoped_ptr<webrtc::DesktopFrame> original_frame = | 394 scoped_ptr<webrtc::DesktopFrame> original_frame = |
241 CreateDesktopFrameWithGradient(kDefaultScreenWidth, | 395 CreateDesktopFrameWithGradient(kDefaultScreenWidthPx, |
242 kDefaultScreenHeight); | 396 kDefaultScreenHeightPx); |
243 video_packets.push_back(encoder_->Encode(*original_frame.get())); | 397 video_packets.push_back(encoder_->Encode(*original_frame.get())); |
244 } | 398 } |
245 | 399 |
246 for (int i = 0; i < task_num; ++i) { | 400 for (int i = 0; i < task_num; ++i) { |
247 // Transfer ownership of video packet. | 401 // Transfer ownership of video packet. |
248 VideoPacket* packet = video_packets[i]; | 402 VideoPacket* packet = video_packets[i]; |
249 video_packets[i] = nullptr; | 403 video_packets[i] = nullptr; |
250 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet), | 404 test_video_renderer_->ProcessVideoPacket(make_scoped_ptr(packet), |
251 base::Bind(&base::DoNothing)); | 405 base::Bind(&base::DoNothing)); |
252 } | 406 } |
253 } | 407 } |
254 | 408 |
255 // Verify video packet size change is handled properly. | 409 // Verify video packet size change is handled properly. |
256 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { | 410 TEST_F(TestVideoRendererTest, VerifyVideoPacketSizeChange) { |
257 encoder_ = VideoEncoderVpx::CreateForVP8(); | 411 encoder_ = VideoEncoderVpx::CreateForVP8(); |
258 test_video_renderer_->SetCodecForDecoding( | 412 test_video_renderer_->SetCodecForDecoding( |
259 protocol::ChannelConfig::Codec::CODEC_VP8); | 413 protocol::ChannelConfig::Codec::CODEC_VP8); |
260 | 414 |
261 TestVideoPacketProcessing(kDefaultScreenWidth, kDefaultScreenHeight, | 415 TestVideoPacketProcessing(kDefaultScreenWidthPx, kDefaultScreenHeightPx, |
262 kDefaultErrorLimit); | 416 kDefaultErrorLimit); |
263 | 417 |
264 TestVideoPacketProcessing(2 * kDefaultScreenWidth, 2 * kDefaultScreenHeight, | 418 TestVideoPacketProcessing(2 * kDefaultScreenWidthPx, |
265 kDefaultErrorLimit); | 419 2 * kDefaultScreenHeightPx, kDefaultErrorLimit); |
266 | 420 |
267 TestVideoPacketProcessing(kDefaultScreenWidth / 2, kDefaultScreenHeight / 2, | 421 TestVideoPacketProcessing(kDefaultScreenWidthPx / 2, |
268 kDefaultErrorLimit); | 422 kDefaultScreenHeightPx / 2, kDefaultErrorLimit); |
423 } | |
424 | |
425 // Verify setting expected image pattern doesn't break video packet processing. | |
426 TEST_F(TestVideoRendererTest, VerifySetExpectedImagePattern) { | |
427 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
428 test_video_renderer_->SetCodecForDecoding( | |
429 protocol::ChannelConfig::Codec::CODEC_VP8); | |
430 | |
431 DCHECK(encoder_); | |
432 DCHECK(test_video_renderer_); | |
433 | |
434 scoped_ptr<webrtc::DesktopFrame> frame = CreateDesktopFrameWithGradient( | |
435 kDefaultScreenWidthPx, kDefaultScreenHeightPx); | |
436 | |
437 // Since we don't care whether expected image pattern is matched or not in | |
438 // this case, an expected color is chosen arbitrarily. | |
439 uint32_t black_color = 0xFF000000; | |
440 | |
441 // Set expected image pattern. | |
442 test_video_renderer_->SetImagePatternAndMatchedCallback( | |
443 kDefaultExpectedRect, black_color, base::Bind(&base::DoNothing)); | |
444 | |
445 // Post test video packet. | |
446 scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame.get()); | |
447 test_video_renderer_->ProcessVideoPacket(packet.Pass(), | |
448 base::Bind(&base::DoNothing)); | |
449 } | |
450 | |
451 // Verify correct image pattern can be matched for VP8. | |
452 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP8) { | |
453 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
454 test_video_renderer_->SetCodecForDecoding( | |
455 protocol::ChannelConfig::Codec::CODEC_VP8); | |
456 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
457 kDefaultExpectedRect, true); | |
458 } | |
459 | |
460 // Verify expected image pattern can be matched for VP9. | |
461 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVP9) { | |
462 encoder_ = VideoEncoderVpx::CreateForVP9(); | |
463 test_video_renderer_->SetCodecForDecoding( | |
464 protocol::ChannelConfig::Codec::CODEC_VP9); | |
465 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
466 kDefaultExpectedRect, true); | |
467 } | |
468 | |
469 // Verify expected image pattern can be matched for VERBATIM. | |
470 TEST_F(TestVideoRendererTest, VerifyImagePatternMatchForVERBATIM) { | |
471 encoder_.reset(new VideoEncoderVerbatim()); | |
472 test_video_renderer_->SetCodecForDecoding( | |
473 protocol::ChannelConfig::Codec::CODEC_VERBATIM); | |
474 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
475 kDefaultExpectedRect, true); | |
476 } | |
477 | |
478 // Verify incorrect image pattern shouldn't be matched for VP8. | |
479 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP8) { | |
480 encoder_ = VideoEncoderVpx::CreateForVP8(); | |
481 test_video_renderer_->SetCodecForDecoding( | |
482 protocol::ChannelConfig::Codec::CODEC_VP8); | |
483 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
484 kDefaultExpectedRect, false); | |
485 } | |
486 | |
487 // Verify incorrect image pattern shouldn't be matched for VP9. | |
488 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVP9) { | |
489 encoder_ = VideoEncoderVpx::CreateForVP9(); | |
490 test_video_renderer_->SetCodecForDecoding( | |
491 protocol::ChannelConfig::Codec::CODEC_VP9); | |
492 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenWidthPx, | |
493 kDefaultExpectedRect, false); | |
494 } | |
495 | |
496 // Verify incorrect image pattern shouldn't be matched for VERBATIM. | |
497 TEST_F(TestVideoRendererTest, VerifyImagePatternNotMatchForVERBATIM) { | |
498 encoder_.reset(new VideoEncoderVerbatim()); | |
499 test_video_renderer_->SetCodecForDecoding( | |
500 protocol::ChannelConfig::Codec::CODEC_VERBATIM); | |
501 TestImagePatternMatch(kDefaultScreenWidthPx, kDefaultScreenHeightPx, | |
502 kDefaultExpectedRect, false); | |
269 } | 503 } |
270 | 504 |
271 } // namespace test | 505 } // namespace test |
272 } // namespace remoting | 506 } // namespace remoting |
OLD | NEW |