Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(222)

Side by Side Diff: remoting/test/test_video_renderer.cc

Issue 1219923011: Added image pattern comparison logic for test interface and fixture. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: "Minor update on naming and comments, also add a 10min timer to prevent bugs from hanging the syste… Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "remoting/test/test_video_renderer.h" 5 #include "remoting/test/test_video_renderer.h"
6 6
7 #include <cmath>
8
7 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/callback_helpers.h"
8 #include "base/logging.h" 11 #include "base/logging.h"
9 #include "base/synchronization/lock.h" 12 #include "base/synchronization/lock.h"
10 #include "base/thread_task_runner_handle.h" 13 #include "base/thread_task_runner_handle.h"
11 #include "base/threading/thread.h" 14 #include "base/threading/thread.h"
12 #include "remoting/codec/video_decoder.h" 15 #include "remoting/codec/video_decoder.h"
13 #include "remoting/codec/video_decoder_verbatim.h" 16 #include "remoting/codec/video_decoder_verbatim.h"
14 #include "remoting/codec/video_decoder_vpx.h" 17 #include "remoting/codec/video_decoder_vpx.h"
15 #include "remoting/proto/video.pb.h" 18 #include "remoting/proto/video.pb.h"
16 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" 19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
17 20
21 namespace {
22
23 // Used to store a RGB color, and it can be converted from RGBA32.
24 struct RGBValue {
25 RGBValue(int r, int g, int b) : red(r), green(g), blue(b) {}
joedow 2015/07/13 19:30:03 nit: newline between c'tor and members would be go
liaoyuke 2015/07/13 20:43:18 Done.
26 int red;
27 int green;
28 int blue;
29 };
30
31 // Convert a RGBA32 to a RGBValue.
joedow 2015/07/13 19:30:03 nit: This now converts a uint32_t to RgbValue sinc
liaoyuke 2015/07/13 20:43:18 Done.
32 RGBValue ConvertRGBA32Tovalue(uint32_t color) {
33 RGBValue rgb_value((color >> 16) & 0xFF, (color >> 8) & 0xFF, color & 0xFF);
34 return rgb_value;
35 }
36
37 // Used to account for frame resizing and lossy encoding error in percentage.
38 const double kMaxColorError = 0.02;
39 } // namespace
40
18 namespace remoting { 41 namespace remoting {
19 namespace test { 42 namespace test {
20 43
21 // Implements video decoding functionality. 44 // Implements video decoding functionality.
22 class TestVideoRenderer::Core { 45 class TestVideoRenderer::Core {
23 public: 46 public:
24 Core(); 47 Core();
25 ~Core(); 48 ~Core();
26 49
27 // Initializes the internal structures of the class. 50 // Initializes the internal structures of the class.
28 void Initialize(); 51 void Initialize();
29 52
30 // Used to decode video packets. 53 // Used to decode video packets.
31 void ProcessVideoPacket(scoped_ptr<VideoPacket> packet, 54 void ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
32 const base::Closure& done); 55 const base::Closure& done);
33 56
34 // Initialize a decoder to decode video packets. 57 // Initialize a decoder to decode video packets.
35 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec); 58 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec);
36 59
37 // Returns a copy of the current buffer. 60 // Returns a copy of the current frame.
38 scoped_ptr<webrtc::DesktopFrame> GetBufferForTest() const; 61 scoped_ptr<webrtc::DesktopFrame> GetCurrentFrameForTest() const;
39 62
40 // Set expected image pattern for comparison and the callback will be called 63 // Set expected image pattern for comparison and the callback will be called
41 // when the pattern is matched. 64 // when the pattern is matched.
42 void SetImagePatternAndMatchedCallback( 65 void SetImagePatternAndMatchedCallback(
43 const webrtc::DesktopRect& expected_rect, 66 const webrtc::DesktopRect& expected_rect,
44 const RgbaColor& expected_color, 67 uint32_t expected_avg_color,
45 const base::Closure& image_pattern_matched_callback); 68 const base::Closure& image_pattern_matched_callback);
46 69
47 private: 70 private:
71 // Returns average color of pixels fall within |rect| on the current frame.
72 RGBValue CalculateAverageColorvalue(const webrtc::DesktopRect& rect) const;
Sergey Ulanov 2015/07/13 19:42:18 s/value/Value/ or just remove "value"
liaoyuke 2015/07/13 20:43:18 Done.
73
74 // Compares |candidate_avg_value| to |expected_avg_color_|.
75 // Returns true if the root mean square of the errors in the R, G and B
76 // components does not exceed a given limit.
77 bool ExpectedColorIsMatched(const RGBValue& candidate_avg_value) const;
78
48 // Used to ensure Core methods are called on the same thread. 79 // Used to ensure Core methods are called on the same thread.
49 base::ThreadChecker thread_checker_; 80 base::ThreadChecker thread_checker_;
50 81
51 // Used to decode video packets. 82 // Used to decode video packets.
52 scoped_ptr<VideoDecoder> decoder_; 83 scoped_ptr<VideoDecoder> decoder_;
53 84
54 // Updated region of the current desktop frame compared to previous one. 85 // Updated region of the current desktop frame compared to previous one.
55 webrtc::DesktopRegion updated_region_; 86 webrtc::DesktopRegion updated_region_;
56 87
57 // Screen size of the remote host. 88 // Screen size of the remote host.
58 webrtc::DesktopSize screen_size_; 89 webrtc::DesktopSize screen_size_;
59 90
60 // Used to post tasks back to main thread. 91 // Used to post tasks back to main thread.
61 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; 92 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
62 93
63 // Used to store decoded video frame. 94 // Used to store decoded video frame.
64 scoped_ptr<webrtc::DesktopFrame> buffer_; 95 scoped_ptr<webrtc::DesktopFrame> frame_;
65 96
66 // Protects access to |buffer_|. 97 // Protects access to |frame_|.
67 mutable base::Lock lock_; 98 mutable base::Lock lock_;
68 99
69 // Used to store the expected image pattern. 100 // Used to store the expected image pattern.
70 webrtc::DesktopRect expected_rect_; 101 webrtc::DesktopRect expected_rect_;
71 RgbaColor expected_color_; 102 uint32_t expected_avg_color_;
72
73 // Maintains accumulating image pattern.
74 webrtc::DesktopRect accumulating_rect_;
75 RgbaColor accumulating_color_;
76 103
77 // Used to store the callback when expected pattern is matched. 104 // Used to store the callback when expected pattern is matched.
78 base::Closure image_pattern_matched_callback_; 105 base::Closure image_pattern_matched_callback_;
79 106
80 DISALLOW_COPY_AND_ASSIGN(Core); 107 DISALLOW_COPY_AND_ASSIGN(Core);
81 }; 108 };
82 109
83 TestVideoRenderer::Core::Core() 110 TestVideoRenderer::Core::Core()
84 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()) { 111 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()) {
85 thread_checker_.DetachFromThread(); 112 thread_checker_.DetachFromThread();
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
117 decoder_.reset(new VideoDecoderVerbatim()); 144 decoder_.reset(new VideoDecoderVerbatim());
118 break; 145 break;
119 } 146 }
120 default: { 147 default: {
121 NOTREACHED() << "Unsupported codec: " << codec; 148 NOTREACHED() << "Unsupported codec: " << codec;
122 } 149 }
123 } 150 }
124 } 151 }
125 152
126 scoped_ptr<webrtc::DesktopFrame> 153 scoped_ptr<webrtc::DesktopFrame>
127 TestVideoRenderer::Core::GetBufferForTest() const { 154 TestVideoRenderer::Core::GetCurrentFrameForTest() const {
128 base::AutoLock auto_lock(lock_); 155 base::AutoLock auto_lock(lock_);
129 DCHECK(buffer_); 156 DCHECK(frame_);
130 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*buffer_.get())); 157 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*frame_.get()));
131 } 158 }
132 159
133 void TestVideoRenderer::Core::ProcessVideoPacket( 160 void TestVideoRenderer::Core::ProcessVideoPacket(
134 scoped_ptr<VideoPacket> packet, const base::Closure& done) { 161 scoped_ptr<VideoPacket> packet, const base::Closure& done) {
135 DCHECK(thread_checker_.CalledOnValidThread()); 162 DCHECK(thread_checker_.CalledOnValidThread());
136 DCHECK(decoder_); 163 DCHECK(decoder_);
137 DCHECK(packet); 164 DCHECK(packet);
138 165
139 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called"; 166 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called";
140 167
141 // Screen size is attached on the first packet as well as when the 168 // Screen size is attached on the first packet as well as when the
142 // host screen is resized. 169 // host screen is resized.
143 if (packet->format().has_screen_width() && 170 if (packet->format().has_screen_width() &&
144 packet->format().has_screen_height()) { 171 packet->format().has_screen_height()) {
145 webrtc::DesktopSize source_size(packet->format().screen_width(), 172 webrtc::DesktopSize source_size(packet->format().screen_width(),
146 packet->format().screen_height()); 173 packet->format().screen_height());
147 if (!screen_size_.equals(source_size)) { 174 if (!screen_size_.equals(source_size)) {
148 screen_size_ = source_size; 175 screen_size_ = source_size;
149 decoder_->Initialize(screen_size_); 176 decoder_->Initialize(screen_size_);
150 buffer_.reset(new webrtc::BasicDesktopFrame(screen_size_)); 177 frame_.reset(new webrtc::BasicDesktopFrame(screen_size_));
151 } 178 }
152 } 179 }
153 180
154 // To make life easier, assume that the desktop shape is a single rectangle. 181 // To make life easier, assume that the desktop shape is a single rectangle.
155 packet->clear_use_desktop_shape(); 182 packet->clear_use_desktop_shape();
156 if (!decoder_->DecodePacket(*packet.get())) { 183 if (!decoder_->DecodePacket(*packet.get())) {
157 LOG(ERROR) << "Decoder::DecodePacket() failed."; 184 LOG(ERROR) << "Decoder::DecodePacket() failed.";
158 return; 185 return;
159 } 186 }
160 187
161 { 188 {
162 base::AutoLock auto_lock(lock_); 189 base::AutoLock auto_lock(lock_);
163 190
164 // Render the decoded packet and write results to the buffer. 191 // Render the decoded packet and write results to the buffer.
165 // Note that the |updated_region_| maintains the changed regions compared to 192 // Note that the |updated_region_| maintains the changed regions compared to
166 // previous video frame. 193 // previous video frame.
167 decoder_->RenderFrame(screen_size_, 194 decoder_->RenderFrame(screen_size_,
168 webrtc::DesktopRect::MakeWH(screen_size_.width(), 195 webrtc::DesktopRect::MakeWH(screen_size_.width(),
169 screen_size_.height()), buffer_->data(), 196 screen_size_.height()),
170 buffer_->stride(), &updated_region_); 197 frame_->data(), frame_->stride(), &updated_region_);
171 } 198 }
172 199
173 main_task_runner_->PostTask(FROM_HERE, done); 200 main_task_runner_->PostTask(FROM_HERE, done);
174 201
175 // TODO(liaoyuke): Update |accumulating_rect_| and |accumulating_color_|, then 202 // Check to see if a image pattern matched reply is passed in, and whether
176 // compare to the expected image pattern to check whether the pattern is 203 // the |expected_rect_| falls within the current frame.
177 // matched or not and update |image_pattern_matched| accordingly. 204 if (image_pattern_matched_callback_.is_null() ||
205 expected_rect_.right() > frame_->size().width() ||
206 expected_rect_.bottom() > frame_->size().height()) {
207 return;
208 }
209
210 // Compare the expected image pattern with the corresponding rectangle region
211 // on the current frame.
212 RGBValue accumulating_avg_value = CalculateAverageColorvalue(expected_rect_);
213 VLOG(2) << accumulating_avg_value.red << " " << accumulating_avg_value.green
214 << " " << accumulating_avg_value.blue;
215
216 if (ExpectedColorIsMatched(accumulating_avg_value)) {
217 main_task_runner_->PostTask(
218 FROM_HERE, base::ResetAndReturn(&image_pattern_matched_callback_));
219 }
178 } 220 }
179 221
180 void TestVideoRenderer::Core::SetImagePatternAndMatchedCallback( 222 void TestVideoRenderer::Core::SetImagePatternAndMatchedCallback(
181 const webrtc::DesktopRect& expected_rect, 223 const webrtc::DesktopRect& expected_rect,
182 const RgbaColor& expected_color, 224 uint32_t expected_avg_color,
183 const base::Closure& image_pattern_matched_callback) { 225 const base::Closure& image_pattern_matched_callback) {
184 DCHECK(thread_checker_.CalledOnValidThread()); 226 DCHECK(thread_checker_.CalledOnValidThread());
185 227
186 expected_rect_ = expected_rect; 228 expected_rect_ = expected_rect;
187 expected_color_ = expected_color; 229 expected_avg_color_ = expected_avg_color;
188 image_pattern_matched_callback_ = image_pattern_matched_callback; 230 image_pattern_matched_callback_ = image_pattern_matched_callback;
189 } 231 }
190 232
233 RGBValue TestVideoRenderer::Core::CalculateAverageColorvalue(
234 const webrtc::DesktopRect& rect) const {
235 int red_sum = 0;
236 int green_sum = 0;
237 int blue_sum = 0;
238
239 // Loop through pixels that fall within |accumulating_rect_| to obtain the
240 // average color value.
241 for (int y = rect.top(); y < rect.bottom(); ++y) {
242 uint8_t* frame_pos =
243 frame_->data() + (y * frame_->stride() +
244 rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
245
246 // Pixels of decoded video frame are presented in ARGB format.
247 for (int x = 0; x < rect.width(); ++x) {
248 red_sum += frame_pos[2];
249 green_sum += frame_pos[1];
250 blue_sum += frame_pos[0];
251 frame_pos += 4;
252 }
253 }
254
255 int area = rect.width() * rect.height();
256 RGBValue rgb_value(red_sum / area, green_sum / area, blue_sum / area);
257 return rgb_value;
258 }
259
260 bool TestVideoRenderer::Core::ExpectedColorIsMatched(
261 const RGBValue& candidate_avg_value) const {
262 RGBValue expected_avg_value = ConvertRGBA32Tovalue(expected_avg_color_);
263 double error_sum_squares = 0;
264 double red_error = expected_avg_value.red - candidate_avg_value.red;
265 double green_error = expected_avg_value.green - candidate_avg_value.green;
266 double blue_error = expected_avg_value.blue - candidate_avg_value.blue;
267 error_sum_squares = red_error * red_error + green_error * green_error +
268 blue_error * blue_error;
269 error_sum_squares /= (255.0 * 255.0);
270
271 return sqrt(error_sum_squares / 3) < kMaxColorError;
272 }
273
191 TestVideoRenderer::TestVideoRenderer() 274 TestVideoRenderer::TestVideoRenderer()
192 : video_decode_thread_( 275 : video_decode_thread_(
193 new base::Thread("TestVideoRendererVideoDecodingThread")), 276 new base::Thread("TestVideoRendererVideoDecodingThread")),
194 weak_factory_(this) { 277 weak_factory_(this) {
195 DCHECK(thread_checker_.CalledOnValidThread()); 278 DCHECK(thread_checker_.CalledOnValidThread());
196 279
197 core_.reset(new Core()); 280 core_.reset(new Core());
198 if (!video_decode_thread_->Start()) { 281 if (!video_decode_thread_->Start()) {
199 LOG(ERROR) << "Cannot start TestVideoRenderer"; 282 LOG(ERROR) << "Cannot start TestVideoRenderer";
200 } else { 283 } else {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
260 const protocol::ChannelConfig::Codec codec) { 343 const protocol::ChannelConfig::Codec codec) {
261 DCHECK(thread_checker_.CalledOnValidThread()); 344 DCHECK(thread_checker_.CalledOnValidThread());
262 345
263 VLOG(2) << "TestVideoRenderer::SetDecoder() Called"; 346 VLOG(2) << "TestVideoRenderer::SetDecoder() Called";
264 video_decode_task_runner_->PostTask( 347 video_decode_task_runner_->PostTask(
265 FROM_HERE, base::Bind(&Core::SetCodecForDecoding, 348 FROM_HERE, base::Bind(&Core::SetCodecForDecoding,
266 base::Unretained(core_.get()), 349 base::Unretained(core_.get()),
267 codec)); 350 codec));
268 } 351 }
269 352
270 scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetBufferForTest() const { 353 scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetCurrentFrameForTest()
354 const {
271 DCHECK(thread_checker_.CalledOnValidThread()); 355 DCHECK(thread_checker_.CalledOnValidThread());
272 356
273 return core_->GetBufferForTest(); 357 return core_->GetCurrentFrameForTest();
274 } 358 }
275 359
276 void TestVideoRenderer::SetImagePatternAndMatchedCallback( 360 void TestVideoRenderer::SetImagePatternAndMatchedCallback(
277 const webrtc::DesktopRect& expected_rect, 361 const webrtc::DesktopRect& expected_rect,
278 const RgbaColor& expected_color, 362 uint32_t expected_avg_color,
279 const base::Closure& image_pattern_matched_callback) { 363 const base::Closure& image_pattern_matched_callback) {
280 DCHECK(thread_checker_.CalledOnValidThread()); 364 DCHECK(thread_checker_.CalledOnValidThread());
365 DCHECK(!expected_rect.is_empty()) << "Expected rect cannot be empty";
281 366
282 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called"; 367 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called";
283 video_decode_task_runner_->PostTask( 368 video_decode_task_runner_->PostTask(
284 FROM_HERE, base::Bind(&Core::SetImagePatternAndMatchedCallback, 369 FROM_HERE,
285 base::Unretained(core_.get()), expected_rect, 370 base::Bind(&Core::SetImagePatternAndMatchedCallback,
286 expected_color, image_pattern_matched_callback)); 371 base::Unretained(core_.get()), expected_rect,
372 expected_avg_color, image_pattern_matched_callback));
287 } 373 }
288 374
289 } // namespace test 375 } // namespace test
290 } // namespace remoting 376 } // namespace remoting
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698