Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: remoting/test/test_video_renderer.cc

Issue 1219923011: Added image pattern comparison logic for test interface and fixture. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: "Minor update on naming and comments: addressed feedback from Joe and Sergey" Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2015 The Chromium Authors. All rights reserved. 1 // Copyright 2015 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "remoting/test/test_video_renderer.h" 5 #include "remoting/test/test_video_renderer.h"
6 6
7 #include <cmath>
8
7 #include "base/bind.h" 9 #include "base/bind.h"
10 #include "base/callback_helpers.h"
8 #include "base/logging.h" 11 #include "base/logging.h"
9 #include "base/synchronization/lock.h" 12 #include "base/synchronization/lock.h"
10 #include "base/thread_task_runner_handle.h" 13 #include "base/thread_task_runner_handle.h"
11 #include "base/threading/thread.h" 14 #include "base/threading/thread.h"
12 #include "remoting/codec/video_decoder.h" 15 #include "remoting/codec/video_decoder.h"
13 #include "remoting/codec/video_decoder_verbatim.h" 16 #include "remoting/codec/video_decoder_verbatim.h"
14 #include "remoting/codec/video_decoder_vpx.h" 17 #include "remoting/codec/video_decoder_vpx.h"
15 #include "remoting/proto/video.pb.h" 18 #include "remoting/proto/video.pb.h"
16 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" 19 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
17 20
21 namespace {
22
23 // Used to store a RGB color, and it can be converted from uint32_t.
24 struct RGBValue {
25 RGBValue(int r, int g, int b) : red(r), green(g), blue(b) {}
26
27 int red;
28 int green;
29 int blue;
30 };
31
32 // Convert an uint32_t to a RGBValue.
33 RGBValue ConvertUint32ToRGBValue(uint32_t color) {
34 RGBValue rgb_value((color >> 16) & 0xFF, (color >> 8) & 0xFF, color & 0xFF);
35 return rgb_value;
36 }
37
38 // Used to account for frame resizing and lossy encoding error in percentage.
39 const double kMaxColorError = 0.02;
40 } // namespace
41
18 namespace remoting { 42 namespace remoting {
19 namespace test { 43 namespace test {
20 44
21 // Implements video decoding functionality. 45 // Implements video decoding functionality.
22 class TestVideoRenderer::Core { 46 class TestVideoRenderer::Core {
23 public: 47 public:
24 Core(); 48 Core();
25 ~Core(); 49 ~Core();
26 50
27 // Initializes the internal structures of the class. 51 // Initializes the internal structures of the class.
28 void Initialize(); 52 void Initialize();
29 53
30 // Used to decode video packets. 54 // Used to decode video packets.
31 void ProcessVideoPacket(scoped_ptr<VideoPacket> packet, 55 void ProcessVideoPacket(scoped_ptr<VideoPacket> packet,
32 const base::Closure& done); 56 const base::Closure& done);
33 57
34 // Initialize a decoder to decode video packets. 58 // Initialize a decoder to decode video packets.
35 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec); 59 void SetCodecForDecoding(const protocol::ChannelConfig::Codec codec);
36 60
37 // Returns a copy of the current buffer. 61 // Returns a copy of the current frame.
38 scoped_ptr<webrtc::DesktopFrame> GetBufferForTest() const; 62 scoped_ptr<webrtc::DesktopFrame> GetCurrentFrameForTest() const;
39 63
40 // Set expected image pattern for comparison and the callback will be called 64 // Set expected image pattern for comparison and the callback will be called
41 // when the pattern is matched. 65 // when the pattern is matched.
42 void SetImagePatternAndMatchedCallback( 66 void ExpectAverageColorInRect(
43 const webrtc::DesktopRect& expected_rect, 67 const webrtc::DesktopRect& expected_rect,
44 const RgbaColor& expected_color, 68 uint32_t expected_avg_color,
45 const base::Closure& image_pattern_matched_callback); 69 const base::Closure& image_pattern_matched_callback);
46 70
47 private: 71 private:
72 // Returns average color of pixels fall within |rect| on the current frame.
73 RGBValue CalculateAverageColorValue(const webrtc::DesktopRect& rect) const;
74
75 // Compares |candidate_avg_value| to |expected_avg_color_|.
76 // Returns true if the root mean square of the errors in the R, G and B
77 // components does not exceed a given limit.
78 bool ExpectedAverageColorIsMatched(const RGBValue& candidate_avg_value) const;
79
48 // Used to ensure Core methods are called on the same thread. 80 // Used to ensure Core methods are called on the same thread.
49 base::ThreadChecker thread_checker_; 81 base::ThreadChecker thread_checker_;
50 82
51 // Used to decode video packets. 83 // Used to decode video packets.
52 scoped_ptr<VideoDecoder> decoder_; 84 scoped_ptr<VideoDecoder> decoder_;
53 85
54 // Updated region of the current desktop frame compared to previous one. 86 // Updated region of the current desktop frame compared to previous one.
55 webrtc::DesktopRegion updated_region_; 87 webrtc::DesktopRegion updated_region_;
56 88
57 // Screen size of the remote host. 89 // Screen size of the remote host.
58 webrtc::DesktopSize screen_size_; 90 webrtc::DesktopSize screen_size_;
59 91
60 // Used to post tasks back to main thread. 92 // Used to post tasks back to main thread.
61 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; 93 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_;
62 94
63 // Used to store decoded video frame. 95 // Used to store decoded video frame.
64 scoped_ptr<webrtc::DesktopFrame> buffer_; 96 scoped_ptr<webrtc::DesktopFrame> frame_;
65 97
66 // Protects access to |buffer_|. 98 // Protects access to |frame_|.
67 mutable base::Lock lock_; 99 mutable base::Lock lock_;
68 100
69 // Used to store the expected image pattern. 101 // Used to store the expected image pattern.
70 webrtc::DesktopRect expected_rect_; 102 webrtc::DesktopRect expected_rect_;
71 RgbaColor expected_color_; 103 uint32_t expected_avg_color_;
72
73 // Maintains accumulating image pattern.
74 webrtc::DesktopRect accumulating_rect_;
75 RgbaColor accumulating_color_;
76 104
77 // Used to store the callback when expected pattern is matched. 105 // Used to store the callback when expected pattern is matched.
78 base::Closure image_pattern_matched_callback_; 106 base::Closure image_pattern_matched_callback_;
79 107
80 DISALLOW_COPY_AND_ASSIGN(Core); 108 DISALLOW_COPY_AND_ASSIGN(Core);
81 }; 109 };
82 110
83 TestVideoRenderer::Core::Core() 111 TestVideoRenderer::Core::Core()
84 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()) { 112 : main_task_runner_(base::ThreadTaskRunnerHandle::Get()) {
85 thread_checker_.DetachFromThread(); 113 thread_checker_.DetachFromThread();
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after
117 decoder_.reset(new VideoDecoderVerbatim()); 145 decoder_.reset(new VideoDecoderVerbatim());
118 break; 146 break;
119 } 147 }
120 default: { 148 default: {
121 NOTREACHED() << "Unsupported codec: " << codec; 149 NOTREACHED() << "Unsupported codec: " << codec;
122 } 150 }
123 } 151 }
124 } 152 }
125 153
126 scoped_ptr<webrtc::DesktopFrame> 154 scoped_ptr<webrtc::DesktopFrame>
127 TestVideoRenderer::Core::GetBufferForTest() const { 155 TestVideoRenderer::Core::GetCurrentFrameForTest() const {
128 base::AutoLock auto_lock(lock_); 156 base::AutoLock auto_lock(lock_);
129 DCHECK(buffer_); 157 DCHECK(frame_);
130 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*buffer_.get())); 158 return make_scoped_ptr(webrtc::BasicDesktopFrame::CopyOf(*frame_.get()));
131 } 159 }
132 160
133 void TestVideoRenderer::Core::ProcessVideoPacket( 161 void TestVideoRenderer::Core::ProcessVideoPacket(
134 scoped_ptr<VideoPacket> packet, const base::Closure& done) { 162 scoped_ptr<VideoPacket> packet, const base::Closure& done) {
135 DCHECK(thread_checker_.CalledOnValidThread()); 163 DCHECK(thread_checker_.CalledOnValidThread());
136 DCHECK(decoder_); 164 DCHECK(decoder_);
137 DCHECK(packet); 165 DCHECK(packet);
138 166
139 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called"; 167 VLOG(2) << "TestVideoRenderer::Core::ProcessVideoPacket() Called";
140 168
141 // Screen size is attached on the first packet as well as when the 169 // Screen size is attached on the first packet as well as when the
142 // host screen is resized. 170 // host screen is resized.
143 if (packet->format().has_screen_width() && 171 if (packet->format().has_screen_width() &&
144 packet->format().has_screen_height()) { 172 packet->format().has_screen_height()) {
145 webrtc::DesktopSize source_size(packet->format().screen_width(), 173 webrtc::DesktopSize source_size(packet->format().screen_width(),
146 packet->format().screen_height()); 174 packet->format().screen_height());
147 if (!screen_size_.equals(source_size)) { 175 if (!screen_size_.equals(source_size)) {
148 screen_size_ = source_size; 176 screen_size_ = source_size;
149 decoder_->Initialize(screen_size_); 177 decoder_->Initialize(screen_size_);
150 buffer_.reset(new webrtc::BasicDesktopFrame(screen_size_)); 178 frame_.reset(new webrtc::BasicDesktopFrame(screen_size_));
151 } 179 }
152 } 180 }
153 181
154 // To make life easier, assume that the desktop shape is a single rectangle. 182 // To make life easier, assume that the desktop shape is a single rectangle.
155 packet->clear_use_desktop_shape(); 183 packet->clear_use_desktop_shape();
156 if (!decoder_->DecodePacket(*packet.get())) { 184 if (!decoder_->DecodePacket(*packet.get())) {
157 LOG(ERROR) << "Decoder::DecodePacket() failed."; 185 LOG(ERROR) << "Decoder::DecodePacket() failed.";
158 return; 186 return;
159 } 187 }
160 188
161 { 189 {
162 base::AutoLock auto_lock(lock_); 190 base::AutoLock auto_lock(lock_);
163 191
164 // Render the decoded packet and write results to the buffer. 192 // Render the decoded packet and write results to the buffer.
165 // Note that the |updated_region_| maintains the changed regions compared to 193 // Note that the |updated_region_| maintains the changed regions compared to
166 // previous video frame. 194 // previous video frame.
167 decoder_->RenderFrame(screen_size_, 195 decoder_->RenderFrame(screen_size_,
168 webrtc::DesktopRect::MakeWH(screen_size_.width(), 196 webrtc::DesktopRect::MakeWH(screen_size_.width(),
169 screen_size_.height()), buffer_->data(), 197 screen_size_.height()),
170 buffer_->stride(), &updated_region_); 198 frame_->data(), frame_->stride(), &updated_region_);
171 } 199 }
172 200
173 main_task_runner_->PostTask(FROM_HERE, done); 201 main_task_runner_->PostTask(FROM_HERE, done);
174 202
175 // TODO(liaoyuke): Update |accumulating_rect_| and |accumulating_color_|, then 203 // Check to see if a image pattern matched reply is passed in, and whether
176 // compare to the expected image pattern to check whether the pattern is 204 // the |expected_rect_| falls within the current frame.
177 // matched or not and update |image_pattern_matched| accordingly. 205 if (image_pattern_matched_callback_.is_null() ||
206 expected_rect_.right() > frame_->size().width() ||
207 expected_rect_.bottom() > frame_->size().height()) {
208 return;
209 }
210
211 // Compare the expected image pattern with the corresponding rectangle region
212 // on the current frame.
213 RGBValue accumulating_avg_value = CalculateAverageColorValue(expected_rect_);
214 VLOG(2) << accumulating_avg_value.red << " " << accumulating_avg_value.green
215 << " " << accumulating_avg_value.blue;
216
217 if (ExpectedColorIsMatched(accumulating_avg_value)) {
218 main_task_runner_->PostTask(
219 FROM_HERE, base::ResetAndReturn(&image_pattern_matched_callback_));
220 }
178 } 221 }
179 222
180 void TestVideoRenderer::Core::SetImagePatternAndMatchedCallback( 223 void TestVideoRenderer::Core::ExpectAverageColorInRect(
181 const webrtc::DesktopRect& expected_rect, 224 const webrtc::DesktopRect& expected_rect,
182 const RgbaColor& expected_color, 225 uint32_t expected_avg_color,
183 const base::Closure& image_pattern_matched_callback) { 226 const base::Closure& image_pattern_matched_callback) {
184 DCHECK(thread_checker_.CalledOnValidThread()); 227 DCHECK(thread_checker_.CalledOnValidThread());
185 228
186 expected_rect_ = expected_rect; 229 expected_rect_ = expected_rect;
187 expected_color_ = expected_color; 230 expected_avg_color_ = expected_avg_color;
188 image_pattern_matched_callback_ = image_pattern_matched_callback; 231 image_pattern_matched_callback_ = image_pattern_matched_callback;
189 } 232 }
190 233
234 RGBValue TestVideoRenderer::Core::CalculateAverageColorValue(
235 const webrtc::DesktopRect& rect) const {
236 int red_sum = 0;
237 int green_sum = 0;
238 int blue_sum = 0;
239
240 // Loop through pixels that fall within |accumulating_rect_| to obtain the
241 // average color value.
242 for (int y = rect.top(); y < rect.bottom(); ++y) {
243 uint8_t* frame_pos =
244 frame_->data() + (y * frame_->stride() +
245 rect.left() * webrtc::DesktopFrame::kBytesPerPixel);
246
247 // Pixels of decoded video frame are presented in ARGB format.
248 for (int x = 0; x < rect.width(); ++x) {
249 red_sum += frame_pos[2];
250 green_sum += frame_pos[1];
251 blue_sum += frame_pos[0];
252 frame_pos += 4;
253 }
254 }
255
256 int area = rect.width() * rect.height();
257 RGBValue rgb_value(red_sum / area, green_sum / area, blue_sum / area);
258 return rgb_value;
259 }
260
261 bool TestVideoRenderer::Core::ExpectedAverageColorIsMatched(
262 const RGBValue& candidate_avg_value) const {
263 RGBValue expected_avg_value = ConvertUint32ToRGBValue(expected_avg_color_);
264 double error_sum_squares = 0;
265 double red_error = expected_avg_value.red - candidate_avg_value.red;
266 double green_error = expected_avg_value.green - candidate_avg_value.green;
267 double blue_error = expected_avg_value.blue - candidate_avg_value.blue;
268 error_sum_squares = red_error * red_error + green_error * green_error +
269 blue_error * blue_error;
270 error_sum_squares /= (255.0 * 255.0);
271
272 return sqrt(error_sum_squares / 3) < kMaxColorError;
273 }
274
191 TestVideoRenderer::TestVideoRenderer() 275 TestVideoRenderer::TestVideoRenderer()
192 : video_decode_thread_( 276 : video_decode_thread_(
193 new base::Thread("TestVideoRendererVideoDecodingThread")), 277 new base::Thread("TestVideoRendererVideoDecodingThread")),
194 weak_factory_(this) { 278 weak_factory_(this) {
195 DCHECK(thread_checker_.CalledOnValidThread()); 279 DCHECK(thread_checker_.CalledOnValidThread());
196 280
197 core_.reset(new Core()); 281 core_.reset(new Core());
198 if (!video_decode_thread_->Start()) { 282 if (!video_decode_thread_->Start()) {
199 LOG(ERROR) << "Cannot start TestVideoRenderer"; 283 LOG(ERROR) << "Cannot start TestVideoRenderer";
200 } else { 284 } else {
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
260 const protocol::ChannelConfig::Codec codec) { 344 const protocol::ChannelConfig::Codec codec) {
261 DCHECK(thread_checker_.CalledOnValidThread()); 345 DCHECK(thread_checker_.CalledOnValidThread());
262 346
263 VLOG(2) << "TestVideoRenderer::SetDecoder() Called"; 347 VLOG(2) << "TestVideoRenderer::SetDecoder() Called";
264 video_decode_task_runner_->PostTask( 348 video_decode_task_runner_->PostTask(
265 FROM_HERE, base::Bind(&Core::SetCodecForDecoding, 349 FROM_HERE, base::Bind(&Core::SetCodecForDecoding,
266 base::Unretained(core_.get()), 350 base::Unretained(core_.get()),
267 codec)); 351 codec));
268 } 352 }
269 353
270 scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetBufferForTest() const { 354 scoped_ptr<webrtc::DesktopFrame> TestVideoRenderer::GetCurrentFrameForTest()
355 const {
271 DCHECK(thread_checker_.CalledOnValidThread()); 356 DCHECK(thread_checker_.CalledOnValidThread());
272 357
273 return core_->GetBufferForTest(); 358 return core_->GetCurrentFrameForTest();
274 } 359 }
275 360
276 void TestVideoRenderer::SetImagePatternAndMatchedCallback( 361 void TestVideoRenderer::ExpectAverageColorInRect(
277 const webrtc::DesktopRect& expected_rect, 362 const webrtc::DesktopRect& expected_rect,
278 const RgbaColor& expected_color, 363 uint32_t expected_avg_color,
279 const base::Closure& image_pattern_matched_callback) { 364 const base::Closure& image_pattern_matched_callback) {
280 DCHECK(thread_checker_.CalledOnValidThread()); 365 DCHECK(thread_checker_.CalledOnValidThread());
366 DCHECK(!expected_rect.is_empty()) << "Expected rect cannot be empty";
281 367
282 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called"; 368 DVLOG(2) << "TestVideoRenderer::SetImagePatternAndMatchedCallback() Called";
283 video_decode_task_runner_->PostTask( 369 video_decode_task_runner_->PostTask(
284 FROM_HERE, base::Bind(&Core::SetImagePatternAndMatchedCallback, 370 FROM_HERE,
285 base::Unretained(core_.get()), expected_rect, 371 base::Bind(&Core::ExpectAverageColorInRect, base::Unretained(core_.get()),
286 expected_color, image_pattern_matched_callback)); 372 expected_rect, expected_avg_color,
373 image_pattern_matched_callback));
287 } 374 }
288 375
289 } // namespace test 376 } // namespace test
290 } // namespace remoting 377 } // namespace remoting
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698