OLD | NEW |
| (Empty) |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "remoting/client/rectangle_update_decoder.h" | |
6 | |
7 #include "base/bind.h" | |
8 #include "base/callback.h" | |
9 #include "base/callback_helpers.h" | |
10 #include "base/location.h" | |
11 #include "base/logging.h" | |
12 #include "base/single_thread_task_runner.h" | |
13 #include "remoting/base/util.h" | |
14 #include "remoting/client/frame_consumer.h" | |
15 #include "remoting/codec/video_decoder.h" | |
16 #include "remoting/codec/video_decoder_verbatim.h" | |
17 #include "remoting/codec/video_decoder_vpx.h" | |
18 #include "remoting/protocol/session_config.h" | |
19 #include "third_party/libyuv/include/libyuv/convert_argb.h" | |
20 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | |
21 | |
22 using base::Passed; | |
23 using remoting::protocol::ChannelConfig; | |
24 using remoting::protocol::SessionConfig; | |
25 | |
26 namespace remoting { | |
27 | |
28 // This class wraps a VideoDecoder and byte-swaps the pixels for compatibility | |
29 // with the android.graphics.Bitmap class. | |
30 // TODO(lambroslambrou): Refactor so that the VideoDecoder produces data | |
31 // in the right byte-order, instead of swapping it here. | |
32 class RgbToBgrVideoDecoderFilter : public VideoDecoder { | |
33 public: | |
34 RgbToBgrVideoDecoderFilter(scoped_ptr<VideoDecoder> parent) | |
35 : parent_(parent.Pass()) { | |
36 } | |
37 | |
38 virtual void Initialize(const webrtc::DesktopSize& screen_size) OVERRIDE { | |
39 parent_->Initialize(screen_size); | |
40 } | |
41 | |
42 virtual bool DecodePacket(const VideoPacket& packet) OVERRIDE { | |
43 return parent_->DecodePacket(packet); | |
44 } | |
45 | |
46 virtual void Invalidate(const webrtc::DesktopSize& view_size, | |
47 const webrtc::DesktopRegion& region) OVERRIDE { | |
48 return parent_->Invalidate(view_size, region); | |
49 } | |
50 | |
51 virtual void RenderFrame(const webrtc::DesktopSize& view_size, | |
52 const webrtc::DesktopRect& clip_area, | |
53 uint8* image_buffer, | |
54 int image_stride, | |
55 webrtc::DesktopRegion* output_region) OVERRIDE { | |
56 parent_->RenderFrame(view_size, clip_area, image_buffer, image_stride, | |
57 output_region); | |
58 | |
59 for (webrtc::DesktopRegion::Iterator i(*output_region); !i.IsAtEnd(); | |
60 i.Advance()) { | |
61 webrtc::DesktopRect rect = i.rect(); | |
62 uint8* pixels = image_buffer + (rect.top() * image_stride) + | |
63 (rect.left() * kBytesPerPixel); | |
64 libyuv::ABGRToARGB(pixels, image_stride, pixels, image_stride, | |
65 rect.width(), rect.height()); | |
66 } | |
67 } | |
68 | |
69 virtual const webrtc::DesktopRegion* GetImageShape() OVERRIDE { | |
70 return parent_->GetImageShape(); | |
71 } | |
72 | |
73 private: | |
74 scoped_ptr<VideoDecoder> parent_; | |
75 }; | |
76 | |
77 RectangleUpdateDecoder::RectangleUpdateDecoder( | |
78 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner, | |
79 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner, | |
80 scoped_refptr<FrameConsumerProxy> consumer) | |
81 : main_task_runner_(main_task_runner), | |
82 decode_task_runner_(decode_task_runner), | |
83 consumer_(consumer), | |
84 paint_scheduled_(false), | |
85 latest_sequence_number_(0) { | |
86 } | |
87 | |
88 RectangleUpdateDecoder::~RectangleUpdateDecoder() { | |
89 } | |
90 | |
91 void RectangleUpdateDecoder::Initialize(const SessionConfig& config) { | |
92 if (!decode_task_runner_->BelongsToCurrentThread()) { | |
93 decode_task_runner_->PostTask( | |
94 FROM_HERE, base::Bind(&RectangleUpdateDecoder::Initialize, this, | |
95 config)); | |
96 return; | |
97 } | |
98 | |
99 // Initialize decoder based on the selected codec. | |
100 ChannelConfig::Codec codec = config.video_config().codec; | |
101 if (codec == ChannelConfig::CODEC_VERBATIM) { | |
102 decoder_.reset(new VideoDecoderVerbatim()); | |
103 } else if (codec == ChannelConfig::CODEC_VP8) { | |
104 decoder_ = VideoDecoderVpx::CreateForVP8(); | |
105 } else if (codec == ChannelConfig::CODEC_VP9) { | |
106 decoder_ = VideoDecoderVpx::CreateForVP9(); | |
107 } else { | |
108 NOTREACHED() << "Invalid Encoding found: " << codec; | |
109 } | |
110 | |
111 if (consumer_->GetPixelFormat() == FrameConsumer::FORMAT_RGBA) { | |
112 scoped_ptr<VideoDecoder> wrapper( | |
113 new RgbToBgrVideoDecoderFilter(decoder_.Pass())); | |
114 decoder_ = wrapper.Pass(); | |
115 } | |
116 } | |
117 | |
118 void RectangleUpdateDecoder::DecodePacket(scoped_ptr<VideoPacket> packet, | |
119 const base::Closure& done) { | |
120 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
121 | |
122 base::ScopedClosureRunner done_runner(done); | |
123 | |
124 bool decoder_needs_reset = false; | |
125 bool notify_size_or_dpi_change = false; | |
126 | |
127 // If the packet includes screen size or DPI information, store them. | |
128 if (packet->format().has_screen_width() && | |
129 packet->format().has_screen_height()) { | |
130 webrtc::DesktopSize source_size(packet->format().screen_width(), | |
131 packet->format().screen_height()); | |
132 if (!source_size_.equals(source_size)) { | |
133 source_size_ = source_size; | |
134 decoder_needs_reset = true; | |
135 notify_size_or_dpi_change = true; | |
136 } | |
137 } | |
138 if (packet->format().has_x_dpi() && packet->format().has_y_dpi()) { | |
139 webrtc::DesktopVector source_dpi(packet->format().x_dpi(), | |
140 packet->format().y_dpi()); | |
141 if (!source_dpi.equals(source_dpi_)) { | |
142 source_dpi_ = source_dpi; | |
143 notify_size_or_dpi_change = true; | |
144 } | |
145 } | |
146 | |
147 // If we've never seen a screen size, ignore the packet. | |
148 if (source_size_.is_empty()) | |
149 return; | |
150 | |
151 if (decoder_needs_reset) | |
152 decoder_->Initialize(source_size_); | |
153 if (notify_size_or_dpi_change) | |
154 consumer_->SetSourceSize(source_size_, source_dpi_); | |
155 | |
156 if (decoder_->DecodePacket(*packet.get())) { | |
157 SchedulePaint(); | |
158 } else { | |
159 LOG(ERROR) << "DecodePacket() failed."; | |
160 } | |
161 } | |
162 | |
163 void RectangleUpdateDecoder::SchedulePaint() { | |
164 if (paint_scheduled_) | |
165 return; | |
166 paint_scheduled_ = true; | |
167 decode_task_runner_->PostTask( | |
168 FROM_HERE, base::Bind(&RectangleUpdateDecoder::DoPaint, this)); | |
169 } | |
170 | |
171 void RectangleUpdateDecoder::DoPaint() { | |
172 DCHECK(paint_scheduled_); | |
173 paint_scheduled_ = false; | |
174 | |
175 // If the view size is empty or we have no output buffers ready, return. | |
176 if (buffers_.empty() || view_size_.is_empty()) | |
177 return; | |
178 | |
179 // If no Decoder is initialized, or the host dimensions are empty, return. | |
180 if (!decoder_.get() || source_size_.is_empty()) | |
181 return; | |
182 | |
183 // Draw the invalidated region to the buffer. | |
184 webrtc::DesktopFrame* buffer = buffers_.front(); | |
185 webrtc::DesktopRegion output_region; | |
186 decoder_->RenderFrame(view_size_, clip_area_, | |
187 buffer->data(), | |
188 buffer->stride(), | |
189 &output_region); | |
190 | |
191 // Notify the consumer that painting is done. | |
192 if (!output_region.is_empty()) { | |
193 buffers_.pop_front(); | |
194 consumer_->ApplyBuffer(view_size_, clip_area_, buffer, output_region); | |
195 } | |
196 } | |
197 | |
198 void RectangleUpdateDecoder::RequestReturnBuffers(const base::Closure& done) { | |
199 if (!decode_task_runner_->BelongsToCurrentThread()) { | |
200 decode_task_runner_->PostTask( | |
201 FROM_HERE, base::Bind(&RectangleUpdateDecoder::RequestReturnBuffers, | |
202 this, done)); | |
203 return; | |
204 } | |
205 | |
206 while (!buffers_.empty()) { | |
207 consumer_->ReturnBuffer(buffers_.front()); | |
208 buffers_.pop_front(); | |
209 } | |
210 | |
211 if (!done.is_null()) | |
212 done.Run(); | |
213 } | |
214 | |
215 void RectangleUpdateDecoder::DrawBuffer(webrtc::DesktopFrame* buffer) { | |
216 if (!decode_task_runner_->BelongsToCurrentThread()) { | |
217 decode_task_runner_->PostTask( | |
218 FROM_HERE, base::Bind(&RectangleUpdateDecoder::DrawBuffer, | |
219 this, buffer)); | |
220 return; | |
221 } | |
222 | |
223 DCHECK(clip_area_.width() <= buffer->size().width() && | |
224 clip_area_.height() <= buffer->size().height()); | |
225 | |
226 buffers_.push_back(buffer); | |
227 SchedulePaint(); | |
228 } | |
229 | |
230 void RectangleUpdateDecoder::InvalidateRegion( | |
231 const webrtc::DesktopRegion& region) { | |
232 if (!decode_task_runner_->BelongsToCurrentThread()) { | |
233 decode_task_runner_->PostTask( | |
234 FROM_HERE, base::Bind(&RectangleUpdateDecoder::InvalidateRegion, | |
235 this, region)); | |
236 return; | |
237 } | |
238 | |
239 if (decoder_.get()) { | |
240 decoder_->Invalidate(view_size_, region); | |
241 SchedulePaint(); | |
242 } | |
243 } | |
244 | |
245 void RectangleUpdateDecoder::SetOutputSizeAndClip( | |
246 const webrtc::DesktopSize& view_size, | |
247 const webrtc::DesktopRect& clip_area) { | |
248 if (!decode_task_runner_->BelongsToCurrentThread()) { | |
249 decode_task_runner_->PostTask( | |
250 FROM_HERE, base::Bind(&RectangleUpdateDecoder::SetOutputSizeAndClip, | |
251 this, view_size, clip_area)); | |
252 return; | |
253 } | |
254 | |
255 // The whole frame needs to be repainted if the scaling factor has changed. | |
256 if (!view_size_.equals(view_size) && decoder_.get()) { | |
257 webrtc::DesktopRegion region; | |
258 region.AddRect(webrtc::DesktopRect::MakeSize(view_size)); | |
259 decoder_->Invalidate(view_size, region); | |
260 } | |
261 | |
262 if (!view_size_.equals(view_size) || | |
263 !clip_area_.equals(clip_area)) { | |
264 view_size_ = view_size; | |
265 clip_area_ = clip_area; | |
266 | |
267 // Return buffers that are smaller than needed to the consumer for | |
268 // reuse/reallocation. | |
269 std::list<webrtc::DesktopFrame*>::iterator i = buffers_.begin(); | |
270 while (i != buffers_.end()) { | |
271 if ((*i)->size().width() < clip_area_.width() || | |
272 (*i)->size().height() < clip_area_.height()) { | |
273 consumer_->ReturnBuffer(*i); | |
274 i = buffers_.erase(i); | |
275 } else { | |
276 ++i; | |
277 } | |
278 } | |
279 | |
280 SchedulePaint(); | |
281 } | |
282 } | |
283 | |
284 const webrtc::DesktopRegion* RectangleUpdateDecoder::GetBufferShape() { | |
285 return decoder_->GetImageShape(); | |
286 } | |
287 | |
288 void RectangleUpdateDecoder::ProcessVideoPacket(scoped_ptr<VideoPacket> packet, | |
289 const base::Closure& done) { | |
290 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
291 | |
292 // If the video packet is empty then drop it. Empty packets are used to | |
293 // maintain activity on the network. | |
294 if (!packet->has_data() || packet->data().size() == 0) { | |
295 done.Run(); | |
296 return; | |
297 } | |
298 | |
299 // Add one frame to the counter. | |
300 stats_.video_frame_rate()->Record(1); | |
301 | |
302 // Record other statistics received from host. | |
303 stats_.video_bandwidth()->Record(packet->data().size()); | |
304 if (packet->has_capture_time_ms()) | |
305 stats_.video_capture_ms()->Record(packet->capture_time_ms()); | |
306 if (packet->has_encode_time_ms()) | |
307 stats_.video_encode_ms()->Record(packet->encode_time_ms()); | |
308 if (packet->has_client_sequence_number() && | |
309 packet->client_sequence_number() > latest_sequence_number_) { | |
310 latest_sequence_number_ = packet->client_sequence_number(); | |
311 base::TimeDelta round_trip_latency = | |
312 base::Time::Now() - | |
313 base::Time::FromInternalValue(packet->client_sequence_number()); | |
314 stats_.round_trip_ms()->Record(round_trip_latency.InMilliseconds()); | |
315 } | |
316 | |
317 // Measure the latency between the last packet being received and presented. | |
318 base::Time decode_start = base::Time::Now(); | |
319 | |
320 base::Closure decode_done = base::Bind( | |
321 &RectangleUpdateDecoder::OnPacketDone, this, decode_start, done); | |
322 | |
323 decode_task_runner_->PostTask(FROM_HERE, base::Bind( | |
324 &RectangleUpdateDecoder::DecodePacket, this, | |
325 base::Passed(&packet), decode_done)); | |
326 } | |
327 | |
328 void RectangleUpdateDecoder::OnPacketDone(base::Time decode_start, | |
329 const base::Closure& done) { | |
330 if (!main_task_runner_->BelongsToCurrentThread()) { | |
331 main_task_runner_->PostTask(FROM_HERE, base::Bind( | |
332 &RectangleUpdateDecoder::OnPacketDone, this, | |
333 decode_start, done)); | |
334 return; | |
335 } | |
336 | |
337 // Record the latency between the packet being received and presented. | |
338 stats_.video_decode_ms()->Record( | |
339 (base::Time::Now() - decode_start).InMilliseconds()); | |
340 | |
341 done.Run(); | |
342 } | |
343 | |
344 ChromotingStats* RectangleUpdateDecoder::GetStats() { | |
345 DCHECK(main_task_runner_->BelongsToCurrentThread()); | |
346 return &stats_; | |
347 } | |
348 | |
349 } // namespace remoting | |
OLD | NEW |