| OLD | NEW |
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "remoting/client/software_video_renderer.h" | 5 #include "remoting/client/software_video_renderer.h" |
| 6 | 6 |
| 7 #include <list> | |
| 8 | |
| 9 #include "base/bind.h" | 7 #include "base/bind.h" |
| 10 #include "base/callback.h" | 8 #include "base/callback.h" |
| 11 #include "base/callback_helpers.h" | 9 #include "base/callback_helpers.h" |
| 12 #include "base/location.h" | 10 #include "base/location.h" |
| 13 #include "base/logging.h" | 11 #include "base/logging.h" |
| 14 #include "base/single_thread_task_runner.h" | 12 #include "base/single_thread_task_runner.h" |
| 13 #include "base/task_runner_util.h" |
| 15 #include "remoting/base/util.h" | 14 #include "remoting/base/util.h" |
| 16 #include "remoting/client/frame_consumer.h" | 15 #include "remoting/client/frame_consumer.h" |
| 17 #include "remoting/codec/video_decoder.h" | 16 #include "remoting/codec/video_decoder.h" |
| 18 #include "remoting/codec/video_decoder_verbatim.h" | 17 #include "remoting/codec/video_decoder_verbatim.h" |
| 19 #include "remoting/codec/video_decoder_vpx.h" | 18 #include "remoting/codec/video_decoder_vpx.h" |
| 19 #include "remoting/proto/video.pb.h" |
| 20 #include "remoting/protocol/session_config.h" | 20 #include "remoting/protocol/session_config.h" |
| 21 #include "third_party/libyuv/include/libyuv/convert_argb.h" | 21 #include "third_party/libyuv/include/libyuv/convert_argb.h" |
| 22 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | 22 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" |
| 23 | 23 |
| 24 using base::Passed; | |
| 25 using remoting::protocol::ChannelConfig; | 24 using remoting::protocol::ChannelConfig; |
| 26 using remoting::protocol::SessionConfig; | 25 using remoting::protocol::SessionConfig; |
| 27 | 26 |
| 28 namespace remoting { | 27 namespace remoting { |
| 29 | 28 |
| 29 namespace { |
| 30 |
| 30 // This class wraps a VideoDecoder and byte-swaps the pixels for compatibility | 31 // This class wraps a VideoDecoder and byte-swaps the pixels for compatibility |
| 31 // with the android.graphics.Bitmap class. | 32 // with the android.graphics.Bitmap class. |
| 32 // TODO(lambroslambrou): Refactor so that the VideoDecoder produces data | 33 // TODO(lambroslambrou): Refactor so that the VideoDecoder produces data |
| 33 // in the right byte-order, instead of swapping it here. | 34 // in the right byte-order, instead of swapping it here. |
| 34 class RgbToBgrVideoDecoderFilter : public VideoDecoder { | 35 class RgbToBgrVideoDecoderFilter : public VideoDecoder { |
| 35 public: | 36 public: |
| 36 RgbToBgrVideoDecoderFilter(scoped_ptr<VideoDecoder> parent) | 37 RgbToBgrVideoDecoderFilter(scoped_ptr<VideoDecoder> parent) |
| 37 : parent_(parent.Pass()) { | 38 : parent_(parent.Pass()) {} |
| 38 } | |
| 39 | 39 |
| 40 bool DecodePacket(const VideoPacket& packet) override { | 40 bool DecodePacket(const VideoPacket& packet) override { |
| 41 return parent_->DecodePacket(packet); | 41 return parent_->DecodePacket(packet); |
| 42 } | 42 } |
| 43 | 43 |
| 44 void Invalidate(const webrtc::DesktopSize& view_size, | 44 void Invalidate(const webrtc::DesktopSize& view_size, |
| 45 const webrtc::DesktopRegion& region) override { | 45 const webrtc::DesktopRegion& region) override { |
| 46 return parent_->Invalidate(view_size, region); | 46 return parent_->Invalidate(view_size, region); |
| 47 } | 47 } |
| 48 | 48 |
| 49 void RenderFrame(const webrtc::DesktopSize& view_size, | 49 void RenderFrame(const webrtc::DesktopSize& view_size, |
| 50 const webrtc::DesktopRect& clip_area, | 50 const webrtc::DesktopRect& clip_area, |
| 51 uint8* image_buffer, | 51 uint8* image_buffer, |
| 52 int image_stride, | 52 int image_stride, |
| 53 webrtc::DesktopRegion* output_region) override { | 53 webrtc::DesktopRegion* output_region) override { |
| 54 parent_->RenderFrame(view_size, clip_area, image_buffer, image_stride, | 54 parent_->RenderFrame(view_size, clip_area, image_buffer, image_stride, |
| 55 output_region); | 55 output_region); |
| 56 | 56 |
| 57 for (webrtc::DesktopRegion::Iterator i(*output_region); !i.IsAtEnd(); | 57 for (webrtc::DesktopRegion::Iterator i(*output_region); !i.IsAtEnd(); |
| 58 i.Advance()) { | 58 i.Advance()) { |
| 59 webrtc::DesktopRect rect = i.rect(); | 59 webrtc::DesktopRect rect = i.rect(); |
| 60 uint8* pixels = image_buffer + (rect.top() * image_stride) + | 60 uint8* pixels = image_buffer + (rect.top() * image_stride) + |
| 61 (rect.left() * kBytesPerPixel); | 61 (rect.left() * kBytesPerPixel); |
| 62 libyuv::ABGRToARGB(pixels, image_stride, pixels, image_stride, | 62 libyuv::ABGRToARGB(pixels, image_stride, pixels, image_stride, |
| 63 rect.width(), rect.height()); | 63 rect.width(), rect.height()); |
| 64 } | 64 } |
| 65 } | 65 } |
| 66 | 66 |
| 67 const webrtc::DesktopRegion* GetImageShape() override { | 67 const webrtc::DesktopRegion* GetImageShape() override { |
| 68 return parent_->GetImageShape(); | 68 return parent_->GetImageShape(); |
| 69 } | 69 } |
| 70 | 70 |
| 71 private: | 71 private: |
| 72 scoped_ptr<VideoDecoder> parent_; | 72 scoped_ptr<VideoDecoder> parent_; |
| 73 }; | 73 }; |
| 74 | 74 |
| 75 class SoftwareVideoRenderer::Core { | 75 scoped_ptr<webrtc::DesktopFrame> DoDecodeFrame( |
| 76 public: | 76 VideoDecoder* decoder, |
| 77 Core(scoped_refptr<base::SingleThreadTaskRunner> main_task_runner, | 77 scoped_ptr<VideoPacket> packet, |
| 78 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner, | 78 scoped_ptr<webrtc::DesktopFrame> frame) { |
| 79 scoped_ptr<FrameConsumerProxy> consumer); | 79 if (!decoder->DecodePacket(*packet)) |
| 80 ~Core(); | 80 return nullptr; |
| 81 | 81 |
| 82 void OnSessionConfig(const protocol::SessionConfig& config); | 82 decoder->RenderFrame( |
| 83 void DrawBuffer(webrtc::DesktopFrame* buffer); | 83 frame->size(), webrtc::DesktopRect::MakeSize(frame->size()), |
| 84 void InvalidateRegion(const webrtc::DesktopRegion& region); | 84 frame->data(), frame->stride(), frame->mutable_updated_region()); |
| 85 void RequestReturnBuffers(const base::Closure& done); | |
| 86 void SetOutputSizeAndClip( | |
| 87 const webrtc::DesktopSize& view_size, | |
| 88 const webrtc::DesktopRect& clip_area); | |
| 89 | 85 |
| 90 // Decodes the contents of |packet|. DecodePacket may keep a reference to | 86 const webrtc::DesktopRegion* shape = decoder->GetImageShape(); |
| 91 // |packet| so the |packet| must remain alive and valid until |done| is | 87 if (shape) |
| 92 // executed. | 88 frame->set_shape(new webrtc::DesktopRegion(*shape)); |
| 93 void DecodePacket(scoped_ptr<VideoPacket> packet, const base::Closure& done); | |
| 94 | 89 |
| 95 private: | 90 return frame.Pass(); |
| 96 // Paints the invalidated region to the next available buffer and returns it | 91 } |
| 97 // to the consumer. | |
| 98 void SchedulePaint(); | |
| 99 void DoPaint(); | |
| 100 | 92 |
| 101 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner_; | 93 } // namespace |
| 102 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner_; | |
| 103 scoped_ptr<FrameConsumerProxy> consumer_; | |
| 104 scoped_ptr<VideoDecoder> decoder_; | |
| 105 | 94 |
| 106 // Remote screen size in pixels. | 95 SoftwareVideoRenderer::SoftwareVideoRenderer( |
| 107 webrtc::DesktopSize source_size_; | |
| 108 | |
| 109 // Vertical and horizontal DPI of the remote screen. | |
| 110 webrtc::DesktopVector source_dpi_; | |
| 111 | |
| 112 // The current dimensions of the frame consumer view. | |
| 113 webrtc::DesktopSize view_size_; | |
| 114 webrtc::DesktopRect clip_area_; | |
| 115 | |
| 116 // The drawing buffers supplied by the frame consumer. | |
| 117 std::list<webrtc::DesktopFrame*> buffers_; | |
| 118 | |
| 119 // Flag used to coalesce runs of SchedulePaint()s into a single DoPaint(). | |
| 120 bool paint_scheduled_; | |
| 121 | |
| 122 base::WeakPtrFactory<Core> weak_factory_; | |
| 123 }; | |
| 124 | |
| 125 SoftwareVideoRenderer::Core::Core( | |
| 126 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner, | |
| 127 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner, | 96 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner, |
| 128 scoped_ptr<FrameConsumerProxy> consumer) | 97 FrameConsumer* consumer) |
| 129 : main_task_runner_(main_task_runner), | 98 : decode_task_runner_(decode_task_runner), |
| 130 decode_task_runner_(decode_task_runner), | 99 consumer_(consumer), |
| 131 consumer_(consumer.Pass()), | |
| 132 paint_scheduled_(false), | |
| 133 weak_factory_(this) {} | 100 weak_factory_(this) {} |
| 134 | 101 |
| 135 SoftwareVideoRenderer::Core::~Core() { | 102 SoftwareVideoRenderer::~SoftwareVideoRenderer() { |
| 103 if (decoder_) |
| 104 decode_task_runner_->DeleteSoon(FROM_HERE, decoder_.release()); |
| 136 } | 105 } |
| 137 | 106 |
| 138 void SoftwareVideoRenderer::Core::OnSessionConfig(const SessionConfig& config) { | 107 void SoftwareVideoRenderer::OnSessionConfig( |
| 139 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | 108 const protocol::SessionConfig& config) { |
| 109 DCHECK(thread_checker_.CalledOnValidThread()); |
| 140 | 110 |
| 141 // Initialize decoder based on the selected codec. | 111 // Initialize decoder based on the selected codec. |
| 142 ChannelConfig::Codec codec = config.video_config().codec; | 112 ChannelConfig::Codec codec = config.video_config().codec; |
| 143 if (codec == ChannelConfig::CODEC_VERBATIM) { | 113 if (codec == ChannelConfig::CODEC_VERBATIM) { |
| 144 decoder_.reset(new VideoDecoderVerbatim()); | 114 decoder_.reset(new VideoDecoderVerbatim()); |
| 145 } else if (codec == ChannelConfig::CODEC_VP8) { | 115 } else if (codec == ChannelConfig::CODEC_VP8) { |
| 146 decoder_ = VideoDecoderVpx::CreateForVP8(); | 116 decoder_ = VideoDecoderVpx::CreateForVP8(); |
| 147 } else if (codec == ChannelConfig::CODEC_VP9) { | 117 } else if (codec == ChannelConfig::CODEC_VP9) { |
| 148 decoder_ = VideoDecoderVpx::CreateForVP9(); | 118 decoder_ = VideoDecoderVpx::CreateForVP9(); |
| 149 } else { | 119 } else { |
| 150 NOTREACHED() << "Invalid Encoding found: " << codec; | 120 NOTREACHED() << "Invalid Encoding found: " << codec; |
| 151 } | 121 } |
| 152 | 122 |
| 153 if (consumer_->GetPixelFormat() == FrameConsumer::FORMAT_RGBA) { | 123 if (consumer_->GetPixelFormat() == FrameConsumer::FORMAT_RGBA) { |
| 154 scoped_ptr<VideoDecoder> wrapper( | 124 scoped_ptr<VideoDecoder> wrapper( |
| 155 new RgbToBgrVideoDecoderFilter(decoder_.Pass())); | 125 new RgbToBgrVideoDecoderFilter(decoder_.Pass())); |
| 156 decoder_ = wrapper.Pass(); | 126 decoder_ = wrapper.Pass(); |
| 157 } | 127 } |
| 158 } | 128 } |
| 159 | 129 |
| 160 void SoftwareVideoRenderer::Core::DecodePacket(scoped_ptr<VideoPacket> packet, | |
| 161 const base::Closure& done) { | |
| 162 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 163 | |
| 164 bool notify_size_or_dpi_change = false; | |
| 165 | |
| 166 // If the packet includes screen size or DPI information, store them. | |
| 167 if (packet->format().has_screen_width() && | |
| 168 packet->format().has_screen_height()) { | |
| 169 webrtc::DesktopSize source_size(packet->format().screen_width(), | |
| 170 packet->format().screen_height()); | |
| 171 if (!source_size_.equals(source_size)) { | |
| 172 source_size_ = source_size; | |
| 173 notify_size_or_dpi_change = true; | |
| 174 } | |
| 175 } | |
| 176 if (packet->format().has_x_dpi() && packet->format().has_y_dpi()) { | |
| 177 webrtc::DesktopVector source_dpi(packet->format().x_dpi(), | |
| 178 packet->format().y_dpi()); | |
| 179 if (!source_dpi.equals(source_dpi_)) { | |
| 180 source_dpi_ = source_dpi; | |
| 181 notify_size_or_dpi_change = true; | |
| 182 } | |
| 183 } | |
| 184 | |
| 185 // If we've never seen a screen size, ignore the packet. | |
| 186 if (source_size_.is_empty()) { | |
| 187 main_task_runner_->PostTask(FROM_HERE, base::Bind(done)); | |
| 188 return; | |
| 189 } | |
| 190 | |
| 191 if (notify_size_or_dpi_change) | |
| 192 consumer_->SetSourceSize(source_size_, source_dpi_); | |
| 193 | |
| 194 if (decoder_->DecodePacket(*packet.get())) { | |
| 195 SchedulePaint(); | |
| 196 } else { | |
| 197 LOG(ERROR) << "DecodePacket() failed."; | |
| 198 } | |
| 199 | |
| 200 main_task_runner_->PostTask(FROM_HERE, base::Bind(done)); | |
| 201 } | |
| 202 | |
| 203 void SoftwareVideoRenderer::Core::SchedulePaint() { | |
| 204 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 205 if (paint_scheduled_) | |
| 206 return; | |
| 207 paint_scheduled_ = true; | |
| 208 decode_task_runner_->PostTask( | |
| 209 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::DoPaint, | |
| 210 weak_factory_.GetWeakPtr())); | |
| 211 } | |
| 212 | |
| 213 void SoftwareVideoRenderer::Core::DoPaint() { | |
| 214 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 215 DCHECK(paint_scheduled_); | |
| 216 paint_scheduled_ = false; | |
| 217 | |
| 218 // If the view size is empty or we have no output buffers ready, return. | |
| 219 if (buffers_.empty() || view_size_.is_empty()) | |
| 220 return; | |
| 221 | |
| 222 // If no Decoder is initialized, or the host dimensions are empty, return. | |
| 223 if (!decoder_.get() || source_size_.is_empty()) | |
| 224 return; | |
| 225 | |
| 226 // Draw the invalidated region to the buffer. | |
| 227 webrtc::DesktopFrame* buffer = buffers_.front(); | |
| 228 webrtc::DesktopRegion output_region; | |
| 229 decoder_->RenderFrame(view_size_, clip_area_, | |
| 230 buffer->data(), buffer->stride(), &output_region); | |
| 231 | |
| 232 // Notify the consumer that painting is done. | |
| 233 if (!output_region.is_empty()) { | |
| 234 buffers_.pop_front(); | |
| 235 consumer_->ApplyBuffer(view_size_, clip_area_, buffer, output_region, | |
| 236 decoder_->GetImageShape()); | |
| 237 } | |
| 238 } | |
| 239 | |
| 240 void SoftwareVideoRenderer::Core::RequestReturnBuffers( | |
| 241 const base::Closure& done) { | |
| 242 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 243 | |
| 244 while (!buffers_.empty()) { | |
| 245 consumer_->ReturnBuffer(buffers_.front()); | |
| 246 buffers_.pop_front(); | |
| 247 } | |
| 248 | |
| 249 if (!done.is_null()) | |
| 250 done.Run(); | |
| 251 } | |
| 252 | |
| 253 void SoftwareVideoRenderer::Core::DrawBuffer(webrtc::DesktopFrame* buffer) { | |
| 254 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 255 DCHECK(clip_area_.width() <= buffer->size().width() && | |
| 256 clip_area_.height() <= buffer->size().height()); | |
| 257 | |
| 258 buffers_.push_back(buffer); | |
| 259 SchedulePaint(); | |
| 260 } | |
| 261 | |
| 262 void SoftwareVideoRenderer::Core::InvalidateRegion( | |
| 263 const webrtc::DesktopRegion& region) { | |
| 264 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 265 | |
| 266 if (decoder_.get()) { | |
| 267 decoder_->Invalidate(view_size_, region); | |
| 268 SchedulePaint(); | |
| 269 } | |
| 270 } | |
| 271 | |
| 272 void SoftwareVideoRenderer::Core::SetOutputSizeAndClip( | |
| 273 const webrtc::DesktopSize& view_size, | |
| 274 const webrtc::DesktopRect& clip_area) { | |
| 275 DCHECK(decode_task_runner_->BelongsToCurrentThread()); | |
| 276 | |
| 277 // The whole frame needs to be repainted if the scaling factor has changed. | |
| 278 if (!view_size_.equals(view_size) && decoder_.get()) { | |
| 279 webrtc::DesktopRegion region; | |
| 280 region.AddRect(webrtc::DesktopRect::MakeSize(view_size)); | |
| 281 decoder_->Invalidate(view_size, region); | |
| 282 } | |
| 283 | |
| 284 if (!view_size_.equals(view_size) || | |
| 285 !clip_area_.equals(clip_area)) { | |
| 286 view_size_ = view_size; | |
| 287 clip_area_ = clip_area; | |
| 288 | |
| 289 // Return buffers that are smaller than needed to the consumer for | |
| 290 // reuse/reallocation. | |
| 291 std::list<webrtc::DesktopFrame*>::iterator i = buffers_.begin(); | |
| 292 while (i != buffers_.end()) { | |
| 293 if ((*i)->size().width() < clip_area_.width() || | |
| 294 (*i)->size().height() < clip_area_.height()) { | |
| 295 consumer_->ReturnBuffer(*i); | |
| 296 i = buffers_.erase(i); | |
| 297 } else { | |
| 298 ++i; | |
| 299 } | |
| 300 } | |
| 301 | |
| 302 SchedulePaint(); | |
| 303 } | |
| 304 } | |
| 305 | |
| 306 SoftwareVideoRenderer::SoftwareVideoRenderer( | |
| 307 scoped_refptr<base::SingleThreadTaskRunner> main_task_runner, | |
| 308 scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner, | |
| 309 scoped_ptr<FrameConsumerProxy> consumer) | |
| 310 : decode_task_runner_(decode_task_runner), | |
| 311 core_(new Core(main_task_runner, decode_task_runner, consumer.Pass())), | |
| 312 weak_factory_(this) { | |
| 313 DCHECK(CalledOnValidThread()); | |
| 314 } | |
| 315 | |
| 316 SoftwareVideoRenderer::~SoftwareVideoRenderer() { | |
| 317 DCHECK(CalledOnValidThread()); | |
| 318 bool result = decode_task_runner_->DeleteSoon(FROM_HERE, core_.release()); | |
| 319 DCHECK(result); | |
| 320 } | |
| 321 | |
| 322 void SoftwareVideoRenderer::OnSessionConfig( | |
| 323 const protocol::SessionConfig& config) { | |
| 324 DCHECK(CalledOnValidThread()); | |
| 325 decode_task_runner_->PostTask( | |
| 326 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::OnSessionConfig, | |
| 327 base::Unretained(core_.get()), config)); | |
| 328 } | |
| 329 | |
| 330 ChromotingStats* SoftwareVideoRenderer::GetStats() { | 130 ChromotingStats* SoftwareVideoRenderer::GetStats() { |
| 331 DCHECK(CalledOnValidThread()); | 131 DCHECK(thread_checker_.CalledOnValidThread()); |
| 332 return &stats_; | 132 return &stats_; |
| 333 } | 133 } |
| 334 | 134 |
| 335 protocol::VideoStub* SoftwareVideoRenderer::GetVideoStub() { | 135 protocol::VideoStub* SoftwareVideoRenderer::GetVideoStub() { |
| 136 DCHECK(thread_checker_.CalledOnValidThread()); |
| 336 return this; | 137 return this; |
| 337 } | 138 } |
| 338 | 139 |
| 339 void SoftwareVideoRenderer::ProcessVideoPacket(scoped_ptr<VideoPacket> packet, | 140 void SoftwareVideoRenderer::ProcessVideoPacket(scoped_ptr<VideoPacket> packet, |
| 340 const base::Closure& done) { | 141 const base::Closure& done) { |
| 341 DCHECK(CalledOnValidThread()); | 142 DCHECK(thread_checker_.CalledOnValidThread()); |
| 143 |
| 144 base::ScopedClosureRunner done_runner(done); |
| 342 | 145 |
| 343 stats_.RecordVideoPacketStats(*packet); | 146 stats_.RecordVideoPacketStats(*packet); |
| 344 | 147 |
| 345 // If the video packet is empty then drop it. Empty packets are used to | 148 // If the video packet is empty then drop it. Empty packets are used to |
| 346 // maintain activity on the network. | 149 // maintain activity on the network. |
| 347 if (!packet->has_data() || packet->data().size() == 0) { | 150 if (!packet->has_data() || packet->data().size() == 0) { |
| 348 done.Run(); | |
| 349 return; | 151 return; |
| 350 } | 152 } |
| 351 | 153 |
| 352 // Measure the latency between the last packet being received and presented. | 154 if (packet->format().has_screen_width() && |
| 353 base::Time decode_start = base::Time::Now(); | 155 packet->format().has_screen_height()) { |
| 156 source_size_.set(packet->format().screen_width(), |
| 157 packet->format().screen_height()); |
| 158 } |
| 354 | 159 |
| 355 base::Closure decode_done = base::Bind(&SoftwareVideoRenderer::OnPacketDone, | 160 if (packet->format().has_x_dpi() && packet->format().has_y_dpi()) { |
| 356 weak_factory_.GetWeakPtr(), | 161 webrtc::DesktopVector source_dpi(packet->format().x_dpi(), |
| 357 decode_start, done); | 162 packet->format().y_dpi()); |
| 163 if (!source_dpi.equals(source_dpi_)) { |
| 164 source_dpi_ = source_dpi; |
| 165 } |
| 166 } |
| 358 | 167 |
| 359 decode_task_runner_->PostTask(FROM_HERE, base::Bind( | 168 if (source_size_.is_empty()) { |
| 360 &SoftwareVideoRenderer::Core::DecodePacket, | 169 LOG(ERROR) << "Received VideoPacket with unknown size."; |
| 361 base::Unretained(core_.get()), base::Passed(&packet), decode_done)); | 170 return; |
| 171 } |
| 172 |
| 173 scoped_ptr<webrtc::DesktopFrame> frame = |
| 174 consumer_->AllocateFrame(source_size_); |
| 175 frame->set_dpi(source_dpi_); |
| 176 |
| 177 base::PostTaskAndReplyWithResult( |
| 178 decode_task_runner_.get(), FROM_HERE, |
| 179 base::Bind(&DoDecodeFrame, decoder_.get(), base::Passed(&packet), |
| 180 base::Passed(&frame)), |
| 181 base::Bind(&SoftwareVideoRenderer::RenderFrame, |
| 182 weak_factory_.GetWeakPtr(), base::TimeTicks::Now(), |
| 183 done_runner.Release())); |
| 362 } | 184 } |
| 363 | 185 |
| 364 void SoftwareVideoRenderer::DrawBuffer(webrtc::DesktopFrame* buffer) { | 186 void SoftwareVideoRenderer::RenderFrame( |
| 365 decode_task_runner_->PostTask( | 187 base::TimeTicks decode_start_time, |
| 366 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::DrawBuffer, | 188 const base::Closure& done, |
| 367 base::Unretained(core_.get()), buffer)); | 189 scoped_ptr<webrtc::DesktopFrame> frame) { |
| 190 DCHECK(thread_checker_.CalledOnValidThread()); |
| 191 |
| 192 stats_.RecordDecodeTime( |
| 193 (base::TimeTicks::Now() - decode_start_time).InMilliseconds()); |
| 194 |
| 195 if (!frame) { |
| 196 if (!done.is_null()) |
| 197 done.Run(); |
| 198 return; |
| 199 } |
| 200 |
| 201 consumer_->DrawFrame( |
| 202 frame.Pass(), |
| 203 base::Bind(&SoftwareVideoRenderer::OnFrameRendered, |
| 204 weak_factory_.GetWeakPtr(), base::TimeTicks::Now(), done)); |
| 368 } | 205 } |
| 369 | 206 |
| 370 void SoftwareVideoRenderer::InvalidateRegion( | 207 void SoftwareVideoRenderer::OnFrameRendered(base::TimeTicks paint_start_time, |
| 371 const webrtc::DesktopRegion& region) { | 208 const base::Closure& done) { |
| 372 decode_task_runner_->PostTask( | 209 DCHECK(thread_checker_.CalledOnValidThread()); |
| 373 FROM_HERE, base::Bind(&SoftwareVideoRenderer::Core::InvalidateRegion, | |
| 374 base::Unretained(core_.get()), region)); | |
| 375 } | |
| 376 | 210 |
| 377 void SoftwareVideoRenderer::RequestReturnBuffers(const base::Closure& done) { | 211 stats_.RecordPaintTime( |
| 378 decode_task_runner_->PostTask( | 212 (base::TimeTicks::Now() - paint_start_time).InMilliseconds()); |
| 379 FROM_HERE, | |
| 380 base::Bind(&SoftwareVideoRenderer::Core::RequestReturnBuffers, | |
| 381 base::Unretained(core_.get()), done)); | |
| 382 } | |
| 383 | 213 |
| 384 void SoftwareVideoRenderer::SetOutputSizeAndClip( | 214 if (!done.is_null()) |
| 385 const webrtc::DesktopSize& view_size, | 215 done.Run(); |
| 386 const webrtc::DesktopRect& clip_area) { | |
| 387 decode_task_runner_->PostTask( | |
| 388 FROM_HERE, | |
| 389 base::Bind(&SoftwareVideoRenderer::Core::SetOutputSizeAndClip, | |
| 390 base::Unretained(core_.get()), view_size, clip_area)); | |
| 391 } | |
| 392 | |
| 393 void SoftwareVideoRenderer::OnPacketDone(base::Time decode_start, | |
| 394 const base::Closure& done) { | |
| 395 DCHECK(CalledOnValidThread()); | |
| 396 | |
| 397 // Record the latency between the packet being received and presented. | |
| 398 base::TimeDelta decode_time = base::Time::Now() - decode_start; | |
| 399 stats_.RecordDecodeTime(decode_time.InMilliseconds()); | |
| 400 | |
| 401 done.Run(); | |
| 402 } | 216 } |
| 403 | 217 |
| 404 } // namespace remoting | 218 } // namespace remoting |
| OLD | NEW |