OLD | NEW |
---|---|
1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "remoting/protocol/webrtc_video_renderer_adapter.h" | 5 #include "remoting/protocol/webrtc_video_renderer_adapter.h" |
6 | 6 |
7 #include <memory> | 7 #include <memory> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
11 #include "base/callback.h" | 11 #include "base/callback.h" |
12 #include "base/location.h" | 12 #include "base/location.h" |
13 #include "base/memory/ptr_util.h" | 13 #include "base/memory/ptr_util.h" |
14 #include "base/single_thread_task_runner.h" | 14 #include "base/single_thread_task_runner.h" |
15 #include "base/task_runner_util.h" | 15 #include "base/task_runner_util.h" |
16 #include "base/threading/thread_task_runner_handle.h" | 16 #include "base/threading/thread_task_runner_handle.h" |
17 #include "base/threading/worker_pool.h" | 17 #include "base/threading/worker_pool.h" |
18 #include "remoting/protocol/client_video_stats_dispatcher.h" | |
18 #include "remoting/protocol/frame_consumer.h" | 19 #include "remoting/protocol/frame_consumer.h" |
19 #include "remoting/protocol/frame_stats.h" | 20 #include "remoting/protocol/frame_stats.h" |
20 #include "remoting/protocol/video_renderer.h" | 21 #include "remoting/protocol/video_renderer.h" |
21 #include "third_party/libyuv/include/libyuv/convert_argb.h" | 22 #include "remoting/protocol/webrtc_transport.h" |
22 #include "third_party/libyuv/include/libyuv/convert_from.h" | 23 #include "third_party/libyuv/include/libyuv/convert_from.h" |
23 #include "third_party/libyuv/include/libyuv/video_common.h" | |
24 #include "third_party/webrtc/media/base/videoframe.h" | 24 #include "third_party/webrtc/media/base/videoframe.h" |
25 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" | 25 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h" |
26 | 26 |
27 namespace remoting { | 27 namespace remoting { |
28 namespace protocol { | 28 namespace protocol { |
29 | 29 |
30 namespace { | 30 namespace { |
31 | 31 |
32 // Maximum number of ClientFrameStats instances to keep. | |
33 const int kMaxQueuedStats = 200; | |
34 | |
32 std::unique_ptr<webrtc::DesktopFrame> ConvertYuvToRgb( | 35 std::unique_ptr<webrtc::DesktopFrame> ConvertYuvToRgb( |
33 scoped_refptr<webrtc::VideoFrameBuffer> yuv_frame, | 36 scoped_refptr<webrtc::VideoFrameBuffer> yuv_frame, |
34 std::unique_ptr<webrtc::DesktopFrame> rgb_frame, | 37 std::unique_ptr<webrtc::DesktopFrame> rgb_frame, |
35 FrameConsumer::PixelFormat pixel_format) { | 38 FrameConsumer::PixelFormat pixel_format) { |
36 DCHECK(rgb_frame->size().equals( | 39 DCHECK(rgb_frame->size().equals( |
37 webrtc::DesktopSize(yuv_frame->width(), yuv_frame->height()))); | 40 webrtc::DesktopSize(yuv_frame->width(), yuv_frame->height()))); |
38 auto yuv_to_rgb_function = (pixel_format == FrameConsumer::FORMAT_BGRA) | 41 auto yuv_to_rgb_function = (pixel_format == FrameConsumer::FORMAT_BGRA) |
39 ? &libyuv::I420ToARGB | 42 ? &libyuv::I420ToARGB |
40 : &libyuv::I420ToABGR; | 43 : &libyuv::I420ToABGR; |
41 yuv_to_rgb_function(yuv_frame->DataY(), yuv_frame->StrideY(), | 44 yuv_to_rgb_function(yuv_frame->DataY(), yuv_frame->StrideY(), |
42 yuv_frame->DataU(), yuv_frame->StrideU(), | 45 yuv_frame->DataU(), yuv_frame->StrideU(), |
43 yuv_frame->DataV(), yuv_frame->StrideV(), | 46 yuv_frame->DataV(), yuv_frame->StrideV(), |
44 rgb_frame->data(), rgb_frame->stride(), | 47 rgb_frame->data(), rgb_frame->stride(), |
45 yuv_frame->width(), yuv_frame->height()); | 48 yuv_frame->width(), yuv_frame->height()); |
46 | 49 |
47 rgb_frame->mutable_updated_region()->AddRect( | 50 rgb_frame->mutable_updated_region()->AddRect( |
48 webrtc::DesktopRect::MakeSize(rgb_frame->size())); | 51 webrtc::DesktopRect::MakeSize(rgb_frame->size())); |
49 return rgb_frame; | 52 return rgb_frame; |
50 } | 53 } |
51 | 54 |
52 } // namespace | 55 } // namespace |
53 | 56 |
54 WebrtcVideoRendererAdapter::WebrtcVideoRendererAdapter( | 57 WebrtcVideoRendererAdapter::WebrtcVideoRendererAdapter( |
55 scoped_refptr<webrtc::MediaStreamInterface> media_stream, | 58 const std::string& label, |
56 VideoRenderer* video_renderer) | 59 VideoRenderer* video_renderer) |
57 : media_stream_(std::move(media_stream)), | 60 : label_(label), |
58 video_renderer_(video_renderer), | 61 video_renderer_(video_renderer), |
59 task_runner_(base::ThreadTaskRunnerHandle::Get()), | 62 task_runner_(base::ThreadTaskRunnerHandle::Get()), |
60 weak_factory_(this) { | 63 weak_factory_(this) {} |
64 | |
65 WebrtcVideoRendererAdapter::~WebrtcVideoRendererAdapter() { | |
66 DCHECK(task_runner_->BelongsToCurrentThread()); | |
67 | |
68 webrtc::VideoTrackVector video_tracks = media_stream_->GetVideoTracks(); | |
69 DCHECK(!video_tracks.empty()); | |
70 video_tracks[0]->RemoveSink(this); | |
71 } | |
72 | |
73 void WebrtcVideoRendererAdapter::SetMediaStream( | |
74 scoped_refptr<webrtc::MediaStreamInterface> media_stream) { | |
75 DCHECK_EQ(media_stream->label(), label()); | |
76 | |
77 media_stream_ = std::move(media_stream); | |
78 | |
61 webrtc::VideoTrackVector video_tracks = media_stream_->GetVideoTracks(); | 79 webrtc::VideoTrackVector video_tracks = media_stream_->GetVideoTracks(); |
62 if (video_tracks.empty()) { | 80 if (video_tracks.empty()) { |
63 LOG(ERROR) << "Received media stream with no video tracks."; | 81 LOG(ERROR) << "Received media stream with no video tracks."; |
64 return; | 82 return; |
65 } | 83 } |
66 | 84 |
67 if (video_tracks.size() > 1U) { | 85 if (video_tracks.size() > 1U) { |
68 LOG(WARNING) << "Received media stream with multiple video tracks."; | 86 LOG(WARNING) << "Received media stream with multiple video tracks."; |
69 } | 87 } |
70 | 88 |
71 video_tracks[0]->AddOrUpdateSink(this, rtc::VideoSinkWants()); | 89 video_tracks[0]->AddOrUpdateSink(this, rtc::VideoSinkWants()); |
72 } | 90 } |
73 | 91 |
74 WebrtcVideoRendererAdapter::~WebrtcVideoRendererAdapter() { | 92 void WebrtcVideoRendererAdapter::SetVideoStatsChannel( |
75 DCHECK(task_runner_->BelongsToCurrentThread()); | 93 std::unique_ptr<MessagePipe> message_pipe) { |
76 | 94 // Expect that the host also creates video_stats data channel. |
77 webrtc::VideoTrackVector video_tracks = media_stream_->GetVideoTracks(); | 95 video_stats_dispatcher_.reset(new ClientVideoStatsDispatcher(label_, this)); |
78 DCHECK(!video_tracks.empty()); | 96 video_stats_dispatcher_->Init(std::move(message_pipe), this); |
79 video_tracks[0]->RemoveSink(this); | |
80 } | 97 } |
81 | 98 |
82 void WebrtcVideoRendererAdapter::OnFrame(const cricket::VideoFrame& frame) { | 99 void WebrtcVideoRendererAdapter::OnFrame(const cricket::VideoFrame& frame) { |
83 if (static_cast<uint64_t>(frame.timestamp_us()) >= rtc::TimeMicros()) { | 100 if (static_cast<uint64_t>(frame.timestamp_us()) >= rtc::TimeMicros()) { |
84 // The host sets playout delay to 0, so all incoming frames are expected to | 101 // The host sets playout delay to 0, so all incoming frames are expected to |
85 // be rendered as so as they are received. | 102 // be rendered as so as they are received. |
86 LOG(WARNING) << "Received frame with playout delay greater than 0."; | 103 LOG(WARNING) << "Received frame with playout delay greater than 0."; |
87 } | 104 } |
88 | 105 |
106 task_runner_->PostTask( | |
107 FROM_HERE, | |
108 base::Bind(&WebrtcVideoRendererAdapter::HandleFrameOnMainThread, | |
109 weak_factory_.GetWeakPtr(), frame.transport_frame_id(), | |
110 base::TimeTicks::Now(), | |
111 scoped_refptr<webrtc::VideoFrameBuffer>( | |
112 frame.video_frame_buffer().get()))); | |
113 } | |
114 | |
115 void WebrtcVideoRendererAdapter::OnVideoFrameStats( | |
116 uint32_t frame_id, | |
117 const HostFrameStats& host_stats) { | |
118 DCHECK(task_runner_->BelongsToCurrentThread()); | |
119 | |
120 // Drop all ClientFrameStats for frames before |frame_id|. Stats messages are | |
121 // expected to be received in the same order as the corresponding video | |
122 // frames, so we are not going to receive HostFrameStats for the frames before | |
123 // |frame_id|. This may happen only if for some reason the host doesn't | |
124 // generate stats message for all video frames. | |
125 while (!client_stats_queue_.empty() && | |
126 client_stats_queue_.front().first != frame_id) { | |
127 client_stats_queue_.pop_front(); | |
128 } | |
129 | |
130 // If there are no ClientFrameStats in the queue then queue HostFrameStats | |
131 // to be processed in FrameRendered(). | |
132 if (client_stats_queue_.empty()) { | |
133 if (host_stats_queue_.size() > kMaxQueuedStats) { | |
134 LOG(ERROR) << "video_stats channel is out of sync with the video stream. " | |
135 "Performance stats will not be reported."; | |
136 video_stats_dispatcher_.reset(); | |
137 return; | |
138 } | |
139 host_stats_queue_.push_back(std::make_pair(frame_id, host_stats)); | |
140 return; | |
141 } | |
142 | |
143 // The correspond frame has been received and now we have both HostFrameStats | |
144 // and ClientFrameStats. Report the stats to FrameStatsConsumer. | |
145 DCHECK_EQ(client_stats_queue_.front().first, frame_id); | |
146 FrameStats frame_stats; | |
147 frame_stats.client_stats = client_stats_queue_.front().second; | |
148 client_stats_queue_.pop_front(); | |
149 frame_stats.host_stats = host_stats; | |
150 video_renderer_->GetFrameStatsConsumer()->OnVideoFrameStats(frame_stats); | |
151 } | |
152 | |
153 void WebrtcVideoRendererAdapter::OnChannelInitialized( | |
154 ChannelDispatcherBase* channel_dispatcher) {} | |
155 | |
156 void WebrtcVideoRendererAdapter::OnChannelClosed( | |
157 ChannelDispatcherBase* channel_dispatcher) { | |
158 LOG(WARNING) << "video_stats channel was closed by the host."; | |
159 } | |
160 | |
161 void WebrtcVideoRendererAdapter::HandleFrameOnMainThread( | |
162 uint32_t frame_id, | |
163 base::TimeTicks time_received, | |
164 scoped_refptr<webrtc::VideoFrameBuffer> frame) { | |
165 DCHECK(task_runner_->BelongsToCurrentThread()); | |
166 | |
89 std::unique_ptr<ClientFrameStats> stats(new ClientFrameStats()); | 167 std::unique_ptr<ClientFrameStats> stats(new ClientFrameStats()); |
90 // TODO(sergeyu): |time_received| is not reported correctly here because the | 168 // TODO(sergeyu): |time_received| is not reported correctly here because the |
91 // frame is already decoded at this point. | 169 // frame is already decoded at this point. |
92 stats->time_received = base::TimeTicks::Now(); | 170 stats->time_received = time_received; |
93 | |
94 task_runner_->PostTask( | |
95 FROM_HERE, | |
96 base::Bind(&WebrtcVideoRendererAdapter::HandleFrameOnMainThread, | |
97 weak_factory_.GetWeakPtr(), base::Passed(&stats), | |
98 scoped_refptr<webrtc::VideoFrameBuffer>( | |
99 frame.video_frame_buffer().get()))); | |
100 } | |
101 | |
102 void WebrtcVideoRendererAdapter::HandleFrameOnMainThread( | |
103 std::unique_ptr<ClientFrameStats> stats, | |
104 scoped_refptr<webrtc::VideoFrameBuffer> frame) { | |
105 DCHECK(task_runner_->BelongsToCurrentThread()); | |
106 | 171 |
107 std::unique_ptr<webrtc::DesktopFrame> rgb_frame = | 172 std::unique_ptr<webrtc::DesktopFrame> rgb_frame = |
108 video_renderer_->GetFrameConsumer()->AllocateFrame( | 173 video_renderer_->GetFrameConsumer()->AllocateFrame( |
109 webrtc::DesktopSize(frame->width(), frame->height())); | 174 webrtc::DesktopSize(frame->width(), frame->height())); |
110 | 175 |
111 base::PostTaskAndReplyWithResult( | 176 base::PostTaskAndReplyWithResult( |
112 base::WorkerPool::GetTaskRunner(false).get(), FROM_HERE, | 177 base::WorkerPool::GetTaskRunner(false).get(), FROM_HERE, |
113 base::Bind(&ConvertYuvToRgb, base::Passed(&frame), | 178 base::Bind(&ConvertYuvToRgb, base::Passed(&frame), |
114 base::Passed(&rgb_frame), | 179 base::Passed(&rgb_frame), |
115 video_renderer_->GetFrameConsumer()->GetPixelFormat()), | 180 video_renderer_->GetFrameConsumer()->GetPixelFormat()), |
116 base::Bind(&WebrtcVideoRendererAdapter::DrawFrame, | 181 base::Bind(&WebrtcVideoRendererAdapter::DrawFrame, |
117 weak_factory_.GetWeakPtr(), base::Passed(&stats))); | 182 weak_factory_.GetWeakPtr(), frame_id, base::Passed(&stats))); |
118 } | 183 } |
119 | 184 |
120 void WebrtcVideoRendererAdapter::DrawFrame( | 185 void WebrtcVideoRendererAdapter::DrawFrame( |
186 uint32_t frame_id, | |
121 std::unique_ptr<ClientFrameStats> stats, | 187 std::unique_ptr<ClientFrameStats> stats, |
122 std::unique_ptr<webrtc::DesktopFrame> frame) { | 188 std::unique_ptr<webrtc::DesktopFrame> frame) { |
123 DCHECK(task_runner_->BelongsToCurrentThread()); | 189 DCHECK(task_runner_->BelongsToCurrentThread()); |
124 stats->time_decoded = base::TimeTicks::Now(); | 190 stats->time_decoded = base::TimeTicks::Now(); |
125 video_renderer_->GetFrameConsumer()->DrawFrame( | 191 video_renderer_->GetFrameConsumer()->DrawFrame( |
126 std::move(frame), | 192 std::move(frame), |
127 base::Bind(&WebrtcVideoRendererAdapter::FrameRendered, | 193 base::Bind(&WebrtcVideoRendererAdapter::FrameRendered, |
128 weak_factory_.GetWeakPtr(), base::Passed(&stats))); | 194 weak_factory_.GetWeakPtr(), frame_id, base::Passed(&stats))); |
129 } | 195 } |
130 | 196 |
131 void WebrtcVideoRendererAdapter::FrameRendered( | 197 void WebrtcVideoRendererAdapter::FrameRendered( |
132 std::unique_ptr<ClientFrameStats> stats) { | 198 uint32_t frame_id, |
133 // TODO(sergeyu): Report stats here | 199 std::unique_ptr<ClientFrameStats> client_stats) { |
200 DCHECK(task_runner_->BelongsToCurrentThread()); | |
201 | |
202 if (!video_stats_dispatcher_ || !video_stats_dispatcher_->is_connected()) | |
203 return; | |
204 | |
205 client_stats->time_rendered = base::TimeTicks::Now(); | |
206 | |
207 // Drop all HostFrameStats for frames before |frame_id|. Stats messages are | |
208 // expected to be received in the same order as the corresponding video | |
209 // frames. This may happen only if the host generates HostFrameStats without | |
210 // the corresponding frame. | |
211 while (!host_stats_queue_.empty() && | |
212 host_stats_queue_.front().first != frame_id) { | |
213 LOG(WARNING) << "Host sent VideoStats message for a frame that was never " | |
214 "received."; | |
215 host_stats_queue_.pop_front(); | |
216 } | |
217 | |
218 // If HostFrameStats hasn't been received for |frame_id| then queue | |
219 // ClientFrameStats to be processed in OnVideoFrameStats(). | |
220 if (host_stats_queue_.empty()) { | |
221 if (host_stats_queue_.size() > kMaxQueuedStats) { | |
Irfan
2016/08/10 20:58:49
This should be client_stats_queue ?
Sergey Ulanov
2016/08/10 22:29:56
Done.
| |
222 LOG(ERROR) << "video_stats channel is out of sync with the video " | |
223 "stream. Performance stats will not be reported."; | |
224 video_stats_dispatcher_.reset(); | |
225 return; | |
226 } | |
227 client_stats_queue_.push_back(std::make_pair(frame_id, *client_stats)); | |
228 return; | |
229 } | |
230 | |
231 // The correspond HostFrameStats has been received already and now we have | |
232 // both HostFrameStats and ClientFrameStats. Report the stats to | |
233 // FrameStatsConsumer. | |
234 DCHECK_EQ(host_stats_queue_.front().first, frame_id); | |
235 FrameStats frame_stats; | |
236 frame_stats.host_stats = host_stats_queue_.front().second; | |
237 frame_stats.client_stats = *client_stats; | |
238 host_stats_queue_.pop_front(); | |
239 video_renderer_->GetFrameStatsConsumer()->OnVideoFrameStats(frame_stats); | |
134 } | 240 } |
135 | 241 |
136 } // namespace protocol | 242 } // namespace protocol |
137 } // namespace remoting | 243 } // namespace remoting |
OLD | NEW |