OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h" | 5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h" |
6 | 6 |
7 #include <stdint.h> | 7 #include <stdint.h> |
8 #include <utility> | 8 #include <utility> |
9 | 9 |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
11 #include "base/callback_helpers.h" | 11 #include "base/callback_helpers.h" |
12 #include "base/location.h" | 12 #include "base/location.h" |
13 #include "base/threading/thread_checker.h" | 13 #include "base/threading/thread_checker.h" |
14 #include "base/trace_event/trace_event.h" | 14 #include "base/trace_event/trace_event.h" |
15 #include "content/renderer/media/webrtc/track_observer.h" | 15 #include "content/renderer/media/webrtc/track_observer.h" |
16 #include "media/base/bind_to_current_loop.h" | 16 #include "media/base/bind_to_current_loop.h" |
17 #include "media/base/timestamp_constants.h" | 17 #include "media/base/timestamp_constants.h" |
18 #include "media/base/video_frame.h" | 18 #include "media/base/video_frame.h" |
19 #include "media/base/video_util.h" | 19 #include "media/base/video_util.h" |
20 #include "third_party/webrtc/media/base/videoframe.h" | 20 #include "third_party/webrtc/media/base/videoframe.h" |
| 21 #include "third_party/webrtc/media/base/videosinkinterface.h" |
21 #include "third_party/webrtc/system_wrappers/include/tick_util.h" | 22 #include "third_party/webrtc/system_wrappers/include/tick_util.h" |
22 | 23 |
23 namespace content { | 24 namespace content { |
24 | 25 |
25 // Internal class used for receiving frames from the webrtc track on a | 26 // Internal class used for receiving frames from the webrtc track on a |
26 // libjingle thread and forward it to the IO-thread. | 27 // libjingle thread and forward it to the IO-thread. |
27 class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate | 28 class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate |
28 : public base::RefCountedThreadSafe<RemoteVideoSourceDelegate>, | 29 : public base::RefCountedThreadSafe<RemoteVideoSourceDelegate>, |
29 public webrtc::VideoRendererInterface { | 30 public rtc::VideoSinkInterface<cricket::VideoFrame> { |
30 public: | 31 public: |
31 RemoteVideoSourceDelegate( | 32 RemoteVideoSourceDelegate( |
32 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, | 33 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, |
33 const VideoCaptureDeliverFrameCB& new_frame_callback); | 34 const VideoCaptureDeliverFrameCB& new_frame_callback); |
34 | 35 |
35 protected: | 36 protected: |
36 friend class base::RefCountedThreadSafe<RemoteVideoSourceDelegate>; | 37 friend class base::RefCountedThreadSafe<RemoteVideoSourceDelegate>; |
37 ~RemoteVideoSourceDelegate() override; | 38 ~RemoteVideoSourceDelegate() override; |
38 | 39 |
39 // Implements webrtc::VideoRendererInterface used for receiving video frames | 40 // Implements rtc::VideoSinkInterface used for receiving video frames |
40 // from the PeerConnection video track. May be called on a libjingle internal | 41 // from the PeerConnection video track. May be called on a libjingle internal |
41 // thread. | 42 // thread. |
42 void RenderFrame(const cricket::VideoFrame* frame) override; | 43 void OnFrame(const cricket::VideoFrame& frame) override; |
43 | 44 |
44 void DoRenderFrameOnIOThread( | 45 void DoRenderFrameOnIOThread( |
45 const scoped_refptr<media::VideoFrame>& video_frame); | 46 const scoped_refptr<media::VideoFrame>& video_frame); |
46 | 47 |
47 private: | 48 private: |
48 // Bound to the render thread. | 49 // Bound to the render thread. |
49 base::ThreadChecker thread_checker_; | 50 base::ThreadChecker thread_checker_; |
50 | 51 |
51 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_; | 52 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_; |
52 | 53 |
(...skipping 17 matching lines...) Expand all Loading... |
70 // the offset, 2) the rate (i.e., one clock runs faster than the other). | 71 // the offset, 2) the rate (i.e., one clock runs faster than the other). |
71 // See http://crbug/516700 | 72 // See http://crbug/516700 |
72 time_diff_(base::TimeTicks::Now() - base::TimeTicks() - | 73 time_diff_(base::TimeTicks::Now() - base::TimeTicks() - |
73 base::TimeDelta::FromMicroseconds( | 74 base::TimeDelta::FromMicroseconds( |
74 webrtc::TickTime::MicrosecondTimestamp())) {} | 75 webrtc::TickTime::MicrosecondTimestamp())) {} |
75 | 76 |
76 MediaStreamRemoteVideoSource:: | 77 MediaStreamRemoteVideoSource:: |
77 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() { | 78 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() { |
78 } | 79 } |
79 | 80 |
80 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::RenderFrame( | 81 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::OnFrame( |
81 const cricket::VideoFrame* incoming_frame) { | 82 const cricket::VideoFrame& incoming_frame) { |
82 const base::TimeDelta incoming_timestamp = base::TimeDelta::FromMicroseconds( | 83 const base::TimeDelta incoming_timestamp = base::TimeDelta::FromMicroseconds( |
83 incoming_frame->GetTimeStamp() / rtc::kNumNanosecsPerMicrosec); | 84 incoming_frame.GetTimeStamp() / rtc::kNumNanosecsPerMicrosec); |
84 const base::TimeTicks render_time = | 85 const base::TimeTicks render_time = |
85 base::TimeTicks() + incoming_timestamp + time_diff_; | 86 base::TimeTicks() + incoming_timestamp + time_diff_; |
86 | 87 |
87 TRACE_EVENT1("webrtc", "RemoteVideoSourceDelegate::RenderFrame", | 88 TRACE_EVENT1("webrtc", "RemoteVideoSourceDelegate::RenderFrame", |
88 "Ideal Render Instant", render_time.ToInternalValue()); | 89 "Ideal Render Instant", render_time.ToInternalValue()); |
89 | 90 |
90 CHECK_NE(media::kNoTimestamp(), incoming_timestamp); | 91 CHECK_NE(media::kNoTimestamp(), incoming_timestamp); |
91 if (start_timestamp_ == media::kNoTimestamp()) | 92 if (start_timestamp_ == media::kNoTimestamp()) |
92 start_timestamp_ = incoming_timestamp; | 93 start_timestamp_ = incoming_timestamp; |
93 const base::TimeDelta elapsed_timestamp = | 94 const base::TimeDelta elapsed_timestamp = |
94 incoming_timestamp - start_timestamp_; | 95 incoming_timestamp - start_timestamp_; |
95 | 96 |
96 scoped_refptr<media::VideoFrame> video_frame; | 97 scoped_refptr<media::VideoFrame> video_frame; |
97 if (incoming_frame->GetNativeHandle() != NULL) { | 98 if (incoming_frame.GetNativeHandle() != NULL) { |
98 video_frame = | 99 video_frame = |
99 static_cast<media::VideoFrame*>(incoming_frame->GetNativeHandle()); | 100 static_cast<media::VideoFrame*>(incoming_frame.GetNativeHandle()); |
100 video_frame->set_timestamp(elapsed_timestamp); | 101 video_frame->set_timestamp(elapsed_timestamp); |
101 } else { | 102 } else { |
102 const cricket::VideoFrame* frame = | 103 const cricket::VideoFrame* frame = |
103 incoming_frame->GetCopyWithRotationApplied(); | 104 incoming_frame.GetCopyWithRotationApplied(); |
104 | 105 |
105 gfx::Size size(frame->GetWidth(), frame->GetHeight()); | 106 gfx::Size size(frame->GetWidth(), frame->GetHeight()); |
106 | 107 |
107 // Make a shallow copy. Both |frame| and |video_frame| will share a single | 108 // Make a shallow copy. Both |frame| and |video_frame| will share a single |
108 // reference counted frame buffer. Const cast and hope no one will overwrite | 109 // reference counted frame buffer. Const cast and hope no one will overwrite |
109 // the data. | 110 // the data. |
110 // TODO(magjed): Update media::VideoFrame to support const data so we don't | 111 // TODO(magjed): Update media::VideoFrame to support const data so we don't |
111 // need to const cast here. | 112 // need to const cast here. |
112 video_frame = media::VideoFrame::WrapExternalYuvData( | 113 video_frame = media::VideoFrame::WrapExternalYuvData( |
113 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, | 114 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
171 | 172 |
172 void MediaStreamRemoteVideoSource::StartSourceImpl( | 173 void MediaStreamRemoteVideoSource::StartSourceImpl( |
173 const media::VideoCaptureFormat& format, | 174 const media::VideoCaptureFormat& format, |
174 const blink::WebMediaConstraints& constraints, | 175 const blink::WebMediaConstraints& constraints, |
175 const VideoCaptureDeliverFrameCB& frame_callback) { | 176 const VideoCaptureDeliverFrameCB& frame_callback) { |
176 DCHECK(CalledOnValidThread()); | 177 DCHECK(CalledOnValidThread()); |
177 DCHECK(!delegate_.get()); | 178 DCHECK(!delegate_.get()); |
178 delegate_ = new RemoteVideoSourceDelegate(io_task_runner(), frame_callback); | 179 delegate_ = new RemoteVideoSourceDelegate(io_task_runner(), frame_callback); |
179 scoped_refptr<webrtc::VideoTrackInterface> video_track( | 180 scoped_refptr<webrtc::VideoTrackInterface> video_track( |
180 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get())); | 181 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get())); |
181 video_track->AddRenderer(delegate_.get()); | 182 video_track->AddOrUpdateSink(delegate_.get(), rtc::VideoSinkWants()); |
182 OnStartDone(MEDIA_DEVICE_OK); | 183 OnStartDone(MEDIA_DEVICE_OK); |
183 } | 184 } |
184 | 185 |
185 void MediaStreamRemoteVideoSource::StopSourceImpl() { | 186 void MediaStreamRemoteVideoSource::StopSourceImpl() { |
186 DCHECK(CalledOnValidThread()); | 187 DCHECK(CalledOnValidThread()); |
187 // StopSourceImpl is called either when MediaStreamTrack.stop is called from | 188 // StopSourceImpl is called either when MediaStreamTrack.stop is called from |
188 // JS or blink gc the MediaStreamSource object or when OnSourceTerminated() | 189 // JS or blink gc the MediaStreamSource object or when OnSourceTerminated() |
189 // is called. Garbage collection will happen after the PeerConnection no | 190 // is called. Garbage collection will happen after the PeerConnection no |
190 // longer receives the video track. | 191 // longer receives the video track. |
191 if (!observer_) | 192 if (!observer_) |
192 return; | 193 return; |
193 DCHECK(state() != MediaStreamVideoSource::ENDED); | 194 DCHECK(state() != MediaStreamVideoSource::ENDED); |
194 scoped_refptr<webrtc::VideoTrackInterface> video_track( | 195 scoped_refptr<webrtc::VideoTrackInterface> video_track( |
195 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get())); | 196 static_cast<webrtc::VideoTrackInterface*>(observer_->track().get())); |
196 video_track->RemoveRenderer(delegate_.get()); | 197 video_track->RemoveSink(delegate_.get()); |
197 // This removes the references to the webrtc video track. | 198 // This removes the references to the webrtc video track. |
198 observer_.reset(); | 199 observer_.reset(); |
199 } | 200 } |
200 | 201 |
201 webrtc::VideoRendererInterface* | 202 rtc::VideoSinkInterface<cricket::VideoFrame>* |
202 MediaStreamRemoteVideoSource::RenderInterfaceForTest() { | 203 MediaStreamRemoteVideoSource::SinkInterfaceForTest() { |
203 return delegate_.get(); | 204 return delegate_.get(); |
204 } | 205 } |
205 | 206 |
206 void MediaStreamRemoteVideoSource::OnChanged( | 207 void MediaStreamRemoteVideoSource::OnChanged( |
207 webrtc::MediaStreamTrackInterface::TrackState state) { | 208 webrtc::MediaStreamTrackInterface::TrackState state) { |
208 DCHECK(CalledOnValidThread()); | 209 DCHECK(CalledOnValidThread()); |
209 switch (state) { | 210 switch (state) { |
210 case webrtc::MediaStreamTrackInterface::kInitializing: | 211 case webrtc::MediaStreamTrackInterface::kInitializing: |
211 // Ignore the kInitializing state since there is no match in | 212 // Ignore the kInitializing state since there is no match in |
212 // WebMediaStreamSource::ReadyState. | 213 // WebMediaStreamSource::ReadyState. |
213 break; | 214 break; |
214 case webrtc::MediaStreamTrackInterface::kLive: | 215 case webrtc::MediaStreamTrackInterface::kLive: |
215 SetReadyState(blink::WebMediaStreamSource::ReadyStateLive); | 216 SetReadyState(blink::WebMediaStreamSource::ReadyStateLive); |
216 break; | 217 break; |
217 case webrtc::MediaStreamTrackInterface::kEnded: | 218 case webrtc::MediaStreamTrackInterface::kEnded: |
218 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); | 219 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); |
219 break; | 220 break; |
220 default: | 221 default: |
221 NOTREACHED(); | 222 NOTREACHED(); |
222 break; | 223 break; |
223 } | 224 } |
224 } | 225 } |
225 | 226 |
226 } // namespace content | 227 } // namespace content |
OLD | NEW |