Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h" | 5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h" |
| 6 | 6 |
| 7 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/callback_helpers.h" | 8 #include "base/callback_helpers.h" |
| 9 #include "base/location.h" | 9 #include "base/location.h" |
| 10 #include "base/threading/thread_checker.h" | 10 #include "base/threading/thread_checker.h" |
| 11 #include "base/trace_event/trace_event.h" | 11 #include "base/trace_event/trace_event.h" |
| 12 #include "content/renderer/media/webrtc/track_observer.h" | 12 #include "content/renderer/media/webrtc/track_observer.h" |
| 13 #include "media/base/bind_to_current_loop.h" | 13 #include "media/base/bind_to_current_loop.h" |
| 14 #include "media/base/timestamp_constants.h" | |
| 14 #include "media/base/video_frame.h" | 15 #include "media/base/video_frame.h" |
| 15 #include "media/base/video_util.h" | 16 #include "media/base/video_util.h" |
| 16 #include "third_party/libjingle/source/talk/media/base/videoframe.h" | 17 #include "third_party/libjingle/source/talk/media/base/videoframe.h" |
| 17 #include "third_party/webrtc/system_wrappers/interface/tick_util.h" | 18 #include "third_party/webrtc/system_wrappers/interface/tick_util.h" |
| 18 | 19 |
| 19 namespace content { | 20 namespace content { |
| 20 | 21 |
| 21 // Internal class used for receiving frames from the webrtc track on a | 22 // Internal class used for receiving frames from the webrtc track on a |
| 22 // libjingle thread and forward it to the IO-thread. | 23 // libjingle thread and forward it to the IO-thread. |
| 23 class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate | 24 class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate |
| (...skipping 18 matching lines...) Expand all Loading... | |
| 42 | 43 |
| 43 private: | 44 private: |
| 44 // Bound to the render thread. | 45 // Bound to the render thread. |
| 45 base::ThreadChecker thread_checker_; | 46 base::ThreadChecker thread_checker_; |
| 46 | 47 |
| 47 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_; | 48 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner_; |
| 48 | 49 |
| 49 // |frame_callback_| is accessed on the IO thread. | 50 // |frame_callback_| is accessed on the IO thread. |
| 50 VideoCaptureDeliverFrameCB frame_callback_; | 51 VideoCaptureDeliverFrameCB frame_callback_; |
| 51 | 52 |
| 53 // Timestamp of the first received frame. | |
| 54 base::TimeDelta start_timestamp_; | |
| 52 // WebRTC Chromium timestamp diff | 55 // WebRTC Chromium timestamp diff |
| 53 int64_t time_diff_us_; | 56 const base::TimeDelta time_diff_; |
| 54 }; | 57 }; |
| 55 | 58 |
| 56 MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate:: | 59 MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate:: |
| 57 RemoteVideoSourceDelegate( | 60 RemoteVideoSourceDelegate( |
| 58 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, | 61 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, |
| 59 const VideoCaptureDeliverFrameCB& new_frame_callback) | 62 const VideoCaptureDeliverFrameCB& new_frame_callback) |
| 60 : io_task_runner_(io_task_runner), frame_callback_(new_frame_callback) { | 63 : io_task_runner_(io_task_runner), |
| 61 // TODO(qiangchen): There can be two differences between clocks: 1) | 64 frame_callback_(new_frame_callback), |
| 62 // the offset, 2) the rate (i.e., one clock runs faster than the other). | 65 start_timestamp_(media::kNoTimestamp()), |
|
tommi (sloooow) - chröme
2015/09/25 07:53:50
Is kNoTimestamp a function?
magjed_chromium
2015/09/25 08:21:35
Yes, it looks like this:
MEDIA_EXPORT inline base:
| |
| 63 // See http://crbug/516700 | 66 // TODO(qiangchen): There can be two differences between clocks: 1) |
| 64 time_diff_us_ = | 67 // the offset, 2) the rate (i.e., one clock runs faster than the other). |
| 65 (base::TimeTicks::Now() - base::TimeTicks()).InMicroseconds() - | 68 // See http://crbug/516700 |
| 66 webrtc::TickTime::MicrosecondTimestamp(); | 69 time_diff_(base::TimeTicks::Now() - base::TimeTicks() - |
| 67 } | 70 base::TimeDelta::FromMicroseconds( |
| 71 webrtc::TickTime::MicrosecondTimestamp())) {} | |
| 68 | 72 |
| 69 MediaStreamRemoteVideoSource:: | 73 MediaStreamRemoteVideoSource:: |
| 70 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() { | 74 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() { |
| 71 } | 75 } |
| 72 | 76 |
| 73 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::RenderFrame( | 77 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::RenderFrame( |
| 74 const cricket::VideoFrame* incoming_frame) { | 78 const cricket::VideoFrame* incoming_frame) { |
| 75 base::TimeTicks render_time = | 79 const base::TimeDelta incoming_timestamp = base::TimeDelta::FromMicroseconds( |
| 76 base::TimeTicks() + | 80 incoming_frame->GetTimeStamp() / rtc::kNumNanosecsPerMicrosec); |
| 77 base::TimeDelta::FromMicroseconds(incoming_frame->GetTimeStamp() / 1000 + | 81 const base::TimeTicks render_time = |
| 78 time_diff_us_); | 82 base::TimeTicks() + incoming_timestamp + time_diff_; |
| 79 | 83 |
| 80 TRACE_EVENT1("webrtc", "RemoteVideoSourceDelegate::RenderFrame", | 84 TRACE_EVENT1("webrtc", "RemoteVideoSourceDelegate::RenderFrame", |
| 81 "Ideal Render Instant", render_time.ToInternalValue()); | 85 "Ideal Render Instant", render_time.ToInternalValue()); |
| 82 | 86 |
| 83 base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds( | 87 if (start_timestamp_ == media::kNoTimestamp()) |
| 84 incoming_frame->GetElapsedTime() / rtc::kNumNanosecsPerMicrosec); | 88 start_timestamp_ = incoming_timestamp; |
| 89 const base::TimeDelta elapsed_timestamp = | |
| 90 incoming_timestamp - start_timestamp_; | |
| 85 | 91 |
| 86 scoped_refptr<media::VideoFrame> video_frame; | 92 scoped_refptr<media::VideoFrame> video_frame; |
| 87 if (incoming_frame->GetNativeHandle() != NULL) { | 93 if (incoming_frame->GetNativeHandle() != NULL) { |
| 88 video_frame = | 94 video_frame = |
| 89 static_cast<media::VideoFrame*>(incoming_frame->GetNativeHandle()); | 95 static_cast<media::VideoFrame*>(incoming_frame->GetNativeHandle()); |
| 90 video_frame->set_timestamp(timestamp); | 96 video_frame->set_timestamp(elapsed_timestamp); |
| 91 } else { | 97 } else { |
| 92 const cricket::VideoFrame* frame = | 98 const cricket::VideoFrame* frame = |
| 93 incoming_frame->GetCopyWithRotationApplied(); | 99 incoming_frame->GetCopyWithRotationApplied(); |
| 94 | 100 |
| 95 gfx::Size size(frame->GetWidth(), frame->GetHeight()); | 101 gfx::Size size(frame->GetWidth(), frame->GetHeight()); |
| 96 | 102 |
| 97 // Non-square pixels are unsupported. | 103 // Non-square pixels are unsupported. |
| 98 DCHECK_EQ(frame->GetPixelWidth(), 1u); | 104 DCHECK_EQ(frame->GetPixelWidth(), 1u); |
| 99 DCHECK_EQ(frame->GetPixelHeight(), 1u); | 105 DCHECK_EQ(frame->GetPixelHeight(), 1u); |
| 100 | 106 |
| 101 // Make a shallow copy. Both |frame| and |video_frame| will share a single | 107 // Make a shallow copy. Both |frame| and |video_frame| will share a single |
| 102 // reference counted frame buffer. Const cast and hope no one will overwrite | 108 // reference counted frame buffer. Const cast and hope no one will overwrite |
| 103 // the data. | 109 // the data. |
| 104 // TODO(magjed): Update media::VideoFrame to support const data so we don't | 110 // TODO(magjed): Update media::VideoFrame to support const data so we don't |
| 105 // need to const cast here. | 111 // need to const cast here. |
| 106 video_frame = media::VideoFrame::WrapExternalYuvData( | 112 video_frame = media::VideoFrame::WrapExternalYuvData( |
| 107 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, | 113 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, |
| 108 frame->GetYPitch(), frame->GetUPitch(), frame->GetVPitch(), | 114 frame->GetYPitch(), frame->GetUPitch(), frame->GetVPitch(), |
| 109 const_cast<uint8_t*>(frame->GetYPlane()), | 115 const_cast<uint8_t*>(frame->GetYPlane()), |
| 110 const_cast<uint8_t*>(frame->GetUPlane()), | 116 const_cast<uint8_t*>(frame->GetUPlane()), |
| 111 const_cast<uint8_t*>(frame->GetVPlane()), timestamp); | 117 const_cast<uint8_t*>(frame->GetVPlane()), elapsed_timestamp); |
| 112 video_frame->AddDestructionObserver( | 118 video_frame->AddDestructionObserver( |
| 113 base::Bind(&base::DeletePointer<cricket::VideoFrame>, frame->Copy())); | 119 base::Bind(&base::DeletePointer<cricket::VideoFrame>, frame->Copy())); |
| 114 } | 120 } |
| 115 | 121 |
| 116 video_frame->metadata()->SetTimeTicks( | 122 video_frame->metadata()->SetTimeTicks( |
| 117 media::VideoFrameMetadata::REFERENCE_TIME, render_time); | 123 media::VideoFrameMetadata::REFERENCE_TIME, render_time); |
| 118 | 124 |
| 119 io_task_runner_->PostTask( | 125 io_task_runner_->PostTask( |
| 120 FROM_HERE, base::Bind(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread, | 126 FROM_HERE, base::Bind(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread, |
| 121 this, video_frame)); | 127 this, video_frame)); |
| (...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 195 case webrtc::MediaStreamTrackInterface::kEnded: | 201 case webrtc::MediaStreamTrackInterface::kEnded: |
| 196 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); | 202 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); |
| 197 break; | 203 break; |
| 198 default: | 204 default: |
| 199 NOTREACHED(); | 205 NOTREACHED(); |
| 200 break; | 206 break; |
| 201 } | 207 } |
| 202 } | 208 } |
| 203 | 209 |
| 204 } // namespace content | 210 } // namespace content |
| OLD | NEW |