OLD | NEW |
---|---|
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h" | 5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h" |
6 | 6 |
7 #include "base/bind.h" | 7 #include "base/bind.h" |
8 #include "base/callback_helpers.h" | 8 #include "base/callback_helpers.h" |
9 #include "base/location.h" | 9 #include "base/location.h" |
10 #include "base/threading/thread_checker.h" | 10 #include "base/threading/thread_checker.h" |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
55 const VideoCaptureDeliverFrameCB& new_frame_callback) | 55 const VideoCaptureDeliverFrameCB& new_frame_callback) |
56 : io_task_runner_(io_task_runner), frame_callback_(new_frame_callback) { | 56 : io_task_runner_(io_task_runner), frame_callback_(new_frame_callback) { |
57 } | 57 } |
58 | 58 |
59 MediaStreamRemoteVideoSource:: | 59 MediaStreamRemoteVideoSource:: |
60 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() { | 60 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() { |
61 } | 61 } |
62 | 62 |
63 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::RenderFrame( | 63 void MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate::RenderFrame( |
64 const cricket::VideoFrame* incoming_frame) { | 64 const cricket::VideoFrame* incoming_frame) { |
65 TRACE_EVENT0("webrtc", "RemoteVideoSourceDelegate::RenderFrame"); | 65 TRACE_EVENT_BEGIN0("webrtc", "RemoteVideoSourceDelegate::RenderFrame"); |
66 base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds( | 66 base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds( |
67 incoming_frame->GetElapsedTime() / rtc::kNumNanosecsPerMicrosec); | 67 incoming_frame->GetElapsedTime() / rtc::kNumNanosecsPerMicrosec); |
68 | 68 |
69 scoped_refptr<media::VideoFrame> video_frame; | 69 scoped_refptr<media::VideoFrame> video_frame; |
70 if (incoming_frame->GetNativeHandle() != NULL) { | 70 if (incoming_frame->GetNativeHandle() != NULL) { |
71 video_frame = | 71 video_frame = |
72 static_cast<media::VideoFrame*>(incoming_frame->GetNativeHandle()); | 72 static_cast<media::VideoFrame*>(incoming_frame->GetNativeHandle()); |
73 video_frame->set_timestamp(timestamp); | 73 video_frame->set_timestamp(timestamp); |
74 } else { | 74 } else { |
75 const cricket::VideoFrame* frame = | 75 const cricket::VideoFrame* frame = |
(...skipping 13 matching lines...) Expand all Loading... | |
89 video_frame = media::VideoFrame::WrapExternalYuvData( | 89 video_frame = media::VideoFrame::WrapExternalYuvData( |
90 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, | 90 media::PIXEL_FORMAT_YV12, size, gfx::Rect(size), size, |
91 frame->GetYPitch(), frame->GetUPitch(), frame->GetVPitch(), | 91 frame->GetYPitch(), frame->GetUPitch(), frame->GetVPitch(), |
92 const_cast<uint8_t*>(frame->GetYPlane()), | 92 const_cast<uint8_t*>(frame->GetYPlane()), |
93 const_cast<uint8_t*>(frame->GetUPlane()), | 93 const_cast<uint8_t*>(frame->GetUPlane()), |
94 const_cast<uint8_t*>(frame->GetVPlane()), timestamp); | 94 const_cast<uint8_t*>(frame->GetVPlane()), timestamp); |
95 video_frame->AddDestructionObserver( | 95 video_frame->AddDestructionObserver( |
96 base::Bind(&base::DeletePointer<cricket::VideoFrame>, frame->Copy())); | 96 base::Bind(&base::DeletePointer<cricket::VideoFrame>, frame->Copy())); |
97 } | 97 } |
98 | 98 |
99 video_frame->set_render_time(base::TimeTicks::FromInternalValue( | |
DaleCurtis
2015/07/30 05:55:24
Using InternalValue like this isn't correct. Why d
qiangchen
2015/07/30 17:40:26
cricket::VideoFrame's timestamp is a number in uni
| |
100 incoming_frame->GetTimeStamp() / 1000)); | |
101 | |
102 TRACE_EVENT_END1("webrtc", "RemoteVideoSourceDelegate::RenderFrame", | |
103 "Ideal Render Instant", | |
104 video_frame->render_time().ToInternalValue()); | |
105 | |
99 io_task_runner_->PostTask( | 106 io_task_runner_->PostTask( |
100 FROM_HERE, base::Bind(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread, | 107 FROM_HERE, base::Bind(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread, |
101 this, video_frame)); | 108 this, video_frame)); |
102 } | 109 } |
103 | 110 |
104 void MediaStreamRemoteVideoSource:: | 111 void MediaStreamRemoteVideoSource:: |
105 RemoteVideoSourceDelegate::DoRenderFrameOnIOThread( | 112 RemoteVideoSourceDelegate::DoRenderFrameOnIOThread( |
106 const scoped_refptr<media::VideoFrame>& video_frame) { | 113 const scoped_refptr<media::VideoFrame>& video_frame) { |
107 DCHECK(io_task_runner_->BelongsToCurrentThread()); | 114 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
108 TRACE_EVENT0("webrtc", "RemoteVideoSourceDelegate::DoRenderFrameOnIOThread"); | 115 TRACE_EVENT0("webrtc", "RemoteVideoSourceDelegate::DoRenderFrameOnIOThread"); |
(...skipping 66 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
175 case webrtc::MediaStreamTrackInterface::kEnded: | 182 case webrtc::MediaStreamTrackInterface::kEnded: |
176 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); | 183 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); |
177 break; | 184 break; |
178 default: | 185 default: |
179 NOTREACHED(); | 186 NOTREACHED(); |
180 break; | 187 break; |
181 } | 188 } |
182 } | 189 } |
183 | 190 |
184 } // namespace content | 191 } // namespace content |
OLD | NEW |