OLD | NEW |
| (Empty) |
1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "media/filters/rtc_video_decoder.h" | |
6 | |
7 #include <deque> | |
8 | |
9 #include "base/task.h" | |
10 #include "googleurl/src/gurl.h" | |
11 #include "media/base/callback.h" | |
12 #include "media/base/filter_host.h" | |
13 #include "media/base/filters.h" | |
14 #include "media/base/limits.h" | |
15 #include "media/base/media_format.h" | |
16 #include "media/base/video_frame.h" | |
17 | |
18 namespace media { | |
19 | |
20 static const char kMediaScheme[] = "media"; | |
21 | |
22 RTCVideoDecoder::RTCVideoDecoder(MessageLoop* message_loop, | |
23 const std::string& url) | |
24 : message_loop_(message_loop), | |
25 width_(176), | |
26 height_(144), | |
27 url_(url), | |
28 state_(kUnInitialized) { | |
29 } | |
30 | |
31 RTCVideoDecoder::~RTCVideoDecoder() { | |
32 } | |
33 | |
34 void RTCVideoDecoder::Initialize(DemuxerStream* demuxer_stream, | |
35 FilterCallback* filter_callback, | |
36 StatisticsCallback* stat_callback) { | |
37 if (MessageLoop::current() != message_loop_) { | |
38 message_loop_->PostTask( | |
39 FROM_HERE, | |
40 NewRunnableMethod(this, | |
41 &RTCVideoDecoder::Initialize, | |
42 make_scoped_refptr(demuxer_stream), | |
43 filter_callback, stat_callback)); | |
44 return; | |
45 } | |
46 | |
47 DCHECK_EQ(MessageLoop::current(), message_loop_); | |
48 | |
49 lock_.Acquire(); | |
50 frame_queue_available_.clear(); | |
51 lock_.Release(); | |
52 media_format_.SetAsInteger(MediaFormat::kWidth, width_); | |
53 media_format_.SetAsInteger(MediaFormat::kHeight, height_); | |
54 media_format_.SetAsInteger(MediaFormat::kSurfaceType, | |
55 static_cast<int>(VideoFrame::YV12)); | |
56 media_format_.SetAsInteger(MediaFormat::kSurfaceFormat, | |
57 static_cast<int>(VideoFrame::TYPE_SYSTEM_MEMORY)); | |
58 | |
59 state_ = kNormal; | |
60 | |
61 filter_callback->Run(); | |
62 delete filter_callback; | |
63 | |
64 // TODO(acolwell): Implement stats. | |
65 delete stat_callback; | |
66 } | |
67 | |
68 void RTCVideoDecoder::Play(FilterCallback* callback) { | |
69 if (MessageLoop::current() != message_loop_) { | |
70 message_loop_->PostTask(FROM_HERE, | |
71 NewRunnableMethod(this, | |
72 &RTCVideoDecoder::Play, | |
73 callback)); | |
74 return; | |
75 } | |
76 | |
77 DCHECK_EQ(MessageLoop::current(), message_loop_); | |
78 | |
79 VideoDecoder::Play(callback); | |
80 } | |
81 | |
82 void RTCVideoDecoder::Pause(FilterCallback* callback) { | |
83 if (MessageLoop::current() != message_loop_) { | |
84 message_loop_->PostTask(FROM_HERE, | |
85 NewRunnableMethod(this, | |
86 &RTCVideoDecoder::Pause, | |
87 callback)); | |
88 return; | |
89 } | |
90 | |
91 DCHECK_EQ(MessageLoop::current(), message_loop_); | |
92 | |
93 state_ = kPaused; | |
94 | |
95 VideoDecoder::Pause(callback); | |
96 } | |
97 | |
98 void RTCVideoDecoder::Stop(FilterCallback* callback) { | |
99 if (MessageLoop::current() != message_loop_) { | |
100 message_loop_->PostTask(FROM_HERE, | |
101 NewRunnableMethod(this, | |
102 &RTCVideoDecoder::Stop, | |
103 callback)); | |
104 return; | |
105 } | |
106 | |
107 DCHECK_EQ(MessageLoop::current(), message_loop_); | |
108 | |
109 state_ = kStopped; | |
110 | |
111 VideoDecoder::Stop(callback); | |
112 | |
113 // TODO(ronghuawu): Stop rtc | |
114 } | |
115 | |
116 void RTCVideoDecoder::Seek(base::TimeDelta time, const FilterStatusCB& cb) { | |
117 if (MessageLoop::current() != message_loop_) { | |
118 message_loop_->PostTask(FROM_HERE, | |
119 NewRunnableMethod(this, &RTCVideoDecoder::Seek, | |
120 time, cb)); | |
121 return; | |
122 } | |
123 | |
124 DCHECK_EQ(MessageLoop::current(), message_loop_); | |
125 | |
126 state_ = kSeeking; | |
127 // Create output buffer pool and pass the frames to renderer | |
128 // so that the renderer can complete the seeking | |
129 for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) { | |
130 scoped_refptr<VideoFrame> video_frame; | |
131 VideoFrame::CreateFrame(VideoFrame::YV12, | |
132 width_, | |
133 height_, | |
134 kNoTimestamp, | |
135 kNoTimestamp, | |
136 &video_frame); | |
137 if (!video_frame.get()) { | |
138 break; | |
139 } | |
140 | |
141 // Create black frame | |
142 const uint8 kBlackY = 0x00; | |
143 const uint8 kBlackUV = 0x80; | |
144 // Fill the Y plane. | |
145 uint8* y_plane = video_frame->data(VideoFrame::kYPlane); | |
146 for (size_t i = 0; i < height_; ++i) { | |
147 memset(y_plane, kBlackY, width_); | |
148 y_plane += video_frame->stride(VideoFrame::kYPlane); | |
149 } | |
150 // Fill the U and V planes. | |
151 uint8* u_plane = video_frame->data(VideoFrame::kUPlane); | |
152 uint8* v_plane = video_frame->data(VideoFrame::kVPlane); | |
153 for (size_t i = 0; i < (height_ / 2); ++i) { | |
154 memset(u_plane, kBlackUV, width_ / 2); | |
155 memset(v_plane, kBlackUV, width_ / 2); | |
156 u_plane += video_frame->stride(VideoFrame::kUPlane); | |
157 v_plane += video_frame->stride(VideoFrame::kVPlane); | |
158 } | |
159 | |
160 VideoFrameReady(video_frame); | |
161 } | |
162 | |
163 state_ = kNormal; | |
164 | |
165 cb.Run(PIPELINE_OK); | |
166 | |
167 // TODO(ronghuawu): Start rtc | |
168 } | |
169 | |
170 const MediaFormat& RTCVideoDecoder::media_format() { | |
171 return media_format_; | |
172 } | |
173 | |
174 void RTCVideoDecoder::ProduceVideoFrame( | |
175 scoped_refptr<VideoFrame> video_frame) { | |
176 if (MessageLoop::current() != message_loop_) { | |
177 message_loop_->PostTask( | |
178 FROM_HERE, | |
179 NewRunnableMethod(this, | |
180 &RTCVideoDecoder::ProduceVideoFrame, video_frame)); | |
181 return; | |
182 } | |
183 DCHECK_EQ(MessageLoop::current(), message_loop_); | |
184 lock_.Acquire(); | |
185 frame_queue_available_.push_back(video_frame); | |
186 lock_.Release(); | |
187 } | |
188 | |
189 bool RTCVideoDecoder::ProvidesBuffer() { | |
190 return true; | |
191 } | |
192 | |
193 int RTCVideoDecoder::FrameSizeChange(unsigned int width, | |
194 unsigned int height, | |
195 unsigned int number_of_streams) { | |
196 width_ = width; | |
197 height_ = height; | |
198 | |
199 media_format_.SetAsInteger(MediaFormat::kWidth, width_); | |
200 media_format_.SetAsInteger(MediaFormat::kHeight, height_); | |
201 host()->SetVideoSize(width_, height_); | |
202 return 0; | |
203 } | |
204 | |
205 int RTCVideoDecoder::DeliverFrame(unsigned char* buffer, | |
206 int buffer_size) { | |
207 DCHECK(buffer); | |
208 | |
209 if (frame_queue_available_.size() == 0) | |
210 return 0; | |
211 | |
212 if (state_ != kNormal) | |
213 return 0; | |
214 | |
215 // This is called from another thread | |
216 lock_.Acquire(); | |
217 scoped_refptr<VideoFrame> video_frame = frame_queue_available_.front(); | |
218 frame_queue_available_.pop_front(); | |
219 lock_.Release(); | |
220 | |
221 // Check if there's a size change | |
222 if (video_frame->width() != width_ || video_frame->height() != height_) { | |
223 video_frame.release(); | |
224 // Allocate new buffer based on the new size | |
225 VideoFrame::CreateFrame(VideoFrame::YV12, | |
226 width_, | |
227 height_, | |
228 kNoTimestamp, | |
229 kNoTimestamp, | |
230 &video_frame); | |
231 if (!video_frame.get()) { | |
232 return -1; | |
233 } | |
234 } | |
235 | |
236 video_frame->SetTimestamp(host()->GetTime()); | |
237 video_frame->SetDuration(base::TimeDelta::FromMilliseconds(30)); | |
238 | |
239 uint8* y_plane = video_frame->data(VideoFrame::kYPlane); | |
240 for (size_t row = 0; row < video_frame->height(); ++row) { | |
241 memcpy(y_plane, buffer, width_); | |
242 y_plane += video_frame->stride(VideoFrame::kYPlane); | |
243 buffer += width_; | |
244 } | |
245 size_t uv_width = width_/2; | |
246 uint8* u_plane = video_frame->data(VideoFrame::kUPlane); | |
247 for (size_t row = 0; row < video_frame->height(); row += 2) { | |
248 memcpy(u_plane, buffer, uv_width); | |
249 u_plane += video_frame->stride(VideoFrame::kUPlane); | |
250 buffer += uv_width; | |
251 } | |
252 uint8* v_plane = video_frame->data(VideoFrame::kVPlane); | |
253 for (size_t row = 0; row < video_frame->height(); row += 2) { | |
254 memcpy(v_plane, buffer, uv_width); | |
255 v_plane += video_frame->stride(VideoFrame::kVPlane); | |
256 buffer += uv_width; | |
257 } | |
258 | |
259 if (MessageLoop::current() != message_loop_) { | |
260 message_loop_->PostTask( | |
261 FROM_HERE, | |
262 NewRunnableMethod(this, | |
263 &RTCVideoDecoder::VideoFrameReady, | |
264 video_frame)); | |
265 } else { | |
266 VideoFrameReady(video_frame); | |
267 } | |
268 | |
269 return 0; | |
270 } | |
271 | |
272 bool RTCVideoDecoder::IsUrlSupported(const std::string& url) { | |
273 GURL gurl(url); | |
274 return gurl.SchemeIs(kMediaScheme); | |
275 } | |
276 | |
277 } // namespace media | |
OLD | NEW |