Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/filters/rtc_video_decoder.h" | 5 #include "content/renderer/media/rtc_video_decoder.h" |
| 6 | 6 |
| 7 #include <deque> | 7 #include <deque> |
| 8 | 8 |
| 9 #include "base/task.h" | 9 #include "base/task.h" |
| 10 #include "googleurl/src/gurl.h" | |
| 11 #include "media/base/callback.h" | 10 #include "media/base/callback.h" |
| 12 #include "media/base/filter_host.h" | 11 #include "media/base/filter_host.h" |
| 13 #include "media/base/filters.h" | 12 #include "media/base/filters.h" |
| 14 #include "media/base/limits.h" | 13 #include "media/base/limits.h" |
| 15 #include "media/base/media_format.h" | 14 #include "media/base/media_format.h" |
| 16 #include "media/base/video_frame.h" | 15 #include "media/base/video_frame.h" |
| 17 | 16 |
| 18 namespace media { | |
| 19 | |
| 20 static const char kMediaScheme[] = "media"; | |
| 21 | |
| 22 RTCVideoDecoder::RTCVideoDecoder(MessageLoop* message_loop, | 17 RTCVideoDecoder::RTCVideoDecoder(MessageLoop* message_loop, |
| 23 const std::string& url) | 18 const std::string& url) |
| 24 : message_loop_(message_loop), | 19 : message_loop_(message_loop), |
| 25 width_(176), | 20 width_(176), |
| 26 height_(144), | 21 height_(144), |
| 27 url_(url), | 22 url_(url), |
| 28 state_(kUnInitialized) { | 23 state_(kUnInitialized) { |
| 29 } | 24 } |
| 30 | 25 |
| 31 RTCVideoDecoder::~RTCVideoDecoder() { | 26 RTCVideoDecoder::~RTCVideoDecoder() { |
| 32 } | 27 } |
| 33 | 28 |
| 34 void RTCVideoDecoder::Initialize(DemuxerStream* demuxer_stream, | 29 void RTCVideoDecoder::Initialize(media::DemuxerStream* demuxer_stream, |
|
scherkus (not reviewing)
2011/06/24 21:33:39
if you want inside .cc files you may use:
using m
Ronghua
2011/06/27 22:34:51
Done.
| |
| 35 FilterCallback* filter_callback, | 30 media::FilterCallback* filter_callback, |
| 36 StatisticsCallback* stat_callback) { | 31 media::StatisticsCallback* stat_callback) { |
| 37 if (MessageLoop::current() != message_loop_) { | 32 if (MessageLoop::current() != message_loop_) { |
| 38 message_loop_->PostTask( | 33 message_loop_->PostTask( |
| 39 FROM_HERE, | 34 FROM_HERE, |
| 40 NewRunnableMethod(this, | 35 NewRunnableMethod(this, |
| 41 &RTCVideoDecoder::Initialize, | 36 &RTCVideoDecoder::Initialize, |
| 42 make_scoped_refptr(demuxer_stream), | 37 make_scoped_refptr(demuxer_stream), |
| 43 filter_callback, stat_callback)); | 38 filter_callback, stat_callback)); |
| 44 return; | 39 return; |
| 45 } | 40 } |
| 46 | 41 |
| 47 DCHECK_EQ(MessageLoop::current(), message_loop_); | 42 DCHECK_EQ(MessageLoop::current(), message_loop_); |
| 48 | 43 |
| 49 lock_.Acquire(); | 44 lock_.Acquire(); |
| 50 frame_queue_available_.clear(); | 45 frame_queue_available_.clear(); |
| 51 lock_.Release(); | 46 lock_.Release(); |
| 52 media_format_.SetAsInteger(MediaFormat::kWidth, width_); | 47 media_format_.SetAsInteger(media::MediaFormat::kWidth, width_); |
| 53 media_format_.SetAsInteger(MediaFormat::kHeight, height_); | 48 media_format_.SetAsInteger(media::MediaFormat::kHeight, height_); |
| 54 media_format_.SetAsInteger(MediaFormat::kSurfaceType, | 49 media_format_.SetAsInteger(media::MediaFormat::kSurfaceType, |
| 55 static_cast<int>(VideoFrame::YV12)); | 50 static_cast<int>(media::VideoFrame::YV12)); |
| 56 media_format_.SetAsInteger(MediaFormat::kSurfaceFormat, | 51 media_format_.SetAsInteger(media::MediaFormat::kSurfaceFormat, |
| 57 static_cast<int>(VideoFrame::TYPE_SYSTEM_MEMORY)); | 52 static_cast<int>(media::VideoFrame::TYPE_SYSTEM_MEMORY)); |
| 58 | 53 |
| 59 state_ = kNormal; | 54 state_ = kNormal; |
| 60 | 55 |
| 61 filter_callback->Run(); | 56 filter_callback->Run(); |
| 62 delete filter_callback; | 57 delete filter_callback; |
| 63 | 58 |
| 64 // TODO(acolwell): Implement stats. | 59 // TODO(acolwell): Implement stats. |
| 65 delete stat_callback; | 60 delete stat_callback; |
| 66 } | 61 } |
| 67 | 62 |
| 68 void RTCVideoDecoder::Play(FilterCallback* callback) { | 63 void RTCVideoDecoder::Play(media::FilterCallback* callback) { |
| 69 if (MessageLoop::current() != message_loop_) { | 64 if (MessageLoop::current() != message_loop_) { |
| 70 message_loop_->PostTask(FROM_HERE, | 65 message_loop_->PostTask(FROM_HERE, |
| 71 NewRunnableMethod(this, | 66 NewRunnableMethod(this, |
| 72 &RTCVideoDecoder::Play, | 67 &RTCVideoDecoder::Play, |
| 73 callback)); | 68 callback)); |
| 74 return; | 69 return; |
| 75 } | 70 } |
| 76 | 71 |
| 77 DCHECK_EQ(MessageLoop::current(), message_loop_); | 72 DCHECK_EQ(MessageLoop::current(), message_loop_); |
| 78 | 73 |
| 79 VideoDecoder::Play(callback); | 74 media::VideoDecoder::Play(callback); |
| 80 } | 75 } |
| 81 | 76 |
| 82 void RTCVideoDecoder::Pause(FilterCallback* callback) { | 77 void RTCVideoDecoder::Pause(media::FilterCallback* callback) { |
| 83 if (MessageLoop::current() != message_loop_) { | 78 if (MessageLoop::current() != message_loop_) { |
| 84 message_loop_->PostTask(FROM_HERE, | 79 message_loop_->PostTask(FROM_HERE, |
| 85 NewRunnableMethod(this, | 80 NewRunnableMethod(this, |
| 86 &RTCVideoDecoder::Pause, | 81 &RTCVideoDecoder::Pause, |
| 87 callback)); | 82 callback)); |
| 88 return; | 83 return; |
| 89 } | 84 } |
| 90 | 85 |
| 91 DCHECK_EQ(MessageLoop::current(), message_loop_); | 86 DCHECK_EQ(MessageLoop::current(), message_loop_); |
| 92 | 87 |
| 93 state_ = kPaused; | 88 state_ = kPaused; |
| 94 | 89 |
| 95 VideoDecoder::Pause(callback); | 90 media::VideoDecoder::Pause(callback); |
| 96 } | 91 } |
| 97 | 92 |
| 98 void RTCVideoDecoder::Stop(FilterCallback* callback) { | 93 void RTCVideoDecoder::Stop(media::FilterCallback* callback) { |
| 99 if (MessageLoop::current() != message_loop_) { | 94 if (MessageLoop::current() != message_loop_) { |
| 100 message_loop_->PostTask(FROM_HERE, | 95 message_loop_->PostTask(FROM_HERE, |
| 101 NewRunnableMethod(this, | 96 NewRunnableMethod(this, |
| 102 &RTCVideoDecoder::Stop, | 97 &RTCVideoDecoder::Stop, |
| 103 callback)); | 98 callback)); |
| 104 return; | 99 return; |
| 105 } | 100 } |
| 106 | 101 |
| 107 DCHECK_EQ(MessageLoop::current(), message_loop_); | 102 DCHECK_EQ(MessageLoop::current(), message_loop_); |
| 108 | 103 |
| 109 state_ = kStopped; | 104 state_ = kStopped; |
| 110 | 105 |
| 111 VideoDecoder::Stop(callback); | 106 media::VideoDecoder::Stop(callback); |
| 112 | 107 |
| 113 // TODO(ronghuawu): Stop rtc | 108 // TODO(ronghuawu): Stop rtc |
| 114 } | 109 } |
| 115 | 110 |
| 116 void RTCVideoDecoder::Seek(base::TimeDelta time, const FilterStatusCB& cb) { | 111 void RTCVideoDecoder::Seek(base::TimeDelta time, |
| 112 const media::FilterStatusCB& cb) { | |
| 117 if (MessageLoop::current() != message_loop_) { | 113 if (MessageLoop::current() != message_loop_) { |
| 118 message_loop_->PostTask(FROM_HERE, | 114 message_loop_->PostTask(FROM_HERE, |
| 119 NewRunnableMethod(this, &RTCVideoDecoder::Seek, | 115 NewRunnableMethod(this, &RTCVideoDecoder::Seek, |
| 120 time, cb)); | 116 time, cb)); |
| 121 return; | 117 return; |
| 122 } | 118 } |
| 123 | 119 |
| 124 DCHECK_EQ(MessageLoop::current(), message_loop_); | 120 DCHECK_EQ(MessageLoop::current(), message_loop_); |
| 125 | 121 |
| 126 state_ = kSeeking; | 122 state_ = kSeeking; |
| 127 // Create output buffer pool and pass the frames to renderer | 123 // Create output buffer pool and pass the frames to renderer |
| 128 // so that the renderer can complete the seeking | 124 // so that the renderer can complete the seeking |
| 129 for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) { | 125 for (size_t i = 0; i < media::Limits::kMaxVideoFrames; ++i) { |
| 130 scoped_refptr<VideoFrame> video_frame; | 126 scoped_refptr<media::VideoFrame> video_frame; |
| 131 VideoFrame::CreateFrame(VideoFrame::YV12, | 127 media::VideoFrame::CreateFrame(media::VideoFrame::YV12, |
| 132 width_, | 128 width_, |
| 133 height_, | 129 height_, |
| 134 kNoTimestamp, | 130 media::kNoTimestamp, |
| 135 kNoTimestamp, | 131 media::kNoTimestamp, |
| 136 &video_frame); | 132 &video_frame); |
| 137 if (!video_frame.get()) { | 133 if (!video_frame.get()) { |
| 138 break; | 134 break; |
| 139 } | 135 } |
| 140 | 136 |
| 141 // Create black frame | 137 // Create black frame |
| 142 const uint8 kBlackY = 0x00; | 138 const uint8 kBlackY = 0x00; |
| 143 const uint8 kBlackUV = 0x80; | 139 const uint8 kBlackUV = 0x80; |
| 144 // Fill the Y plane. | 140 // Fill the Y plane. |
| 145 uint8* y_plane = video_frame->data(VideoFrame::kYPlane); | 141 uint8* y_plane = video_frame->data(media::VideoFrame::kYPlane); |
| 146 for (size_t i = 0; i < height_; ++i) { | 142 for (size_t i = 0; i < height_; ++i) { |
| 147 memset(y_plane, kBlackY, width_); | 143 memset(y_plane, kBlackY, width_); |
| 148 y_plane += video_frame->stride(VideoFrame::kYPlane); | 144 y_plane += video_frame->stride(media::VideoFrame::kYPlane); |
| 149 } | 145 } |
| 150 // Fill the U and V planes. | 146 // Fill the U and V planes. |
| 151 uint8* u_plane = video_frame->data(VideoFrame::kUPlane); | 147 uint8* u_plane = video_frame->data(media::VideoFrame::kUPlane); |
| 152 uint8* v_plane = video_frame->data(VideoFrame::kVPlane); | 148 uint8* v_plane = video_frame->data(media::VideoFrame::kVPlane); |
| 153 for (size_t i = 0; i < (height_ / 2); ++i) { | 149 for (size_t i = 0; i < (height_ / 2); ++i) { |
| 154 memset(u_plane, kBlackUV, width_ / 2); | 150 memset(u_plane, kBlackUV, width_ / 2); |
| 155 memset(v_plane, kBlackUV, width_ / 2); | 151 memset(v_plane, kBlackUV, width_ / 2); |
| 156 u_plane += video_frame->stride(VideoFrame::kUPlane); | 152 u_plane += video_frame->stride(media::VideoFrame::kUPlane); |
| 157 v_plane += video_frame->stride(VideoFrame::kVPlane); | 153 v_plane += video_frame->stride(media::VideoFrame::kVPlane); |
| 158 } | 154 } |
| 159 | 155 |
| 160 VideoFrameReady(video_frame); | 156 VideoFrameReady(video_frame); |
| 161 } | 157 } |
| 162 | 158 |
| 163 state_ = kNormal; | 159 state_ = kNormal; |
| 164 | 160 |
| 165 cb.Run(PIPELINE_OK); | 161 cb.Run(media::PIPELINE_OK); |
| 166 | 162 |
| 167 // TODO(ronghuawu): Start rtc | 163 // TODO(ronghuawu): Start rtc |
| 168 } | 164 } |
| 169 | 165 |
| 170 const MediaFormat& RTCVideoDecoder::media_format() { | 166 const media::MediaFormat& RTCVideoDecoder::media_format() { |
| 171 return media_format_; | 167 return media_format_; |
| 172 } | 168 } |
| 173 | 169 |
| 174 void RTCVideoDecoder::ProduceVideoFrame( | 170 void RTCVideoDecoder::ProduceVideoFrame( |
| 175 scoped_refptr<VideoFrame> video_frame) { | 171 scoped_refptr<media::VideoFrame> video_frame) { |
| 176 if (MessageLoop::current() != message_loop_) { | 172 if (MessageLoop::current() != message_loop_) { |
| 177 message_loop_->PostTask( | 173 message_loop_->PostTask( |
| 178 FROM_HERE, | 174 FROM_HERE, |
| 179 NewRunnableMethod(this, | 175 NewRunnableMethod(this, |
| 180 &RTCVideoDecoder::ProduceVideoFrame, video_frame)); | 176 &RTCVideoDecoder::ProduceVideoFrame, video_frame)); |
| 181 return; | 177 return; |
| 182 } | 178 } |
| 183 DCHECK_EQ(MessageLoop::current(), message_loop_); | 179 DCHECK_EQ(MessageLoop::current(), message_loop_); |
| 184 lock_.Acquire(); | 180 lock_.Acquire(); |
| 185 frame_queue_available_.push_back(video_frame); | 181 frame_queue_available_.push_back(video_frame); |
| 186 lock_.Release(); | 182 lock_.Release(); |
| 187 } | 183 } |
| 188 | 184 |
| 189 bool RTCVideoDecoder::ProvidesBuffer() { | 185 bool RTCVideoDecoder::ProvidesBuffer() { |
| 190 return true; | 186 return true; |
| 191 } | 187 } |
| 192 | 188 |
| 193 int RTCVideoDecoder::FrameSizeChange(unsigned int width, | 189 int RTCVideoDecoder::FrameSizeChange(unsigned int width, |
| 194 unsigned int height, | 190 unsigned int height, |
| 195 unsigned int number_of_streams) { | 191 unsigned int number_of_streams) { |
| 196 width_ = width; | 192 width_ = width; |
| 197 height_ = height; | 193 height_ = height; |
| 198 | 194 |
| 199 media_format_.SetAsInteger(MediaFormat::kWidth, width_); | 195 media_format_.SetAsInteger(media::MediaFormat::kWidth, width_); |
| 200 media_format_.SetAsInteger(MediaFormat::kHeight, height_); | 196 media_format_.SetAsInteger(media::MediaFormat::kHeight, height_); |
| 201 host()->SetVideoSize(width_, height_); | 197 host()->SetVideoSize(width_, height_); |
| 202 return 0; | 198 return 0; |
| 203 } | 199 } |
| 204 | 200 |
| 205 int RTCVideoDecoder::DeliverFrame(unsigned char* buffer, | 201 int RTCVideoDecoder::DeliverFrame(unsigned char* buffer, |
| 206 int buffer_size) { | 202 int buffer_size) { |
| 207 DCHECK(buffer); | 203 DCHECK(buffer); |
| 208 | 204 |
| 209 if (frame_queue_available_.size() == 0) | |
| 210 return 0; | |
| 211 | |
| 212 if (state_ != kNormal) | 205 if (state_ != kNormal) |
| 213 return 0; | 206 return 0; |
| 214 | 207 |
| 215 // This is called from another thread | 208 // This is called from another thread |
| 216 lock_.Acquire(); | 209 lock_.Acquire(); |
| 217 scoped_refptr<VideoFrame> video_frame = frame_queue_available_.front(); | 210 if (frame_queue_available_.size() == 0) { |
| 211 lock_.Release(); | |
| 212 return 0; | |
| 213 } | |
| 214 scoped_refptr<media::VideoFrame> video_frame = | |
| 215 frame_queue_available_.front(); | |
| 218 frame_queue_available_.pop_front(); | 216 frame_queue_available_.pop_front(); |
| 219 lock_.Release(); | 217 lock_.Release(); |
| 220 | 218 |
| 221 // Check if there's a size change | 219 // Check if there's a size change |
| 222 if (video_frame->width() != width_ || video_frame->height() != height_) { | 220 if (video_frame->width() != width_ || video_frame->height() != height_) { |
| 223 video_frame.release(); | 221 video_frame.release(); |
| 224 // Allocate new buffer based on the new size | 222 // Allocate new buffer based on the new size |
| 225 VideoFrame::CreateFrame(VideoFrame::YV12, | 223 media::VideoFrame::CreateFrame(media::VideoFrame::YV12, |
| 226 width_, | 224 width_, |
| 227 height_, | 225 height_, |
| 228 kNoTimestamp, | 226 media::kNoTimestamp, |
| 229 kNoTimestamp, | 227 media::kNoTimestamp, |
| 230 &video_frame); | 228 &video_frame); |
| 231 if (!video_frame.get()) { | 229 if (!video_frame.get()) { |
| 232 return -1; | 230 return -1; |
| 233 } | 231 } |
| 234 } | 232 } |
| 235 | 233 |
| 236 video_frame->SetTimestamp(host()->GetTime()); | 234 video_frame->SetTimestamp(host()->GetTime()); |
| 237 video_frame->SetDuration(base::TimeDelta::FromMilliseconds(30)); | 235 video_frame->SetDuration(base::TimeDelta::FromMilliseconds(30)); |
| 238 | 236 |
| 239 uint8* y_plane = video_frame->data(VideoFrame::kYPlane); | 237 uint8* y_plane = video_frame->data(media::VideoFrame::kYPlane); |
| 240 for (size_t row = 0; row < video_frame->height(); ++row) { | 238 for (size_t row = 0; row < video_frame->height(); ++row) { |
| 241 memcpy(y_plane, buffer, width_); | 239 memcpy(y_plane, buffer, width_); |
| 242 y_plane += video_frame->stride(VideoFrame::kYPlane); | 240 y_plane += video_frame->stride(media::VideoFrame::kYPlane); |
| 243 buffer += width_; | 241 buffer += width_; |
| 244 } | 242 } |
| 245 size_t uv_width = width_/2; | 243 size_t uv_width = width_/2; |
| 246 uint8* u_plane = video_frame->data(VideoFrame::kUPlane); | 244 uint8* u_plane = video_frame->data(media::VideoFrame::kUPlane); |
| 247 for (size_t row = 0; row < video_frame->height(); row += 2) { | 245 for (size_t row = 0; row < video_frame->height(); row += 2) { |
| 248 memcpy(u_plane, buffer, uv_width); | 246 memcpy(u_plane, buffer, uv_width); |
| 249 u_plane += video_frame->stride(VideoFrame::kUPlane); | 247 u_plane += video_frame->stride(media::VideoFrame::kUPlane); |
| 250 buffer += uv_width; | 248 buffer += uv_width; |
| 251 } | 249 } |
| 252 uint8* v_plane = video_frame->data(VideoFrame::kVPlane); | 250 uint8* v_plane = video_frame->data(media::VideoFrame::kVPlane); |
| 253 for (size_t row = 0; row < video_frame->height(); row += 2) { | 251 for (size_t row = 0; row < video_frame->height(); row += 2) { |
| 254 memcpy(v_plane, buffer, uv_width); | 252 memcpy(v_plane, buffer, uv_width); |
| 255 v_plane += video_frame->stride(VideoFrame::kVPlane); | 253 v_plane += video_frame->stride(media::VideoFrame::kVPlane); |
| 256 buffer += uv_width; | 254 buffer += uv_width; |
| 257 } | 255 } |
| 258 | 256 |
| 259 if (MessageLoop::current() != message_loop_) { | 257 if (MessageLoop::current() != message_loop_) { |
| 260 message_loop_->PostTask( | 258 message_loop_->PostTask( |
| 261 FROM_HERE, | 259 FROM_HERE, |
| 262 NewRunnableMethod(this, | 260 NewRunnableMethod(this, |
| 263 &RTCVideoDecoder::VideoFrameReady, | 261 &RTCVideoDecoder::VideoFrameReady, |
| 264 video_frame)); | 262 video_frame)); |
| 265 } else { | 263 } else { |
| 266 VideoFrameReady(video_frame); | 264 VideoFrameReady(video_frame); |
| 267 } | 265 } |
| 268 | 266 |
| 269 return 0; | 267 return 0; |
| 270 } | 268 } |
| 271 | |
| 272 bool RTCVideoDecoder::IsUrlSupported(const std::string& url) { | |
| 273 GURL gurl(url); | |
| 274 return gurl.SchemeIs(kMediaScheme); | |
| 275 } | |
| 276 | |
| 277 } // namespace media | |
| OLD | NEW |