Index: content/renderer/media/rtc_video_decoder.cc |
=================================================================== |
--- content/renderer/media/rtc_video_decoder.cc (revision 89290) |
+++ content/renderer/media/rtc_video_decoder.cc (working copy) |
@@ -2,12 +2,11 @@ |
// Use of this source code is governed by a BSD-style license that can be |
// found in the LICENSE file. |
-#include "media/filters/rtc_video_decoder.h" |
+#include "content/renderer/media/rtc_video_decoder.h" |
#include <deque> |
#include "base/task.h" |
-#include "googleurl/src/gurl.h" |
#include "media/base/callback.h" |
#include "media/base/filter_host.h" |
#include "media/base/filters.h" |
@@ -15,10 +14,6 @@ |
#include "media/base/media_format.h" |
#include "media/base/video_frame.h" |
-namespace media { |
- |
-static const char kMediaScheme[] = "media"; |
- |
RTCVideoDecoder::RTCVideoDecoder(MessageLoop* message_loop, |
const std::string& url) |
: message_loop_(message_loop), |
@@ -31,9 +26,9 @@ |
RTCVideoDecoder::~RTCVideoDecoder() { |
} |
-void RTCVideoDecoder::Initialize(DemuxerStream* demuxer_stream, |
- FilterCallback* filter_callback, |
- StatisticsCallback* stat_callback) { |
+void RTCVideoDecoder::Initialize(media::DemuxerStream* demuxer_stream, |
scherkus (not reviewing)
2011/06/24 21:33:39
if you want inside .cc files you may use:
using m
Ronghua
2011/06/27 22:34:51
Done.
|
+ media::FilterCallback* filter_callback, |
+ media::StatisticsCallback* stat_callback) { |
if (MessageLoop::current() != message_loop_) { |
message_loop_->PostTask( |
FROM_HERE, |
@@ -49,12 +44,12 @@ |
lock_.Acquire(); |
frame_queue_available_.clear(); |
lock_.Release(); |
- media_format_.SetAsInteger(MediaFormat::kWidth, width_); |
- media_format_.SetAsInteger(MediaFormat::kHeight, height_); |
- media_format_.SetAsInteger(MediaFormat::kSurfaceType, |
- static_cast<int>(VideoFrame::YV12)); |
- media_format_.SetAsInteger(MediaFormat::kSurfaceFormat, |
- static_cast<int>(VideoFrame::TYPE_SYSTEM_MEMORY)); |
+ media_format_.SetAsInteger(media::MediaFormat::kWidth, width_); |
+ media_format_.SetAsInteger(media::MediaFormat::kHeight, height_); |
+ media_format_.SetAsInteger(media::MediaFormat::kSurfaceType, |
+ static_cast<int>(media::VideoFrame::YV12)); |
+ media_format_.SetAsInteger(media::MediaFormat::kSurfaceFormat, |
+ static_cast<int>(media::VideoFrame::TYPE_SYSTEM_MEMORY)); |
state_ = kNormal; |
@@ -65,7 +60,7 @@ |
delete stat_callback; |
} |
-void RTCVideoDecoder::Play(FilterCallback* callback) { |
+void RTCVideoDecoder::Play(media::FilterCallback* callback) { |
if (MessageLoop::current() != message_loop_) { |
message_loop_->PostTask(FROM_HERE, |
NewRunnableMethod(this, |
@@ -76,10 +71,10 @@ |
DCHECK_EQ(MessageLoop::current(), message_loop_); |
- VideoDecoder::Play(callback); |
+ media::VideoDecoder::Play(callback); |
} |
-void RTCVideoDecoder::Pause(FilterCallback* callback) { |
+void RTCVideoDecoder::Pause(media::FilterCallback* callback) { |
if (MessageLoop::current() != message_loop_) { |
message_loop_->PostTask(FROM_HERE, |
NewRunnableMethod(this, |
@@ -92,10 +87,10 @@ |
state_ = kPaused; |
- VideoDecoder::Pause(callback); |
+ media::VideoDecoder::Pause(callback); |
} |
-void RTCVideoDecoder::Stop(FilterCallback* callback) { |
+void RTCVideoDecoder::Stop(media::FilterCallback* callback) { |
if (MessageLoop::current() != message_loop_) { |
message_loop_->PostTask(FROM_HERE, |
NewRunnableMethod(this, |
@@ -108,12 +103,13 @@ |
state_ = kStopped; |
- VideoDecoder::Stop(callback); |
+ media::VideoDecoder::Stop(callback); |
// TODO(ronghuawu): Stop rtc |
} |
-void RTCVideoDecoder::Seek(base::TimeDelta time, const FilterStatusCB& cb) { |
+void RTCVideoDecoder::Seek(base::TimeDelta time, |
+ const media::FilterStatusCB& cb) { |
if (MessageLoop::current() != message_loop_) { |
message_loop_->PostTask(FROM_HERE, |
NewRunnableMethod(this, &RTCVideoDecoder::Seek, |
@@ -126,13 +122,13 @@ |
state_ = kSeeking; |
// Create output buffer pool and pass the frames to renderer |
// so that the renderer can complete the seeking |
- for (size_t i = 0; i < Limits::kMaxVideoFrames; ++i) { |
- scoped_refptr<VideoFrame> video_frame; |
- VideoFrame::CreateFrame(VideoFrame::YV12, |
+ for (size_t i = 0; i < media::Limits::kMaxVideoFrames; ++i) { |
+ scoped_refptr<media::VideoFrame> video_frame; |
+ media::VideoFrame::CreateFrame(media::VideoFrame::YV12, |
width_, |
height_, |
- kNoTimestamp, |
- kNoTimestamp, |
+ media::kNoTimestamp, |
+ media::kNoTimestamp, |
&video_frame); |
if (!video_frame.get()) { |
break; |
@@ -142,19 +138,19 @@ |
const uint8 kBlackY = 0x00; |
const uint8 kBlackUV = 0x80; |
// Fill the Y plane. |
- uint8* y_plane = video_frame->data(VideoFrame::kYPlane); |
+ uint8* y_plane = video_frame->data(media::VideoFrame::kYPlane); |
for (size_t i = 0; i < height_; ++i) { |
memset(y_plane, kBlackY, width_); |
- y_plane += video_frame->stride(VideoFrame::kYPlane); |
+ y_plane += video_frame->stride(media::VideoFrame::kYPlane); |
} |
// Fill the U and V planes. |
- uint8* u_plane = video_frame->data(VideoFrame::kUPlane); |
- uint8* v_plane = video_frame->data(VideoFrame::kVPlane); |
+ uint8* u_plane = video_frame->data(media::VideoFrame::kUPlane); |
+ uint8* v_plane = video_frame->data(media::VideoFrame::kVPlane); |
for (size_t i = 0; i < (height_ / 2); ++i) { |
memset(u_plane, kBlackUV, width_ / 2); |
memset(v_plane, kBlackUV, width_ / 2); |
- u_plane += video_frame->stride(VideoFrame::kUPlane); |
- v_plane += video_frame->stride(VideoFrame::kVPlane); |
+ u_plane += video_frame->stride(media::VideoFrame::kUPlane); |
+ v_plane += video_frame->stride(media::VideoFrame::kVPlane); |
} |
VideoFrameReady(video_frame); |
@@ -162,17 +158,17 @@ |
state_ = kNormal; |
- cb.Run(PIPELINE_OK); |
+ cb.Run(media::PIPELINE_OK); |
// TODO(ronghuawu): Start rtc |
} |
-const MediaFormat& RTCVideoDecoder::media_format() { |
+const media::MediaFormat& RTCVideoDecoder::media_format() { |
return media_format_; |
} |
void RTCVideoDecoder::ProduceVideoFrame( |
- scoped_refptr<VideoFrame> video_frame) { |
+ scoped_refptr<media::VideoFrame> video_frame) { |
if (MessageLoop::current() != message_loop_) { |
message_loop_->PostTask( |
FROM_HERE, |
@@ -196,8 +192,8 @@ |
width_ = width; |
height_ = height; |
- media_format_.SetAsInteger(MediaFormat::kWidth, width_); |
- media_format_.SetAsInteger(MediaFormat::kHeight, height_); |
+ media_format_.SetAsInteger(media::MediaFormat::kWidth, width_); |
+ media_format_.SetAsInteger(media::MediaFormat::kHeight, height_); |
host()->SetVideoSize(width_, height_); |
return 0; |
} |
@@ -206,15 +202,17 @@ |
int buffer_size) { |
DCHECK(buffer); |
- if (frame_queue_available_.size() == 0) |
- return 0; |
- |
if (state_ != kNormal) |
return 0; |
// This is called from another thread |
lock_.Acquire(); |
- scoped_refptr<VideoFrame> video_frame = frame_queue_available_.front(); |
+ if (frame_queue_available_.size() == 0) { |
+ lock_.Release(); |
+ return 0; |
+ } |
+ scoped_refptr<media::VideoFrame> video_frame = |
+ frame_queue_available_.front(); |
frame_queue_available_.pop_front(); |
lock_.Release(); |
@@ -222,11 +220,11 @@ |
if (video_frame->width() != width_ || video_frame->height() != height_) { |
video_frame.release(); |
// Allocate new buffer based on the new size |
- VideoFrame::CreateFrame(VideoFrame::YV12, |
+ media::VideoFrame::CreateFrame(media::VideoFrame::YV12, |
width_, |
height_, |
- kNoTimestamp, |
- kNoTimestamp, |
+ media::kNoTimestamp, |
+ media::kNoTimestamp, |
&video_frame); |
if (!video_frame.get()) { |
return -1; |
@@ -236,23 +234,23 @@ |
video_frame->SetTimestamp(host()->GetTime()); |
video_frame->SetDuration(base::TimeDelta::FromMilliseconds(30)); |
- uint8* y_plane = video_frame->data(VideoFrame::kYPlane); |
+ uint8* y_plane = video_frame->data(media::VideoFrame::kYPlane); |
for (size_t row = 0; row < video_frame->height(); ++row) { |
memcpy(y_plane, buffer, width_); |
- y_plane += video_frame->stride(VideoFrame::kYPlane); |
+ y_plane += video_frame->stride(media::VideoFrame::kYPlane); |
buffer += width_; |
} |
size_t uv_width = width_/2; |
- uint8* u_plane = video_frame->data(VideoFrame::kUPlane); |
+ uint8* u_plane = video_frame->data(media::VideoFrame::kUPlane); |
for (size_t row = 0; row < video_frame->height(); row += 2) { |
memcpy(u_plane, buffer, uv_width); |
- u_plane += video_frame->stride(VideoFrame::kUPlane); |
+ u_plane += video_frame->stride(media::VideoFrame::kUPlane); |
buffer += uv_width; |
} |
- uint8* v_plane = video_frame->data(VideoFrame::kVPlane); |
+ uint8* v_plane = video_frame->data(media::VideoFrame::kVPlane); |
for (size_t row = 0; row < video_frame->height(); row += 2) { |
memcpy(v_plane, buffer, uv_width); |
- v_plane += video_frame->stride(VideoFrame::kVPlane); |
+ v_plane += video_frame->stride(media::VideoFrame::kVPlane); |
buffer += uv_width; |
} |
@@ -268,10 +266,3 @@ |
return 0; |
} |
- |
-bool RTCVideoDecoder::IsUrlSupported(const std::string& url) { |
- GURL gurl(url); |
- return gurl.SchemeIs(kMediaScheme); |
-} |
- |
-} // namespace media |