| Index: content/renderer/media/media_stream_video_track.cc
|
| diff --git a/content/renderer/media/media_stream_video_track.cc b/content/renderer/media/media_stream_video_track.cc
|
| index 70383236bac5f23d6ba73890fca640c6aec05185..80d202b08463cab68bd672af966fe3d2e3f06a62 100644
|
| --- a/content/renderer/media/media_stream_video_track.cc
|
| +++ b/content/renderer/media/media_stream_video_track.cc
|
| @@ -5,55 +5,80 @@
|
| #include "content/renderer/media/media_stream_video_track.h"
|
|
|
| #include "content/renderer/media/media_stream_dependency_factory.h"
|
| -#include "content/renderer/media/media_stream_video_source.h"
|
| #include "content/renderer/media/webrtc/webrtc_video_sink_adapter.h"
|
|
|
| namespace content {
|
|
|
| -// Wrapper which allows to use std::find_if() when adding and removing
|
| -// sinks to/from |sinks_|.
|
| -struct SinkWrapper {
|
| - explicit SinkWrapper(MediaStreamVideoSink* sink) : sink_(sink) {}
|
| - bool operator()(
|
| - const WebRtcVideoSinkAdapter* owner) {
|
| - return owner->sink() == sink_;
|
| - }
|
| - MediaStreamVideoSink* sink_;
|
| -};
|
| +// Empty method used for keeping a reference to the original media::VideoFrame
|
| +// in RTCVideoRenderer::OnVideoFrame if a color conversion between I420 and
|
| +// YV12 is needed.
|
| +static void ReleaseOriginalFrame(
|
| + const scoped_refptr<media::VideoFrame>& frame) {
|
| +}
|
|
|
| -MediaStreamVideoTrack::MediaStreamVideoTrack(
|
| - webrtc::VideoTrackInterface* track)
|
| - : MediaStreamTrack(track, false),
|
| - factory_(NULL) {
|
| +//static
|
| +blink::WebMediaStreamTrack MediaStreamVideoTrack::CreateVideoTrack(
|
| + MediaStreamVideoSource* source,
|
| + const blink::WebMediaConstraints& constraints,
|
| + const MediaStreamVideoSource::ConstraintsCallback& callback,
|
| + bool enabled,
|
| + MediaStreamDependencyFactory* factory) {
|
| + blink::WebMediaStreamTrack track;
|
| + track.initialize(source->owner());
|
| + track.setExtraData(new MediaStreamVideoTrack(source,
|
| + constraints,
|
| + callback,
|
| + enabled,
|
| + factory));
|
| + return track;
|
| +}
|
| +
|
| +// static
|
| +MediaStreamVideoTrack* MediaStreamVideoTrack::GetVideoTrack(
|
| + const blink::WebMediaStreamTrack& track) {
|
| + return static_cast<MediaStreamVideoTrack*>(track.extraData());
|
| }
|
|
|
| MediaStreamVideoTrack::MediaStreamVideoTrack(
|
| + MediaStreamVideoSource* source,
|
| + const blink::WebMediaConstraints& constraints,
|
| + const MediaStreamVideoSource::ConstraintsCallback& callback,
|
| + bool enabled,
|
| MediaStreamDependencyFactory* factory)
|
| : MediaStreamTrack(NULL, true),
|
| + enabled_(enabled),
|
| + source_(source),
|
| factory_(factory) {
|
| - DCHECK(factory_);
|
| + // TODO(perkj): source can be NULL if this is actually a remote video track.
|
| + // Remove as soon as we only have one implementation of video tracks.
|
| + if (source)
|
| + source->AddTrack(this, constraints, callback);
|
| }
|
|
|
| MediaStreamVideoTrack::~MediaStreamVideoTrack() {
|
| DCHECK(sinks_.empty());
|
| + // TODO(perkj): source can be NULL if this is actually a remote video track.
|
| + // Remove as soon as we only have one implementation of video tracks.
|
| + if (source_)
|
| + source_->RemoveTrack(this);
|
| }
|
|
|
| void MediaStreamVideoTrack::AddSink(MediaStreamVideoSink* sink) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| - DCHECK(std::find_if(sinks_.begin(), sinks_.end(),
|
| - SinkWrapper(sink)) == sinks_.end());
|
| - sinks_.push_back(new WebRtcVideoSinkAdapter(GetVideoAdapter(), sink));
|
| + DCHECK(std::find(sinks_.begin(), sinks_.end(), sink) == sinks_.end());
|
| + sinks_.push_back(sink);
|
| }
|
|
|
| void MediaStreamVideoTrack::RemoveSink(MediaStreamVideoSink* sink) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| - ScopedVector<WebRtcVideoSinkAdapter>::iterator it =
|
| - std::find_if(sinks_.begin(), sinks_.end(), SinkWrapper(sink));
|
| + std::vector<MediaStreamVideoSink*>::iterator it =
|
| + std::find(sinks_.begin(), sinks_.end(), sink);
|
| DCHECK(it != sinks_.end());
|
| sinks_.erase(it);
|
| }
|
|
|
| webrtc::VideoTrackInterface* MediaStreamVideoTrack::GetVideoAdapter() {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| DCHECK_EQ(owner().source().type(), blink::WebMediaStreamSource::TypeVideo);
|
| if (!track_.get()) {
|
| MediaStreamVideoSource* source =
|
| @@ -67,4 +92,95 @@ webrtc::VideoTrackInterface* MediaStreamVideoTrack::GetVideoAdapter() {
|
| return static_cast<webrtc::VideoTrackInterface*>(track_.get());
|
| }
|
|
|
| +void MediaStreamVideoTrack::SetEnabled(bool enabled) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + enabled_ = enabled;
|
| + MediaStreamTrack::SetEnabled(enabled);
|
| + for (std::vector<MediaStreamVideoSink*>::iterator it = sinks_.begin();
|
| + it != sinks_.end(); ++it) {
|
| + (*it)->OnEnabledChanged(enabled);
|
| + }
|
| +}
|
| +
|
| +void MediaStreamVideoTrack::OnVideoFrame(
|
| + const scoped_refptr<media::VideoFrame>& frame) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + if (!enabled_)
|
| + return;
|
| +
|
| + scoped_refptr<media::VideoFrame> video_frame = frame;
|
| + if (frame->format() == media::VideoFrame::I420) {
|
| + // Rendering do not support I420 but video capture use I420.
|
| + // The only difference between YV12 and I420 is the order of U and V plane.
|
| + // To solve that the I420 frame is simply wrapped in an YV12 video frame.
|
| + // crbug/341452.
|
| + video_frame = media::VideoFrame::WrapExternalYuvData(
|
| + media::VideoFrame::YV12,
|
| + frame->coded_size(),
|
| + frame->visible_rect(),
|
| + frame->natural_size(),
|
| + frame->stride(media::VideoFrame::kYPlane),
|
| + frame->stride(media::VideoFrame::kUPlane),
|
| + frame->stride(media::VideoFrame::kVPlane),
|
| + frame->data(media::VideoFrame::kYPlane),
|
| + frame->data(media::VideoFrame::kUPlane),
|
| + frame->data(media::VideoFrame::kVPlane),
|
| + frame->GetTimestamp(),
|
| + base::Bind(&ReleaseOriginalFrame, frame));
|
| + }
|
| +
|
| + for (std::vector<MediaStreamVideoSink*>::iterator it = sinks_.begin();
|
| + it != sinks_.end(); ++it) {
|
| + (*it)->OnVideoFrame(video_frame);
|
| + }
|
| +}
|
| +
|
| +void MediaStreamVideoTrack::OnReadyStateChanged(
|
| + blink::WebMediaStreamSource::ReadyState state) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + for (std::vector<MediaStreamVideoSink*>::iterator it = sinks_.begin();
|
| + it != sinks_.end(); ++it) {
|
| + (*it)->OnReadyStateChanged(state);
|
| + }
|
| +}
|
| +
|
| +// Wrapper which allows to use std::find_if() when adding and removing
|
| +// sinks to/from |sinks_|.
|
| +struct SinkWrapper {
|
| + explicit SinkWrapper(MediaStreamVideoSink* sink) : sink_(sink) {}
|
| + bool operator()(
|
| + const WebRtcVideoSinkAdapter* owner) {
|
| + return owner->sink() == sink_;
|
| + }
|
| + MediaStreamVideoSink* sink_;
|
| +};
|
| +
|
| +WebRtcMediaStreamVideoTrack::WebRtcMediaStreamVideoTrack(
|
| + webrtc::VideoTrackInterface* track)
|
| + : MediaStreamVideoTrack(NULL,
|
| + blink::WebMediaConstraints(),
|
| + MediaStreamVideoSource::ConstraintsCallback(),
|
| + track->enabled(),
|
| + NULL) {
|
| + track_ = track;
|
| +}
|
| +
|
| +WebRtcMediaStreamVideoTrack::~WebRtcMediaStreamVideoTrack() {
|
| +}
|
| +
|
| +void WebRtcMediaStreamVideoTrack::AddSink(MediaStreamVideoSink* sink) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + DCHECK(std::find_if(sinks_.begin(), sinks_.end(),
|
| + SinkWrapper(sink)) == sinks_.end());
|
| + sinks_.push_back(new WebRtcVideoSinkAdapter(GetVideoAdapter(), sink));
|
| +}
|
| +
|
| +void WebRtcMediaStreamVideoTrack::RemoveSink(MediaStreamVideoSink* sink) {
|
| + DCHECK(thread_checker_.CalledOnValidThread());
|
| + ScopedVector<WebRtcVideoSinkAdapter>::iterator it =
|
| + std::find_if(sinks_.begin(), sinks_.end(), SinkWrapper(sink));
|
| + DCHECK(it != sinks_.end());
|
| + sinks_.erase(it);
|
| +}
|
| +
|
| } // namespace content
|
|
|