Chromium Code Reviews| Index: content/renderer/media/media_stream_dependency_factory.cc |
| diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc |
| index 31e88140f1cb74645126515b0ba9afdc872c6401..0e84d67fefc88a0f3247b7a36c2865f1332f93c0 100644 |
| --- a/content/renderer/media/media_stream_dependency_factory.cc |
| +++ b/content/renderer/media/media_stream_dependency_factory.cc |
| @@ -12,8 +12,9 @@ |
| #include "content/common/media/media_stream_messages.h" |
| #include "content/public/common/content_switches.h" |
| #include "content/renderer/media/media_stream_audio_processor_options.h" |
| -#include "content/renderer/media/media_stream_source_extra_data.h" |
| +#include "content/renderer/media/media_stream_audio_source.h" |
| #include "content/renderer/media/media_stream_track_extra_data.h" |
| +#include "content/renderer/media/media_stream_video_source.h" |
| #include "content/renderer/media/media_stream_video_track.h" |
| #include "content/renderer/media/peer_connection_identity_service.h" |
| #include "content/renderer/media/rtc_media_constraints.h" |
| @@ -123,82 +124,6 @@ class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface { |
| blink::WebFrame* web_frame_; |
| }; |
| -// SourceStateObserver is a help class used for observing the startup state |
| -// transition of webrtc media sources such as a camera or microphone. |
| -// An instance of the object deletes itself after use. |
| -// Usage: |
| -// 1. Create an instance of the object with the blink::WebMediaStream |
| -// the observed sources belongs to a callback. |
| -// 2. Add the sources to the observer using AddSource. |
| -// 3. Call StartObserving() |
| -// 4. The callback will be triggered when all sources have transitioned from |
| -// webrtc::MediaSourceInterface::kInitializing. |
| -class SourceStateObserver : public webrtc::ObserverInterface, |
| - public base::NonThreadSafe { |
| - public: |
| - SourceStateObserver( |
| - blink::WebMediaStream* web_stream, |
| - const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback) |
| - : web_stream_(web_stream), |
| - ready_callback_(callback), |
| - live_(true) { |
| - } |
| - |
| - void AddSource(webrtc::MediaSourceInterface* source) { |
| - DCHECK(CalledOnValidThread()); |
| - switch (source->state()) { |
| - case webrtc::MediaSourceInterface::kInitializing: |
| - sources_.push_back(source); |
| - source->RegisterObserver(this); |
| - break; |
| - case webrtc::MediaSourceInterface::kLive: |
| - // The source is already live so we don't need to wait for it. |
| - break; |
| - case webrtc::MediaSourceInterface::kEnded: |
| - // The source have already failed. |
| - live_ = false; |
| - break; |
| - default: |
| - NOTREACHED(); |
| - } |
| - } |
| - |
| - void StartObservering() { |
| - DCHECK(CalledOnValidThread()); |
| - CheckIfSourcesAreLive(); |
| - } |
| - |
| - virtual void OnChanged() OVERRIDE { |
| - DCHECK(CalledOnValidThread()); |
| - CheckIfSourcesAreLive(); |
| - } |
| - |
| - private: |
| - void CheckIfSourcesAreLive() { |
| - ObservedSources::iterator it = sources_.begin(); |
| - while (it != sources_.end()) { |
| - if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) { |
| - live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive; |
| - (*it)->UnregisterObserver(this); |
| - it = sources_.erase(it); |
| - } else { |
| - ++it; |
| - } |
| - } |
| - if (sources_.empty()) { |
| - ready_callback_.Run(web_stream_, live_); |
| - delete this; |
| - } |
| - } |
| - |
| - blink::WebMediaStream* web_stream_; |
| - MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_; |
| - bool live_; |
| - typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> > |
| - ObservedSources; |
| - ObservedSources sources_; |
| -}; |
| - |
| MediaStreamDependencyFactory::MediaStreamDependencyFactory( |
| P2PSocketDispatcher* p2p_socket_dispatcher) |
| : network_manager_(NULL), |
| @@ -232,58 +157,23 @@ MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler( |
| return new RTCPeerConnectionHandler(client, this); |
| } |
| -void MediaStreamDependencyFactory::CreateNativeMediaSources( |
| +bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource( |
|
no longer working on chromium
2014/01/17 14:51:51
Curiously, is this method going to initialize a ve
perkj_chrome
2014/01/19 15:52:39
I have left this pretty much untouched and hope yo
|
| int render_view_id, |
| const blink::WebMediaConstraints& audio_constraints, |
| - const blink::WebMediaConstraints& video_constraints, |
| - blink::WebMediaStream* web_stream, |
| - const MediaSourcesCreatedCallback& sources_created) { |
| - DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()"; |
| + const blink::WebVector<blink::WebMediaStreamSource>& audio_sources) { |
| + DVLOG(1) << "CreateNativeMediaStreamAudioSources()"; |
|
no longer working on chromium
2014/01/17 14:51:51
is it CreateNativeMediaStreamAudioSources or Initi
perkj_chrome
2014/01/19 15:52:39
Done.
|
| if (!EnsurePeerConnectionFactory()) { |
| - sources_created.Run(web_stream, false); |
| - return; |
| - } |
| - |
| - // |source_observer| clean up itself when it has completed |
| - // source_observer->StartObservering. |
| - SourceStateObserver* source_observer = |
| - new SourceStateObserver(web_stream, sources_created); |
| - |
| - // Create local video sources. |
| - RTCMediaConstraints native_video_constraints(video_constraints); |
| - blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
| - web_stream->videoTracks(video_tracks); |
| - for (size_t i = 0; i < video_tracks.size(); ++i) { |
| - const blink::WebMediaStreamSource& source = video_tracks[i].source(); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| - |
| - // Check if the source has already been created. This happens when the same |
| - // source is used in multiple MediaStreams as a result of calling |
| - // getUserMedia. |
| - if (source_data->video_source()) |
| - continue; |
| - |
| - const bool is_screencast = |
| - source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || |
| - source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
| - source_data->SetVideoSource( |
| - CreateLocalVideoSource(source_data->device_info().session_id, |
| - is_screencast, |
| - &native_video_constraints).get()); |
| - source_observer->AddSource(source_data->video_source()); |
| + return false; |
| } |
| // Do additional source initialization if the audio source is a valid |
| // microphone or tab audio. |
| RTCMediaConstraints native_audio_constraints(audio_constraints); |
| ApplyFixedAudioConstraints(&native_audio_constraints); |
| - blink::WebVector<blink::WebMediaStreamTrack> audio_tracks; |
| - web_stream->audioTracks(audio_tracks); |
| - for (size_t i = 0; i < audio_tracks.size(); ++i) { |
| - const blink::WebMediaStreamSource& source = audio_tracks[i].source(); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + for (size_t i = 0; i < audio_sources.size(); ++i) { |
| + const blink::WebMediaStreamSource& source = audio_sources[i]; |
| + MediaStreamAudioSource* source_data = |
| + static_cast<MediaStreamAudioSource*>(source.extraData()); |
| // Check if the source has already been created. This happens when the same |
| // source is used in multiple MediaStreams as a result of calling |
| @@ -329,12 +219,11 @@ void MediaStreamDependencyFactory::CreateNativeMediaSources( |
| if (!capturer.get()) { |
| DLOG(WARNING) << "Failed to create the capturer for device " |
| << device_info.device.id; |
| - sources_created.Run(web_stream, false); |
| // TODO(xians): Don't we need to check if source_observer is observing |
| // something? If not, then it looks like we have a leak here. |
| // OTOH, if it _is_ observing something, then the callback might |
| // be called multiple times which is likely also a bug. |
| - return; |
| + return false; |
| } |
| source_data->SetAudioCapturer(capturer); |
| @@ -342,10 +231,16 @@ void MediaStreamDependencyFactory::CreateNativeMediaSources( |
| // TODO(xians): The option should apply to the track instead of the source. |
| source_data->SetLocalAudioSource( |
| CreateLocalAudioSource(&constraints).get()); |
| - source_observer->AddSource(source_data->local_audio_source()); |
| } |
| + return true; |
| +} |
| - source_observer->StartObservering(); |
| +cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer( |
| + const StreamDeviceInfo& info) { |
| + bool is_screeencast = |
| + info.device.type == MEDIA_TAB_VIDEO_CAPTURE || |
| + info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
| + return new RtcVideoCapturer(info.session_id, is_screeencast); |
| } |
| void MediaStreamDependencyFactory::CreateNativeLocalMediaStream( |
| @@ -393,8 +288,8 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| const blink::WebMediaStreamTrack& track) { |
| blink::WebMediaStreamSource source = track.source(); |
| DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + MediaStreamAudioSource* source_data = |
| + static_cast<MediaStreamAudioSource*>(source.extraData()); |
| // In the future the constraints will belong to the track itself, but |
| // right now they're on the source, so we fetch them from there. |
| @@ -411,7 +306,7 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| // Create a specific capturer for each WebAudio consumer. |
| webaudio_source = CreateWebAudioSource(&source, track_constraints); |
| source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + static_cast<MediaStreamAudioSource*>(source.extraData()); |
| } else { |
| // TODO(perkj): Implement support for sources from |
| // remote MediaStreams. |
| @@ -445,10 +340,12 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack( |
| scoped_refptr<webrtc::VideoTrackInterface> |
| MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( |
| const blink::WebMediaStreamTrack& track) { |
| + DCHECK(track.extraData() == NULL); |
| blink::WebMediaStreamSource source = track.source(); |
| DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo); |
| - MediaStreamSourceExtraData* source_data = |
| - static_cast<MediaStreamSourceExtraData*>(source.extraData()); |
| + |
| + MediaStreamVideoSource* source_data = |
| + static_cast<MediaStreamVideoSource*>(source.extraData()); |
| if (!source_data) { |
| // TODO(perkj): Implement support for sources from |
| @@ -457,14 +354,15 @@ MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack( |
| return NULL; |
| } |
| - std::string track_id = base::UTF16ToUTF8(track.id()); |
| - scoped_refptr<webrtc::VideoTrackInterface> video_track( |
| - CreateLocalVideoTrack(track_id, source_data->video_source())); |
| - AddNativeTrackToBlinkTrack(video_track.get(), track, true); |
| + // Create native track from the source. |
| + scoped_refptr<webrtc::VideoTrackInterface> webrtc_track = |
| + CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter()); |
| + |
| + AddNativeTrackToBlinkTrack(webrtc_track, track, true); |
| - video_track->set_enabled(track.isEnabled()); |
| + webrtc_track->set_enabled(track.isEnabled()); |
| - return video_track; |
| + return webrtc_track; |
| } |
| void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack( |
| @@ -575,6 +473,9 @@ scoped_refptr<webrtc::VideoSourceInterface> |
| MediaStreamDependencyFactory::CreateVideoSource( |
| cricket::VideoCapturer* capturer, |
| const webrtc::MediaConstraintsInterface* constraints) { |
| + if (!EnsurePeerConnectionFactory()) { |
| + return NULL; |
| + } |
| scoped_refptr<webrtc::VideoSourceInterface> source = |
| pc_factory_->CreateVideoSource(capturer, constraints).get(); |
| return source; |
| @@ -690,20 +591,6 @@ MediaStreamDependencyFactory::CreateLocalAudioSource( |
| return source; |
| } |
| -scoped_refptr<webrtc::VideoSourceInterface> |
| -MediaStreamDependencyFactory::CreateLocalVideoSource( |
| - int video_session_id, |
| - bool is_screencast, |
| - const webrtc::MediaConstraintsInterface* constraints) { |
| - RtcVideoCapturer* capturer = new RtcVideoCapturer( |
| - video_session_id, is_screencast); |
| - |
| - // The video source takes ownership of |capturer|. |
| - scoped_refptr<webrtc::VideoSourceInterface> source = |
| - CreateVideoSource(capturer, constraints); |
| - return source; |
| -} |
| - |
| scoped_refptr<WebAudioCapturerSource> |
| MediaStreamDependencyFactory::CreateWebAudioSource( |
| blink::WebMediaStreamSource* source, |
| @@ -713,7 +600,7 @@ MediaStreamDependencyFactory::CreateWebAudioSource( |
| scoped_refptr<WebAudioCapturerSource> |
| webaudio_capturer_source(new WebAudioCapturerSource()); |
| - MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData(); |
| + MediaStreamAudioSource* source_data = new MediaStreamAudioSource(); |
| // Create a LocalAudioSource object which holds audio options. |
| // SetLocalAudioSource() affects core audio parts in third_party/Libjingle. |