| Index: content/renderer/media/media_stream_dependency_factory.cc
|
| diff --git a/content/renderer/media/media_stream_dependency_factory.cc b/content/renderer/media/media_stream_dependency_factory.cc
|
| index 31e88140f1cb74645126515b0ba9afdc872c6401..5f20442ebcf8b2acdfd92abf6a6c2ed6f901a3a5 100644
|
| --- a/content/renderer/media/media_stream_dependency_factory.cc
|
| +++ b/content/renderer/media/media_stream_dependency_factory.cc
|
| @@ -12,8 +12,9 @@
|
| #include "content/common/media/media_stream_messages.h"
|
| #include "content/public/common/content_switches.h"
|
| #include "content/renderer/media/media_stream_audio_processor_options.h"
|
| -#include "content/renderer/media/media_stream_source_extra_data.h"
|
| +#include "content/renderer/media/media_stream_audio_source.h"
|
| #include "content/renderer/media/media_stream_track_extra_data.h"
|
| +#include "content/renderer/media/media_stream_video_source.h"
|
| #include "content/renderer/media/media_stream_video_track.h"
|
| #include "content/renderer/media/peer_connection_identity_service.h"
|
| #include "content/renderer/media/rtc_media_constraints.h"
|
| @@ -123,82 +124,6 @@ class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
|
| blink::WebFrame* web_frame_;
|
| };
|
|
|
| -// SourceStateObserver is a help class used for observing the startup state
|
| -// transition of webrtc media sources such as a camera or microphone.
|
| -// An instance of the object deletes itself after use.
|
| -// Usage:
|
| -// 1. Create an instance of the object with the blink::WebMediaStream
|
| -// the observed sources belongs to a callback.
|
| -// 2. Add the sources to the observer using AddSource.
|
| -// 3. Call StartObserving()
|
| -// 4. The callback will be triggered when all sources have transitioned from
|
| -// webrtc::MediaSourceInterface::kInitializing.
|
| -class SourceStateObserver : public webrtc::ObserverInterface,
|
| - public base::NonThreadSafe {
|
| - public:
|
| - SourceStateObserver(
|
| - blink::WebMediaStream* web_stream,
|
| - const MediaStreamDependencyFactory::MediaSourcesCreatedCallback& callback)
|
| - : web_stream_(web_stream),
|
| - ready_callback_(callback),
|
| - live_(true) {
|
| - }
|
| -
|
| - void AddSource(webrtc::MediaSourceInterface* source) {
|
| - DCHECK(CalledOnValidThread());
|
| - switch (source->state()) {
|
| - case webrtc::MediaSourceInterface::kInitializing:
|
| - sources_.push_back(source);
|
| - source->RegisterObserver(this);
|
| - break;
|
| - case webrtc::MediaSourceInterface::kLive:
|
| - // The source is already live so we don't need to wait for it.
|
| - break;
|
| - case webrtc::MediaSourceInterface::kEnded:
|
| - // The source have already failed.
|
| - live_ = false;
|
| - break;
|
| - default:
|
| - NOTREACHED();
|
| - }
|
| - }
|
| -
|
| - void StartObservering() {
|
| - DCHECK(CalledOnValidThread());
|
| - CheckIfSourcesAreLive();
|
| - }
|
| -
|
| - virtual void OnChanged() OVERRIDE {
|
| - DCHECK(CalledOnValidThread());
|
| - CheckIfSourcesAreLive();
|
| - }
|
| -
|
| - private:
|
| - void CheckIfSourcesAreLive() {
|
| - ObservedSources::iterator it = sources_.begin();
|
| - while (it != sources_.end()) {
|
| - if ((*it)->state() != webrtc::MediaSourceInterface::kInitializing) {
|
| - live_ &= (*it)->state() == webrtc::MediaSourceInterface::kLive;
|
| - (*it)->UnregisterObserver(this);
|
| - it = sources_.erase(it);
|
| - } else {
|
| - ++it;
|
| - }
|
| - }
|
| - if (sources_.empty()) {
|
| - ready_callback_.Run(web_stream_, live_);
|
| - delete this;
|
| - }
|
| - }
|
| -
|
| - blink::WebMediaStream* web_stream_;
|
| - MediaStreamDependencyFactory::MediaSourcesCreatedCallback ready_callback_;
|
| - bool live_;
|
| - typedef std::vector<scoped_refptr<webrtc::MediaSourceInterface> >
|
| - ObservedSources;
|
| - ObservedSources sources_;
|
| -};
|
| -
|
| MediaStreamDependencyFactory::MediaStreamDependencyFactory(
|
| P2PSocketDispatcher* p2p_socket_dispatcher)
|
| : network_manager_(NULL),
|
| @@ -232,120 +157,82 @@ MediaStreamDependencyFactory::CreateRTCPeerConnectionHandler(
|
| return new RTCPeerConnectionHandler(client, this);
|
| }
|
|
|
| -void MediaStreamDependencyFactory::CreateNativeMediaSources(
|
| +bool MediaStreamDependencyFactory::InitializeMediaStreamAudioSource(
|
| int render_view_id,
|
| const blink::WebMediaConstraints& audio_constraints,
|
| - const blink::WebMediaConstraints& video_constraints,
|
| - blink::WebMediaStream* web_stream,
|
| - const MediaSourcesCreatedCallback& sources_created) {
|
| - DVLOG(1) << "MediaStreamDependencyFactory::CreateNativeMediaSources()";
|
| - if (!EnsurePeerConnectionFactory()) {
|
| - sources_created.Run(web_stream, false);
|
| - return;
|
| - }
|
| -
|
| - // |source_observer| clean up itself when it has completed
|
| - // source_observer->StartObservering.
|
| - SourceStateObserver* source_observer =
|
| - new SourceStateObserver(web_stream, sources_created);
|
| -
|
| - // Create local video sources.
|
| - RTCMediaConstraints native_video_constraints(video_constraints);
|
| - blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
|
| - web_stream->videoTracks(video_tracks);
|
| - for (size_t i = 0; i < video_tracks.size(); ++i) {
|
| - const blink::WebMediaStreamSource& source = video_tracks[i].source();
|
| - MediaStreamSourceExtraData* source_data =
|
| - static_cast<MediaStreamSourceExtraData*>(source.extraData());
|
| -
|
| - // Check if the source has already been created. This happens when the same
|
| - // source is used in multiple MediaStreams as a result of calling
|
| - // getUserMedia.
|
| - if (source_data->video_source())
|
| - continue;
|
| -
|
| - const bool is_screencast =
|
| - source_data->device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE ||
|
| - source_data->device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
|
| - source_data->SetVideoSource(
|
| - CreateLocalVideoSource(source_data->device_info().session_id,
|
| - is_screencast,
|
| - &native_video_constraints).get());
|
| - source_observer->AddSource(source_data->video_source());
|
| - }
|
| + const blink::WebMediaStreamSource& audio_source) {
|
| + DVLOG(1) << "InitializeMediaStreamAudioSources()";
|
|
|
| // Do additional source initialization if the audio source is a valid
|
| // microphone or tab audio.
|
| RTCMediaConstraints native_audio_constraints(audio_constraints);
|
| ApplyFixedAudioConstraints(&native_audio_constraints);
|
| - blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
|
| - web_stream->audioTracks(audio_tracks);
|
| - for (size_t i = 0; i < audio_tracks.size(); ++i) {
|
| - const blink::WebMediaStreamSource& source = audio_tracks[i].source();
|
| - MediaStreamSourceExtraData* source_data =
|
| - static_cast<MediaStreamSourceExtraData*>(source.extraData());
|
| -
|
| - // Check if the source has already been created. This happens when the same
|
| - // source is used in multiple MediaStreams as a result of calling
|
| - // getUserMedia.
|
| - if (source_data->local_audio_source())
|
| - continue;
|
| -
|
| - // TODO(xians): Create a new capturer for difference microphones when we
|
| - // support multiple microphones. See issue crbug/262117 .
|
| - StreamDeviceInfo device_info = source_data->device_info();
|
| - RTCMediaConstraints constraints = native_audio_constraints;
|
| -
|
| - // If any platform effects are available, check them against the
|
| - // constraints. Disable effects to match false constraints, but if a
|
| - // constraint is true, set the constraint to false to later disable the
|
| - // software effect.
|
| - int effects = device_info.device.input.effects;
|
| - if (effects != media::AudioParameters::NO_EFFECTS) {
|
| - for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
|
| - bool value;
|
| - if (!webrtc::FindConstraint(&constraints,
|
| - kConstraintEffectMap[i].constraint, &value, NULL) || !value) {
|
| - // If the constraint is false, or does not exist, disable the platform
|
| - // effect.
|
| - effects &= ~kConstraintEffectMap[i].effect;
|
| - DVLOG(1) << "Disabling constraint: "
|
| - << kConstraintEffectMap[i].constraint;
|
| - } else if (effects & kConstraintEffectMap[i].effect) {
|
| - // If the constraint is true, leave the platform effect enabled, and
|
| - // set the constraint to false to later disable the software effect.
|
| - constraints.AddMandatory(kConstraintEffectMap[i].constraint,
|
| - webrtc::MediaConstraintsInterface::kValueFalse, true);
|
| - DVLOG(1) << "Disabling platform effect: "
|
| - << kConstraintEffectMap[i].constraint;
|
| - }
|
| - }
|
| - device_info.device.input.effects = effects;
|
| - }
|
|
|
| - scoped_refptr<WebRtcAudioCapturer> capturer(
|
| - MaybeCreateAudioCapturer(render_view_id, device_info,
|
| - audio_constraints));
|
| - if (!capturer.get()) {
|
| - DLOG(WARNING) << "Failed to create the capturer for device "
|
| - << device_info.device.id;
|
| - sources_created.Run(web_stream, false);
|
| - // TODO(xians): Don't we need to check if source_observer is observing
|
| - // something? If not, then it looks like we have a leak here.
|
| - // OTOH, if it _is_ observing something, then the callback might
|
| - // be called multiple times which is likely also a bug.
|
| - return;
|
| + MediaStreamAudioSource* source_data =
|
| + static_cast<MediaStreamAudioSource*>(audio_source.extraData());
|
| +
|
| + // TODO(xians): Create a new capturer for difference microphones when we
|
| + // support multiple microphones. See issue crbug/262117 .
|
| + StreamDeviceInfo device_info = source_data->device_info();
|
| + RTCMediaConstraints constraints = native_audio_constraints;
|
| +
|
| + // If any platform effects are available, check them against the
|
| + // constraints. Disable effects to match false constraints, but if a
|
| + // constraint is true, set the constraint to false to later disable the
|
| + // software effect.
|
| + int effects = device_info.device.input.effects;
|
| + if (effects != media::AudioParameters::NO_EFFECTS) {
|
| + for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
|
| + bool value;
|
| + if (!webrtc::FindConstraint(&constraints,
|
| + kConstraintEffectMap[i].constraint, &value,
|
| + NULL) || !value) {
|
| + // If the constraint is false, or does not exist, disable the platform
|
| + // effect.
|
| + effects &= ~kConstraintEffectMap[i].effect;
|
| + DVLOG(1) << "Disabling constraint: "
|
| + << kConstraintEffectMap[i].constraint;
|
| + } else if (effects & kConstraintEffectMap[i].effect) {
|
| + // If the constraint is true, leave the platform effect enabled, and
|
| + // set the constraint to false to later disable the software effect.
|
| + constraints.AddMandatory(kConstraintEffectMap[i].constraint,
|
| + webrtc::MediaConstraintsInterface::kValueFalse,
|
| + true);
|
| + DVLOG(1) << "Disabling platform effect: "
|
| + << kConstraintEffectMap[i].constraint;
|
| + }
|
| }
|
| - source_data->SetAudioCapturer(capturer);
|
| + device_info.device.input.effects = effects;
|
| + }
|
|
|
| - // Creates a LocalAudioSource object which holds audio options.
|
| - // TODO(xians): The option should apply to the track instead of the source.
|
| - source_data->SetLocalAudioSource(
|
| - CreateLocalAudioSource(&constraints).get());
|
| - source_observer->AddSource(source_data->local_audio_source());
|
| + scoped_refptr<WebRtcAudioCapturer> capturer(
|
| + MaybeCreateAudioCapturer(render_view_id, device_info,
|
| + audio_constraints));
|
| + if (!capturer.get()) {
|
| + DLOG(WARNING) << "Failed to create the capturer for device "
|
| + << device_info.device.id;
|
| + // TODO(xians): Don't we need to check if source_observer is observing
|
| + // something? If not, then it looks like we have a leak here.
|
| + // OTOH, if it _is_ observing something, then the callback might
|
| + // be called multiple times which is likely also a bug.
|
| + return false;
|
| }
|
| + source_data->SetAudioCapturer(capturer);
|
|
|
| - source_observer->StartObservering();
|
| + // Creates a LocalAudioSource object which holds audio options.
|
| + // TODO(xians): The option should apply to the track instead of the source.
|
| + source_data->SetLocalAudioSource(
|
| + CreateLocalAudioSource(&constraints).get());
|
| +
|
| + return true;
|
| +}
|
| +
|
| +cricket::VideoCapturer* MediaStreamDependencyFactory::CreateVideoCapturer(
|
| + const StreamDeviceInfo& info) {
|
| + bool is_screeencast =
|
| + info.device.type == MEDIA_TAB_VIDEO_CAPTURE ||
|
| + info.device.type == MEDIA_DESKTOP_VIDEO_CAPTURE;
|
| + return new RtcVideoCapturer(info.session_id, is_screeencast);
|
| }
|
|
|
| void MediaStreamDependencyFactory::CreateNativeLocalMediaStream(
|
| @@ -393,8 +280,8 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
|
| const blink::WebMediaStreamTrack& track) {
|
| blink::WebMediaStreamSource source = track.source();
|
| DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
|
| - MediaStreamSourceExtraData* source_data =
|
| - static_cast<MediaStreamSourceExtraData*>(source.extraData());
|
| + MediaStreamAudioSource* source_data =
|
| + static_cast<MediaStreamAudioSource*>(source.extraData());
|
|
|
| // In the future the constraints will belong to the track itself, but
|
| // right now they're on the source, so we fetch them from there.
|
| @@ -411,7 +298,7 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
|
| // Create a specific capturer for each WebAudio consumer.
|
| webaudio_source = CreateWebAudioSource(&source, track_constraints);
|
| source_data =
|
| - static_cast<MediaStreamSourceExtraData*>(source.extraData());
|
| + static_cast<MediaStreamAudioSource*>(source.extraData());
|
| } else {
|
| // TODO(perkj): Implement support for sources from
|
| // remote MediaStreams.
|
| @@ -445,10 +332,12 @@ MediaStreamDependencyFactory::CreateNativeAudioMediaStreamTrack(
|
| scoped_refptr<webrtc::VideoTrackInterface>
|
| MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
|
| const blink::WebMediaStreamTrack& track) {
|
| + DCHECK(track.extraData() == NULL);
|
| blink::WebMediaStreamSource source = track.source();
|
| DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeVideo);
|
| - MediaStreamSourceExtraData* source_data =
|
| - static_cast<MediaStreamSourceExtraData*>(source.extraData());
|
| +
|
| + MediaStreamVideoSource* source_data =
|
| + static_cast<MediaStreamVideoSource*>(source.extraData());
|
|
|
| if (!source_data) {
|
| // TODO(perkj): Implement support for sources from
|
| @@ -457,14 +346,15 @@ MediaStreamDependencyFactory::CreateNativeVideoMediaStreamTrack(
|
| return NULL;
|
| }
|
|
|
| - std::string track_id = base::UTF16ToUTF8(track.id());
|
| - scoped_refptr<webrtc::VideoTrackInterface> video_track(
|
| - CreateLocalVideoTrack(track_id, source_data->video_source()));
|
| - AddNativeTrackToBlinkTrack(video_track.get(), track, true);
|
| + // Create native track from the source.
|
| + scoped_refptr<webrtc::VideoTrackInterface> webrtc_track =
|
| + CreateLocalVideoTrack(track.id().utf8(), source_data->GetAdapter());
|
|
|
| - video_track->set_enabled(track.isEnabled());
|
| + AddNativeTrackToBlinkTrack(webrtc_track, track, true);
|
|
|
| - return video_track;
|
| + webrtc_track->set_enabled(track.isEnabled());
|
| +
|
| + return webrtc_track;
|
| }
|
|
|
| void MediaStreamDependencyFactory::CreateNativeMediaStreamTrack(
|
| @@ -575,6 +465,9 @@ scoped_refptr<webrtc::VideoSourceInterface>
|
| MediaStreamDependencyFactory::CreateVideoSource(
|
| cricket::VideoCapturer* capturer,
|
| const webrtc::MediaConstraintsInterface* constraints) {
|
| + if (!EnsurePeerConnectionFactory()) {
|
| + return NULL;
|
| + }
|
| scoped_refptr<webrtc::VideoSourceInterface> source =
|
| pc_factory_->CreateVideoSource(capturer, constraints).get();
|
| return source;
|
| @@ -690,20 +583,6 @@ MediaStreamDependencyFactory::CreateLocalAudioSource(
|
| return source;
|
| }
|
|
|
| -scoped_refptr<webrtc::VideoSourceInterface>
|
| -MediaStreamDependencyFactory::CreateLocalVideoSource(
|
| - int video_session_id,
|
| - bool is_screencast,
|
| - const webrtc::MediaConstraintsInterface* constraints) {
|
| - RtcVideoCapturer* capturer = new RtcVideoCapturer(
|
| - video_session_id, is_screencast);
|
| -
|
| - // The video source takes ownership of |capturer|.
|
| - scoped_refptr<webrtc::VideoSourceInterface> source =
|
| - CreateVideoSource(capturer, constraints);
|
| - return source;
|
| -}
|
| -
|
| scoped_refptr<WebAudioCapturerSource>
|
| MediaStreamDependencyFactory::CreateWebAudioSource(
|
| blink::WebMediaStreamSource* source,
|
| @@ -713,7 +592,7 @@ MediaStreamDependencyFactory::CreateWebAudioSource(
|
|
|
| scoped_refptr<WebAudioCapturerSource>
|
| webaudio_capturer_source(new WebAudioCapturerSource());
|
| - MediaStreamSourceExtraData* source_data = new MediaStreamSourceExtraData();
|
| + MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
|
|
|
| // Create a LocalAudioSource object which holds audio options.
|
| // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
|
|
|