Index: content/renderer/media/webrtc/media_stream_remote_audio_track.h |
diff --git a/content/renderer/media/webrtc/media_stream_remote_audio_track.h b/content/renderer/media/webrtc/media_stream_remote_audio_track.h |
index 639c2e9616bf96c3e14f3b007a37c615f5cbbab3..1f0a7b5a1e89623cb203bbd467ac83b9af7b9ae9 100644 |
--- a/content/renderer/media/webrtc/media_stream_remote_audio_track.h |
+++ b/content/renderer/media/webrtc/media_stream_remote_audio_track.h |
@@ -6,13 +6,16 @@ |
#define CONTENT_RENDERER_MEDIA_WEBRTC_MEDIA_STREAM_REMOTE_AUDIO_TRACK_H_ |
#include "base/memory/ref_counted.h" |
-#include "content/renderer/media/media_stream_track.h" |
+#include "content/renderer/media/media_stream_audio_track.h" |
namespace content { |
// MediaStreamRemoteAudioTrack is a WebRTC specific implementation of an |
// audio track received from a PeerConnection. |
-class MediaStreamRemoteAudioTrack : public MediaStreamTrack { |
+// TODO(tommi): Chrome shouldn't have to care about remote vs local so |
+// we should have a single track implementation that delegates to the |
+// sources that do different things depending on the type of source. |
+class MediaStreamRemoteAudioTrack : public MediaStreamAudioTrack { |
public: |
explicit MediaStreamRemoteAudioTrack( |
const scoped_refptr<webrtc::AudioTrackInterface>& track); |
@@ -21,9 +24,15 @@ class MediaStreamRemoteAudioTrack : public MediaStreamTrack { |
void SetEnabled(bool enabled) override; |
void Stop() override; |
+ void AddSink(MediaStreamAudioSink* sink) override; |
+ void RemoveSink(MediaStreamAudioSink* sink) override; |
+ media::AudioParameters GetOutputFormat() const override; |
+ |
webrtc::AudioTrackInterface* GetAudioAdapter() override; |
private: |
+ class AudioSink; |
+ scoped_ptr<AudioSink> sink_; |
const scoped_refptr<webrtc::AudioTrackInterface> track_; |
}; |