Index: content/renderer/speech_recognition_dispatcher.h |
diff --git a/content/renderer/speech_recognition_dispatcher.h b/content/renderer/speech_recognition_dispatcher.h |
index bae0e5d92a484cbd706533010a8ffd28e3f9e5fa..15ed361ae8c9b67f5fd8a78bbf878ca4a9e2de05 100644 |
--- a/content/renderer/speech_recognition_dispatcher.h |
+++ b/content/renderer/speech_recognition_dispatcher.h |
@@ -8,14 +8,24 @@ |
#include <map> |
#include "base/basictypes.h" |
+#include "base/memory/shared_memory.h" |
+#include "base/message_loop/message_loop_proxy.h" |
burnik
2014/09/22 07:43:12
Removed.
|
+#include "base/sync_socket.h" |
#include "content/public/common/speech_recognition_result.h" |
#include "content/public/renderer/render_view_observer.h" |
+#include "content/renderer/media/speech_recognition_audio_source_provider.h" |
no longer working on chromium
2014/09/23 10:09:13
remove, because you forward declare it below.
burnik
2014/09/23 12:39:21
Done.
|
+#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" |
#include "third_party/WebKit/public/platform/WebVector.h" |
#include "third_party/WebKit/public/web/WebSpeechRecognitionHandle.h" |
#include "third_party/WebKit/public/web/WebSpeechRecognizer.h" |
+namespace media { |
+class AudioParameters; |
+} |
+ |
namespace content { |
class RenderViewImpl; |
+class SpeechRecognitionAudioSourceProvider; |
struct SpeechRecognitionError; |
struct SpeechRecognitionResult; |
@@ -53,6 +63,10 @@ class SpeechRecognitionDispatcher : public RenderViewObserver, |
void OnRecognitionEnded(int request_id); |
void OnResultsRetrieved(int request_id, |
const SpeechRecognitionResults& result); |
+ void OnAudioTrackReady(int session_id, const media::AudioParameters& params, |
+ base::SharedMemoryHandle handle, |
+ base::SyncSocket::TransitDescriptor socket); |
+ virtual void OnAudioTrackStopped(); |
burnik
2014/09/22 07:43:12
Removed 'virtual'.
|
int GetOrCreateIDForHandle(const blink::WebSpeechRecognitionHandle& handle); |
bool HandleExists(const blink::WebSpeechRecognitionHandle& handle); |
@@ -61,6 +75,13 @@ class SpeechRecognitionDispatcher : public RenderViewObserver, |
// The WebKit client class that we use to send events back to the JS world. |
blink::WebSpeechRecognizerClient* recognizer_client_; |
+ // Media stream audio track that the speech recognition connects to. |
+ // Accessed on the render thread. |
+ blink::WebMediaStreamTrack audio_track_; |
+ |
+ // Audio sink used to provide audio from the track. |
+ scoped_ptr<SpeechRecognitionAudioSourceProvider> audio_source_provider_; |
+ |
typedef std::map<int, blink::WebSpeechRecognitionHandle> HandleMap; |
HandleMap handle_map_; |
int next_id_; |