OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "components/test_runner/mock_web_speech_recognizer.h" | 5 #include "components/test_runner/mock_web_speech_recognizer.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 | 8 |
9 #include "base/logging.h" | 9 #include "base/logging.h" |
10 #include "base/macros.h" | 10 #include "base/macros.h" |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
104 private: | 104 private: |
105 blink::WebSpeechRecognizerClient::ErrorCode code_; | 105 blink::WebSpeechRecognizerClient::ErrorCode code_; |
106 blink::WebString message_; | 106 blink::WebString message_; |
107 | 107 |
108 DISALLOW_COPY_AND_ASSIGN(ErrorTask); | 108 DISALLOW_COPY_AND_ASSIGN(ErrorTask); |
109 }; | 109 }; |
110 | 110 |
111 // Task for tidying up after recognition task has ended. | 111 // Task for tidying up after recognition task has ended. |
112 class EndedTask : public MockWebSpeechRecognizer::Task { | 112 class EndedTask : public MockWebSpeechRecognizer::Task { |
113 public: | 113 public: |
114 EndedTask(MockWebSpeechRecognizer* mock, | 114 EndedTask(MockWebSpeechRecognizer* mock) |
115 blink::WebSpeechRecognitionHandle handle) | 115 : MockWebSpeechRecognizer::Task(mock) {} |
116 : MockWebSpeechRecognizer::Task(mock), handle_(handle) {} | |
117 | 116 |
118 ~EndedTask() override {} | 117 ~EndedTask() override {} |
119 | 118 |
120 void run() override { | 119 void run() override { |
121 // Reset recognizer's handle if it hasn't been replaced. | 120 blink::WebSpeechRecognitionHandle handle = recognizer_->Handle(); |
122 if (recognizer_->Handle() == handle_) | 121 recognizer_->Handle().reset(); |
123 recognizer_->Handle().reset(); | 122 recognizer_->Client()->didEnd(handle); |
124 handle_.reset(); | |
125 } | 123 } |
126 | 124 |
127 private: | 125 private: |
128 blink::WebSpeechRecognitionHandle handle_; | |
129 | |
130 DISALLOW_COPY_AND_ASSIGN(EndedTask); | 126 DISALLOW_COPY_AND_ASSIGN(EndedTask); |
131 }; | 127 }; |
132 | 128 |
133 } // namespace | 129 } // namespace |
134 | 130 |
135 MockWebSpeechRecognizer::MockWebSpeechRecognizer() | 131 MockWebSpeechRecognizer::MockWebSpeechRecognizer() |
136 : was_aborted_(false), task_queue_running_(false), delegate_(0) { | 132 : was_aborted_(false), task_queue_running_(false), delegate_(0) { |
137 } | 133 } |
138 | 134 |
139 MockWebSpeechRecognizer::~MockWebSpeechRecognizer() { | 135 MockWebSpeechRecognizer::~MockWebSpeechRecognizer() { |
(...skipping 28 matching lines...) Expand all Loading... |
168 | 164 |
169 mock_transcripts_.clear(); | 165 mock_transcripts_.clear(); |
170 mock_confidences_.clear(); | 166 mock_confidences_.clear(); |
171 } else | 167 } else |
172 task_queue_.push_back(new NoMatchTask(this)); | 168 task_queue_.push_back(new NoMatchTask(this)); |
173 | 169 |
174 task_queue_.push_back( | 170 task_queue_.push_back( |
175 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEndSound)); | 171 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEndSound)); |
176 task_queue_.push_back( | 172 task_queue_.push_back( |
177 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEndAudio)); | 173 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEndAudio)); |
178 task_queue_.push_back( | 174 task_queue_.push_back(new EndedTask(this)); |
179 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEnd)); | |
180 task_queue_.push_back(new EndedTask(this, handle_)); | |
181 | 175 |
182 StartTaskQueue(); | 176 StartTaskQueue(); |
183 } | 177 } |
184 | 178 |
185 void MockWebSpeechRecognizer::stop( | 179 void MockWebSpeechRecognizer::stop( |
186 const blink::WebSpeechRecognitionHandle& handle, | 180 const blink::WebSpeechRecognitionHandle& handle, |
187 blink::WebSpeechRecognizerClient* client) { | 181 blink::WebSpeechRecognizerClient* client) { |
188 handle_ = handle; | 182 handle_ = handle; |
189 client_ = client; | 183 client_ = client; |
190 | 184 |
191 // FIXME: Implement. | 185 // FIXME: Implement. |
192 NOTREACHED(); | 186 NOTREACHED(); |
193 } | 187 } |
194 | 188 |
195 void MockWebSpeechRecognizer::abort( | 189 void MockWebSpeechRecognizer::abort( |
196 const blink::WebSpeechRecognitionHandle& handle, | 190 const blink::WebSpeechRecognitionHandle& handle, |
197 blink::WebSpeechRecognizerClient* client) { | 191 blink::WebSpeechRecognizerClient* client) { |
198 handle_ = handle; | 192 handle_ = handle; |
199 client_ = client; | 193 client_ = client; |
200 | 194 |
201 ClearTaskQueue(); | 195 ClearTaskQueue(); |
202 was_aborted_ = true; | 196 was_aborted_ = true; |
203 task_queue_.push_back( | 197 task_queue_.push_back(new EndedTask(this)); |
204 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEnd)); | |
205 task_queue_.push_back(new EndedTask(this, handle_)); | |
206 | 198 |
207 StartTaskQueue(); | 199 StartTaskQueue(); |
208 } | 200 } |
209 | 201 |
210 void MockWebSpeechRecognizer::AddMockResult(const blink::WebString& transcript, | 202 void MockWebSpeechRecognizer::AddMockResult(const blink::WebString& transcript, |
211 float confidence) { | 203 float confidence) { |
212 mock_transcripts_.push_back(transcript); | 204 mock_transcripts_.push_back(transcript); |
213 mock_confidences_.push_back(confidence); | 205 mock_confidences_.push_back(confidence); |
214 } | 206 } |
215 | 207 |
(...skipping 16 matching lines...) Expand all Loading... |
232 code = blink::WebSpeechRecognizerClient::ServiceNotAllowedError; | 224 code = blink::WebSpeechRecognizerClient::ServiceNotAllowedError; |
233 else if (error == "BadGrammarError") | 225 else if (error == "BadGrammarError") |
234 code = blink::WebSpeechRecognizerClient::BadGrammarError; | 226 code = blink::WebSpeechRecognizerClient::BadGrammarError; |
235 else if (error == "LanguageNotSupportedError") | 227 else if (error == "LanguageNotSupportedError") |
236 code = blink::WebSpeechRecognizerClient::LanguageNotSupportedError; | 228 code = blink::WebSpeechRecognizerClient::LanguageNotSupportedError; |
237 else | 229 else |
238 return; | 230 return; |
239 | 231 |
240 ClearTaskQueue(); | 232 ClearTaskQueue(); |
241 task_queue_.push_back(new ErrorTask(this, code, message)); | 233 task_queue_.push_back(new ErrorTask(this, code, message)); |
242 task_queue_.push_back( | 234 task_queue_.push_back(new EndedTask(this)); |
243 new ClientCallTask(this, &blink::WebSpeechRecognizerClient::didEnd)); | |
244 task_queue_.push_back(new EndedTask(this, handle_)); | |
245 | 235 |
246 StartTaskQueue(); | 236 StartTaskQueue(); |
247 } | 237 } |
248 | 238 |
249 void MockWebSpeechRecognizer::StartTaskQueue() { | 239 void MockWebSpeechRecognizer::StartTaskQueue() { |
250 if (task_queue_running_) | 240 if (task_queue_running_) |
251 return; | 241 return; |
252 delegate_->PostTask(new StepTask(this)); | 242 delegate_->PostTask(new StepTask(this)); |
253 task_queue_running_ = true; | 243 task_queue_running_ = true; |
254 } | 244 } |
(...skipping 19 matching lines...) Expand all Loading... |
274 | 264 |
275 if (object_->task_queue_.empty()) { | 265 if (object_->task_queue_.empty()) { |
276 object_->task_queue_running_ = false; | 266 object_->task_queue_running_ = false; |
277 return; | 267 return; |
278 } | 268 } |
279 | 269 |
280 object_->delegate_->PostTask(new StepTask(object_)); | 270 object_->delegate_->PostTask(new StepTask(object_)); |
281 } | 271 } |
282 | 272 |
283 } // namespace test_runner | 273 } // namespace test_runner |
OLD | NEW |