Index: Source/modules/speech/SpeechRecognition.cpp |
diff --git a/Source/modules/speech/SpeechRecognition.cpp b/Source/modules/speech/SpeechRecognition.cpp |
index 06196eedf0925623ae539f848c1aa4669dddeea7..f0487ed95e01ea58d52974e3d4d4c03a5eea77f9 100644 |
--- a/Source/modules/speech/SpeechRecognition.cpp |
+++ b/Source/modules/speech/SpeechRecognition.cpp |
@@ -38,16 +38,31 @@ |
namespace blink { |
-SpeechRecognition* SpeechRecognition::create(ExecutionContext* context) |
+SpeechRecognition* SpeechRecognition::create(ExecutionContext* context, ExceptionState& exceptionState) |
{ |
- SpeechRecognition* speechRecognition = new SpeechRecognition(context); |
+ const char* notSupportedErrorMessage = "cannot create a new object for a closed window."; |
+ if (!context) { |
haraken
2015/02/27 10:47:39
By calling ConstructorCallWith=ExecutionContext, c
sof
2015/02/27 10:50:36
The bindings code calls currentExecutionContext()
|
+ exceptionState.throwDOMException(NotSupportedError, notSupportedErrorMessage); |
+ return nullptr; |
+ } |
+ ASSERT(context->isDocument()); |
+ Document* document = toDocument(context); |
+ ASSERT(document); |
+ Page* page = document->page(); |
+ if (!page) { |
+ exceptionState.throwDOMException(NotSupportedError, notSupportedErrorMessage); |
+ return nullptr; |
+ } |
+ SpeechRecognition* speechRecognition = new SpeechRecognition(page, context); |
speechRecognition->suspendIfNeeded(); |
return speechRecognition; |
} |
void SpeechRecognition::start(ExceptionState& exceptionState) |
{ |
- ASSERT(m_controller); |
+ if (!m_controller) |
+ return; |
+ |
if (m_started) { |
exceptionState.throwDOMException(InvalidStateError, "recognition has already started."); |
return; |
@@ -60,7 +75,9 @@ void SpeechRecognition::start(ExceptionState& exceptionState) |
void SpeechRecognition::stopFunction() |
{ |
- ASSERT(m_controller); |
+ if (!m_controller) |
+ return; |
+ |
if (m_started && !m_stopping) { |
m_stopping = true; |
m_controller->stop(this); |
@@ -69,7 +86,9 @@ void SpeechRecognition::stopFunction() |
void SpeechRecognition::abort() |
{ |
- ASSERT(m_controller); |
+ if (!m_controller) |
+ return; |
+ |
if (m_started && !m_stopping) { |
m_stopping = true; |
m_controller->abort(this); |
@@ -166,26 +185,20 @@ bool SpeechRecognition::hasPendingActivity() const |
return m_started; |
} |
-SpeechRecognition::SpeechRecognition(ExecutionContext* context) |
- : ActiveDOMObject(context) |
+SpeechRecognition::SpeechRecognition(Page* page, ExecutionContext* context) |
+ : PageLifecycleObserver(page) |
+ , ActiveDOMObject(context) |
, m_grammars(SpeechGrammarList::create()) // FIXME: The spec is not clear on the default value for the grammars attribute. |
, m_audioTrack(nullptr) |
, m_continuous(false) |
, m_interimResults(false) |
, m_maxAlternatives(1) |
- , m_controller(nullptr) |
+ , m_controller(SpeechRecognitionController::from(page)) |
, m_stoppedByActiveDOMObject(false) |
, m_started(false) |
, m_stopping(false) |
{ |
- Document* document = toDocument(executionContext()); |
- |
- Page* page = document->page(); |
- ASSERT(page); |
- |
- m_controller = SpeechRecognitionController::from(page); |
ASSERT(m_controller); |
- |
// FIXME: Need to hook up with Page to get notified when the visibility changes. |
} |
@@ -193,6 +206,12 @@ SpeechRecognition::~SpeechRecognition() |
{ |
} |
+void SpeechRecognition::contextDestroyed() |
+{ |
+ m_controller = nullptr; |
+ PageLifecycleObserver::contextDestroyed(); |
+} |
+ |
DEFINE_TRACE(SpeechRecognition) |
{ |
visitor->trace(m_grammars); |
@@ -202,6 +221,7 @@ DEFINE_TRACE(SpeechRecognition) |
#endif |
visitor->trace(m_finalResults); |
RefCountedGarbageCollectedEventTargetWithInlineData<SpeechRecognition>::trace(visitor); |
+ PageLifecycleObserver::trace(visitor); |
ActiveDOMObject::trace(visitor); |
} |