Index: Source/modules/webaudio/AudioContext.cpp |
diff --git a/Source/modules/webaudio/AudioContext.cpp b/Source/modules/webaudio/AudioContext.cpp |
index 645145c54ff0c92282c932f40525833a6296e1d6..33a8eb4a76aa79f3f167238a67273aef640ab502 100644 |
--- a/Source/modules/webaudio/AudioContext.cpp |
+++ b/Source/modules/webaudio/AudioContext.cpp |
@@ -99,16 +99,16 @@ AudioContext* AudioContext::create(Document& document, ExceptionState& exception |
// Constructor for rendering to the audio hardware. |
AudioContext::AudioContext(Document* document) |
: ActiveDOMObject(document) |
+ , m_destinationNode(nullptr) |
+ , m_didInitializeContextGraphMutex(false) |
+ , m_contextState(Suspended) |
, m_isStopScheduled(false) |
, m_isCleared(false) |
, m_isInitialized(false) |
- , m_destinationNode(nullptr) |
, m_isResolvingResumePromises(false) |
, m_connectionCount(0) |
- , m_didInitializeContextGraphMutex(false) |
, m_deferredTaskHandler(DeferredTaskHandler::create()) |
, m_isOfflineContext(false) |
- , m_contextState(Suspended) |
, m_cachedSampleFrame(0) |
{ |
m_didInitializeContextGraphMutex = true; |
@@ -120,19 +120,20 @@ AudioContext::AudioContext(Document* document) |
// Constructor for offline (non-realtime) rendering. |
AudioContext::AudioContext(Document* document, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate) |
: ActiveDOMObject(document) |
+ , m_destinationNode(nullptr) |
+ , m_didInitializeContextGraphMutex(false) |
+ , m_contextState(Suspended) |
, m_isStopScheduled(false) |
, m_isCleared(false) |
, m_isInitialized(false) |
- , m_destinationNode(nullptr) |
, m_isResolvingResumePromises(false) |
, m_connectionCount(0) |
- , m_didInitializeContextGraphMutex(false) |
, m_deferredTaskHandler(DeferredTaskHandler::create()) |
, m_isOfflineContext(true) |
- , m_contextState(Suspended) |
, m_cachedSampleFrame(0) |
{ |
m_didInitializeContextGraphMutex = true; |
+ |
// Create a new destination for offline rendering. |
m_renderTarget = AudioBuffer::create(numberOfChannels, numberOfFrames, sampleRate); |
if (m_renderTarget.get()) |
@@ -733,8 +734,10 @@ void AudioContext::setContextState(AudioContextState newState) |
m_contextState = newState; |
// Notify context that state changed |
- if (executionContext()) |
- executionContext()->postTask(FROM_HERE, createSameThreadTask(&AudioContext::notifyStateChange, this)); |
+ if (executionContext()) { |
+ executionContext()->postTask(FROM_HERE, |
+ createSameThreadTask(&AudioContext::notifyStateChange, this)); |
+ } |
Raymond Toy
2015/05/13 17:16:08
Why?
hongchan
2015/05/13 17:30:53
Added some line breaks to make the code read bette
|
} |
void AudioContext::notifyStateChange() |
@@ -1023,6 +1026,27 @@ void AudioContext::stopRendering() |
} |
} |
+void AudioContext::fireSuspendEvent() |
+{ |
+ ASSERT(isMainThread()); |
+ if (!isMainThread()) |
+ return; |
+ |
+ // AudioBuffer* renderedBuffer = m_renderTarget.get(); |
+ |
+ setContextState(Suspended); |
+ |
+ // ASSERT(renderedBuffer); |
+ // if (!renderedBuffer) |
+ // return; |
+ |
+ // Avoid firing the event if the document has already gone away. |
+ // if (executionContext()) { |
+ // dispatchEvent(Event::create(EventTypeNames::statechange)); |
+ // // m_offlineResolver->resolve(renderedBuffer); |
+ // } |
+} |
+ |
void AudioContext::fireCompletionEvent() |
{ |
ASSERT(isMainThread()); |
@@ -1105,6 +1129,12 @@ ScriptPromise AudioContext::closeContext(ScriptState* scriptState) |
return promise; |
} |
+bool AudioContext::suspendIfNecessary() |
+{ |
+ ASSERT_WITH_MESSAGE(1, "suspendIfNecessary() only valid for offline audio context"); |
+ return false; |
+} |
+ |
} // namespace blink |
#endif // ENABLE(WEB_AUDIO) |