Index: Source/modules/webaudio/AudioContext.cpp |
diff --git a/Source/modules/webaudio/AudioContext.cpp b/Source/modules/webaudio/AudioContext.cpp |
index 845d729fa5598817b70e1853536a9758697f6fdb..5774bf36da5e6f62ab9e3d3e8545686cc15a0153 100644 |
--- a/Source/modules/webaudio/AudioContext.cpp |
+++ b/Source/modules/webaudio/AudioContext.cpp |
@@ -99,16 +99,16 @@ AudioContext* AudioContext::create(Document& document, ExceptionState& exception |
// Constructor for rendering to the audio hardware. |
AudioContext::AudioContext(Document* document) |
: ActiveDOMObject(document) |
+ , m_destinationNode(nullptr) |
+ , m_didInitializeContextGraphMutex(false) |
+ , m_contextState(Suspended) |
, m_isStopScheduled(false) |
, m_isCleared(false) |
, m_isInitialized(false) |
- , m_destinationNode(nullptr) |
, m_isResolvingResumePromises(false) |
, m_connectionCount(0) |
- , m_didInitializeContextGraphMutex(false) |
, m_deferredTaskHandler(DeferredTaskHandler::create()) |
, m_isOfflineContext(false) |
- , m_contextState(Suspended) |
, m_cachedSampleFrame(0) |
{ |
m_didInitializeContextGraphMutex = true; |
@@ -120,19 +120,20 @@ AudioContext::AudioContext(Document* document) |
// Constructor for offline (non-realtime) rendering. |
AudioContext::AudioContext(Document* document, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate) |
: ActiveDOMObject(document) |
+ , m_destinationNode(nullptr) |
+ , m_didInitializeContextGraphMutex(false) |
+ , m_contextState(Suspended) |
, m_isStopScheduled(false) |
, m_isCleared(false) |
, m_isInitialized(false) |
- , m_destinationNode(nullptr) |
, m_isResolvingResumePromises(false) |
, m_connectionCount(0) |
- , m_didInitializeContextGraphMutex(false) |
, m_deferredTaskHandler(DeferredTaskHandler::create()) |
, m_isOfflineContext(true) |
- , m_contextState(Suspended) |
, m_cachedSampleFrame(0) |
{ |
m_didInitializeContextGraphMutex = true; |
+ |
// Create a new destination for offline rendering. |
m_renderTarget = AudioBuffer::create(numberOfChannels, numberOfFrames, sampleRate); |
if (m_renderTarget.get()) |
@@ -736,8 +737,10 @@ void AudioContext::setContextState(AudioContextState newState) |
m_contextState = newState; |
// Notify context that state changed |
- if (executionContext()) |
- executionContext()->postTask(FROM_HERE, createSameThreadTask(&AudioContext::notifyStateChange, this)); |
+ if (executionContext()) { |
+ executionContext()->postTask(FROM_HERE, |
+ createSameThreadTask(&AudioContext::notifyStateChange, this)); |
+ } |
} |
void AudioContext::notifyStateChange() |
@@ -989,6 +992,27 @@ void AudioContext::stopRendering() |
} |
} |
+void AudioContext::fireSuspendEvent() |
+{ |
+ ASSERT(isMainThread()); |
+ if (!isMainThread()) |
+ return; |
+ |
+ // AudioBuffer* renderedBuffer = m_renderTarget.get(); |
+ |
+ // setContextState(Suspended); |
+ |
+ // ASSERT(renderedBuffer); |
+ // if (!renderedBuffer) |
+ // return; |
+ |
+ // Avoid firing the event if the document has already gone away. |
+ // if (executionContext()) { |
+ // dispatchEvent(Event::create(EventTypeNames::statechange)); |
+ // // m_offlineResolver->resolve(renderedBuffer); |
+ // } |
+} |
+ |
void AudioContext::fireCompletionEvent() |
{ |
ASSERT(isMainThread()); |
@@ -1070,6 +1094,12 @@ ScriptPromise AudioContext::closeContext(ScriptState* scriptState) |
return promise; |
} |
+bool AudioContext::suspendIfNecessary() |
+{ |
+ ASSERT_WITH_MESSAGE(1, "suspendIfNecessary() only valid for offline audio context"); |
+ return false; |
+} |
+ |
} // namespace blink |
#endif // ENABLE(WEB_AUDIO) |