| Index: third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.h
|
| diff --git a/third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.h b/third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.h
|
| index 10c587fc0621f0e3013ba084c314bd4288190cb7..b79cbe0717565afd3d92ce25ee1074f5c62e9e79 100644
|
| --- a/third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.h
|
| +++ b/third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.h
|
| @@ -167,8 +167,11 @@ public:
|
| // Close
|
| virtual ScriptPromise closeContext(ScriptState*) = 0;
|
|
|
| - // Suspend/Resume
|
| + // Suspend for online and offline context.
|
| virtual ScriptPromise suspendContext(ScriptState*) = 0;
|
| + virtual ScriptPromise suspendContext(ScriptState*, double) = 0;
|
| +
|
| + // Resume
|
| virtual ScriptPromise resumeContext(ScriptState*) = 0;
|
|
|
| // When a source node has started processing and needs to be protected,
|
| @@ -227,11 +230,9 @@ public:
|
| const AtomicString& interfaceName() const final;
|
| ExecutionContext* executionContext() const final;
|
|
|
| - DEFINE_ATTRIBUTE_EVENT_LISTENER(complete);
|
| DEFINE_ATTRIBUTE_EVENT_LISTENER(statechange);
|
|
|
| void startRendering();
|
| - void fireCompletionEvent();
|
| void notifyStateChange();
|
|
|
| // A context is considered closed if:
|
| @@ -246,20 +247,27 @@ protected:
|
| explicit AbstractAudioContext(Document*);
|
| AbstractAudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
|
|
|
| + void initialize();
|
| + void uninitialize();
|
| +
|
| void setContextState(AudioContextState);
|
| +
|
| virtual void didClose() {}
|
| - void uninitialize();
|
|
|
| - Member<ScriptPromiseResolver> m_offlineResolver;
|
| + // Tries to handle AudioBufferSourceNodes that were started but became disconnected or was never
|
| + // connected. Because these never get pulled anymore, they will stay around forever. So if we
|
| + // can, try to stop them so they can be collected.
|
| + void handleStoppableSourceNodes();
|
| +
|
| + Member<AudioDestinationNode> m_destinationNode;
|
|
|
| // FIXME(dominicc): Move m_resumeResolvers to AudioContext, because only
|
| // it creates these Promises.
|
| // Vector of promises created by resume(). It takes time to handle them, so we collect all of
|
| // the promises here until they can be resolved or rejected.
|
| HeapVector<Member<ScriptPromiseResolver>> m_resumeResolvers;
|
| -private:
|
| - void initialize();
|
|
|
| +private:
|
| bool m_isCleared;
|
| void clear();
|
|
|
| @@ -269,7 +277,6 @@ private:
|
| // haven't finished playing. Make sure to release them here.
|
| void releaseActiveSourceNodes();
|
|
|
| - Member<AudioDestinationNode> m_destinationNode;
|
| Member<AudioListener> m_listener;
|
|
|
| // Only accessed in the audio thread.
|
| @@ -304,18 +311,11 @@ private:
|
| bool m_didInitializeContextGraphMutex;
|
| RefPtr<DeferredTaskHandler> m_deferredTaskHandler;
|
|
|
| - Member<AudioBuffer> m_renderTarget;
|
| -
|
| // The state of the AbstractAudioContext.
|
| AudioContextState m_contextState;
|
|
|
| AsyncAudioDecoder m_audioDecoder;
|
|
|
| - // Tries to handle AudioBufferSourceNodes that were started but became disconnected or was never
|
| - // connected. Because these never get pulled anymore, they will stay around forever. So if we
|
| - // can, try to stop them so they can be collected.
|
| - void handleStoppableSourceNodes();
|
| -
|
| // This is considering 32 is large enough for multiple channels audio.
|
| // It is somewhat arbitrary and could be increased if necessary.
|
| enum { MaxNumberOfChannels = 32 };
|
|
|