Index: Source/modules/webaudio/AbstractAudioContext.h |
diff --git a/Source/modules/webaudio/AudioContext.h b/Source/modules/webaudio/AbstractAudioContext.h |
similarity index 90% |
copy from Source/modules/webaudio/AudioContext.h |
copy to Source/modules/webaudio/AbstractAudioContext.h |
index 00ad293d1871fc1652a6a21484e9476e32073193..9c691fbe83906283427b9200f1a43c82ed618ef9 100644 |
--- a/Source/modules/webaudio/AudioContext.h |
+++ b/Source/modules/webaudio/AbstractAudioContext.h |
@@ -22,8 +22,8 @@ |
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
*/ |
-#ifndef AudioContext_h |
-#define AudioContext_h |
+#ifndef AbstractAudioContext_h |
+#define AbstractAudioContext_h |
#include "bindings/core/v8/ScriptPromise.h" |
#include "bindings/core/v8/ScriptPromiseResolver.h" |
@@ -74,12 +74,12 @@ class SecurityOrigin; |
class StereoPannerNode; |
class WaveShaperNode; |
-// AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it. |
+// AbstractAudioContext is the cornerstone of the web audio API and all AudioNodes are created from it. |
// For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism. |
-class MODULES_EXPORT AudioContext : public RefCountedGarbageCollectedEventTargetWithInlineData<AudioContext>, public ActiveDOMObject { |
- REFCOUNTED_GARBAGE_COLLECTED_EVENT_TARGET(AudioContext); |
- WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(AudioContext); |
+class MODULES_EXPORT AbstractAudioContext : public RefCountedGarbageCollectedEventTargetWithInlineData<AbstractAudioContext>, public ActiveDOMObject { |
+ REFCOUNTED_GARBAGE_COLLECTED_EVENT_TARGET(AbstractAudioContext); |
+ WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(AbstractAudioContext); |
DEFINE_WRAPPERTYPEINFO(); |
public: |
// The state of an audio context. On creation, the state is Suspended. The state is Running if |
@@ -93,14 +93,13 @@ public: |
}; |
// Create an AudioContext for rendering to the audio hardware. |
- static AudioContext* create(Document&, ExceptionState&); |
+ static AbstractAudioContext* create(Document&, ExceptionState&); |
- ~AudioContext() override; |
+ ~AbstractAudioContext() override; |
DECLARE_VIRTUAL_TRACE(); |
bool isInitialized() const { return m_isInitialized; } |
- bool isOfflineContext() { return m_isOfflineContext; } |
// Document notification |
void stop() final; |
@@ -130,6 +129,8 @@ public: |
AudioListener* listener() { return m_listener.get(); } |
+ virtual bool hasRealtimeConstraint() = 0; |
+ |
// The AudioNode create methods are called on the main thread (from JavaScript). |
AudioBufferSourceNode* createBufferSource(ExceptionState&); |
MediaElementAudioSourceNode* createMediaElementSource(HTMLMediaElement*, ExceptionState&); |
@@ -157,11 +158,11 @@ public: |
PeriodicWave* createPeriodicWave(DOMFloat32Array* real, DOMFloat32Array* imag, ExceptionState&); |
// Close |
- ScriptPromise closeContext(ScriptState*); |
+ virtual ScriptPromise closeContext(ScriptState*) = 0; |
// Suspend/Resume |
- ScriptPromise suspendContext(ScriptState*); |
- ScriptPromise resumeContext(ScriptState*); |
+ virtual ScriptPromise suspendContext(ScriptState*) = 0; |
+ virtual ScriptPromise resumeContext(ScriptState*) = 0; |
// When a source node has started processing and needs to be protected, |
// this method tells the context to protect the node. |
@@ -230,22 +231,28 @@ public: |
// - closeContext() has been called, even if the audio HW has not yet been |
// stopped. It will be stopped eventually. |
// - it has been stopped (or is stopping) by its execution context. |
- bool isContextClosed() const { return m_closeResolver || m_isStopScheduled || m_isCleared; } |
- |
- static unsigned s_hardwareContextCount; |
- static unsigned s_contextId; |
+ virtual bool isContextClosed() const { return m_isStopScheduled || m_isCleared; } |
// Get the security origin for this audio context. |
SecurityOrigin* securityOrigin() const; |
protected: |
- explicit AudioContext(Document*); |
- AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate); |
+ explicit AbstractAudioContext(Document*); |
+ AbstractAudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate); |
+ |
+ void setContextState(AudioContextState); |
+ virtual void didClose() {} |
+ void uninitialize(); |
RefPtrWillBeMember<ScriptPromiseResolver> m_offlineResolver; |
+ |
+ // FIXME(dominicc): Move m_resumeResolvers to AudioContext, because only |
+ // it creates these Promises. |
+ // Vector of promises created by resume(). It takes time to handle them, so we collect all of |
+ // the promises here until they can be resolved or rejected. |
+ WillBeHeapVector<RefPtrWillBeMember<ScriptPromiseResolver>> m_resumeResolvers; |
private: |
void initialize(); |
- void uninitialize(); |
// ExecutionContext calls stop twice. |
// We'd like to schedule only one stop action for them. |
@@ -278,16 +285,12 @@ private: |
// this. |
HeapVector<Member<AudioNode>> m_activeSourceNodes; |
- // Stop rendering the audio graph. |
- void stopRendering(); |
- |
+ // FIXME(dominicc): Move these to AudioContext because only |
+ // it creates these Promises. |
// Handle Promises for resume() and suspend() |
void resolvePromisesForResume(); |
void resolvePromisesForResumeOnMainThread(); |
- // Vector of promises created by resume(). It takes time to handle them, so we collect all of |
- // the promises here until they can be resolved or rejected. |
- WillBeHeapVector<RefPtrWillBeMember<ScriptPromiseResolver>> m_resumeResolvers; |
void rejectPendingResolvers(); |
// True if we're in the process of resolving promises for resume(). Resolving can take some |
@@ -303,17 +306,11 @@ private: |
Member<AudioBuffer> m_renderTarget; |
- bool m_isOfflineContext; |
- |
- // The state of the AudioContext. |
+ // The state of the AbstractAudioContext. |
AudioContextState m_contextState; |
- void setContextState(AudioContextState); |
AsyncAudioDecoder m_audioDecoder; |
- // The Promise that is returned by close(); |
- RefPtrWillBeMember<ScriptPromiseResolver> m_closeResolver; |
- |
// Tries to handle AudioBufferSourceNodes that were started but became disconnected or was never |
// connected. Because these never get pulled anymore, they will stay around forever. So if we |
// can, try to stop them so they can be collected. |
@@ -322,10 +319,8 @@ private: |
// This is considering 32 is large enough for multiple channels audio. |
// It is somewhat arbitrary and could be increased if necessary. |
enum { MaxNumberOfChannels = 32 }; |
- |
- unsigned m_contextId; |
}; |
} // namespace blink |
-#endif // AudioContext_h |
+#endif // AbstractAudioContext_h |