Chromium Code Reviews| Index: Source/modules/webaudio/AudioContext.h |
| diff --git a/Source/modules/webaudio/AudioContext.h b/Source/modules/webaudio/AudioContext.h |
| index 51ac3ef2afa69a2e7a7fbfe4cde9b9b286344531..4e224fc9d354e0ebee45c7c3c0517f7d472f32d8 100644 |
| --- a/Source/modules/webaudio/AudioContext.h |
| +++ b/Source/modules/webaudio/AudioContext.h |
| @@ -29,6 +29,7 @@ |
| #include "core/dom/ActiveDOMObject.h" |
| #include "core/events/EventListener.h" |
| #include "core/events/EventTarget.h" |
| +#include "heap/Handle.h" |
| #include "platform/audio/AudioBus.h" |
| #include "modules/webaudio/AsyncAudioDecoder.h" |
| #include "modules/webaudio/AudioDestinationNode.h" |
| @@ -73,17 +74,19 @@ class WaveShaperNode; |
| // AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it. |
| // For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism. |
| -class AudioContext : public ActiveDOMObject, public ScriptWrappable, public ThreadSafeRefCounted<AudioContext>, public EventTargetWithInlineData { |
| - DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCounted<AudioContext>); |
| +class AudioContext : public ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>, public ActiveDOMObject, public ScriptWrappable, public EventTargetWithInlineData { |
|
haraken
2014/04/08 06:02:08
Just to confirm: The fact that this class is Threa
|
| + DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>); |
| public: |
| // Create an AudioContext for rendering to the audio hardware. |
| - static PassRefPtr<AudioContext> create(Document&, ExceptionState&); |
| + static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState&); |
| // Deprecated: create an AudioContext for offline (non-realtime) rendering. |
| - static PassRefPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); |
| + static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); |
| virtual ~AudioContext(); |
| + virtual void trace(Visitor*); |
| + |
| bool isInitialized() const; |
| // The constructor of an AudioNode must call this to initialize the context. |
| void lazyInitialize(); |
| @@ -103,8 +106,8 @@ public: |
| void incrementActiveSourceCount(); |
| void decrementActiveSourceCount(); |
| - PassRefPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); |
| - PassRefPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, ExceptionState&); |
| // Asynchronous audio file data decoding. |
| void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnPtr<AudioBufferCallback>, ExceptionState&); |
| @@ -112,29 +115,29 @@ public: |
| AudioListener* listener() { return m_listener.get(); } |
| // The AudioNode create methods are called on the main thread (from JavaScript). |
| - PassRefPtr<AudioBufferSourceNode> createBufferSource(); |
| - PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaElement*, ExceptionState&); |
| - PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&); |
| - PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); |
| - PassRefPtr<GainNode> createGain(); |
| - PassRefPtr<BiquadFilterNode> createBiquadFilter(); |
| - PassRefPtr<WaveShaperNode> createWaveShaper(); |
| - PassRefPtr<DelayNode> createDelay(ExceptionState&); |
| - PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); |
| - PassRefPtr<PannerNode> createPanner(); |
| - PassRefPtr<ConvolverNode> createConvolver(); |
| - PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor(); |
| - PassRefPtr<AnalyserNode> createAnalyser(); |
| - PassRefPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&); |
| - PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, ExceptionState&); |
| - PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&); |
| - PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); |
| - PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); |
| - PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs, ExceptionState&); |
| - PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); |
| - PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, ExceptionState&); |
| - PassRefPtr<OscillatorNode> createOscillator(); |
| - PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource(); |
| + PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaElement*, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); |
| + PassRefPtrWillBeRawPtr<GainNode> createGain(); |
| + PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter(); |
| + PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper(); |
| + PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&); |
| + PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<PannerNode> createPanner(); |
| + PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver(); |
| + PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor(); |
| + PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser(); |
| + PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); |
| + PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, ExceptionState&); |
| + PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator(); |
| + PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&); |
| // When a source node has no more processing to do (has finished playing), then it tells the context to dereference it. |
| void notifyNodeFinishedProcessing(AudioNode*); |
| @@ -274,8 +277,8 @@ private: |
| // Make sure to dereference them here. |
| void derefUnfinishedSourceNodes(); |
| - RefPtr<AudioDestinationNode> m_destinationNode; |
| - RefPtr<AudioListener> m_listener; |
| + RefPtrWillBeMember<AudioDestinationNode> m_destinationNode; |
| + RefPtrWillBeMember<AudioListener> m_listener; |
| // Only accessed in the audio thread. |
| Vector<AudioNode*> m_finishedNodes; |
| @@ -319,7 +322,7 @@ private: |
| // Only accessed in the audio thread. |
| Vector<AudioNode*> m_deferredFinishDerefList; |
| - RefPtr<AudioBuffer> m_renderTarget; |
| + RefPtrWillBeMember<AudioBuffer> m_renderTarget; |
| bool m_isOfflineContext; |