Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1815)

Unified Diff: Source/modules/webaudio/AudioContext.h

Issue 205173002: Move webaudio to oilpan (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: WIP Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: Source/modules/webaudio/AudioContext.h
diff --git a/Source/modules/webaudio/AudioContext.h b/Source/modules/webaudio/AudioContext.h
index 5fe579dfa8adf1334405816f859f0b20c6dfdb90..453487f3573588289cedfb6206a8781085a9d6b3 100644
--- a/Source/modules/webaudio/AudioContext.h
+++ b/Source/modules/webaudio/AudioContext.h
@@ -29,6 +29,7 @@
#include "core/dom/ActiveDOMObject.h"
#include "core/events/EventListener.h"
#include "core/events/EventTarget.h"
+#include "heap/Handle.h"
#include "platform/audio/AudioBus.h"
#include "modules/webaudio/AsyncAudioDecoder.h"
#include "modules/webaudio/AudioDestinationNode.h"
@@ -73,17 +74,19 @@ class WaveShaperNode;
// AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it.
// For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
-class AudioContext : public ActiveDOMObject, public ScriptWrappable, public ThreadSafeRefCounted<AudioContext>, public EventTargetWithInlineData {
- DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCounted<AudioContext>);
+class AudioContext : public ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>, public ActiveDOMObject, public ScriptWrappable, public EventTargetWithInlineData {
+ DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>);
public:
// Create an AudioContext for rendering to the audio hardware.
- static PassRefPtr<AudioContext> create(Document&, ExceptionState&);
+ static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState&);
// Deprecated: create an AudioContext for offline (non-realtime) rendering.
- static PassRefPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
+ static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
virtual ~AudioContext();
+ virtual void trace(Visitor*);
+
bool isInitialized() const;
bool isOfflineContext() { return m_isOfflineContext; }
@@ -101,8 +104,8 @@ public:
void incrementActiveSourceCount();
void decrementActiveSourceCount();
- PassRefPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
- PassRefPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, ExceptionState&);
+ PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
+ PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, ExceptionState&);
// Asynchronous audio file data decoding.
void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnPtr<AudioBufferCallback>, ExceptionState&);
@@ -110,29 +113,29 @@ public:
AudioListener* listener() { return m_listener.get(); }
// The AudioNode create methods are called on the main thread (from JavaScript).
- PassRefPtr<AudioBufferSourceNode> createBufferSource();
- PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaElement*, ExceptionState&);
- PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&);
- PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination();
- PassRefPtr<GainNode> createGain();
- PassRefPtr<BiquadFilterNode> createBiquadFilter();
- PassRefPtr<WaveShaperNode> createWaveShaper();
- PassRefPtr<DelayNode> createDelay(ExceptionState&);
- PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&);
- PassRefPtr<PannerNode> createPanner();
- PassRefPtr<ConvolverNode> createConvolver();
- PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor();
- PassRefPtr<AnalyserNode> createAnalyser();
- PassRefPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&);
- PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, ExceptionState&);
- PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&);
- PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&);
- PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&);
- PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs, ExceptionState&);
- PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&);
- PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, ExceptionState&);
- PassRefPtr<OscillatorNode> createOscillator();
- PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
+ PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource();
+ PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaElement*, ExceptionState&);
+ PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&);
+ PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination();
+ PassRefPtrWillBeRawPtr<GainNode> createGain();
+ PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter();
+ PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper();
+ PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&);
+ PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&);
+ PassRefPtrWillBeRawPtr<PannerNode> createPanner();
+ PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver();
+ PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor();
+ PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser();
+ PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&);
+ PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, ExceptionState&);
+ PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&);
+ PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&);
+ PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&);
+ PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs, ExceptionState&);
+ PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState&);
+ PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, ExceptionState&);
+ PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator();
+ PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
// When a source node has no more processing to do (has finished playing), then it tells the context to dereference it.
void notifyNodeFinishedProcessing(AudioNode*);
@@ -272,8 +275,8 @@ private:
// Make sure to dereference them here.
void derefUnfinishedSourceNodes();
- RefPtr<AudioDestinationNode> m_destinationNode;
- RefPtr<AudioListener> m_listener;
+ RefPtrWillBeRawPtr<AudioDestinationNode> m_destinationNode;
Mads Ager (chromium) 2014/03/27 11:06:49 This should be RefPtrWillBeMember and it should be
haraken 2014/03/27 11:44:05 Shouldn't this be a RefPtrWillBeMember? If no, I w
keishi 2014/04/03 06:53:19 Done.
+ RefPtrWillBeMember<AudioListener> m_listener;
// Only accessed in the audio thread.
Vector<AudioNode*> m_finishedNodes;
@@ -317,7 +320,7 @@ private:
// Only accessed in the audio thread.
Vector<AudioNode*> m_deferredFinishDerefList;
- RefPtr<AudioBuffer> m_renderTarget;
+ RefPtrWillBeMember<AudioBuffer> m_renderTarget;
bool m_isOfflineContext;

Powered by Google App Engine
This is Rietveld 408576698