Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
| 8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
| 9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
| 10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
| (...skipping 11 matching lines...) Expand all Loading... | |
| 22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | 22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 23 */ | 23 */ |
| 24 | 24 |
| 25 #ifndef AudioContext_h | 25 #ifndef AudioContext_h |
| 26 #define AudioContext_h | 26 #define AudioContext_h |
| 27 | 27 |
| 28 #include "bindings/v8/ScriptWrappable.h" | 28 #include "bindings/v8/ScriptWrappable.h" |
| 29 #include "core/dom/ActiveDOMObject.h" | 29 #include "core/dom/ActiveDOMObject.h" |
| 30 #include "core/events/EventListener.h" | 30 #include "core/events/EventListener.h" |
| 31 #include "core/events/EventTarget.h" | 31 #include "core/events/EventTarget.h" |
| 32 #include "heap/Handle.h" | |
| 32 #include "platform/audio/AudioBus.h" | 33 #include "platform/audio/AudioBus.h" |
| 33 #include "modules/webaudio/AsyncAudioDecoder.h" | 34 #include "modules/webaudio/AsyncAudioDecoder.h" |
| 34 #include "modules/webaudio/AudioDestinationNode.h" | 35 #include "modules/webaudio/AudioDestinationNode.h" |
| 35 #include "wtf/HashSet.h" | 36 #include "wtf/HashSet.h" |
| 36 #include "wtf/MainThread.h" | 37 #include "wtf/MainThread.h" |
| 37 #include "wtf/OwnPtr.h" | 38 #include "wtf/OwnPtr.h" |
| 38 #include "wtf/PassRefPtr.h" | 39 #include "wtf/PassRefPtr.h" |
| 39 #include "wtf/RefCounted.h" | 40 #include "wtf/RefCounted.h" |
| 40 #include "wtf/RefPtr.h" | 41 #include "wtf/RefPtr.h" |
| 41 #include "wtf/ThreadSafeRefCounted.h" | 42 #include "wtf/ThreadSafeRefCounted.h" |
| (...skipping 24 matching lines...) Expand all Loading... | |
| 66 class MediaStreamAudioSourceNode; | 67 class MediaStreamAudioSourceNode; |
| 67 class OscillatorNode; | 68 class OscillatorNode; |
| 68 class PannerNode; | 69 class PannerNode; |
| 69 class PeriodicWave; | 70 class PeriodicWave; |
| 70 class ScriptProcessorNode; | 71 class ScriptProcessorNode; |
| 71 class WaveShaperNode; | 72 class WaveShaperNode; |
| 72 | 73 |
| 73 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c reated from it. | 74 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c reated from it. |
| 74 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism. | 75 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism. |
| 75 | 76 |
| 76 class AudioContext : public ActiveDOMObject, public ScriptWrappable, public Thre adSafeRefCounted<AudioContext>, public EventTargetWithInlineData { | 77 class AudioContext : public ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbag eCollected<AudioContext>, public ActiveDOMObject, public ScriptWrappable, public EventTargetWithInlineData { |
| 77 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCounted<AudioContext>); | 78 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeThreadSafeRefCount edGarbageCollected<AudioContext>); |
| 78 public: | 79 public: |
| 79 // Create an AudioContext for rendering to the audio hardware. | 80 // Create an AudioContext for rendering to the audio hardware. |
| 80 static PassRefPtr<AudioContext> create(Document&, ExceptionState&); | 81 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState &); |
| 81 | 82 |
| 82 // Deprecated: create an AudioContext for offline (non-realtime) rendering. | 83 // Deprecated: create an AudioContext for offline (non-realtime) rendering. |
| 83 static PassRefPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); | 84 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, unsigned numbe rOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); |
| 84 | 85 |
| 85 virtual ~AudioContext(); | 86 virtual ~AudioContext(); |
| 86 | 87 |
| 88 virtual void trace(Visitor*); | |
| 89 | |
| 87 bool isInitialized() const; | 90 bool isInitialized() const; |
| 88 | 91 |
| 89 bool isOfflineContext() { return m_isOfflineContext; } | 92 bool isOfflineContext() { return m_isOfflineContext; } |
| 90 | 93 |
| 91 // Document notification | 94 // Document notification |
| 92 virtual void stop() OVERRIDE FINAL; | 95 virtual void stop() OVERRIDE FINAL; |
| 93 virtual bool hasPendingActivity() const OVERRIDE; | 96 virtual bool hasPendingActivity() const OVERRIDE; |
| 94 | 97 |
| 95 AudioDestinationNode* destination() { return m_destinationNode.get(); } | 98 AudioDestinationNode* destination() { return m_destinationNode.get(); } |
| 96 size_t currentSampleFrame() const { return m_destinationNode->currentSampleF rame(); } | 99 size_t currentSampleFrame() const { return m_destinationNode->currentSampleF rame(); } |
| 97 double currentTime() const { return m_destinationNode->currentTime(); } | 100 double currentTime() const { return m_destinationNode->currentTime(); } |
| 98 float sampleRate() const { return m_destinationNode->sampleRate(); } | 101 float sampleRate() const { return m_destinationNode->sampleRate(); } |
| 99 unsigned long activeSourceCount() const { return static_cast<unsigned long>( m_activeSourceCount); } | 102 unsigned long activeSourceCount() const { return static_cast<unsigned long>( m_activeSourceCount); } |
| 100 | 103 |
| 101 void incrementActiveSourceCount(); | 104 void incrementActiveSourceCount(); |
| 102 void decrementActiveSourceCount(); | 105 void decrementActiveSourceCount(); |
| 103 | 106 |
| 104 PassRefPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numbe rOfFrames, float sampleRate, ExceptionState&); | 107 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); |
| 105 PassRefPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, Exception State&); | 108 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMon o, ExceptionState&); |
| 106 | 109 |
| 107 // Asynchronous audio file data decoding. | 110 // Asynchronous audio file data decoding. |
| 108 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnP tr<AudioBufferCallback>, ExceptionState&); | 111 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnP tr<AudioBufferCallback>, ExceptionState&); |
| 109 | 112 |
| 110 AudioListener* listener() { return m_listener.get(); } | 113 AudioListener* listener() { return m_listener.get(); } |
| 111 | 114 |
| 112 // The AudioNode create methods are called on the main thread (from JavaScri pt). | 115 // The AudioNode create methods are called on the main thread (from JavaScri pt). |
| 113 PassRefPtr<AudioBufferSourceNode> createBufferSource(); | 116 PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource(); |
| 114 PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaEl ement*, ExceptionState&); | 117 PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource (HTMLMediaElement*, ExceptionState&); |
| 115 PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&); | 118 PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(M ediaStream*, ExceptionState&); |
| 116 PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); | 119 PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDes tination(); |
| 117 PassRefPtr<GainNode> createGain(); | 120 PassRefPtrWillBeRawPtr<GainNode> createGain(); |
| 118 PassRefPtr<BiquadFilterNode> createBiquadFilter(); | 121 PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter(); |
| 119 PassRefPtr<WaveShaperNode> createWaveShaper(); | 122 PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper(); |
| 120 PassRefPtr<DelayNode> createDelay(ExceptionState&); | 123 PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&); |
| 121 PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); | 124 PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, Exception State&); |
| 122 PassRefPtr<PannerNode> createPanner(); | 125 PassRefPtrWillBeRawPtr<PannerNode> createPanner(); |
| 123 PassRefPtr<ConvolverNode> createConvolver(); | 126 PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver(); |
| 124 PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor(); | 127 PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor(); |
| 125 PassRefPtr<AnalyserNode> createAnalyser(); | 128 PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser(); |
| 126 PassRefPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&); | 129 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionS tate&); |
| 127 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, Exc eptionState&); | 130 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, ExceptionState&); |
| 128 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz e_t numberOfInputChannels, ExceptionState&); | 131 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, size_t numberOfInputChannels, ExceptionState&); |
| 129 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz e_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); | 132 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionS tate&); |
| 130 PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); | 133 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionS tate&); |
| 131 PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs , ExceptionState&); | 134 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t num berOfOutputs, ExceptionState&); |
| 132 PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); | 135 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState &); |
| 133 PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, Exc eptionState&); | 136 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberO fInputs, ExceptionState&); |
| 134 PassRefPtr<OscillatorNode> createOscillator(); | 137 PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator(); |
| 135 PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array * imag, ExceptionState&); | 138 PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&); |
| 136 | 139 |
| 137 // When a source node has no more processing to do (has finished playing), t hen it tells the context to dereference it. | 140 // When a source node has no more processing to do (has finished playing), t hen it tells the context to dereference it. |
| 138 void notifyNodeFinishedProcessing(AudioNode*); | 141 void notifyNodeFinishedProcessing(AudioNode*); |
| 139 | 142 |
| 140 // Called at the start of each render quantum. | 143 // Called at the start of each render quantum. |
| 141 void handlePreRenderTasks(); | 144 void handlePreRenderTasks(); |
| 142 | 145 |
| 143 // Called at the end of each render quantum. | 146 // Called at the end of each render quantum. |
| 144 void handlePostRenderTasks(); | 147 void handlePostRenderTasks(); |
| 145 | 148 |
| (...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 265 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode. | 268 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode. |
| 266 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is | 269 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is |
| 267 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m ethods for more details. | 270 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m ethods for more details. |
| 268 void refNode(AudioNode*); | 271 void refNode(AudioNode*); |
| 269 void derefNode(AudioNode*); | 272 void derefNode(AudioNode*); |
| 270 | 273 |
| 271 // When the context goes away, there might still be some sources which haven 't finished playing. | 274 // When the context goes away, there might still be some sources which haven 't finished playing. |
| 272 // Make sure to dereference them here. | 275 // Make sure to dereference them here. |
| 273 void derefUnfinishedSourceNodes(); | 276 void derefUnfinishedSourceNodes(); |
| 274 | 277 |
| 275 RefPtr<AudioDestinationNode> m_destinationNode; | 278 RefPtrWillBeRawPtr<AudioDestinationNode> m_destinationNode; |
|
Mads Ager (chromium)
2014/03/27 11:06:49
This should be RefPtrWillBeMember and it should be
haraken
2014/03/27 11:44:05
Shouldn't this be a RefPtrWillBeMember? If no, I w
keishi
2014/04/03 06:53:19
Done.
| |
| 276 RefPtr<AudioListener> m_listener; | 279 RefPtrWillBeMember<AudioListener> m_listener; |
| 277 | 280 |
| 278 // Only accessed in the audio thread. | 281 // Only accessed in the audio thread. |
| 279 Vector<AudioNode*> m_finishedNodes; | 282 Vector<AudioNode*> m_finishedNodes; |
| 280 | 283 |
| 281 // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation | 284 // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation |
| 282 // with an optional argument for refType. We need to use the special refTyp e: RefTypeConnection | 285 // with an optional argument for refType. We need to use the special refTyp e: RefTypeConnection |
| 283 // Either accessed when the graph lock is held, or on the main thread when t he audio thread has finished. | 286 // Either accessed when the graph lock is held, or on the main thread when t he audio thread has finished. |
| 284 Vector<AudioNode*> m_referencedNodes; | 287 Vector<AudioNode*> m_referencedNodes; |
|
haraken
2014/03/27 11:44:05
AudioContext has a bunch of collections of AudioNo
keishi
2014/04/08 02:33:40
I think this is safe because this keeps a set of A
| |
| 285 | 288 |
| 286 // Accumulate nodes which need to be deleted here. | 289 // Accumulate nodes which need to be deleted here. |
| 287 // This is copied to m_nodesToDelete at the end of a render cycle in handleP ostRenderTasks(), where we're assured of a stable graph | 290 // This is copied to m_nodesToDelete at the end of a render cycle in handleP ostRenderTasks(), where we're assured of a stable graph |
| 288 // state which will have no references to any of the nodes in m_nodesToDelet e once the context lock is released | 291 // state which will have no references to any of the nodes in m_nodesToDelet e once the context lock is released |
| 289 // (when handlePostRenderTasks() has completed). | 292 // (when handlePostRenderTasks() has completed). |
| 290 Vector<AudioNode*> m_nodesMarkedForDeletion; | 293 Vector<AudioNode*> m_nodesMarkedForDeletion; |
| 291 | 294 |
| 292 // They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread). | 295 // They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread). |
| 293 Vector<AudioNode*> m_nodesToDelete; | 296 Vector<AudioNode*> m_nodesToDelete; |
| 294 bool m_isDeletionScheduled; | 297 bool m_isDeletionScheduled; |
| (...skipping 15 matching lines...) Expand all Loading... | |
| 310 unsigned m_connectionCount; | 313 unsigned m_connectionCount; |
| 311 | 314 |
| 312 // Graph locking. | 315 // Graph locking. |
| 313 Mutex m_contextGraphMutex; | 316 Mutex m_contextGraphMutex; |
| 314 volatile ThreadIdentifier m_audioThread; | 317 volatile ThreadIdentifier m_audioThread; |
| 315 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th is is the thread which owns it, otherwise == UndefinedThreadIdentifier | 318 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th is is the thread which owns it, otherwise == UndefinedThreadIdentifier |
| 316 | 319 |
| 317 // Only accessed in the audio thread. | 320 // Only accessed in the audio thread. |
| 318 Vector<AudioNode*> m_deferredFinishDerefList; | 321 Vector<AudioNode*> m_deferredFinishDerefList; |
| 319 | 322 |
| 320 RefPtr<AudioBuffer> m_renderTarget; | 323 RefPtrWillBeMember<AudioBuffer> m_renderTarget; |
| 321 | 324 |
| 322 bool m_isOfflineContext; | 325 bool m_isOfflineContext; |
| 323 | 326 |
| 324 AsyncAudioDecoder m_audioDecoder; | 327 AsyncAudioDecoder m_audioDecoder; |
| 325 | 328 |
| 326 // This is considering 32 is large enough for multiple channels audio. | 329 // This is considering 32 is large enough for multiple channels audio. |
| 327 // It is somewhat arbitrary and could be increased if necessary. | 330 // It is somewhat arbitrary and could be increased if necessary. |
| 328 enum { MaxNumberOfChannels = 32 }; | 331 enum { MaxNumberOfChannels = 32 }; |
| 329 | 332 |
| 330 // Number of AudioBufferSourceNodes that are active (playing). | 333 // Number of AudioBufferSourceNodes that are active (playing). |
| 331 int m_activeSourceCount; | 334 int m_activeSourceCount; |
| 332 }; | 335 }; |
| 333 | 336 |
| 334 } // WebCore | 337 } // WebCore |
| 335 | 338 |
| 336 #endif // AudioContext_h | 339 #endif // AudioContext_h |
| OLD | NEW |