Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(128)

Side by Side Diff: Source/modules/webaudio/AudioContext.h

Issue 205173002: Move webaudio to oilpan (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: Fixed Created 6 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved. 2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions 5 * modification, are permitted provided that the following conditions
6 * are met: 6 * are met:
7 * 1. Redistributions of source code must retain the above copyright 7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer. 8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright 9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the 10 * notice, this list of conditions and the following disclaimer in the
(...skipping 11 matching lines...) Expand all
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 */ 23 */
24 24
25 #ifndef AudioContext_h 25 #ifndef AudioContext_h
26 #define AudioContext_h 26 #define AudioContext_h
27 27
28 #include "bindings/v8/ScriptWrappable.h" 28 #include "bindings/v8/ScriptWrappable.h"
29 #include "core/dom/ActiveDOMObject.h" 29 #include "core/dom/ActiveDOMObject.h"
30 #include "core/events/EventListener.h" 30 #include "core/events/EventListener.h"
31 #include "core/events/EventTarget.h" 31 #include "core/events/EventTarget.h"
32 #include "heap/Handle.h"
32 #include "platform/audio/AudioBus.h" 33 #include "platform/audio/AudioBus.h"
33 #include "modules/webaudio/AsyncAudioDecoder.h" 34 #include "modules/webaudio/AsyncAudioDecoder.h"
34 #include "modules/webaudio/AudioDestinationNode.h" 35 #include "modules/webaudio/AudioDestinationNode.h"
35 #include "wtf/HashSet.h" 36 #include "wtf/HashSet.h"
36 #include "wtf/MainThread.h" 37 #include "wtf/MainThread.h"
37 #include "wtf/OwnPtr.h" 38 #include "wtf/OwnPtr.h"
38 #include "wtf/PassRefPtr.h" 39 #include "wtf/PassRefPtr.h"
39 #include "wtf/RefCounted.h" 40 #include "wtf/RefCounted.h"
40 #include "wtf/RefPtr.h" 41 #include "wtf/RefPtr.h"
41 #include "wtf/ThreadSafeRefCounted.h" 42 #include "wtf/ThreadSafeRefCounted.h"
(...skipping 24 matching lines...) Expand all
66 class MediaStreamAudioSourceNode; 67 class MediaStreamAudioSourceNode;
67 class OscillatorNode; 68 class OscillatorNode;
68 class PannerNode; 69 class PannerNode;
69 class PeriodicWave; 70 class PeriodicWave;
70 class ScriptProcessorNode; 71 class ScriptProcessorNode;
71 class WaveShaperNode; 72 class WaveShaperNode;
72 73
73 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c reated from it. 74 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c reated from it.
74 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism. 75 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism.
75 76
76 class AudioContext : public ActiveDOMObject, public ScriptWrappable, public Thre adSafeRefCounted<AudioContext>, public EventTargetWithInlineData { 77 class AudioContext : public ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbag eCollected<AudioContext>, public ActiveDOMObject, public ScriptWrappable, public EventTargetWithInlineData {
haraken 2014/04/08 06:02:08 Just to confirm: The fact that this class is Threa
77 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCounted<AudioContext>); 78 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeThreadSafeRefCount edGarbageCollected<AudioContext>);
78 public: 79 public:
79 // Create an AudioContext for rendering to the audio hardware. 80 // Create an AudioContext for rendering to the audio hardware.
80 static PassRefPtr<AudioContext> create(Document&, ExceptionState&); 81 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState &);
81 82
82 // Deprecated: create an AudioContext for offline (non-realtime) rendering. 83 // Deprecated: create an AudioContext for offline (non-realtime) rendering.
83 static PassRefPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); 84 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, unsigned numbe rOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
84 85
85 virtual ~AudioContext(); 86 virtual ~AudioContext();
86 87
88 virtual void trace(Visitor*);
89
87 bool isInitialized() const; 90 bool isInitialized() const;
88 // The constructor of an AudioNode must call this to initialize the context. 91 // The constructor of an AudioNode must call this to initialize the context.
89 void lazyInitialize(); 92 void lazyInitialize();
90 93
91 bool isOfflineContext() { return m_isOfflineContext; } 94 bool isOfflineContext() { return m_isOfflineContext; }
92 95
93 // Document notification 96 // Document notification
94 virtual void stop() OVERRIDE FINAL; 97 virtual void stop() OVERRIDE FINAL;
95 virtual bool hasPendingActivity() const OVERRIDE; 98 virtual bool hasPendingActivity() const OVERRIDE;
96 99
97 AudioDestinationNode* destination() { return m_destinationNode.get(); } 100 AudioDestinationNode* destination() { return m_destinationNode.get(); }
98 size_t currentSampleFrame() const { return m_destinationNode->currentSampleF rame(); } 101 size_t currentSampleFrame() const { return m_destinationNode->currentSampleF rame(); }
99 double currentTime() const { return m_destinationNode->currentTime(); } 102 double currentTime() const { return m_destinationNode->currentTime(); }
100 float sampleRate() const { return m_destinationNode->sampleRate(); } 103 float sampleRate() const { return m_destinationNode->sampleRate(); }
101 unsigned long activeSourceCount() const { return static_cast<unsigned long>( m_activeSourceCount); } 104 unsigned long activeSourceCount() const { return static_cast<unsigned long>( m_activeSourceCount); }
102 105
103 void incrementActiveSourceCount(); 106 void incrementActiveSourceCount();
104 void decrementActiveSourceCount(); 107 void decrementActiveSourceCount();
105 108
106 PassRefPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numbe rOfFrames, float sampleRate, ExceptionState&); 109 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
107 PassRefPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, Exception State&); 110 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMon o, ExceptionState&);
108 111
109 // Asynchronous audio file data decoding. 112 // Asynchronous audio file data decoding.
110 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnP tr<AudioBufferCallback>, ExceptionState&); 113 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnP tr<AudioBufferCallback>, ExceptionState&);
111 114
112 AudioListener* listener() { return m_listener.get(); } 115 AudioListener* listener() { return m_listener.get(); }
113 116
114 // The AudioNode create methods are called on the main thread (from JavaScri pt). 117 // The AudioNode create methods are called on the main thread (from JavaScri pt).
115 PassRefPtr<AudioBufferSourceNode> createBufferSource(); 118 PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource();
116 PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaEl ement*, ExceptionState&); 119 PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource (HTMLMediaElement*, ExceptionState&);
117 PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&); 120 PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(M ediaStream*, ExceptionState&);
118 PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); 121 PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDes tination();
119 PassRefPtr<GainNode> createGain(); 122 PassRefPtrWillBeRawPtr<GainNode> createGain();
120 PassRefPtr<BiquadFilterNode> createBiquadFilter(); 123 PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter();
121 PassRefPtr<WaveShaperNode> createWaveShaper(); 124 PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper();
122 PassRefPtr<DelayNode> createDelay(ExceptionState&); 125 PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&);
123 PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); 126 PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, Exception State&);
124 PassRefPtr<PannerNode> createPanner(); 127 PassRefPtrWillBeRawPtr<PannerNode> createPanner();
125 PassRefPtr<ConvolverNode> createConvolver(); 128 PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver();
126 PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor(); 129 PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor();
127 PassRefPtr<AnalyserNode> createAnalyser(); 130 PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser();
128 PassRefPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&); 131 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionS tate&);
129 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, Exc eptionState&); 132 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, ExceptionState&);
130 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz e_t numberOfInputChannels, ExceptionState&); 133 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, size_t numberOfInputChannels, ExceptionState&);
131 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz e_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); 134 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionS tate&);
132 PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); 135 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionS tate&);
133 PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs , ExceptionState&); 136 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t num berOfOutputs, ExceptionState&);
134 PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); 137 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState &);
135 PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, Exc eptionState&); 138 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberO fInputs, ExceptionState&);
136 PassRefPtr<OscillatorNode> createOscillator(); 139 PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator();
137 PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array * imag, ExceptionState&); 140 PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
138 141
139 // When a source node has no more processing to do (has finished playing), t hen it tells the context to dereference it. 142 // When a source node has no more processing to do (has finished playing), t hen it tells the context to dereference it.
140 void notifyNodeFinishedProcessing(AudioNode*); 143 void notifyNodeFinishedProcessing(AudioNode*);
141 144
142 // Called at the start of each render quantum. 145 // Called at the start of each render quantum.
143 void handlePreRenderTasks(); 146 void handlePreRenderTasks();
144 147
145 // Called at the end of each render quantum. 148 // Called at the end of each render quantum.
146 void handlePostRenderTasks(); 149 void handlePostRenderTasks();
147 150
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after
267 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode. 270 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode.
268 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is 271 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is
269 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m ethods for more details. 272 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m ethods for more details.
270 void refNode(AudioNode*); 273 void refNode(AudioNode*);
271 void derefNode(AudioNode*); 274 void derefNode(AudioNode*);
272 275
273 // When the context goes away, there might still be some sources which haven 't finished playing. 276 // When the context goes away, there might still be some sources which haven 't finished playing.
274 // Make sure to dereference them here. 277 // Make sure to dereference them here.
275 void derefUnfinishedSourceNodes(); 278 void derefUnfinishedSourceNodes();
276 279
277 RefPtr<AudioDestinationNode> m_destinationNode; 280 RefPtrWillBeMember<AudioDestinationNode> m_destinationNode;
278 RefPtr<AudioListener> m_listener; 281 RefPtrWillBeMember<AudioListener> m_listener;
279 282
280 // Only accessed in the audio thread. 283 // Only accessed in the audio thread.
281 Vector<AudioNode*> m_finishedNodes; 284 Vector<AudioNode*> m_finishedNodes;
282 285
283 // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation 286 // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation
284 // with an optional argument for refType. We need to use the special refTyp e: RefTypeConnection 287 // with an optional argument for refType. We need to use the special refTyp e: RefTypeConnection
285 // Either accessed when the graph lock is held, or on the main thread when t he audio thread has finished. 288 // Either accessed when the graph lock is held, or on the main thread when t he audio thread has finished.
286 Vector<AudioNode*> m_referencedNodes; 289 Vector<AudioNode*> m_referencedNodes;
287 290
288 // Accumulate nodes which need to be deleted here. 291 // Accumulate nodes which need to be deleted here.
289 // This is copied to m_nodesToDelete at the end of a render cycle in handleP ostRenderTasks(), where we're assured of a stable graph 292 // This is copied to m_nodesToDelete at the end of a render cycle in handleP ostRenderTasks(), where we're assured of a stable graph
290 // state which will have no references to any of the nodes in m_nodesToDelet e once the context lock is released 293 // state which will have no references to any of the nodes in m_nodesToDelet e once the context lock is released
291 // (when handlePostRenderTasks() has completed). 294 // (when handlePostRenderTasks() has completed).
292 Vector<AudioNode*> m_nodesMarkedForDeletion; 295 Vector<AudioNode*> m_nodesMarkedForDeletion;
293 296
294 // They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread). 297 // They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread).
295 Vector<AudioNode*> m_nodesToDelete; 298 Vector<AudioNode*> m_nodesToDelete;
296 bool m_isDeletionScheduled; 299 bool m_isDeletionScheduled;
297 300
298 // Only accessed when the graph lock is held. 301 // Only accessed when the graph lock is held.
299 HashSet<AudioSummingJunction*> m_dirtySummingJunctions; 302 HashSet<AudioSummingJunction*> m_dirtySummingJunctions;
300 HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs; 303 HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
haraken 2014/04/08 06:02:08 At this point AudioSummingJunction and AudioNodeOu
301 void handleDirtyAudioSummingJunctions(); 304 void handleDirtyAudioSummingJunctions();
302 void handleDirtyAudioNodeOutputs(); 305 void handleDirtyAudioNodeOutputs();
303 306
304 // For the sake of thread safety, we maintain a seperate Vector of automatic pull nodes for rendering in m_renderingAutomaticPullNodes. 307 // For the sake of thread safety, we maintain a seperate Vector of automatic pull nodes for rendering in m_renderingAutomaticPullNodes.
305 // It will be copied from m_automaticPullNodes by updateAutomaticPullNodes() at the very start or end of the rendering quantum. 308 // It will be copied from m_automaticPullNodes by updateAutomaticPullNodes() at the very start or end of the rendering quantum.
306 HashSet<AudioNode*> m_automaticPullNodes; 309 HashSet<AudioNode*> m_automaticPullNodes;
307 Vector<AudioNode*> m_renderingAutomaticPullNodes; 310 Vector<AudioNode*> m_renderingAutomaticPullNodes;
308 // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is m odified. 311 // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is m odified.
309 bool m_automaticPullNodesNeedUpdating; 312 bool m_automaticPullNodesNeedUpdating;
310 void updateAutomaticPullNodes(); 313 void updateAutomaticPullNodes();
311 314
312 unsigned m_connectionCount; 315 unsigned m_connectionCount;
313 316
314 // Graph locking. 317 // Graph locking.
315 Mutex m_contextGraphMutex; 318 Mutex m_contextGraphMutex;
316 volatile ThreadIdentifier m_audioThread; 319 volatile ThreadIdentifier m_audioThread;
317 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th is is the thread which owns it, otherwise == UndefinedThreadIdentifier 320 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th is is the thread which owns it, otherwise == UndefinedThreadIdentifier
318 321
319 // Only accessed in the audio thread. 322 // Only accessed in the audio thread.
320 Vector<AudioNode*> m_deferredFinishDerefList; 323 Vector<AudioNode*> m_deferredFinishDerefList;
321 324
322 RefPtr<AudioBuffer> m_renderTarget; 325 RefPtrWillBeMember<AudioBuffer> m_renderTarget;
323 326
324 bool m_isOfflineContext; 327 bool m_isOfflineContext;
325 328
326 AsyncAudioDecoder m_audioDecoder; 329 AsyncAudioDecoder m_audioDecoder;
327 330
328 // This is considering 32 is large enough for multiple channels audio. 331 // This is considering 32 is large enough for multiple channels audio.
329 // It is somewhat arbitrary and could be increased if necessary. 332 // It is somewhat arbitrary and could be increased if necessary.
330 enum { MaxNumberOfChannels = 32 }; 333 enum { MaxNumberOfChannels = 32 };
331 334
332 // Number of AudioBufferSourceNodes that are active (playing). 335 // Number of AudioBufferSourceNodes that are active (playing).
333 int m_activeSourceCount; 336 int m_activeSourceCount;
334 }; 337 };
335 338
336 } // WebCore 339 } // WebCore
337 340
338 #endif // AudioContext_h 341 #endif // AudioContext_h
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698