Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(160)

Side by Side Diff: Source/modules/webaudio/AudioContext.h

Issue 205173002: Move webaudio to oilpan (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: Created 6 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved. 2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions 5 * modification, are permitted provided that the following conditions
6 * are met: 6 * are met:
7 * 1. Redistributions of source code must retain the above copyright 7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer. 8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright 9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the 10 * notice, this list of conditions and the following disclaimer in the
(...skipping 11 matching lines...) Expand all
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23 */ 23 */
24 24
25 #ifndef AudioContext_h 25 #ifndef AudioContext_h
26 #define AudioContext_h 26 #define AudioContext_h
27 27
28 #include "bindings/v8/ScriptWrappable.h" 28 #include "bindings/v8/ScriptWrappable.h"
29 #include "core/dom/ActiveDOMObject.h" 29 #include "core/dom/ActiveDOMObject.h"
30 #include "core/events/EventListener.h" 30 #include "core/events/EventListener.h"
31 #include "core/events/EventTarget.h" 31 #include "core/events/EventTarget.h"
32 #include "heap/Handle.h"
32 #include "platform/audio/AudioBus.h" 33 #include "platform/audio/AudioBus.h"
33 #include "modules/webaudio/AsyncAudioDecoder.h" 34 #include "modules/webaudio/AsyncAudioDecoder.h"
34 #include "modules/webaudio/AudioDestinationNode.h" 35 #include "modules/webaudio/AudioDestinationNode.h"
35 #include "wtf/HashSet.h" 36 #include "wtf/HashSet.h"
36 #include "wtf/MainThread.h" 37 #include "wtf/MainThread.h"
37 #include "wtf/OwnPtr.h" 38 #include "wtf/OwnPtr.h"
38 #include "wtf/PassRefPtr.h" 39 #include "wtf/PassRefPtr.h"
39 #include "wtf/RefCounted.h" 40 #include "wtf/RefCounted.h"
40 #include "wtf/RefPtr.h" 41 #include "wtf/RefPtr.h"
41 #include "wtf/ThreadSafeRefCounted.h" 42 #include "wtf/ThreadSafeRefCounted.h"
(...skipping 24 matching lines...) Expand all
66 class MediaStreamAudioSourceNode; 67 class MediaStreamAudioSourceNode;
67 class OscillatorNode; 68 class OscillatorNode;
68 class PannerNode; 69 class PannerNode;
69 class PeriodicWave; 70 class PeriodicWave;
70 class ScriptProcessorNode; 71 class ScriptProcessorNode;
71 class WaveShaperNode; 72 class WaveShaperNode;
72 73
73 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c reated from it. 74 // AudioContext is the cornerstone of the web audio API and all AudioNodes are c reated from it.
74 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism. 75 // For thread safety between the audio thread and the main thread, it has a rend ering graph locking mechanism.
75 76
76 class AudioContext : public ActiveDOMObject, public ScriptWrappable, public Thre adSafeRefCounted<AudioContext>, public EventTargetWithInlineData { 77 class AudioContext : public ActiveDOMObject, public ScriptWrappable, public Thre adSafeRefCountedWillBeRefCountedGarbageCollected<AudioContext>, public EventTarg etWithInlineData {
Mads Ager (chromium) 2014/03/20 08:30:00 If this is still ref counted it also still needs t
keishi 2014/03/27 07:39:37 Done.
77 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCounted<AudioContext>); 78 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeRefCountedGarbageC ollected<AudioContext>);
78 public: 79 public:
79 // Create an AudioContext for rendering to the audio hardware. 80 // Create an AudioContext for rendering to the audio hardware.
80 static PassRefPtr<AudioContext> create(Document&, ExceptionState&); 81 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState &);
81 82
82 // Deprecated: create an AudioContext for offline (non-realtime) rendering. 83 // Deprecated: create an AudioContext for offline (non-realtime) rendering.
83 static PassRefPtr<AudioContext> create(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&); 84 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, unsigned numbe rOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
84 85
85 virtual ~AudioContext(); 86 virtual ~AudioContext();
86 87
88 virtual void trace(Visitor*);
89
87 bool isInitialized() const; 90 bool isInitialized() const;
88 91
89 bool isOfflineContext() { return m_isOfflineContext; } 92 bool isOfflineContext() { return m_isOfflineContext; }
90 93
91 // Document notification 94 // Document notification
92 virtual void stop() OVERRIDE FINAL; 95 virtual void stop() OVERRIDE FINAL;
93 96
94 AudioDestinationNode* destination() { return m_destinationNode.get(); } 97 AudioDestinationNode* destination() { return m_destinationNode.get(); }
95 size_t currentSampleFrame() const { return m_destinationNode->currentSampleF rame(); } 98 size_t currentSampleFrame() const { return m_destinationNode->currentSampleF rame(); }
96 double currentTime() const { return m_destinationNode->currentTime(); } 99 double currentTime() const { return m_destinationNode->currentTime(); }
97 float sampleRate() const { return m_destinationNode->sampleRate(); } 100 float sampleRate() const { return m_destinationNode->sampleRate(); }
98 unsigned long activeSourceCount() const { return static_cast<unsigned long>( m_activeSourceCount); } 101 unsigned long activeSourceCount() const { return static_cast<unsigned long>( m_activeSourceCount); }
99 102
100 void incrementActiveSourceCount(); 103 void incrementActiveSourceCount();
101 void decrementActiveSourceCount(); 104 void decrementActiveSourceCount();
102 105
103 PassRefPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numbe rOfFrames, float sampleRate, ExceptionState&); 106 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
104 PassRefPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMono, Exception State&); 107 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(ArrayBuffer*, bool mixToMon o, ExceptionState&);
105 108
106 // Asynchronous audio file data decoding. 109 // Asynchronous audio file data decoding.
107 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnP tr<AudioBufferCallback>, ExceptionState&); 110 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnP tr<AudioBufferCallback>, ExceptionState&);
108 111
109 AudioListener* listener() { return m_listener.get(); } 112 AudioListener* listener() { return m_listener.get(); }
110 113
111 // The AudioNode create methods are called on the main thread (from JavaScri pt). 114 // The AudioNode create methods are called on the main thread (from JavaScri pt).
112 PassRefPtr<AudioBufferSourceNode> createBufferSource(); 115 PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource();
113 PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaEl ement*, ExceptionState&); 116 PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource (HTMLMediaElement*, ExceptionState&);
114 PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&); 117 PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(M ediaStream*, ExceptionState&);
115 PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); 118 PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDes tination();
116 PassRefPtr<GainNode> createGain(); 119 PassRefPtrWillBeRawPtr<GainNode> createGain();
117 PassRefPtr<BiquadFilterNode> createBiquadFilter(); 120 PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter();
118 PassRefPtr<WaveShaperNode> createWaveShaper(); 121 PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper();
119 PassRefPtr<DelayNode> createDelay(ExceptionState&); 122 PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&);
120 PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); 123 PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, Exception State&);
121 PassRefPtr<PannerNode> createPanner(); 124 PassRefPtrWillBeRawPtr<PannerNode> createPanner();
122 PassRefPtr<ConvolverNode> createConvolver(); 125 PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver();
123 PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor(); 126 PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor();
124 PassRefPtr<AnalyserNode> createAnalyser(); 127 PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser();
125 PassRefPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&); 128 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionS tate&);
126 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, Exc eptionState&); 129 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, ExceptionState&);
127 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz e_t numberOfInputChannels, ExceptionState&); 130 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, size_t numberOfInputChannels, ExceptionState&);
128 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz e_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); 131 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t buf ferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionS tate&);
129 PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); 132 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionS tate&);
130 PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs , ExceptionState&); 133 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t num berOfOutputs, ExceptionState&);
131 PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); 134 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState &);
132 PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, Exc eptionState&); 135 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberO fInputs, ExceptionState&);
133 PassRefPtr<OscillatorNode> createOscillator(); 136 PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator();
134 PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array * imag, ExceptionState&); 137 PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
135 138
136 // When a source node has no more processing to do (has finished playing), t hen it tells the context to dereference it. 139 // When a source node has no more processing to do (has finished playing), t hen it tells the context to dereference it.
137 void notifyNodeFinishedProcessing(AudioNode*); 140 void notifyNodeFinishedProcessing(AudioNode*);
138 141
139 // Called at the start of each render quantum. 142 // Called at the start of each render quantum.
140 void handlePreRenderTasks(); 143 void handlePreRenderTasks();
141 144
142 // Called at the end of each render quantum. 145 // Called at the end of each render quantum.
143 void handlePostRenderTasks(); 146 void handlePostRenderTasks();
144 147
(...skipping 118 matching lines...) Expand 10 before | Expand all | Expand 10 after
263 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode. 266 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode.
264 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is 267 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is
265 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m ethods for more details. 268 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m ethods for more details.
266 void refNode(AudioNode*); 269 void refNode(AudioNode*);
267 void derefNode(AudioNode*); 270 void derefNode(AudioNode*);
268 271
269 // When the context goes away, there might still be some sources which haven 't finished playing. 272 // When the context goes away, there might still be some sources which haven 't finished playing.
270 // Make sure to dereference them here. 273 // Make sure to dereference them here.
271 void derefUnfinishedSourceNodes(); 274 void derefUnfinishedSourceNodes();
272 275
273 RefPtr<AudioDestinationNode> m_destinationNode; 276 RefPtrWillBeRawPtr<AudioDestinationNode> m_destinationNode;
Mads Ager (chromium) 2014/03/20 08:30:00 If this was a RefPtr before it needs to be a Membe
keishi 2014/03/27 07:39:37 Done.
274 RefPtr<AudioListener> m_listener; 277 RefPtrWillBeMember<AudioListener> m_listener;
275 278
276 // Only accessed in the audio thread. 279 // Only accessed in the audio thread.
277 Vector<AudioNode*> m_finishedNodes; 280 Vector<AudioNode*> m_finishedNodes;
278 281
279 // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation 282 // We don't use RefPtrWillBeRawPtr<AudioNode> here because AudioNode has a m ore complex ref() / deref() implementation
Mads Ager (chromium) 2014/03/20 08:30:00 I would leave the comment as is.
keishi 2014/03/27 07:39:37 Done.
280 // with an optional argument for refType. We need to use the special refTyp e: RefTypeConnection 283 // with an optional argument for refType. We need to use the special refTyp e: RefTypeConnection
281 // Either accessed when the graph lock is held, or on the main thread when t he audio thread has finished. 284 // Either accessed when the graph lock is held, or on the main thread when t he audio thread has finished.
282 Vector<AudioNode*> m_referencedNodes; 285 Vector<AudioNode*> m_referencedNodes;
283 286
284 // Accumulate nodes which need to be deleted here. 287 // Accumulate nodes which need to be deleted here.
285 // This is copied to m_nodesToDelete at the end of a render cycle in handleP ostRenderTasks(), where we're assured of a stable graph 288 // This is copied to m_nodesToDelete at the end of a render cycle in handleP ostRenderTasks(), where we're assured of a stable graph
286 // state which will have no references to any of the nodes in m_nodesToDelet e once the context lock is released 289 // state which will have no references to any of the nodes in m_nodesToDelet e once the context lock is released
287 // (when handlePostRenderTasks() has completed). 290 // (when handlePostRenderTasks() has completed).
288 Vector<AudioNode*> m_nodesMarkedForDeletion; 291 Vector<AudioNode*> m_nodesMarkedForDeletion;
289 292
(...skipping 18 matching lines...) Expand all
308 unsigned m_connectionCount; 311 unsigned m_connectionCount;
309 312
310 // Graph locking. 313 // Graph locking.
311 Mutex m_contextGraphMutex; 314 Mutex m_contextGraphMutex;
312 volatile ThreadIdentifier m_audioThread; 315 volatile ThreadIdentifier m_audioThread;
313 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th is is the thread which owns it, otherwise == UndefinedThreadIdentifier 316 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th is is the thread which owns it, otherwise == UndefinedThreadIdentifier
314 317
315 // Only accessed in the audio thread. 318 // Only accessed in the audio thread.
316 Vector<AudioNode*> m_deferredFinishDerefList; 319 Vector<AudioNode*> m_deferredFinishDerefList;
317 320
318 RefPtr<AudioBuffer> m_renderTarget; 321 RefPtrWillBeMember<AudioBuffer> m_renderTarget;
319 322
320 bool m_isOfflineContext; 323 bool m_isOfflineContext;
321 324
322 AsyncAudioDecoder m_audioDecoder; 325 AsyncAudioDecoder m_audioDecoder;
323 326
324 // This is considering 32 is large enough for multiple channels audio. 327 // This is considering 32 is large enough for multiple channels audio.
325 // It is somewhat arbitrary and could be increased if necessary. 328 // It is somewhat arbitrary and could be increased if necessary.
326 enum { MaxNumberOfChannels = 32 }; 329 enum { MaxNumberOfChannels = 32 };
327 330
328 // Number of AudioBufferSourceNodes that are active (playing). 331 // Number of AudioBufferSourceNodes that are active (playing).
329 int m_activeSourceCount; 332 int m_activeSourceCount;
330 }; 333 };
331 334
332 } // WebCore 335 } // WebCore
333 336
334 #endif // AudioContext_h 337 #endif // AudioContext_h
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698