OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
6 * are met: | 6 * are met: |
7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
(...skipping 186 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
197 // nodes reference all nodes they're connected to. All nodes are ultimately | 197 // nodes reference all nodes they're connected to. All nodes are ultimately |
198 // connected to the AudioDestinationNode. When the context release a source | 198 // connected to the AudioDestinationNode. When the context release a source |
199 // node, it will be deactivated from the rendering graph along with all | 199 // node, it will be deactivated from the rendering graph along with all |
200 // other nodes it is uniquely connected to. | 200 // other nodes it is uniquely connected to. |
201 void notifySourceNodeStartedProcessing(AudioNode*); | 201 void notifySourceNodeStartedProcessing(AudioNode*); |
202 // When a source node has no more processing to do (has finished playing), | 202 // When a source node has no more processing to do (has finished playing), |
203 // this method tells the context to release the corresponding node. | 203 // this method tells the context to release the corresponding node. |
204 void notifySourceNodeFinishedProcessing(AudioHandler*); | 204 void notifySourceNodeFinishedProcessing(AudioHandler*); |
205 | 205 |
206 // Called at the start of each render quantum. | 206 // Called at the start of each render quantum. |
207 void handlePreRenderTasks(); | 207 void handlePreRenderTasks(const WebAudioTimestamp&); |
208 | 208 |
209 // Called at the end of each render quantum. | 209 // Called at the end of each render quantum. |
210 void handlePostRenderTasks(); | 210 void handlePostRenderTasks(); |
211 | 211 |
212 // Called periodically at the end of each render quantum to release finished | 212 // Called periodically at the end of each render quantum to release finished |
213 // source nodes. | 213 // source nodes. |
214 void releaseFinishedSourceNodes(); | 214 void releaseFinishedSourceNodes(); |
215 | 215 |
216 // Keeps track of the number of connections made. | 216 // Keeps track of the number of connections made. |
217 void incrementConnectionCount() | 217 void incrementConnectionCount() |
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
260 | 260 |
261 // Get the PeriodicWave for the specified oscillator type. The table is ini
tialized internally | 261 // Get the PeriodicWave for the specified oscillator type. The table is ini
tialized internally |
262 // if necessary. | 262 // if necessary. |
263 PeriodicWave* periodicWave(int type); | 263 PeriodicWave* periodicWave(int type); |
264 | 264 |
265 // Check whether the AudioContext requires a user gesture and whether the | 265 // Check whether the AudioContext requires a user gesture and whether the |
266 // current stack is processing user gesture and record these information in | 266 // current stack is processing user gesture and record these information in |
267 // a histogram. | 267 // a histogram. |
268 void recordUserGestureState(); | 268 void recordUserGestureState(); |
269 | 269 |
| 270 // Gets the audio timestamp of the currently audiable signal. |
| 271 virtual void getOutputTimestamp(AudioTimestamp&) {} |
| 272 |
270 protected: | 273 protected: |
271 explicit AbstractAudioContext(Document*); | 274 explicit AbstractAudioContext(Document*); |
272 AbstractAudioContext(Document*, unsigned numberOfChannels, size_t numberOfFr
ames, float sampleRate); | 275 AbstractAudioContext(Document*, unsigned numberOfChannels, size_t numberOfFr
ames, float sampleRate); |
273 | 276 |
274 void initialize(); | 277 void initialize(); |
275 void uninitialize(); | 278 void uninitialize(); |
276 | 279 |
277 void setContextState(AudioContextState); | 280 void setContextState(AudioContextState); |
278 | 281 |
279 virtual void didClose() {} | 282 virtual void didClose() {} |
280 | 283 |
281 // Tries to handle AudioBufferSourceNodes that were started but became disco
nnected or was never | 284 // Tries to handle AudioBufferSourceNodes that were started but became disco
nnected or was never |
282 // connected. Because these never get pulled anymore, they will stay around
forever. So if we | 285 // connected. Because these never get pulled anymore, they will stay around
forever. So if we |
283 // can, try to stop them so they can be collected. | 286 // can, try to stop them so they can be collected. |
284 void handleStoppableSourceNodes(); | 287 void handleStoppableSourceNodes(); |
285 | 288 |
286 Member<AudioDestinationNode> m_destinationNode; | 289 Member<AudioDestinationNode> m_destinationNode; |
287 | 290 |
288 // FIXME(dominicc): Move m_resumeResolvers to AudioContext, because only | 291 // FIXME(dominicc): Move m_resumeResolvers to AudioContext, because only |
289 // it creates these Promises. | 292 // it creates these Promises. |
290 // Vector of promises created by resume(). It takes time to handle them, so
we collect all of | 293 // Vector of promises created by resume(). It takes time to handle them, so
we collect all of |
291 // the promises here until they can be resolved or rejected. | 294 // the promises here until they can be resolved or rejected. |
292 HeapVector<Member<ScriptPromiseResolver>> m_resumeResolvers; | 295 HeapVector<Member<ScriptPromiseResolver>> m_resumeResolvers; |
293 | 296 |
294 void setClosedContextSampleRate(float newSampleRate) { m_closedContextSample
Rate = newSampleRate; } | 297 void setClosedContextSampleRate(float newSampleRate) { m_closedContextSample
Rate = newSampleRate; } |
295 float closedContextSampleRate() const { return m_closedContextSampleRate; } | 298 float closedContextSampleRate() const { return m_closedContextSampleRate; } |
296 | 299 |
297 void rejectPendingDecodeAudioDataResolvers(); | 300 void rejectPendingDecodeAudioDataResolvers(); |
298 | 301 |
| 302 WebAudioTimestamp outputTimestamp(); |
| 303 |
299 private: | 304 private: |
300 bool m_isCleared; | 305 bool m_isCleared; |
301 void clear(); | 306 void clear(); |
302 | 307 |
303 // When the context goes away, there might still be some sources which | 308 // When the context goes away, there might still be some sources which |
304 // haven't finished playing. Make sure to release them here. | 309 // haven't finished playing. Make sure to release them here. |
305 void releaseActiveSourceNodes(); | 310 void releaseActiveSourceNodes(); |
306 | 311 |
307 void removeFinishedSourceNodes(); | 312 void removeFinishedSourceNodes(); |
308 | 313 |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
368 // they can be shared with all OscillatorNodes in the context. To conserve
memory, these are | 373 // they can be shared with all OscillatorNodes in the context. To conserve
memory, these are |
369 // lazily initiialized on first use. | 374 // lazily initiialized on first use. |
370 Member<PeriodicWave> m_periodicWaveSine; | 375 Member<PeriodicWave> m_periodicWaveSine; |
371 Member<PeriodicWave> m_periodicWaveSquare; | 376 Member<PeriodicWave> m_periodicWaveSquare; |
372 Member<PeriodicWave> m_periodicWaveSawtooth; | 377 Member<PeriodicWave> m_periodicWaveSawtooth; |
373 Member<PeriodicWave> m_periodicWaveTriangle; | 378 Member<PeriodicWave> m_periodicWaveTriangle; |
374 | 379 |
375 // This is considering 32 is large enough for multiple channels audio. | 380 // This is considering 32 is large enough for multiple channels audio. |
376 // It is somewhat arbitrary and could be increased if necessary. | 381 // It is somewhat arbitrary and could be increased if necessary. |
377 enum { MaxNumberOfChannels = 32 }; | 382 enum { MaxNumberOfChannels = 32 }; |
| 383 |
| 384 // Output audio stream timestamp. |
| 385 WebAudioTimestamp m_outputTimestamp; |
378 }; | 386 }; |
379 | 387 |
380 } // namespace blink | 388 } // namespace blink |
381 | 389 |
382 #endif // AbstractAudioContext_h | 390 #endif // AbstractAudioContext_h |
OLD | NEW |