OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
6 * are met: | 6 * are met: |
7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
73 class OscillatorNode; | 73 class OscillatorNode; |
74 class PannerNode; | 74 class PannerNode; |
75 class PeriodicWave; | 75 class PeriodicWave; |
76 class PeriodicWaveConstraints; | 76 class PeriodicWaveConstraints; |
77 class ScriptProcessorNode; | 77 class ScriptProcessorNode; |
78 class ScriptPromiseResolver; | 78 class ScriptPromiseResolver; |
79 class ScriptState; | 79 class ScriptState; |
80 class SecurityOrigin; | 80 class SecurityOrigin; |
81 class StereoPannerNode; | 81 class StereoPannerNode; |
82 class WaveShaperNode; | 82 class WaveShaperNode; |
83 class WebAudioLatencyHint; | |
83 | 84 |
84 // BaseAudioContext is the cornerstone of the web audio API and all AudioNodes | 85 // BaseAudioContext is the cornerstone of the web audio API and all AudioNodes |
85 // are created from it. For thread safety between the audio thread and the main | 86 // are created from it. For thread safety between the audio thread and the main |
86 // thread, it has a rendering graph locking mechanism. | 87 // thread, it has a rendering graph locking mechanism. |
87 | 88 |
88 class MODULES_EXPORT BaseAudioContext : public EventTargetWithInlineData, | 89 class MODULES_EXPORT BaseAudioContext : public EventTargetWithInlineData, |
89 public ActiveScriptWrappable, | 90 public ActiveScriptWrappable, |
90 public ActiveDOMObject { | 91 public ActiveDOMObject { |
91 USING_GARBAGE_COLLECTED_MIXIN(BaseAudioContext); | 92 USING_GARBAGE_COLLECTED_MIXIN(BaseAudioContext); |
92 DEFINE_WRAPPERTYPEINFO(); | 93 DEFINE_WRAPPERTYPEINFO(); |
93 | 94 |
94 public: | 95 public: |
95 // The state of an audio context. On creation, the state is Suspended. The | 96 // The state of an audio context. On creation, the state is Suspended. The |
96 // state is Running if audio is being processed (audio graph is being pulled | 97 // state is Running if audio is being processed (audio graph is being pulled |
97 // for data). The state is Closed if the audio context has been closed. The | 98 // for data). The state is Closed if the audio context has been closed. The |
98 // valid transitions are from Suspended to either Running or Closed; Running | 99 // valid transitions are from Suspended to either Running or Closed; Running |
99 // to Suspended or Closed. Once Closed, there are no valid transitions. | 100 // to Suspended or Closed. Once Closed, there are no valid transitions. |
100 enum AudioContextState { Suspended, Running, Closed }; | 101 enum AudioContextState { Suspended, Running, Closed }; |
101 | 102 |
102 // Create an AudioContext for rendering to the audio hardware. | 103 // Create an AudioContext for rendering to the audio hardware. |
103 static BaseAudioContext* create(Document&, ExceptionState&); | 104 static BaseAudioContext* create(Document&, |
105 const WebAudioLatencyHint&, | |
hongchan
2016/12/02 17:40:12
BaseAudioContext neeeds |AudioContextOptions|. |Au
Andrew MacPherson
2016/12/05 14:12:53
Done.
| |
106 ExceptionState&); | |
104 | 107 |
105 ~BaseAudioContext() override; | 108 ~BaseAudioContext() override; |
106 | 109 |
107 DECLARE_VIRTUAL_TRACE(); | 110 DECLARE_VIRTUAL_TRACE(); |
108 | 111 |
109 // Is the destination node initialized and ready to handle audio? | 112 // Is the destination node initialized and ready to handle audio? |
110 bool isDestinationInitialized() const { | 113 bool isDestinationInitialized() const { |
111 AudioDestinationNode* dest = destination(); | 114 AudioDestinationNode* dest = destination(); |
112 return dest ? dest->audioDestinationHandler().isInitialized() : false; | 115 return dest ? dest->audioDestinationHandler().isInitialized() : false; |
113 } | 116 } |
(...skipping 16 matching lines...) Expand all Loading... | |
130 | 133 |
131 double currentTime() const { | 134 double currentTime() const { |
132 // TODO: What is the correct value for the current time if the destination | 135 // TODO: What is the correct value for the current time if the destination |
133 // node has gone away? 0 is a valid time. | 136 // node has gone away? 0 is a valid time. |
134 return m_destinationNode | 137 return m_destinationNode |
135 ? m_destinationNode->audioDestinationHandler().currentTime() | 138 ? m_destinationNode->audioDestinationHandler().currentTime() |
136 : 0; | 139 : 0; |
137 } | 140 } |
138 | 141 |
139 float sampleRate() const { | 142 float sampleRate() const { |
140 return m_destinationNode ? m_destinationNode->handler().sampleRate() : 0; | 143 return m_destinationNode |
144 ? m_destinationNode->audioDestinationHandler().sampleRate() | |
145 : 0; | |
146 } | |
147 | |
148 float framesPerBuffer() const { | |
149 return m_destinationNode | |
150 ? m_destinationNode->audioDestinationHandler().framesPerBuffer() | |
151 : 0; | |
141 } | 152 } |
142 | 153 |
143 String state() const; | 154 String state() const; |
144 AudioContextState contextState() const { return m_contextState; } | 155 AudioContextState contextState() const { return m_contextState; } |
145 void throwExceptionForClosedState(ExceptionState&); | 156 void throwExceptionForClosedState(ExceptionState&); |
146 | 157 |
147 AudioBuffer* createBuffer(unsigned numberOfChannels, | 158 AudioBuffer* createBuffer(unsigned numberOfChannels, |
148 size_t numberOfFrames, | 159 size_t numberOfFrames, |
149 float sampleRate, | 160 float sampleRate, |
150 ExceptionState&); | 161 ExceptionState&); |
(...skipping 147 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
298 // Get the PeriodicWave for the specified oscillator type. The table is | 309 // Get the PeriodicWave for the specified oscillator type. The table is |
299 // initialized internally if necessary. | 310 // initialized internally if necessary. |
300 PeriodicWave* periodicWave(int type); | 311 PeriodicWave* periodicWave(int type); |
301 | 312 |
302 // For metrics purpose, records when start() is called on a | 313 // For metrics purpose, records when start() is called on a |
303 // AudioScheduledSourceHandler or a AudioBufferSourceHandler without a user | 314 // AudioScheduledSourceHandler or a AudioBufferSourceHandler without a user |
304 // gesture while the AudioContext requires a user gesture. | 315 // gesture while the AudioContext requires a user gesture. |
305 void maybeRecordStartAttempt(); | 316 void maybeRecordStartAttempt(); |
306 | 317 |
307 protected: | 318 protected: |
308 explicit BaseAudioContext(Document*); | 319 explicit BaseAudioContext(Document*, const WebAudioLatencyHint&); |
309 BaseAudioContext(Document*, | 320 BaseAudioContext(Document*, |
310 unsigned numberOfChannels, | 321 unsigned numberOfChannels, |
311 size_t numberOfFrames, | 322 size_t numberOfFrames, |
312 float sampleRate); | 323 float sampleRate); |
313 | 324 |
314 void initialize(); | 325 void initialize(); |
315 void uninitialize(); | 326 void uninitialize(); |
316 | 327 |
317 void setContextState(AudioContextState); | 328 void setContextState(AudioContextState); |
318 | 329 |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
447 // This is considering 32 is large enough for multiple channels audio. | 458 // This is considering 32 is large enough for multiple channels audio. |
448 // It is somewhat arbitrary and could be increased if necessary. | 459 // It is somewhat arbitrary and could be increased if necessary. |
449 enum { MaxNumberOfChannels = 32 }; | 460 enum { MaxNumberOfChannels = 32 }; |
450 | 461 |
451 Optional<AutoplayStatus> m_autoplayStatus; | 462 Optional<AutoplayStatus> m_autoplayStatus; |
452 }; | 463 }; |
453 | 464 |
454 } // namespace blink | 465 } // namespace blink |
455 | 466 |
456 #endif // BaseAudioContext_h | 467 #endif // BaseAudioContext_h |
OLD | NEW |