OLD | NEW |
---|---|
1 /* | 1 /* |
2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
6 * are met: | 6 * are met: |
7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
73 class OscillatorNode; | 73 class OscillatorNode; |
74 class PannerNode; | 74 class PannerNode; |
75 class PeriodicWave; | 75 class PeriodicWave; |
76 class PeriodicWaveConstraints; | 76 class PeriodicWaveConstraints; |
77 class ScriptProcessorNode; | 77 class ScriptProcessorNode; |
78 class ScriptPromiseResolver; | 78 class ScriptPromiseResolver; |
79 class ScriptState; | 79 class ScriptState; |
80 class SecurityOrigin; | 80 class SecurityOrigin; |
81 class StereoPannerNode; | 81 class StereoPannerNode; |
82 class WaveShaperNode; | 82 class WaveShaperNode; |
83 class WebAudioLatencyHint; | |
83 | 84 |
84 // BaseAudioContext is the cornerstone of the web audio API and all AudioNodes | 85 // BaseAudioContext is the cornerstone of the web audio API and all AudioNodes |
85 // are created from it. For thread safety between the audio thread and the main | 86 // are created from it. For thread safety between the audio thread and the main |
86 // thread, it has a rendering graph locking mechanism. | 87 // thread, it has a rendering graph locking mechanism. |
87 | 88 |
88 class MODULES_EXPORT BaseAudioContext : public EventTargetWithInlineData, | 89 class MODULES_EXPORT BaseAudioContext : public EventTargetWithInlineData, |
89 public ActiveScriptWrappable, | 90 public ActiveScriptWrappable, |
90 public ActiveDOMObject { | 91 public ActiveDOMObject { |
91 USING_GARBAGE_COLLECTED_MIXIN(BaseAudioContext); | 92 USING_GARBAGE_COLLECTED_MIXIN(BaseAudioContext); |
92 DEFINE_WRAPPERTYPEINFO(); | 93 DEFINE_WRAPPERTYPEINFO(); |
93 | 94 |
94 public: | 95 public: |
95 // The state of an audio context. On creation, the state is Suspended. The | 96 // The state of an audio context. On creation, the state is Suspended. The |
96 // state is Running if audio is being processed (audio graph is being pulled | 97 // state is Running if audio is being processed (audio graph is being pulled |
97 // for data). The state is Closed if the audio context has been closed. The | 98 // for data). The state is Closed if the audio context has been closed. The |
98 // valid transitions are from Suspended to either Running or Closed; Running | 99 // valid transitions are from Suspended to either Running or Closed; Running |
99 // to Suspended or Closed. Once Closed, there are no valid transitions. | 100 // to Suspended or Closed. Once Closed, there are no valid transitions. |
100 enum AudioContextState { Suspended, Running, Closed }; | 101 enum AudioContextState { Suspended, Running, Closed }; |
101 | 102 |
102 // Create an AudioContext for rendering to the audio hardware. | 103 // Create an AudioContext for rendering to the audio hardware. |
103 static BaseAudioContext* create(Document&, ExceptionState&); | 104 static BaseAudioContext* create(Document&, |
105 const WebAudioLatencyHint&, | |
106 ExceptionState&); | |
104 | 107 |
105 ~BaseAudioContext() override; | 108 ~BaseAudioContext() override; |
106 | 109 |
107 DECLARE_VIRTUAL_TRACE(); | 110 DECLARE_VIRTUAL_TRACE(); |
108 | 111 |
109 // Is the destination node initialized and ready to handle audio? | 112 // Is the destination node initialized and ready to handle audio? |
110 bool isDestinationInitialized() const { | 113 bool isDestinationInitialized() const { |
111 AudioDestinationNode* dest = destination(); | 114 AudioDestinationNode* dest = destination(); |
112 return dest ? dest->audioDestinationHandler().isInitialized() : false; | 115 return dest ? dest->audioDestinationHandler().isInitialized() : false; |
113 } | 116 } |
114 | 117 |
115 // Document notification | 118 // Document notification |
116 void contextDestroyed() final; | 119 void contextDestroyed() final; |
117 bool hasPendingActivity() const final; | 120 bool hasPendingActivity() const final; |
118 | 121 |
119 // Cannnot be called from the audio thread. | 122 // Cannnot be called from the audio thread. |
120 AudioDestinationNode* destination() const; | 123 AudioDestinationNode* destination() const; |
121 | 124 |
122 size_t currentSampleFrame() const { | 125 size_t currentSampleFrame() const { |
123 // TODO: What is the correct value for the current frame if the destination | 126 // TODO: What is the correct value for the current frame if the destination |
124 // node has gone away? 0 is a valid frame. | 127 // node has gone away? 0 is a valid frame. |
125 return m_destinationNode | 128 return m_destinationNode |
o1ka
2016/11/30 11:46:26
It does not look like having null m_destinationNod
Andrew MacPherson
2016/12/01 12:11:56
I believe an OfflineAudioContext does not have an
Raymond Toy
2016/12/01 16:26:51
It should. OfflineAudioContext is a subclass of B
Andrew MacPherson
2016/12/02 09:42:55
Sorry, I see it now, the value is being set direct
Raymond Toy
2016/12/02 16:57:45
Yes, as currently implemented closed contexts have
| |
126 ? m_destinationNode->audioDestinationHandler() | 129 ? m_destinationNode->audioDestinationHandler() |
127 .currentSampleFrame() | 130 .currentSampleFrame() |
128 : 0; | 131 : 0; |
129 } | 132 } |
130 | 133 |
131 double currentTime() const { | 134 double currentTime() const { |
132 // TODO: What is the correct value for the current time if the destination | 135 // TODO: What is the correct value for the current time if the destination |
133 // node has gone away? 0 is a valid time. | 136 // node has gone away? 0 is a valid time. |
134 return m_destinationNode | 137 return m_destinationNode |
135 ? m_destinationNode->audioDestinationHandler().currentTime() | 138 ? m_destinationNode->audioDestinationHandler().currentTime() |
136 : 0; | 139 : 0; |
137 } | 140 } |
138 | 141 |
139 float sampleRate() const { | 142 float sampleRate() const { |
140 return m_destinationNode ? m_destinationNode->handler().sampleRate() : 0; | 143 return m_destinationNode |
144 ? m_destinationNode->audioDestinationHandler().sampleRate() | |
145 : 0; | |
146 } | |
147 | |
148 float framesPerBuffer() const { | |
149 return m_destinationNode | |
150 ? m_destinationNode->audioDestinationHandler().framesPerBuffer() | |
151 : 0; | |
141 } | 152 } |
142 | 153 |
143 String state() const; | 154 String state() const; |
155 double baseLatency() const; | |
144 AudioContextState contextState() const { return m_contextState; } | 156 AudioContextState contextState() const { return m_contextState; } |
145 void throwExceptionForClosedState(ExceptionState&); | 157 void throwExceptionForClosedState(ExceptionState&); |
146 | 158 |
147 AudioBuffer* createBuffer(unsigned numberOfChannels, | 159 AudioBuffer* createBuffer(unsigned numberOfChannels, |
148 size_t numberOfFrames, | 160 size_t numberOfFrames, |
149 float sampleRate, | 161 float sampleRate, |
150 ExceptionState&); | 162 ExceptionState&); |
151 | 163 |
152 // Asynchronous audio file data decoding. | 164 // Asynchronous audio file data decoding. |
153 ScriptPromise decodeAudioData(ScriptState*, | 165 ScriptPromise decodeAudioData(ScriptState*, |
(...skipping 144 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
298 // Get the PeriodicWave for the specified oscillator type. The table is | 310 // Get the PeriodicWave for the specified oscillator type. The table is |
299 // initialized internally if necessary. | 311 // initialized internally if necessary. |
300 PeriodicWave* periodicWave(int type); | 312 PeriodicWave* periodicWave(int type); |
301 | 313 |
302 // For metrics purpose, records when start() is called on a | 314 // For metrics purpose, records when start() is called on a |
303 // AudioScheduledSourceHandler or a AudioBufferSourceHandler without a user | 315 // AudioScheduledSourceHandler or a AudioBufferSourceHandler without a user |
304 // gesture while the AudioContext requires a user gesture. | 316 // gesture while the AudioContext requires a user gesture. |
305 void maybeRecordStartAttempt(); | 317 void maybeRecordStartAttempt(); |
306 | 318 |
307 protected: | 319 protected: |
308 explicit BaseAudioContext(Document*); | 320 explicit BaseAudioContext(Document*, const WebAudioLatencyHint&); |
309 BaseAudioContext(Document*, | 321 BaseAudioContext(Document*, |
310 unsigned numberOfChannels, | 322 unsigned numberOfChannels, |
311 size_t numberOfFrames, | 323 size_t numberOfFrames, |
312 float sampleRate); | 324 float sampleRate); |
313 | 325 |
314 void initialize(); | 326 void initialize(); |
315 void uninitialize(); | 327 void uninitialize(); |
316 | 328 |
317 void setContextState(AudioContextState); | 329 void setContextState(AudioContextState); |
318 | 330 |
(...skipping 128 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
447 // This is considering 32 is large enough for multiple channels audio. | 459 // This is considering 32 is large enough for multiple channels audio. |
448 // It is somewhat arbitrary and could be increased if necessary. | 460 // It is somewhat arbitrary and could be increased if necessary. |
449 enum { MaxNumberOfChannels = 32 }; | 461 enum { MaxNumberOfChannels = 32 }; |
450 | 462 |
451 Optional<AutoplayStatus> m_autoplayStatus; | 463 Optional<AutoplayStatus> m_autoplayStatus; |
452 }; | 464 }; |
453 | 465 |
454 } // namespace blink | 466 } // namespace blink |
455 | 467 |
456 #endif // BaseAudioContext_h | 468 #endif // BaseAudioContext_h |
OLD | NEW |