Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(5)

Side by Side Diff: third_party/WebKit/Source/modules/webaudio/AbstractAudioContext.cpp

Issue 2159403002: Replace ASSERT with DCHECK in WebAudio (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved. 2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions 5 * modification, are permitted provided that the following conditions
6 * are met: 6 * are met:
7 * 1. Redistributions of source code must retain the above copyright 7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer. 8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright 9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the 10 * notice, this list of conditions and the following disclaimer in the
(...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after
144 // in order to record metrics, re-using the HTMLMediaElement setting is 144 // in order to record metrics, re-using the HTMLMediaElement setting is
145 // probably the simplest solution. 145 // probably the simplest solution.
146 if (document->settings() && document->settings()->mediaPlaybackRequiresUserG esture()) 146 if (document->settings() && document->settings()->mediaPlaybackRequiresUserG esture())
147 m_userGestureRequired = true; 147 m_userGestureRequired = true;
148 } 148 }
149 149
150 AbstractAudioContext::~AbstractAudioContext() 150 AbstractAudioContext::~AbstractAudioContext()
151 { 151 {
152 deferredTaskHandler().contextWillBeDestroyed(); 152 deferredTaskHandler().contextWillBeDestroyed();
153 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around. 153 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around.
154 ASSERT(!isDestinationInitialized()); 154 DCHECK(!isDestinationInitialized());
155 ASSERT(!m_activeSourceNodes.size()); 155 DCHECK(!m_activeSourceNodes.size());
156 ASSERT(!m_finishedSourceHandlers.size()); 156 DCHECK(!m_finishedSourceHandlers.size());
157 ASSERT(!m_isResolvingResumePromises); 157 DCHECK(!m_isResolvingResumePromises);
158 ASSERT(!m_resumeResolvers.size()); 158 DCHECK(!m_resumeResolvers.size());
159 } 159 }
160 160
161 void AbstractAudioContext::initialize() 161 void AbstractAudioContext::initialize()
162 { 162 {
163 if (isDestinationInitialized()) 163 if (isDestinationInitialized())
164 return; 164 return;
165 165
166 FFTFrame::initialize(); 166 FFTFrame::initialize();
167 167
168 if (m_destinationNode) { 168 if (m_destinationNode) {
169 m_destinationNode->handler().initialize(); 169 m_destinationNode->handler().initialize();
170 // The AudioParams in the listener need access to the destination node, so only create the 170 // The AudioParams in the listener need access to the destination node, so only create the
171 // listener if the destination node exists. 171 // listener if the destination node exists.
172 m_listener = AudioListener::create(*this); 172 m_listener = AudioListener::create(*this);
173 } 173 }
174 } 174 }
175 175
176 void AbstractAudioContext::clear() 176 void AbstractAudioContext::clear()
177 { 177 {
178 m_destinationNode.clear(); 178 m_destinationNode.clear();
179 // The audio rendering thread is dead. Nobody will schedule AudioHandler 179 // The audio rendering thread is dead. Nobody will schedule AudioHandler
180 // deletion. Let's do it ourselves. 180 // deletion. Let's do it ourselves.
181 deferredTaskHandler().clearHandlersToBeDeleted(); 181 deferredTaskHandler().clearHandlersToBeDeleted();
182 m_isCleared = true; 182 m_isCleared = true;
183 } 183 }
184 184
185 void AbstractAudioContext::uninitialize() 185 void AbstractAudioContext::uninitialize()
186 { 186 {
187 ASSERT(isMainThread()); 187 DCHECK(isMainThread());
188 188
189 if (!isDestinationInitialized()) 189 if (!isDestinationInitialized())
190 return; 190 return;
191 191
192 // This stops the audio thread and all audio rendering. 192 // This stops the audio thread and all audio rendering.
193 if (m_destinationNode) 193 if (m_destinationNode)
194 m_destinationNode->handler().uninitialize(); 194 m_destinationNode->handler().uninitialize();
195 195
196 // Get rid of the sources which may still be playing. 196 // Get rid of the sources which may still be playing.
197 releaseActiveSourceNodes(); 197 releaseActiveSourceNodes();
198 198
199 // Reject any pending resolvers before we go away. 199 // Reject any pending resolvers before we go away.
200 rejectPendingResolvers(); 200 rejectPendingResolvers();
201 didClose(); 201 didClose();
202 202
203 ASSERT(m_listener); 203 DCHECK(m_listener);
204 m_listener->waitForHRTFDatabaseLoaderThreadCompletion(); 204 m_listener->waitForHRTFDatabaseLoaderThreadCompletion();
205 205
206 clear(); 206 clear();
207 } 207 }
208 208
209 void AbstractAudioContext::stop() 209 void AbstractAudioContext::stop()
210 { 210 {
211 uninitialize(); 211 uninitialize();
212 } 212 }
213 213
214 bool AbstractAudioContext::hasPendingActivity() const 214 bool AbstractAudioContext::hasPendingActivity() const
215 { 215 {
216 // There's no pending activity if the audio context has been cleared. 216 // There's no pending activity if the audio context has been cleared.
217 return !m_isCleared; 217 return !m_isCleared;
218 } 218 }
219 219
220 AudioDestinationNode* AbstractAudioContext::destination() const 220 AudioDestinationNode* AbstractAudioContext::destination() const
221 { 221 {
222 // Cannot be called from the audio thread because this method touches object s managed by Oilpan, 222 // Cannot be called from the audio thread because this method touches object s managed by Oilpan,
223 // and the audio thread is not managed by Oilpan. 223 // and the audio thread is not managed by Oilpan.
224 ASSERT(!isAudioThread()); 224 DCHECK(!isAudioThread());
225 return m_destinationNode; 225 return m_destinationNode;
226 } 226 }
227 227
228 void AbstractAudioContext::throwExceptionForClosedState(ExceptionState& exceptio nState) 228 void AbstractAudioContext::throwExceptionForClosedState(ExceptionState& exceptio nState)
229 { 229 {
230 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c losed."); 230 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c losed.");
231 } 231 }
232 232
233 AudioBuffer* AbstractAudioContext::createBuffer(unsigned numberOfChannels, size_ t numberOfFrames, float sampleRate, ExceptionState& exceptionState) 233 AudioBuffer* AbstractAudioContext::createBuffer(unsigned numberOfChannels, size_ t numberOfFrames, float sampleRate, ExceptionState& exceptionState)
234 { 234 {
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
273 float ratio = 100 * sampleRate / this->sampleRate(); 273 float ratio = 100 * sampleRate / this->sampleRate();
274 audioBufferSampleRateRatioHistogram.count(static_cast<int>(0.5 + rat io)); 274 audioBufferSampleRateRatioHistogram.count(static_cast<int>(0.5 + rat io));
275 } 275 }
276 } 276 }
277 277
278 return buffer; 278 return buffer;
279 } 279 }
280 280
281 ScriptPromise AbstractAudioContext::decodeAudioData(ScriptState* scriptState, DO MArrayBuffer* audioData, AudioBufferCallback* successCallback, AudioBufferCallba ck* errorCallback, ExceptionState& exceptionState) 281 ScriptPromise AbstractAudioContext::decodeAudioData(ScriptState* scriptState, DO MArrayBuffer* audioData, AudioBufferCallback* successCallback, AudioBufferCallba ck* errorCallback, ExceptionState& exceptionState)
282 { 282 {
283 ASSERT(isMainThread()); 283 DCHECK(isMainThread());
284 ASSERT(audioData); 284 DCHECK(audioData);
285 285
286 ScriptPromiseResolver* resolver = ScriptPromiseResolver::create(scriptState) ; 286 ScriptPromiseResolver* resolver = ScriptPromiseResolver::create(scriptState) ;
287 ScriptPromise promise = resolver->promise(); 287 ScriptPromise promise = resolver->promise();
288 288
289 float rate = isContextClosed() ? closedContextSampleRate() : sampleRate(); 289 float rate = isContextClosed() ? closedContextSampleRate() : sampleRate();
290 290
291 ASSERT(rate > 0); 291 DCHECK_GT(rate, 0);
292 292
293 m_decodeAudioResolvers.add(resolver); 293 m_decodeAudioResolvers.add(resolver);
294 m_audioDecoder.decodeAsync(audioData, rate, successCallback, errorCallback, resolver, this); 294 m_audioDecoder.decodeAsync(audioData, rate, successCallback, errorCallback, resolver, this);
295 295
296 return promise; 296 return promise;
297 } 297 }
298 298
299 void AbstractAudioContext::handleDecodeAudioData(AudioBuffer* audioBuffer, Scrip tPromiseResolver* resolver, AudioBufferCallback* successCallback, AudioBufferCal lback* errorCallback) 299 void AbstractAudioContext::handleDecodeAudioData(AudioBuffer* audioBuffer, Scrip tPromiseResolver* resolver, AudioBufferCallback* successCallback, AudioBufferCal lback* errorCallback)
300 { 300 {
301 ASSERT(isMainThread()); 301 DCHECK(isMainThread());
302 302
303 if (audioBuffer) { 303 if (audioBuffer) {
304 // Resolve promise successfully and run the success callback 304 // Resolve promise successfully and run the success callback
305 resolver->resolve(audioBuffer); 305 resolver->resolve(audioBuffer);
306 if (successCallback) 306 if (successCallback)
307 successCallback->handleEvent(audioBuffer); 307 successCallback->handleEvent(audioBuffer);
308 } else { 308 } else {
309 // Reject the promise and run the error callback 309 // Reject the promise and run the error callback
310 DOMException* error = DOMException::create(EncodingError, "Unable to dec ode audio data"); 310 DOMException* error = DOMException::create(EncodingError, "Unable to dec ode audio data");
311 resolver->reject(error); 311 resolver->reject(error);
312 if (errorCallback) 312 if (errorCallback)
313 errorCallback->handleEvent(error); 313 errorCallback->handleEvent(error);
314 } 314 }
315 315
316 // We've resolved the promise. Remove it now. 316 // We've resolved the promise. Remove it now.
317 ASSERT(m_decodeAudioResolvers.contains(resolver)); 317 DCHECK(m_decodeAudioResolvers.contains(resolver));
318 m_decodeAudioResolvers.remove(resolver); 318 m_decodeAudioResolvers.remove(resolver);
319 } 319 }
320 320
321 AudioBufferSourceNode* AbstractAudioContext::createBufferSource(ExceptionState& exceptionState) 321 AudioBufferSourceNode* AbstractAudioContext::createBufferSource(ExceptionState& exceptionState)
322 { 322 {
323 ASSERT(isMainThread()); 323 DCHECK(isMainThread());
324 324
325 AudioBufferSourceNode* node = AudioBufferSourceNode::create(*this, exception State); 325 AudioBufferSourceNode* node = AudioBufferSourceNode::create(*this, exception State);
326 326
327 // Do not add a reference to this source node now. The reference will be add ed when start() is 327 // Do not add a reference to this source node now. The reference will be add ed when start() is
328 // called. 328 // called.
329 329
330 return node; 330 return node;
331 } 331 }
332 332
333 MediaElementAudioSourceNode* AbstractAudioContext::createMediaElementSource(HTML MediaElement* mediaElement, ExceptionState& exceptionState) 333 MediaElementAudioSourceNode* AbstractAudioContext::createMediaElementSource(HTML MediaElement* mediaElement, ExceptionState& exceptionState)
334 { 334 {
335 ASSERT(isMainThread()); 335 DCHECK(isMainThread());
336 336
337 return MediaElementAudioSourceNode::create(*this, *mediaElement, exceptionSt ate); 337 return MediaElementAudioSourceNode::create(*this, *mediaElement, exceptionSt ate);
338 } 338 }
339 339
340 MediaStreamAudioSourceNode* AbstractAudioContext::createMediaStreamSource(MediaS tream* mediaStream, ExceptionState& exceptionState) 340 MediaStreamAudioSourceNode* AbstractAudioContext::createMediaStreamSource(MediaS tream* mediaStream, ExceptionState& exceptionState)
341 { 341 {
342 ASSERT(isMainThread()); 342 DCHECK(isMainThread());
343 343
344 return MediaStreamAudioSourceNode::create(*this, *mediaStream, exceptionStat e); 344 return MediaStreamAudioSourceNode::create(*this, *mediaStream, exceptionStat e);
345 } 345 }
346 346
347 MediaStreamAudioDestinationNode* AbstractAudioContext::createMediaStreamDestinat ion(ExceptionState& exceptionState) 347 MediaStreamAudioDestinationNode* AbstractAudioContext::createMediaStreamDestinat ion(ExceptionState& exceptionState)
348 { 348 {
349 DCHECK(isMainThread()); 349 DCHECK(isMainThread());
350 350
351 // Set number of output channels to stereo by default. 351 // Set number of output channels to stereo by default.
352 return MediaStreamAudioDestinationNode::create(*this, 2, exceptionState); 352 return MediaStreamAudioDestinationNode::create(*this, 2, exceptionState);
(...skipping 15 matching lines...) Expand all
368 368
369 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, ExceptionState& exceptionState) 369 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, ExceptionState& exceptionState)
370 { 370 {
371 DCHECK(isMainThread()); 371 DCHECK(isMainThread());
372 372
373 return ScriptProcessorNode::create(*this, bufferSize, numberOfInputChannels, exceptionState); 373 return ScriptProcessorNode::create(*this, bufferSize, numberOfInputChannels, exceptionState);
374 } 374 }
375 375
376 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exceptionState) 376 ScriptProcessorNode* AbstractAudioContext::createScriptProcessor(size_t bufferSi ze, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState& exceptionState)
377 { 377 {
378 ASSERT(isMainThread()); 378 DCHECK(isMainThread());
379 379
380 return ScriptProcessorNode::create( 380 return ScriptProcessorNode::create(
381 *this, 381 *this,
382 bufferSize, 382 bufferSize,
383 numberOfInputChannels, 383 numberOfInputChannels,
384 numberOfOutputChannels, 384 numberOfOutputChannels,
385 exceptionState); 385 exceptionState);
386 } 386 }
387 387
388 StereoPannerNode* AbstractAudioContext::createStereoPanner(ExceptionState& excep tionState) 388 StereoPannerNode* AbstractAudioContext::createStereoPanner(ExceptionState& excep tionState)
389 { 389 {
390 ASSERT(isMainThread()); 390 DCHECK(isMainThread());
391 391
392 return StereoPannerNode::create(*this, exceptionState); 392 return StereoPannerNode::create(*this, exceptionState);
393 } 393 }
394 394
395 BiquadFilterNode* AbstractAudioContext::createBiquadFilter(ExceptionState& excep tionState) 395 BiquadFilterNode* AbstractAudioContext::createBiquadFilter(ExceptionState& excep tionState)
396 { 396 {
397 ASSERT(isMainThread()); 397 DCHECK(isMainThread());
398 398
399 return BiquadFilterNode::create(*this, exceptionState); 399 return BiquadFilterNode::create(*this, exceptionState);
400 } 400 }
401 401
402 WaveShaperNode* AbstractAudioContext::createWaveShaper(ExceptionState& exception State) 402 WaveShaperNode* AbstractAudioContext::createWaveShaper(ExceptionState& exception State)
403 { 403 {
404 ASSERT(isMainThread()); 404 DCHECK(isMainThread());
405 405
406 return WaveShaperNode::create(*this, exceptionState); 406 return WaveShaperNode::create(*this, exceptionState);
407 } 407 }
408 408
409 PannerNode* AbstractAudioContext::createPanner(ExceptionState& exceptionState) 409 PannerNode* AbstractAudioContext::createPanner(ExceptionState& exceptionState)
410 { 410 {
411 ASSERT(isMainThread()); 411 DCHECK(isMainThread());
412 412
413 return PannerNode::create(*this, exceptionState); 413 return PannerNode::create(*this, exceptionState);
414 } 414 }
415 415
416 ConvolverNode* AbstractAudioContext::createConvolver(ExceptionState& exceptionSt ate) 416 ConvolverNode* AbstractAudioContext::createConvolver(ExceptionState& exceptionSt ate)
417 { 417 {
418 ASSERT(isMainThread()); 418 DCHECK(isMainThread());
419 419
420 return ConvolverNode::create(*this, exceptionState); 420 return ConvolverNode::create(*this, exceptionState);
421 } 421 }
422 422
423 DynamicsCompressorNode* AbstractAudioContext::createDynamicsCompressor(Exception State& exceptionState) 423 DynamicsCompressorNode* AbstractAudioContext::createDynamicsCompressor(Exception State& exceptionState)
424 { 424 {
425 ASSERT(isMainThread()); 425 DCHECK(isMainThread());
426 426
427 return DynamicsCompressorNode::create(*this, exceptionState); 427 return DynamicsCompressorNode::create(*this, exceptionState);
428 } 428 }
429 429
430 AnalyserNode* AbstractAudioContext::createAnalyser(ExceptionState& exceptionStat e) 430 AnalyserNode* AbstractAudioContext::createAnalyser(ExceptionState& exceptionStat e)
431 { 431 {
432 ASSERT(isMainThread()); 432 DCHECK(isMainThread());
433 433
434 return AnalyserNode::create(*this, exceptionState); 434 return AnalyserNode::create(*this, exceptionState);
435 } 435 }
436 436
437 GainNode* AbstractAudioContext::createGain(ExceptionState& exceptionState) 437 GainNode* AbstractAudioContext::createGain(ExceptionState& exceptionState)
438 { 438 {
439 ASSERT(isMainThread()); 439 DCHECK(isMainThread());
440 440
441 return GainNode::create(*this, exceptionState); 441 return GainNode::create(*this, exceptionState);
442 } 442 }
443 443
444 DelayNode* AbstractAudioContext::createDelay(ExceptionState& exceptionState) 444 DelayNode* AbstractAudioContext::createDelay(ExceptionState& exceptionState)
445 { 445 {
446 DCHECK(isMainThread()); 446 DCHECK(isMainThread());
447 447
448 return DelayNode::create(*this, exceptionState); 448 return DelayNode::create(*this, exceptionState);
449 } 449 }
450 450
451 DelayNode* AbstractAudioContext::createDelay(double maxDelayTime, ExceptionState & exceptionState) 451 DelayNode* AbstractAudioContext::createDelay(double maxDelayTime, ExceptionState & exceptionState)
452 { 452 {
453 ASSERT(isMainThread()); 453 DCHECK(isMainThread());
454 454
455 return DelayNode::create(*this, maxDelayTime, exceptionState); 455 return DelayNode::create(*this, maxDelayTime, exceptionState);
456 } 456 }
457 457
458 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(ExceptionState& exceptionState) 458 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(ExceptionState& exceptionState)
459 { 459 {
460 DCHECK(isMainThread()); 460 DCHECK(isMainThread());
461 461
462 return ChannelSplitterNode::create(*this, exceptionState); 462 return ChannelSplitterNode::create(*this, exceptionState);
463 } 463 }
464 464
465 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(size_t numberOf Outputs, ExceptionState& exceptionState) 465 ChannelSplitterNode* AbstractAudioContext::createChannelSplitter(size_t numberOf Outputs, ExceptionState& exceptionState)
466 { 466 {
467 ASSERT(isMainThread()); 467 DCHECK(isMainThread());
468 468
469 return ChannelSplitterNode::create(*this, numberOfOutputs, exceptionState); 469 return ChannelSplitterNode::create(*this, numberOfOutputs, exceptionState);
470 } 470 }
471 471
472 ChannelMergerNode* AbstractAudioContext::createChannelMerger(ExceptionState& exc eptionState) 472 ChannelMergerNode* AbstractAudioContext::createChannelMerger(ExceptionState& exc eptionState)
473 { 473 {
474 DCHECK(isMainThread()); 474 DCHECK(isMainThread());
475 475
476 return ChannelMergerNode::create(*this, exceptionState); 476 return ChannelMergerNode::create(*this, exceptionState);
477 } 477 }
478 478
479 ChannelMergerNode* AbstractAudioContext::createChannelMerger(size_t numberOfInpu ts, ExceptionState& exceptionState) 479 ChannelMergerNode* AbstractAudioContext::createChannelMerger(size_t numberOfInpu ts, ExceptionState& exceptionState)
480 { 480 {
481 ASSERT(isMainThread()); 481 DCHECK(isMainThread());
482 482
483 return ChannelMergerNode::create(*this, numberOfInputs, exceptionState); 483 return ChannelMergerNode::create(*this, numberOfInputs, exceptionState);
484 } 484 }
485 485
486 OscillatorNode* AbstractAudioContext::createOscillator(ExceptionState& exception State) 486 OscillatorNode* AbstractAudioContext::createOscillator(ExceptionState& exception State)
487 { 487 {
488 ASSERT(isMainThread()); 488 DCHECK(isMainThread());
489 489
490 return OscillatorNode::create(*this, exceptionState); 490 return OscillatorNode::create(*this, exceptionState);
491 } 491 }
492 492
493 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, ExceptionState& exceptionState) 493 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, ExceptionState& exceptionState)
494 { 494 {
495 DCHECK(isMainThread()); 495 DCHECK(isMainThread());
496 496
497 return PeriodicWave::create(*this, real, imag, false, exceptionState); 497 return PeriodicWave::create(*this, real, imag, false, exceptionState);
498 } 498 }
499 499
500 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, const PeriodicWaveConstraints& options, ExceptionState& exc eptionState) 500 PeriodicWave* AbstractAudioContext::createPeriodicWave(DOMFloat32Array* real, DO MFloat32Array* imag, const PeriodicWaveConstraints& options, ExceptionState& exc eptionState)
501 { 501 {
502 ASSERT(isMainThread()); 502 DCHECK(isMainThread());
503 503
504 bool disable = options.hasDisableNormalization() ? options.disableNormalizat ion() : false; 504 bool disable = options.hasDisableNormalization() ? options.disableNormalizat ion() : false;
505 505
506 return PeriodicWave::create(*this, real, imag, disable, exceptionState); 506 return PeriodicWave::create(*this, real, imag, disable, exceptionState);
507 } 507 }
508 508
509 IIRFilterNode* AbstractAudioContext::createIIRFilter(Vector<double> feedforwardC oef, Vector<double> feedbackCoef, ExceptionState& exceptionState) 509 IIRFilterNode* AbstractAudioContext::createIIRFilter(Vector<double> feedforwardC oef, Vector<double> feedbackCoef, ExceptionState& exceptionState)
510 { 510 {
511 ASSERT(isMainThread()); 511 DCHECK(isMainThread());
512 512
513 return IIRFilterNode::create(*this, feedforwardCoef, feedbackCoef, exception State); 513 return IIRFilterNode::create(*this, feedforwardCoef, feedbackCoef, exception State);
514 } 514 }
515 515
516 PeriodicWave* AbstractAudioContext::periodicWave(int type) 516 PeriodicWave* AbstractAudioContext::periodicWave(int type)
517 { 517 {
518 switch (type) { 518 switch (type) {
519 case OscillatorHandler::SINE: 519 case OscillatorHandler::SINE:
520 // Initialize the table if necessary 520 // Initialize the table if necessary
521 if (!m_periodicWaveSine) 521 if (!m_periodicWaveSine)
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after
571 return "running"; 571 return "running";
572 case Closed: 572 case Closed:
573 return "closed"; 573 return "closed";
574 } 574 }
575 ASSERT_NOT_REACHED(); 575 ASSERT_NOT_REACHED();
576 return ""; 576 return "";
577 } 577 }
578 578
579 void AbstractAudioContext::setContextState(AudioContextState newState) 579 void AbstractAudioContext::setContextState(AudioContextState newState)
580 { 580 {
581 ASSERT(isMainThread()); 581 DCHECK(isMainThread());
582 582
583 // Validate the transitions. The valid transitions are Suspended->Running, Running->Suspended, 583 // Validate the transitions. The valid transitions are Suspended->Running, Running->Suspended,
584 // and anything->Closed. 584 // and anything->Closed.
585 switch (newState) { 585 switch (newState) {
586 case Suspended: 586 case Suspended:
587 ASSERT(m_contextState == Running); 587 DCHECK(m_contextState == Running);
hongchan 2016/07/20 16:11:19 Perhaps use DCHECK_EQ?
HyungwookLee 2016/07/22 03:30:33 Done.
588 break; 588 break;
589 case Running: 589 case Running:
590 ASSERT(m_contextState == Suspended); 590 DCHECK(m_contextState == Suspended);
hongchan 2016/07/20 16:11:19 ditto.
HyungwookLee 2016/07/22 03:30:33 Done.
591 break; 591 break;
592 case Closed: 592 case Closed:
593 ASSERT(m_contextState != Closed); 593 DCHECK(m_contextState != Closed);
hongchan 2016/07/20 16:11:19 DCHECK_NE here?
HyungwookLee 2016/07/22 03:30:33 Done.
594 break; 594 break;
595 } 595 }
596 596
597 if (newState == m_contextState) { 597 if (newState == m_contextState) {
598 // ASSERTs above failed; just return. 598 // ASSERTs above failed; just return.
hongchan 2016/07/20 16:11:19 Change the comment accordingly.
HyungwookLee 2016/07/22 03:30:33 Done.
599 return; 599 return;
600 } 600 }
601 601
602 m_contextState = newState; 602 m_contextState = newState;
603 603
604 // Notify context that state changed 604 // Notify context that state changed
605 if (getExecutionContext()) 605 if (getExecutionContext())
606 getExecutionContext()->postTask(BLINK_FROM_HERE, createSameThreadTask(&A bstractAudioContext::notifyStateChange, wrapPersistent(this))); 606 getExecutionContext()->postTask(BLINK_FROM_HERE, createSameThreadTask(&A bstractAudioContext::notifyStateChange, wrapPersistent(this)));
607 } 607 }
608 608
609 void AbstractAudioContext::notifyStateChange() 609 void AbstractAudioContext::notifyStateChange()
610 { 610 {
611 dispatchEvent(Event::create(EventTypeNames::statechange)); 611 dispatchEvent(Event::create(EventTypeNames::statechange));
612 } 612 }
613 613
614 void AbstractAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* hand ler) 614 void AbstractAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* hand ler)
615 { 615 {
616 ASSERT(isAudioThread()); 616 DCHECK(isAudioThread());
617 m_finishedSourceHandlers.append(handler); 617 m_finishedSourceHandlers.append(handler);
618 } 618 }
619 619
620 void AbstractAudioContext::removeFinishedSourceNodes() 620 void AbstractAudioContext::removeFinishedSourceNodes()
621 { 621 {
622 ASSERT(isMainThread()); 622 DCHECK(isMainThread());
623 AutoLocker locker(this); 623 AutoLocker locker(this);
624 // Quadratic worst case, but sizes of both vectors are considered 624 // Quadratic worst case, but sizes of both vectors are considered
625 // manageable, especially |m_finishedSourceNodes| is likely to be short. 625 // manageable, especially |m_finishedSourceNodes| is likely to be short.
626 for (AudioNode* node : m_finishedSourceNodes) { 626 for (AudioNode* node : m_finishedSourceNodes) {
627 size_t i = m_activeSourceNodes.find(node); 627 size_t i = m_activeSourceNodes.find(node);
628 if (i != kNotFound) 628 if (i != kNotFound)
629 m_activeSourceNodes.remove(i); 629 m_activeSourceNodes.remove(i);
630 } 630 }
631 m_finishedSourceNodes.clear(); 631 m_finishedSourceNodes.clear();
632 } 632 }
633 633
634 void AbstractAudioContext::releaseFinishedSourceNodes() 634 void AbstractAudioContext::releaseFinishedSourceNodes()
635 { 635 {
636 ASSERT(isGraphOwner()); 636 ASSERT(isGraphOwner());
637 ASSERT(isAudioThread()); 637 DCHECK(isAudioThread());
638 bool didRemove = false; 638 bool didRemove = false;
639 for (AudioHandler* handler : m_finishedSourceHandlers) { 639 for (AudioHandler* handler : m_finishedSourceHandlers) {
640 for (AudioNode* node : m_activeSourceNodes) { 640 for (AudioNode* node : m_activeSourceNodes) {
641 if (m_finishedSourceNodes.contains(node)) 641 if (m_finishedSourceNodes.contains(node))
642 continue; 642 continue;
643 if (handler == &node->handler()) { 643 if (handler == &node->handler()) {
644 handler->breakConnection(); 644 handler->breakConnection();
645 m_finishedSourceNodes.add(node); 645 m_finishedSourceNodes.add(node);
646 didRemove = true; 646 didRemove = true;
647 break; 647 break;
648 } 648 }
649 } 649 }
650 } 650 }
651 if (didRemove) 651 if (didRemove)
652 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, crossThreadBind(&AbstractAudioContext::removeFinishedSourceNodes, wrapC rossThreadPersistent(this))); 652 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, crossThreadBind(&AbstractAudioContext::removeFinishedSourceNodes, wrapC rossThreadPersistent(this)));
653 653
654 m_finishedSourceHandlers.clear(); 654 m_finishedSourceHandlers.clear();
655 } 655 }
656 656
657 void AbstractAudioContext::notifySourceNodeStartedProcessing(AudioNode* node) 657 void AbstractAudioContext::notifySourceNodeStartedProcessing(AudioNode* node)
658 { 658 {
659 ASSERT(isMainThread()); 659 DCHECK(isMainThread());
660 AutoLocker locker(this); 660 AutoLocker locker(this);
661 661
662 m_activeSourceNodes.append(node); 662 m_activeSourceNodes.append(node);
663 node->handler().makeConnection(); 663 node->handler().makeConnection();
664 } 664 }
665 665
666 void AbstractAudioContext::releaseActiveSourceNodes() 666 void AbstractAudioContext::releaseActiveSourceNodes()
667 { 667 {
668 ASSERT(isMainThread()); 668 DCHECK(isMainThread());
669 for (auto& sourceNode : m_activeSourceNodes) 669 for (auto& sourceNode : m_activeSourceNodes)
670 sourceNode->handler().breakConnection(); 670 sourceNode->handler().breakConnection();
671 671
672 m_activeSourceNodes.clear(); 672 m_activeSourceNodes.clear();
673 } 673 }
674 674
675 void AbstractAudioContext::handleStoppableSourceNodes() 675 void AbstractAudioContext::handleStoppableSourceNodes()
676 { 676 {
677 ASSERT(isGraphOwner()); 677 ASSERT(isGraphOwner());
678 678
679 // Find AudioBufferSourceNodes to see if we can stop playing them. 679 // Find AudioBufferSourceNodes to see if we can stop playing them.
680 for (AudioNode* node : m_activeSourceNodes) { 680 for (AudioNode* node : m_activeSourceNodes) {
681 // If the AudioNode has been marked as finished and released by 681 // If the AudioNode has been marked as finished and released by
682 // the audio thread, but not yet removed by the main thread 682 // the audio thread, but not yet removed by the main thread
683 // (see releaseActiveSourceNodes() above), |node| must not be 683 // (see releaseActiveSourceNodes() above), |node| must not be
684 // touched as its handler may have been released already. 684 // touched as its handler may have been released already.
685 if (m_finishedSourceNodes.contains(node)) 685 if (m_finishedSourceNodes.contains(node))
686 continue; 686 continue;
687 if (node->handler().getNodeType() == AudioHandler::NodeTypeAudioBufferSo urce) { 687 if (node->handler().getNodeType() == AudioHandler::NodeTypeAudioBufferSo urce) {
688 AudioBufferSourceNode* sourceNode = static_cast<AudioBufferSourceNod e*>(node); 688 AudioBufferSourceNode* sourceNode = static_cast<AudioBufferSourceNod e*>(node);
689 sourceNode->audioBufferSourceHandler().handleStoppableSourceNode(); 689 sourceNode->audioBufferSourceHandler().handleStoppableSourceNode();
690 } 690 }
691 } 691 }
692 } 692 }
693 693
694 void AbstractAudioContext::handlePreRenderTasks() 694 void AbstractAudioContext::handlePreRenderTasks()
695 { 695 {
696 ASSERT(isAudioThread()); 696 DCHECK(isAudioThread());
697 697
698 // At the beginning of every render quantum, try to update the internal rend ering graph state (from main thread changes). 698 // At the beginning of every render quantum, try to update the internal rend ering graph state (from main thread changes).
699 // It's OK if the tryLock() fails, we'll just take slightly longer to pick u p the changes. 699 // It's OK if the tryLock() fails, we'll just take slightly longer to pick u p the changes.
700 if (tryLock()) { 700 if (tryLock()) {
701 deferredTaskHandler().handleDeferredTasks(); 701 deferredTaskHandler().handleDeferredTasks();
702 702
703 resolvePromisesForResume(); 703 resolvePromisesForResume();
704 704
705 // Check to see if source nodes can be stopped because the end time has passed. 705 // Check to see if source nodes can be stopped because the end time has passed.
706 handleStoppableSourceNodes(); 706 handleStoppableSourceNodes();
707 707
708 // Update the dirty state of the listener. 708 // Update the dirty state of the listener.
709 listener()->updateState(); 709 listener()->updateState();
710 710
711 unlock(); 711 unlock();
712 } 712 }
713 } 713 }
714 714
715 void AbstractAudioContext::handlePostRenderTasks() 715 void AbstractAudioContext::handlePostRenderTasks()
716 { 716 {
717 ASSERT(isAudioThread()); 717 DCHECK(isAudioThread());
718 718
719 // Must use a tryLock() here too. Don't worry, the lock will very rarely be contended and this method is called frequently. 719 // Must use a tryLock() here too. Don't worry, the lock will very rarely be contended and this method is called frequently.
720 // The worst that can happen is that there will be some nodes which will tak e slightly longer than usual to be deleted or removed 720 // The worst that can happen is that there will be some nodes which will tak e slightly longer than usual to be deleted or removed
721 // from the render graph (in which case they'll render silence). 721 // from the render graph (in which case they'll render silence).
722 if (tryLock()) { 722 if (tryLock()) {
723 // Take care of AudioNode tasks where the tryLock() failed previously. 723 // Take care of AudioNode tasks where the tryLock() failed previously.
724 deferredTaskHandler().breakConnections(); 724 deferredTaskHandler().breakConnections();
725 725
726 // Dynamically clean up nodes which are no longer needed. 726 // Dynamically clean up nodes which are no longer needed.
727 releaseFinishedSourceNodes(); 727 releaseFinishedSourceNodes();
728 728
729 deferredTaskHandler().handleDeferredTasks(); 729 deferredTaskHandler().handleDeferredTasks();
730 deferredTaskHandler().requestToDeleteHandlersOnMainThread(); 730 deferredTaskHandler().requestToDeleteHandlersOnMainThread();
731 731
732 unlock(); 732 unlock();
733 } 733 }
734 } 734 }
735 735
736 void AbstractAudioContext::resolvePromisesForResumeOnMainThread() 736 void AbstractAudioContext::resolvePromisesForResumeOnMainThread()
737 { 737 {
738 ASSERT(isMainThread()); 738 DCHECK(isMainThread());
739 AutoLocker locker(this); 739 AutoLocker locker(this);
740 740
741 for (auto& resolver : m_resumeResolvers) { 741 for (auto& resolver : m_resumeResolvers) {
742 if (m_contextState == Closed) { 742 if (m_contextState == Closed) {
743 resolver->reject( 743 resolver->reject(
744 DOMException::create(InvalidStateError, "Cannot resume a context that has been closed")); 744 DOMException::create(InvalidStateError, "Cannot resume a context that has been closed"));
745 } else { 745 } else {
746 resolver->resolve(); 746 resolver->resolve();
747 } 747 }
748 } 748 }
749 749
750 m_resumeResolvers.clear(); 750 m_resumeResolvers.clear();
751 m_isResolvingResumePromises = false; 751 m_isResolvingResumePromises = false;
752 } 752 }
753 753
754 void AbstractAudioContext::resolvePromisesForResume() 754 void AbstractAudioContext::resolvePromisesForResume()
755 { 755 {
756 // This runs inside the AbstractAudioContext's lock when handling pre-render tasks. 756 // This runs inside the AbstractAudioContext's lock when handling pre-render tasks.
757 ASSERT(isAudioThread()); 757 DCHECK(isAudioThread());
758 ASSERT(isGraphOwner()); 758 ASSERT(isGraphOwner());
759 759
760 // Resolve any pending promises created by resume(). Only do this if we have n't already started 760 // Resolve any pending promises created by resume(). Only do this if we have n't already started
761 // resolving these promises. This gets called very often and it takes some t ime to resolve the 761 // resolving these promises. This gets called very often and it takes some t ime to resolve the
762 // promises in the main thread. 762 // promises in the main thread.
763 if (!m_isResolvingResumePromises && m_resumeResolvers.size() > 0) { 763 if (!m_isResolvingResumePromises && m_resumeResolvers.size() > 0) {
764 m_isResolvingResumePromises = true; 764 m_isResolvingResumePromises = true;
765 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, crossThreadBind(&AbstractAudioContext::resolvePromisesForResumeOnMainTh read, wrapCrossThreadPersistent(this))); 765 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR OM_HERE, crossThreadBind(&AbstractAudioContext::resolvePromisesForResumeOnMainTh read, wrapCrossThreadPersistent(this)));
766 } 766 }
767 } 767 }
768 768
769 void AbstractAudioContext::rejectPendingDecodeAudioDataResolvers() 769 void AbstractAudioContext::rejectPendingDecodeAudioDataResolvers()
770 { 770 {
771 // Now reject any pending decodeAudioData resolvers 771 // Now reject any pending decodeAudioData resolvers
772 for (auto& resolver : m_decodeAudioResolvers) 772 for (auto& resolver : m_decodeAudioResolvers)
773 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away")); 773 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away"));
774 m_decodeAudioResolvers.clear(); 774 m_decodeAudioResolvers.clear();
775 } 775 }
776 776
777 void AbstractAudioContext::rejectPendingResolvers() 777 void AbstractAudioContext::rejectPendingResolvers()
778 { 778 {
779 ASSERT(isMainThread()); 779 DCHECK(isMainThread());
780 780
781 // Audio context is closing down so reject any resume promises that are stil l pending. 781 // Audio context is closing down so reject any resume promises that are stil l pending.
782 782
783 for (auto& resolver : m_resumeResolvers) { 783 for (auto& resolver : m_resumeResolvers) {
784 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away")); 784 resolver->reject(DOMException::create(InvalidStateError, "Audio context is going away"));
785 } 785 }
786 m_resumeResolvers.clear(); 786 m_resumeResolvers.clear();
787 m_isResolvingResumePromises = false; 787 m_isResolvingResumePromises = false;
788 788
789 rejectPendingDecodeAudioDataResolvers(); 789 rejectPendingDecodeAudioDataResolvers();
790 } 790 }
791 791
792 const AtomicString& AbstractAudioContext::interfaceName() const 792 const AtomicString& AbstractAudioContext::interfaceName() const
793 { 793 {
794 return EventTargetNames::AudioContext; 794 return EventTargetNames::AudioContext;
795 } 795 }
796 796
797 ExecutionContext* AbstractAudioContext::getExecutionContext() const 797 ExecutionContext* AbstractAudioContext::getExecutionContext() const
798 { 798 {
799 return ActiveDOMObject::getExecutionContext(); 799 return ActiveDOMObject::getExecutionContext();
800 } 800 }
801 801
802 void AbstractAudioContext::startRendering() 802 void AbstractAudioContext::startRendering()
803 { 803 {
804 // This is called for both online and offline contexts. 804 // This is called for both online and offline contexts.
805 ASSERT(isMainThread()); 805 DCHECK(isMainThread());
806 ASSERT(m_destinationNode); 806 DCHECK(m_destinationNode);
807 807
808 recordUserGestureState(); 808 recordUserGestureState();
809 809
810 if (m_contextState == Suspended) { 810 if (m_contextState == Suspended) {
811 destination()->audioDestinationHandler().startRendering(); 811 destination()->audioDestinationHandler().startRendering();
812 setContextState(Running); 812 setContextState(Running);
813 } 813 }
814 } 814 }
815 815
816 DEFINE_TRACE(AbstractAudioContext) 816 DEFINE_TRACE(AbstractAudioContext)
(...skipping 14 matching lines...) Expand all
831 831
832 SecurityOrigin* AbstractAudioContext::getSecurityOrigin() const 832 SecurityOrigin* AbstractAudioContext::getSecurityOrigin() const
833 { 833 {
834 if (getExecutionContext()) 834 if (getExecutionContext())
835 return getExecutionContext()->getSecurityOrigin(); 835 return getExecutionContext()->getSecurityOrigin();
836 836
837 return nullptr; 837 return nullptr;
838 } 838 }
839 839
840 } // namespace blink 840 } // namespace blink
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698