Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(39)

Side by Side Diff: Source/modules/webaudio/AudioContext.cpp

Issue 1214463003: Split "Online" and "Offline" AudioContext processing (Closed) Base URL: svn://svn.chromium.org/blink/trunk
Patch Set: Created 5 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 /* 1 /*
2 * Copyright (C) 2010, Google Inc. All rights reserved. 2 * Copyright (C) 2010, Google Inc. All rights reserved.
3 * 3 *
4 * Redistribution and use in source and binary forms, with or without 4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions 5 * modification, are permitted provided that the following conditions
6 * are met: 6 * are met:
7 * 1. Redistributions of source code must retain the above copyright 7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer. 8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright 9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the 10 * notice, this list of conditions and the following disclaimer in the
(...skipping 39 matching lines...) Expand 10 before | Expand all | Expand 10 after
50 #include "modules/webaudio/DefaultAudioDestinationNode.h" 50 #include "modules/webaudio/DefaultAudioDestinationNode.h"
51 #include "modules/webaudio/DelayNode.h" 51 #include "modules/webaudio/DelayNode.h"
52 #include "modules/webaudio/DynamicsCompressorNode.h" 52 #include "modules/webaudio/DynamicsCompressorNode.h"
53 #include "modules/webaudio/GainNode.h" 53 #include "modules/webaudio/GainNode.h"
54 #include "modules/webaudio/MediaElementAudioSourceNode.h" 54 #include "modules/webaudio/MediaElementAudioSourceNode.h"
55 #include "modules/webaudio/MediaStreamAudioDestinationNode.h" 55 #include "modules/webaudio/MediaStreamAudioDestinationNode.h"
56 #include "modules/webaudio/MediaStreamAudioSourceNode.h" 56 #include "modules/webaudio/MediaStreamAudioSourceNode.h"
57 #include "modules/webaudio/OfflineAudioCompletionEvent.h" 57 #include "modules/webaudio/OfflineAudioCompletionEvent.h"
58 #include "modules/webaudio/OfflineAudioContext.h" 58 #include "modules/webaudio/OfflineAudioContext.h"
59 #include "modules/webaudio/OfflineAudioDestinationNode.h" 59 #include "modules/webaudio/OfflineAudioDestinationNode.h"
60 #include "modules/webaudio/OnlineAudioContext.h"
60 #include "modules/webaudio/OscillatorNode.h" 61 #include "modules/webaudio/OscillatorNode.h"
61 #include "modules/webaudio/PannerNode.h" 62 #include "modules/webaudio/PannerNode.h"
62 #include "modules/webaudio/PeriodicWave.h" 63 #include "modules/webaudio/PeriodicWave.h"
63 #include "modules/webaudio/ScriptProcessorNode.h" 64 #include "modules/webaudio/ScriptProcessorNode.h"
64 #include "modules/webaudio/StereoPannerNode.h" 65 #include "modules/webaudio/StereoPannerNode.h"
65 #include "modules/webaudio/WaveShaperNode.h" 66 #include "modules/webaudio/WaveShaperNode.h"
66 #include "platform/ThreadSafeFunctional.h" 67 #include "platform/ThreadSafeFunctional.h"
67 #include "public/platform/Platform.h" 68 #include "public/platform/Platform.h"
68 #include "wtf/text/WTFString.h" 69 #include "wtf/text/WTFString.h"
69 70
70 #if DEBUG_AUDIONODE_REFERENCES
71 #include <stdio.h>
72 #endif
73
74 namespace blink { 71 namespace blink {
75 72
76 // Don't allow more than this number of simultaneous AudioContexts talking to ha rdware.
77 const unsigned MaxHardwareContexts = 6;
78 unsigned AudioContext::s_hardwareContextCount = 0;
79 unsigned AudioContext::s_contextId = 0;
80
81 AudioContext* AudioContext::create(Document& document, ExceptionState& exception State) 73 AudioContext* AudioContext::create(Document& document, ExceptionState& exception State)
82 { 74 {
83 ASSERT(isMainThread()); 75 return OnlineAudioContext::create(document, exceptionState);
84 if (s_hardwareContextCount >= MaxHardwareContexts) { 76 }
85 exceptionState.throwDOMException(
86 NotSupportedError,
87 ExceptionMessages::indexExceedsMaximumBound(
88 "number of hardware contexts",
89 s_hardwareContextCount,
90 MaxHardwareContexts));
91 return nullptr;
92 }
93 77
94 AudioContext* audioContext = new AudioContext(&document); 78 // FIXME(dominicc): Devolve these constructors to OnlineAudioContext
95 audioContext->suspendIfNeeded(); 79 // and OfflineAudioContext respectively.
96 return audioContext;
97 }
98 80
99 // Constructor for rendering to the audio hardware. 81 // Constructor for rendering to the audio hardware.
100 AudioContext::AudioContext(Document* document) 82 AudioContext::AudioContext(Document* document)
101 : ActiveDOMObject(document) 83 : ActiveDOMObject(document)
102 , m_isStopScheduled(false) 84 , m_isStopScheduled(false)
103 , m_isCleared(false) 85 , m_isCleared(false)
104 , m_isInitialized(false) 86 , m_isInitialized(false)
105 , m_destinationNode(nullptr) 87 , m_destinationNode(nullptr)
106 , m_isResolvingResumePromises(false) 88 , m_isResolvingResumePromises(false)
107 , m_connectionCount(0) 89 , m_connectionCount(0)
108 , m_didInitializeContextGraphMutex(false) 90 , m_didInitializeContextGraphMutex(false)
109 , m_deferredTaskHandler(DeferredTaskHandler::create()) 91 , m_deferredTaskHandler(DeferredTaskHandler::create())
110 , m_isOfflineContext(false)
111 , m_contextState(Suspended) 92 , m_contextState(Suspended)
112 { 93 {
113 m_didInitializeContextGraphMutex = true; 94 m_didInitializeContextGraphMutex = true;
114 m_destinationNode = DefaultAudioDestinationNode::create(this); 95 m_destinationNode = DefaultAudioDestinationNode::create(this);
115 96
116 initialize(); 97 initialize();
117 } 98 }
118 99
119 // Constructor for offline (non-realtime) rendering. 100 // Constructor for offline (non-realtime) rendering.
120 AudioContext::AudioContext(Document* document, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate) 101 AudioContext::AudioContext(Document* document, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate)
121 : ActiveDOMObject(document) 102 : ActiveDOMObject(document)
122 , m_isStopScheduled(false) 103 , m_isStopScheduled(false)
123 , m_isCleared(false) 104 , m_isCleared(false)
124 , m_isInitialized(false) 105 , m_isInitialized(false)
125 , m_destinationNode(nullptr) 106 , m_destinationNode(nullptr)
126 , m_isResolvingResumePromises(false) 107 , m_isResolvingResumePromises(false)
127 , m_connectionCount(0) 108 , m_connectionCount(0)
128 , m_didInitializeContextGraphMutex(false) 109 , m_didInitializeContextGraphMutex(false)
129 , m_deferredTaskHandler(DeferredTaskHandler::create()) 110 , m_deferredTaskHandler(DeferredTaskHandler::create())
130 , m_isOfflineContext(true)
131 , m_contextState(Suspended) 111 , m_contextState(Suspended)
132 { 112 {
133 m_didInitializeContextGraphMutex = true; 113 m_didInitializeContextGraphMutex = true;
134 // Create a new destination for offline rendering. 114 // Create a new destination for offline rendering.
135 m_renderTarget = AudioBuffer::create(numberOfChannels, numberOfFrames, sampl eRate); 115 m_renderTarget = AudioBuffer::create(numberOfChannels, numberOfFrames, sampl eRate);
136 if (m_renderTarget.get()) 116 if (m_renderTarget.get())
137 m_destinationNode = OfflineAudioDestinationNode::create(this, m_renderTa rget.get()); 117 m_destinationNode = OfflineAudioDestinationNode::create(this, m_renderTa rget.get());
138 118
139 initialize(); 119 initialize();
140 } 120 }
141 121
142 AudioContext::~AudioContext() 122 AudioContext::~AudioContext()
143 { 123 {
144 #if DEBUG_AUDIONODE_REFERENCES
145 fprintf(stderr, "%p: AudioContext::~AudioContext(): %u\n", this, m_contextId );
146 #endif
147 deferredTaskHandler().contextWillBeDestroyed(); 124 deferredTaskHandler().contextWillBeDestroyed();
148 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around. 125 // AudioNodes keep a reference to their context, so there should be no way t o be in the destructor if there are still AudioNodes around.
149 ASSERT(!m_isInitialized); 126 ASSERT(!m_isInitialized);
150 ASSERT(!m_activeSourceNodes.size()); 127 ASSERT(!m_activeSourceNodes.size());
151 ASSERT(!m_finishedSourceHandlers.size()); 128 ASSERT(!m_finishedSourceHandlers.size());
152 ASSERT(!m_isResolvingResumePromises); 129 ASSERT(!m_isResolvingResumePromises);
153 ASSERT(!m_resumeResolvers.size()); 130 ASSERT(!m_resumeResolvers.size());
154 } 131 }
155 132
156 void AudioContext::initialize() 133 void AudioContext::initialize()
157 { 134 {
158 if (isInitialized()) 135 if (isInitialized())
159 return; 136 return;
160 137
161 FFTFrame::initialize(); 138 FFTFrame::initialize();
162 m_listener = AudioListener::create(); 139 m_listener = AudioListener::create();
163 140
164 if (m_destinationNode.get()) { 141 if (m_destinationNode.get()) {
165 m_destinationNode->handler().initialize(); 142 m_destinationNode->handler().initialize();
166
167 if (!isOfflineContext()) {
168 // This starts the audio thread. The destination node's provideInput () method will now be called repeatedly to render audio.
169 // Each time provideInput() is called, a portion of the audio stream is rendered. Let's call this time period a "render quantum".
170 // NOTE: for now default AudioContext does not need an explicit star tRendering() call from JavaScript.
171 // We may want to consider requiring it for symmetry with OfflineAud ioContext.
172 startRendering();
173 ++s_hardwareContextCount;
174 }
175
176 m_contextId = s_contextId++;
177 m_isInitialized = true; 143 m_isInitialized = true;
178 #if DEBUG_AUDIONODE_REFERENCES
179 fprintf(stderr, "%p: AudioContext::AudioContext(): %u #%u\n",
180 this, m_contextId, AudioContext::s_hardwareContextCount);
181 #endif
182 } 144 }
183 } 145 }
184 146
185 void AudioContext::clear() 147 void AudioContext::clear()
186 { 148 {
187 m_destinationNode.clear(); 149 m_destinationNode.clear();
188 // The audio rendering thread is dead. Nobody will schedule AudioHandler 150 // The audio rendering thread is dead. Nobody will schedule AudioHandler
189 // deletion. Let's do it ourselves. 151 // deletion. Let's do it ourselves.
190 deferredTaskHandler().clearHandlersToBeDeleted(); 152 deferredTaskHandler().clearHandlersToBeDeleted();
191 m_isCleared = true; 153 m_isCleared = true;
192 } 154 }
193 155
194 void AudioContext::uninitialize() 156 void AudioContext::uninitialize()
195 { 157 {
196 ASSERT(isMainThread()); 158 ASSERT(isMainThread());
197 159
198 if (!isInitialized()) 160 if (!isInitialized())
199 return; 161 return;
200 162
201 m_isInitialized = false; 163 m_isInitialized = false;
202 164
203 // This stops the audio thread and all audio rendering. 165 // This stops the audio thread and all audio rendering.
204 if (m_destinationNode) 166 if (m_destinationNode)
205 m_destinationNode->handler().uninitialize(); 167 m_destinationNode->handler().uninitialize();
206 168
207 if (!isOfflineContext()) {
208 ASSERT(s_hardwareContextCount);
209 --s_hardwareContextCount;
210 }
211
212 // Get rid of the sources which may still be playing. 169 // Get rid of the sources which may still be playing.
213 releaseActiveSourceNodes(); 170 releaseActiveSourceNodes();
214 171
215 // Reject any pending resolvers before we go away. 172 // Reject any pending resolvers before we go away.
216 rejectPendingResolvers(); 173 rejectPendingResolvers();
217 174 didClose();
218 // For an offline audio context, the completion event will set the state to closed. For an
219 // online context, we need to do it here. We only want to set the closed st ate once.
220 if (!isOfflineContext())
221 setContextState(Closed);
222
223 // Resolve the promise now, if any
224 if (m_closeResolver)
225 m_closeResolver->resolve();
226 175
227 ASSERT(m_listener); 176 ASSERT(m_listener);
228 m_listener->waitForHRTFDatabaseLoaderThreadCompletion(); 177 m_listener->waitForHRTFDatabaseLoaderThreadCompletion();
229 178
230 clear(); 179 clear();
231 } 180 }
232 181
233 void AudioContext::stop() 182 void AudioContext::stop()
234 { 183 {
235 // Usually ExecutionContext calls stop twice. 184 // Usually ExecutionContext calls stop twice.
(...skipping 484 matching lines...) Expand 10 before | Expand all | Expand 10 after
720 // Notify context that state changed 669 // Notify context that state changed
721 if (executionContext()) 670 if (executionContext())
722 executionContext()->postTask(FROM_HERE, createSameThreadTask(&AudioConte xt::notifyStateChange, this)); 671 executionContext()->postTask(FROM_HERE, createSameThreadTask(&AudioConte xt::notifyStateChange, this));
723 } 672 }
724 673
725 void AudioContext::notifyStateChange() 674 void AudioContext::notifyStateChange()
726 { 675 {
727 dispatchEvent(Event::create(EventTypeNames::statechange)); 676 dispatchEvent(Event::create(EventTypeNames::statechange));
728 } 677 }
729 678
730 ScriptPromise AudioContext::suspendContext(ScriptState* scriptState)
731 {
732 ASSERT(isMainThread());
733 AutoLocker locker(this);
734
735 if (isOfflineContext()) {
736 return ScriptPromise::rejectWithDOMException(
737 scriptState,
738 DOMException::create(
739 InvalidAccessError,
740 "cannot suspend an OfflineAudioContext"));
741 }
742
743 RefPtrWillBeRawPtr<ScriptPromiseResolver> resolver = ScriptPromiseResolver:: create(scriptState);
744 ScriptPromise promise = resolver->promise();
745
746 if (m_contextState == Closed) {
747 resolver->reject(
748 DOMException::create(InvalidStateError, "Cannot suspend a context th at has been closed"));
749 } else {
750 // Stop rendering now.
751 if (m_destinationNode)
752 stopRendering();
753
754 // Since we don't have any way of knowing when the hardware actually sto ps, we'll just
755 // resolve the promise now.
756 resolver->resolve();
757 }
758
759 return promise;
760 }
761
762 ScriptPromise AudioContext::resumeContext(ScriptState* scriptState)
763 {
764 ASSERT(isMainThread());
765 AutoLocker locker(this);
766
767 if (isOfflineContext()) {
768 return ScriptPromise::rejectWithDOMException(
769 scriptState,
770 DOMException::create(
771 InvalidAccessError,
772 "cannot resume an OfflineAudioContext"));
773 }
774
775 if (isContextClosed()) {
776 return ScriptPromise::rejectWithDOMException(
777 scriptState,
778 DOMException::create(
779 InvalidAccessError,
780 "cannot resume a closed AudioContext"));
781 }
782
783 RefPtrWillBeRawPtr<ScriptPromiseResolver> resolver = ScriptPromiseResolver:: create(scriptState);
784 ScriptPromise promise = resolver->promise();
785
786 // Restart the destination node to pull on the audio graph.
787 if (m_destinationNode)
788 startRendering();
789
790 // Save the resolver which will get resolved when the destination node start s pulling on the
791 // graph again.
792 m_resumeResolvers.append(resolver);
793
794 return promise;
795 }
796
797 void AudioContext::notifySourceNodeFinishedProcessing(AudioHandler* handler) 679 void AudioContext::notifySourceNodeFinishedProcessing(AudioHandler* handler)
798 { 680 {
799 ASSERT(isAudioThread()); 681 ASSERT(isAudioThread());
800 m_finishedSourceHandlers.append(handler); 682 m_finishedSourceHandlers.append(handler);
801 } 683 }
802 684
803 void AudioContext::releaseFinishedSourceNodes() 685 void AudioContext::releaseFinishedSourceNodes()
804 { 686 {
805 ASSERT(isGraphOwner()); 687 ASSERT(isGraphOwner());
806 ASSERT(isAudioThread()); 688 ASSERT(isAudioThread());
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after
948 // This is called for both online and offline contexts. 830 // This is called for both online and offline contexts.
949 ASSERT(isMainThread()); 831 ASSERT(isMainThread());
950 ASSERT(m_destinationNode); 832 ASSERT(m_destinationNode);
951 833
952 if (m_contextState == Suspended) { 834 if (m_contextState == Suspended) {
953 destination()->audioDestinationHandler().startRendering(); 835 destination()->audioDestinationHandler().startRendering();
954 setContextState(Running); 836 setContextState(Running);
955 } 837 }
956 } 838 }
957 839
958 void AudioContext::stopRendering()
959 {
960 ASSERT(isMainThread());
961 ASSERT(m_destinationNode);
962 ASSERT(!isOfflineContext());
963
964 if (m_contextState == Running) {
965 destination()->audioDestinationHandler().stopRendering();
966 setContextState(Suspended);
967 deferredTaskHandler().clearHandlersToBeDeleted();
968 }
969 }
970
971 void AudioContext::fireCompletionEvent() 840 void AudioContext::fireCompletionEvent()
972 { 841 {
973 ASSERT(isMainThread()); 842 ASSERT(isMainThread());
974 if (!isMainThread()) 843 if (!isMainThread())
975 return; 844 return;
976 845
977 AudioBuffer* renderedBuffer = m_renderTarget.get(); 846 AudioBuffer* renderedBuffer = m_renderTarget.get();
978 847
979 // For an offline context, we set the state to closed here so that the oncom plete handler sees 848 // For an offline context, we set the state to closed here so that the oncom plete handler sees
980 // that the context has been closed. 849 // that the context has been closed.
981 setContextState(Closed); 850 setContextState(Closed);
982 851
983 ASSERT(renderedBuffer); 852 ASSERT(renderedBuffer);
984 if (!renderedBuffer) 853 if (!renderedBuffer)
985 return; 854 return;
986 855
987 // Avoid firing the event if the document has already gone away. 856 // Avoid firing the event if the document has already gone away.
988 if (executionContext()) { 857 if (executionContext()) {
989 // Call the offline rendering completion event listener and resolve the promise too. 858 // Call the offline rendering completion event listener and resolve the promise too.
990 dispatchEvent(OfflineAudioCompletionEvent::create(renderedBuffer)); 859 dispatchEvent(OfflineAudioCompletionEvent::create(renderedBuffer));
991 m_offlineResolver->resolve(renderedBuffer); 860 m_offlineResolver->resolve(renderedBuffer);
992 } 861 }
993 } 862 }
994 863
995 DEFINE_TRACE(AudioContext) 864 DEFINE_TRACE(AudioContext)
996 { 865 {
997 visitor->trace(m_closeResolver);
998 visitor->trace(m_offlineResolver); 866 visitor->trace(m_offlineResolver);
999 visitor->trace(m_renderTarget); 867 visitor->trace(m_renderTarget);
1000 visitor->trace(m_destinationNode); 868 visitor->trace(m_destinationNode);
1001 visitor->trace(m_listener); 869 visitor->trace(m_listener);
1002 // trace() can be called in AudioContext constructor, and 870 // trace() can be called in AudioContext constructor, and
1003 // m_contextGraphMutex might be unavailable. 871 // m_contextGraphMutex might be unavailable.
1004 if (m_didInitializeContextGraphMutex) { 872 if (m_didInitializeContextGraphMutex) {
1005 AutoLocker lock(this); 873 AutoLocker lock(this);
1006 visitor->trace(m_activeSourceNodes); 874 visitor->trace(m_activeSourceNodes);
1007 } else { 875 } else {
1008 visitor->trace(m_activeSourceNodes); 876 visitor->trace(m_activeSourceNodes);
1009 } 877 }
1010 visitor->trace(m_resumeResolvers); 878 visitor->trace(m_resumeResolvers);
1011 RefCountedGarbageCollectedEventTargetWithInlineData<AudioContext>::trace(vis itor); 879 RefCountedGarbageCollectedEventTargetWithInlineData<AudioContext>::trace(vis itor);
1012 ActiveDOMObject::trace(visitor); 880 ActiveDOMObject::trace(visitor);
1013 } 881 }
1014 882
1015 SecurityOrigin* AudioContext::securityOrigin() const 883 SecurityOrigin* AudioContext::securityOrigin() const
1016 { 884 {
1017 if (executionContext()) 885 if (executionContext())
1018 return executionContext()->securityOrigin(); 886 return executionContext()->securityOrigin();
1019 887
1020 return nullptr; 888 return nullptr;
1021 } 889 }
1022 890
1023 ScriptPromise AudioContext::closeContext(ScriptState* scriptState)
1024 {
1025 if (isOfflineContext()) {
1026 return ScriptPromise::rejectWithDOMException(
1027 scriptState,
1028 DOMException::create(InvalidAccessError, "cannot close an OfflineAud ioContext."));
1029 }
1030
1031 if (isContextClosed()) {
1032 // We've already closed the context previously, but it hasn't yet been r esolved, so just
1033 // create a new promise and reject it.
1034 return ScriptPromise::rejectWithDOMException(
1035 scriptState,
1036 DOMException::create(InvalidStateError,
1037 "Cannot close a context that is being closed or has already been closed."));
1038 }
1039
1040 m_closeResolver = ScriptPromiseResolver::create(scriptState);
1041 ScriptPromise promise = m_closeResolver->promise();
1042
1043 // Stop the audio context. This will stop the destination node from pulling audio anymore. And
1044 // since we have disconnected the destination from the audio graph, and thus has no references,
1045 // the destination node can GCed if JS has no references. stop() will also r esolve the Promise
1046 // created here.
1047 stop();
1048
1049 return promise;
1050 }
1051
1052 } // namespace blink 891 } // namespace blink
1053 892
1054 #endif // ENABLE(WEB_AUDIO) 893 #endif // ENABLE(WEB_AUDIO)
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698