| OLD | NEW |
| 1 /* | 1 /* |
| 2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
| 8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
| 9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
| 10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 80 // and OfflineAudioContext respectively. | 80 // and OfflineAudioContext respectively. |
| 81 | 81 |
| 82 // Constructor for rendering to the audio hardware. | 82 // Constructor for rendering to the audio hardware. |
| 83 AbstractAudioContext::AbstractAudioContext(Document* document) | 83 AbstractAudioContext::AbstractAudioContext(Document* document) |
| 84 : ActiveScriptWrappable(this) | 84 : ActiveScriptWrappable(this) |
| 85 , ActiveDOMObject(document) | 85 , ActiveDOMObject(document) |
| 86 , m_destinationNode(nullptr) | 86 , m_destinationNode(nullptr) |
| 87 , m_isCleared(false) | 87 , m_isCleared(false) |
| 88 , m_isResolvingResumePromises(false) | 88 , m_isResolvingResumePromises(false) |
| 89 , m_connectionCount(0) | 89 , m_connectionCount(0) |
| 90 , m_didInitializeContextGraphMutex(false) | |
| 91 , m_deferredTaskHandler(DeferredTaskHandler::create()) | 90 , m_deferredTaskHandler(DeferredTaskHandler::create()) |
| 92 , m_contextState(Suspended) | 91 , m_contextState(Suspended) |
| 93 , m_closedContextSampleRate(-1) | 92 , m_closedContextSampleRate(-1) |
| 94 , m_periodicWaveSine(nullptr) | 93 , m_periodicWaveSine(nullptr) |
| 95 , m_periodicWaveSquare(nullptr) | 94 , m_periodicWaveSquare(nullptr) |
| 96 , m_periodicWaveSawtooth(nullptr) | 95 , m_periodicWaveSawtooth(nullptr) |
| 97 , m_periodicWaveTriangle(nullptr) | 96 , m_periodicWaveTriangle(nullptr) |
| 98 { | 97 { |
| 99 m_didInitializeContextGraphMutex = true; | |
| 100 m_destinationNode = DefaultAudioDestinationNode::create(this); | 98 m_destinationNode = DefaultAudioDestinationNode::create(this); |
| 101 | 99 |
| 102 initialize(); | 100 initialize(); |
| 103 } | 101 } |
| 104 | 102 |
| 105 // Constructor for offline (non-realtime) rendering. | 103 // Constructor for offline (non-realtime) rendering. |
| 106 AbstractAudioContext::AbstractAudioContext(Document* document, unsigned numberOf
Channels, size_t numberOfFrames, float sampleRate) | 104 AbstractAudioContext::AbstractAudioContext(Document* document, unsigned numberOf
Channels, size_t numberOfFrames, float sampleRate) |
| 107 : ActiveScriptWrappable(this) | 105 : ActiveScriptWrappable(this) |
| 108 , ActiveDOMObject(document) | 106 , ActiveDOMObject(document) |
| 109 , m_destinationNode(nullptr) | 107 , m_destinationNode(nullptr) |
| 110 , m_isCleared(false) | 108 , m_isCleared(false) |
| 111 , m_isResolvingResumePromises(false) | 109 , m_isResolvingResumePromises(false) |
| 112 , m_connectionCount(0) | 110 , m_connectionCount(0) |
| 113 , m_didInitializeContextGraphMutex(false) | |
| 114 , m_deferredTaskHandler(DeferredTaskHandler::create()) | 111 , m_deferredTaskHandler(DeferredTaskHandler::create()) |
| 115 , m_contextState(Suspended) | 112 , m_contextState(Suspended) |
| 116 , m_closedContextSampleRate(-1) | 113 , m_closedContextSampleRate(-1) |
| 117 , m_periodicWaveSine(nullptr) | 114 , m_periodicWaveSine(nullptr) |
| 118 , m_periodicWaveSquare(nullptr) | 115 , m_periodicWaveSquare(nullptr) |
| 119 , m_periodicWaveSawtooth(nullptr) | 116 , m_periodicWaveSawtooth(nullptr) |
| 120 , m_periodicWaveTriangle(nullptr) | 117 , m_periodicWaveTriangle(nullptr) |
| 121 { | 118 { |
| 122 m_didInitializeContextGraphMutex = true; | |
| 123 } | 119 } |
| 124 | 120 |
| 125 AbstractAudioContext::~AbstractAudioContext() | 121 AbstractAudioContext::~AbstractAudioContext() |
| 126 { | 122 { |
| 127 deferredTaskHandler().contextWillBeDestroyed(); | 123 deferredTaskHandler().contextWillBeDestroyed(); |
| 128 // AudioNodes keep a reference to their context, so there should be no way t
o be in the destructor if there are still AudioNodes around. | 124 // AudioNodes keep a reference to their context, so there should be no way t
o be in the destructor if there are still AudioNodes around. |
| 129 ASSERT(!isDestinationInitialized()); | 125 ASSERT(!isDestinationInitialized()); |
| 130 ASSERT(!m_activeSourceNodes.size()); | 126 ASSERT(!m_activeSourceNodes.size()); |
| 131 ASSERT(!m_finishedSourceHandlers.size()); | 127 ASSERT(!m_finishedSourceHandlers.size()); |
| 132 ASSERT(!m_isResolvingResumePromises); | 128 ASSERT(!m_isResolvingResumePromises); |
| 133 ASSERT(!m_resumeResolvers.size()); | 129 ASSERT(!m_resumeResolvers.size()); |
| 134 } | 130 } |
| 135 | 131 |
| 136 void AbstractAudioContext::initialize() | 132 void AbstractAudioContext::initialize() |
| 137 { | 133 { |
| 138 if (isDestinationInitialized()) | 134 if (isDestinationInitialized()) |
| 139 return; | 135 return; |
| 140 | 136 |
| 141 FFTFrame::initialize(); | 137 FFTFrame::initialize(); |
| 142 m_listener = AudioListener::create(); | 138 m_listener = AudioListener::create(); |
| 143 | 139 |
| 144 if (m_destinationNode.get()) { | 140 if (m_destinationNode) { |
| 145 m_destinationNode->handler().initialize(); | 141 m_destinationNode->handler().initialize(); |
| 146 } | 142 } |
| 147 } | 143 } |
| 148 | 144 |
| 149 void AbstractAudioContext::clear() | 145 void AbstractAudioContext::clear() |
| 150 { | 146 { |
| 151 m_destinationNode.clear(); | 147 m_destinationNode.clear(); |
| 152 // The audio rendering thread is dead. Nobody will schedule AudioHandler | 148 // The audio rendering thread is dead. Nobody will schedule AudioHandler |
| 153 // deletion. Let's do it ourselves. | 149 // deletion. Let's do it ourselves. |
| 154 deferredTaskHandler().clearHandlersToBeDeleted(); | 150 deferredTaskHandler().clearHandlersToBeDeleted(); |
| (...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 188 { | 184 { |
| 189 // There's no pending activity if the audio context has been cleared. | 185 // There's no pending activity if the audio context has been cleared. |
| 190 return !m_isCleared; | 186 return !m_isCleared; |
| 191 } | 187 } |
| 192 | 188 |
| 193 AudioDestinationNode* AbstractAudioContext::destination() const | 189 AudioDestinationNode* AbstractAudioContext::destination() const |
| 194 { | 190 { |
| 195 // Cannot be called from the audio thread because this method touches object
s managed by Oilpan, | 191 // Cannot be called from the audio thread because this method touches object
s managed by Oilpan, |
| 196 // and the audio thread is not managed by Oilpan. | 192 // and the audio thread is not managed by Oilpan. |
| 197 ASSERT(!isAudioThread()); | 193 ASSERT(!isAudioThread()); |
| 198 return m_destinationNode.get(); | 194 return m_destinationNode; |
| 199 } | 195 } |
| 200 | 196 |
| 201 void AbstractAudioContext::throwExceptionForClosedState(ExceptionState& exceptio
nState) | 197 void AbstractAudioContext::throwExceptionForClosedState(ExceptionState& exceptio
nState) |
| 202 { | 198 { |
| 203 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c
losed."); | 199 exceptionState.throwDOMException(InvalidStateError, "AudioContext has been c
losed."); |
| 204 } | 200 } |
| 205 | 201 |
| 206 AudioBuffer* AbstractAudioContext::createBuffer(unsigned numberOfChannels, size_
t numberOfFrames, float sampleRate, ExceptionState& exceptionState) | 202 AudioBuffer* AbstractAudioContext::createBuffer(unsigned numberOfChannels, size_
t numberOfFrames, float sampleRate, ExceptionState& exceptionState) |
| 207 { | 203 { |
| 208 // It's ok to call createBuffer, even if the context is closed because the A
udioBuffer doesn't | 204 // It's ok to call createBuffer, even if the context is closed because the A
udioBuffer doesn't |
| (...skipping 555 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 764 { | 760 { |
| 765 dispatchEvent(Event::create(EventTypeNames::statechange)); | 761 dispatchEvent(Event::create(EventTypeNames::statechange)); |
| 766 } | 762 } |
| 767 | 763 |
| 768 void AbstractAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* hand
ler) | 764 void AbstractAudioContext::notifySourceNodeFinishedProcessing(AudioHandler* hand
ler) |
| 769 { | 765 { |
| 770 ASSERT(isAudioThread()); | 766 ASSERT(isAudioThread()); |
| 771 m_finishedSourceHandlers.append(handler); | 767 m_finishedSourceHandlers.append(handler); |
| 772 } | 768 } |
| 773 | 769 |
| 770 void AbstractAudioContext::removeFinishedSourceNodes() |
| 771 { |
| 772 ASSERT(isMainThread()); |
| 773 AutoLocker locker(this); |
| 774 // Quadratic worst case, but sizes of both vectors are considered |
| 775 // manageable, especially |m_finishedSourceNodes| is likely to be short. |
| 776 for (AudioNode* node : m_finishedSourceNodes) { |
| 777 size_t i = m_activeSourceNodes.find(node); |
| 778 if (i != kNotFound) |
| 779 m_activeSourceNodes.remove(i); |
| 780 } |
| 781 m_finishedSourceNodes.clear(); |
| 782 } |
| 783 |
| 774 void AbstractAudioContext::releaseFinishedSourceNodes() | 784 void AbstractAudioContext::releaseFinishedSourceNodes() |
| 775 { | 785 { |
| 776 ASSERT(isGraphOwner()); | 786 ASSERT(isGraphOwner()); |
| 777 ASSERT(isAudioThread()); | 787 ASSERT(isAudioThread()); |
| 788 bool didRemove = false; |
| 778 for (AudioHandler* handler : m_finishedSourceHandlers) { | 789 for (AudioHandler* handler : m_finishedSourceHandlers) { |
| 779 for (unsigned i = 0; i < m_activeSourceNodes.size(); ++i) { | 790 for (unsigned i = 0; i < m_activeSourceNodes.size(); ++i) { |
| 780 if (handler == &m_activeSourceNodes[i]->handler()) { | 791 if (handler == &m_activeSourceNodes[i]->handler()) { |
| 781 handler->breakConnection(); | 792 handler->breakConnection(); |
| 782 m_activeSourceNodes.remove(i); | 793 m_finishedSourceNodes.append(m_activeSourceNodes[i]); |
| 794 didRemove = true; |
| 783 break; | 795 break; |
| 784 } | 796 } |
| 785 } | 797 } |
| 786 } | 798 } |
| 799 if (didRemove) |
| 800 Platform::current()->mainThread()->getWebTaskRunner()->postTask(BLINK_FR
OM_HERE, threadSafeBind(&AbstractAudioContext::removeFinishedSourceNodes, this))
; |
| 787 | 801 |
| 788 m_finishedSourceHandlers.clear(); | 802 m_finishedSourceHandlers.clear(); |
| 789 } | 803 } |
| 790 | 804 |
| 791 void AbstractAudioContext::notifySourceNodeStartedProcessing(AudioNode* node) | 805 void AbstractAudioContext::notifySourceNodeStartedProcessing(AudioNode* node) |
| 792 { | 806 { |
| 793 ASSERT(isMainThread()); | 807 ASSERT(isMainThread()); |
| 794 AutoLocker locker(this); | 808 AutoLocker locker(this); |
| 795 | 809 |
| 796 m_activeSourceNodes.append(node); | 810 m_activeSourceNodes.append(node); |
| (...skipping 136 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 933 if (m_contextState == Suspended) { | 947 if (m_contextState == Suspended) { |
| 934 destination()->audioDestinationHandler().startRendering(); | 948 destination()->audioDestinationHandler().startRendering(); |
| 935 setContextState(Running); | 949 setContextState(Running); |
| 936 } | 950 } |
| 937 } | 951 } |
| 938 | 952 |
| 939 DEFINE_TRACE(AbstractAudioContext) | 953 DEFINE_TRACE(AbstractAudioContext) |
| 940 { | 954 { |
| 941 visitor->trace(m_destinationNode); | 955 visitor->trace(m_destinationNode); |
| 942 visitor->trace(m_listener); | 956 visitor->trace(m_listener); |
| 943 // trace() can be called in AbstractAudioContext constructor, and | 957 visitor->trace(m_activeSourceNodes); |
| 944 // m_contextGraphMutex might be unavailable. | |
| 945 if (m_didInitializeContextGraphMutex) { | |
| 946 AutoLocker lock(this); | |
| 947 visitor->trace(m_activeSourceNodes); | |
| 948 } else { | |
| 949 visitor->trace(m_activeSourceNodes); | |
| 950 } | |
| 951 visitor->trace(m_resumeResolvers); | 958 visitor->trace(m_resumeResolvers); |
| 952 visitor->trace(m_decodeAudioResolvers); | 959 visitor->trace(m_decodeAudioResolvers); |
| 953 | 960 |
| 954 visitor->trace(m_periodicWaveSine); | 961 visitor->trace(m_periodicWaveSine); |
| 955 visitor->trace(m_periodicWaveSquare); | 962 visitor->trace(m_periodicWaveSquare); |
| 956 visitor->trace(m_periodicWaveSawtooth); | 963 visitor->trace(m_periodicWaveSawtooth); |
| 957 visitor->trace(m_periodicWaveTriangle); | 964 visitor->trace(m_periodicWaveTriangle); |
| 958 EventTargetWithInlineData::trace(visitor); | 965 EventTargetWithInlineData::trace(visitor); |
| 959 ActiveDOMObject::trace(visitor); | 966 ActiveDOMObject::trace(visitor); |
| 960 } | 967 } |
| 961 | 968 |
| 962 SecurityOrigin* AbstractAudioContext::getSecurityOrigin() const | 969 SecurityOrigin* AbstractAudioContext::getSecurityOrigin() const |
| 963 { | 970 { |
| 964 if (getExecutionContext()) | 971 if (getExecutionContext()) |
| 965 return getExecutionContext()->getSecurityOrigin(); | 972 return getExecutionContext()->getSecurityOrigin(); |
| 966 | 973 |
| 967 return nullptr; | 974 return nullptr; |
| 968 } | 975 } |
| 969 | 976 |
| 970 } // namespace blink | 977 } // namespace blink |
| 971 | 978 |
| OLD | NEW |