Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 /* | 1 /* |
| 2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
| 3 * | 3 * |
| 4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
| 5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
| 6 * are met: | 6 * are met: |
| 7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
| 8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
| 9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
| 10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
| (...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 92 } | 92 } |
| 93 | 93 |
| 94 AudioContext* audioContext = new AudioContext(&document); | 94 AudioContext* audioContext = new AudioContext(&document); |
| 95 audioContext->suspendIfNeeded(); | 95 audioContext->suspendIfNeeded(); |
| 96 return audioContext; | 96 return audioContext; |
| 97 } | 97 } |
| 98 | 98 |
| 99 // Constructor for rendering to the audio hardware. | 99 // Constructor for rendering to the audio hardware. |
| 100 AudioContext::AudioContext(Document* document) | 100 AudioContext::AudioContext(Document* document) |
| 101 : ActiveDOMObject(document) | 101 : ActiveDOMObject(document) |
| 102 , m_destinationNode(nullptr) | |
| 103 , m_didInitializeContextGraphMutex(false) | |
| 104 , m_contextState(Suspended) | |
| 102 , m_isStopScheduled(false) | 105 , m_isStopScheduled(false) |
| 103 , m_isCleared(false) | 106 , m_isCleared(false) |
| 104 , m_isInitialized(false) | 107 , m_isInitialized(false) |
| 105 , m_destinationNode(nullptr) | |
| 106 , m_isResolvingResumePromises(false) | 108 , m_isResolvingResumePromises(false) |
| 107 , m_connectionCount(0) | 109 , m_connectionCount(0) |
| 108 , m_didInitializeContextGraphMutex(false) | |
| 109 , m_deferredTaskHandler(DeferredTaskHandler::create()) | 110 , m_deferredTaskHandler(DeferredTaskHandler::create()) |
| 110 , m_isOfflineContext(false) | 111 , m_isOfflineContext(false) |
| 111 , m_contextState(Suspended) | |
| 112 , m_cachedSampleFrame(0) | 112 , m_cachedSampleFrame(0) |
| 113 { | 113 { |
| 114 m_didInitializeContextGraphMutex = true; | 114 m_didInitializeContextGraphMutex = true; |
| 115 m_destinationNode = DefaultAudioDestinationNode::create(this); | 115 m_destinationNode = DefaultAudioDestinationNode::create(this); |
| 116 | 116 |
| 117 initialize(); | 117 initialize(); |
| 118 } | 118 } |
| 119 | 119 |
| 120 // Constructor for offline (non-realtime) rendering. | 120 // Constructor for offline (non-realtime) rendering. |
| 121 AudioContext::AudioContext(Document* document, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate) | 121 AudioContext::AudioContext(Document* document, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate) |
| 122 : ActiveDOMObject(document) | 122 : ActiveDOMObject(document) |
| 123 , m_destinationNode(nullptr) | |
| 124 , m_didInitializeContextGraphMutex(false) | |
| 125 , m_contextState(Suspended) | |
| 123 , m_isStopScheduled(false) | 126 , m_isStopScheduled(false) |
| 124 , m_isCleared(false) | 127 , m_isCleared(false) |
| 125 , m_isInitialized(false) | 128 , m_isInitialized(false) |
| 126 , m_destinationNode(nullptr) | |
| 127 , m_isResolvingResumePromises(false) | 129 , m_isResolvingResumePromises(false) |
| 128 , m_connectionCount(0) | 130 , m_connectionCount(0) |
| 129 , m_didInitializeContextGraphMutex(false) | |
| 130 , m_deferredTaskHandler(DeferredTaskHandler::create()) | 131 , m_deferredTaskHandler(DeferredTaskHandler::create()) |
| 131 , m_isOfflineContext(true) | 132 , m_isOfflineContext(true) |
| 132 , m_contextState(Suspended) | |
| 133 , m_cachedSampleFrame(0) | 133 , m_cachedSampleFrame(0) |
| 134 { | 134 { |
| 135 m_didInitializeContextGraphMutex = true; | 135 m_didInitializeContextGraphMutex = true; |
| 136 | |
| 136 // Create a new destination for offline rendering. | 137 // Create a new destination for offline rendering. |
| 137 m_renderTarget = AudioBuffer::create(numberOfChannels, numberOfFrames, sampl eRate); | 138 m_renderTarget = AudioBuffer::create(numberOfChannels, numberOfFrames, sampl eRate); |
| 138 if (m_renderTarget.get()) | 139 if (m_renderTarget.get()) |
| 139 m_destinationNode = OfflineAudioDestinationNode::create(this, m_renderTa rget.get()); | 140 m_destinationNode = OfflineAudioDestinationNode::create(this, m_renderTa rget.get()); |
| 140 | 141 |
| 141 initialize(); | 142 initialize(); |
| 142 } | 143 } |
| 143 | 144 |
| 144 AudioContext::~AudioContext() | 145 AudioContext::~AudioContext() |
| 145 { | 146 { |
| (...skipping 580 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 726 } | 727 } |
| 727 | 728 |
| 728 if (newState == m_contextState) { | 729 if (newState == m_contextState) { |
| 729 // ASSERTs above failed; just return. | 730 // ASSERTs above failed; just return. |
| 730 return; | 731 return; |
| 731 } | 732 } |
| 732 | 733 |
| 733 m_contextState = newState; | 734 m_contextState = newState; |
| 734 | 735 |
| 735 // Notify context that state changed | 736 // Notify context that state changed |
| 736 if (executionContext()) | 737 if (executionContext()) { |
| 737 executionContext()->postTask(FROM_HERE, createSameThreadTask(&AudioConte xt::notifyStateChange, this)); | 738 executionContext()->postTask(FROM_HERE, |
| 739 createSameThreadTask(&AudioContext::notifyStateChange, this)); | |
| 740 } | |
|
Raymond Toy
2015/05/13 17:16:08
Why?
hongchan
2015/05/13 17:30:53
Added some line breaks to make the code read bette
| |
| 738 } | 741 } |
| 739 | 742 |
| 740 void AudioContext::notifyStateChange() | 743 void AudioContext::notifyStateChange() |
| 741 { | 744 { |
| 742 dispatchEvent(Event::create(EventTypeNames::statechange)); | 745 dispatchEvent(Event::create(EventTypeNames::statechange)); |
| 743 } | 746 } |
| 744 | 747 |
| 745 ScriptPromise AudioContext::suspendContext(ScriptState* scriptState) | 748 ScriptPromise AudioContext::suspendContext(ScriptState* scriptState) |
| 746 { | 749 { |
| 747 ASSERT(isMainThread()); | 750 ASSERT(isMainThread()); |
| (...skipping 268 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1016 ASSERT(m_destinationNode); | 1019 ASSERT(m_destinationNode); |
| 1017 ASSERT(!isOfflineContext()); | 1020 ASSERT(!isOfflineContext()); |
| 1018 | 1021 |
| 1019 if (m_contextState == Running) { | 1022 if (m_contextState == Running) { |
| 1020 destination()->audioDestinationHandler().stopRendering(); | 1023 destination()->audioDestinationHandler().stopRendering(); |
| 1021 setContextState(Suspended); | 1024 setContextState(Suspended); |
| 1022 deferredTaskHandler().clearHandlersToBeDeleted(); | 1025 deferredTaskHandler().clearHandlersToBeDeleted(); |
| 1023 } | 1026 } |
| 1024 } | 1027 } |
| 1025 | 1028 |
| 1029 void AudioContext::fireSuspendEvent() | |
| 1030 { | |
| 1031 ASSERT(isMainThread()); | |
| 1032 if (!isMainThread()) | |
| 1033 return; | |
| 1034 | |
| 1035 // AudioBuffer* renderedBuffer = m_renderTarget.get(); | |
| 1036 | |
| 1037 setContextState(Suspended); | |
| 1038 | |
| 1039 // ASSERT(renderedBuffer); | |
| 1040 // if (!renderedBuffer) | |
| 1041 // return; | |
| 1042 | |
| 1043 // Avoid firing the event if the document has already gone away. | |
| 1044 // if (executionContext()) { | |
| 1045 // dispatchEvent(Event::create(EventTypeNames::statechange)); | |
| 1046 // // m_offlineResolver->resolve(renderedBuffer); | |
| 1047 // } | |
| 1048 } | |
| 1049 | |
| 1026 void AudioContext::fireCompletionEvent() | 1050 void AudioContext::fireCompletionEvent() |
| 1027 { | 1051 { |
| 1028 ASSERT(isMainThread()); | 1052 ASSERT(isMainThread()); |
| 1029 if (!isMainThread()) | 1053 if (!isMainThread()) |
| 1030 return; | 1054 return; |
| 1031 | 1055 |
| 1032 AudioBuffer* renderedBuffer = m_renderTarget.get(); | 1056 AudioBuffer* renderedBuffer = m_renderTarget.get(); |
| 1033 | 1057 |
| 1034 // For an offline context, we set the state to closed here so that the oncom plete handler sees | 1058 // For an offline context, we set the state to closed here so that the oncom plete handler sees |
| 1035 // that the context has been closed. | 1059 // that the context has been closed. |
| (...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1098 | 1122 |
| 1099 // Stop the audio context. This will stop the destination node from pulling audio anymore. And | 1123 // Stop the audio context. This will stop the destination node from pulling audio anymore. And |
| 1100 // since we have disconnected the destination from the audio graph, and thus has no references, | 1124 // since we have disconnected the destination from the audio graph, and thus has no references, |
| 1101 // the destination node can GCed if JS has no references. stop() will also r esolve the Promise | 1125 // the destination node can GCed if JS has no references. stop() will also r esolve the Promise |
| 1102 // created here. | 1126 // created here. |
| 1103 stop(); | 1127 stop(); |
| 1104 | 1128 |
| 1105 return promise; | 1129 return promise; |
| 1106 } | 1130 } |
| 1107 | 1131 |
| 1132 bool AudioContext::suspendIfNecessary() | |
| 1133 { | |
| 1134 ASSERT_WITH_MESSAGE(1, "suspendIfNecessary() only valid for offline audio co ntext"); | |
| 1135 return false; | |
| 1136 } | |
| 1137 | |
| 1108 } // namespace blink | 1138 } // namespace blink |
| 1109 | 1139 |
| 1110 #endif // ENABLE(WEB_AUDIO) | 1140 #endif // ENABLE(WEB_AUDIO) |
| OLD | NEW |