Index: media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
diff --git a/media/base/android/java/src/org/chromium/media/AudioRecordInput.java b/media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
index 0752731f80200d6a13f1d0124da84a81d3d9e2ec..b476009aab55b577a21bbdb2b8a9be0676240072 100644 |
--- a/media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
+++ b/media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
@@ -5,6 +5,9 @@ |
package org.chromium.media; |
import android.content.Context; |
+import android.media.audiofx.AcousticEchoCanceler; |
+import android.media.audiofx.AudioEffect; |
+import android.media.audiofx.AudioEffect.Descriptor; |
import android.media.AudioFormat; |
import android.media.AudioRecord; |
import android.media.MediaRecorder.AudioSource; |
@@ -14,6 +17,7 @@ import java.nio.ByteBuffer; |
import org.chromium.base.CalledByNative; |
import org.chromium.base.JNINamespace; |
+import org.chromium.media.AudioManagerAndroid; |
// Owned by its native counterpart declared in audio_record_input.h. Refer to |
// that class for general comments. |
@@ -30,9 +34,11 @@ class AudioRecordInput { |
private final int mChannels; |
private final int mBitsPerSample; |
private final int mHardwareDelayBytes; |
+ private final boolean mUsePlatformAEC; |
private ByteBuffer mBuffer; |
private AudioRecord mAudioRecord; |
private AudioRecordThread mAudioRecordThread; |
+ private AcousticEchoCanceler mAEC; |
private class AudioRecordThread extends Thread { |
// The "volatile" synchronization technique is discussed here: |
@@ -88,18 +94,20 @@ class AudioRecordInput { |
@CalledByNative |
private static AudioRecordInput createAudioRecordInput(long nativeAudioRecordInputStream, |
- int sampleRate, int channels, int bitsPerSample, int bytesPerBuffer) { |
+ int sampleRate, int channels, int bitsPerSample, int bytesPerBuffer, |
+ boolean usePlatformAEC) { |
return new AudioRecordInput(nativeAudioRecordInputStream, sampleRate, channels, |
- bitsPerSample, bytesPerBuffer); |
+ bitsPerSample, bytesPerBuffer, usePlatformAEC); |
} |
private AudioRecordInput(long nativeAudioRecordInputStream, int sampleRate, int channels, |
- int bitsPerSample, int bytesPerBuffer) { |
+ int bitsPerSample, int bytesPerBuffer, boolean usePlatformAEC) { |
mNativeAudioRecordInputStream = nativeAudioRecordInputStream; |
mSampleRate = sampleRate; |
mChannels = channels; |
mBitsPerSample = bitsPerSample; |
mHardwareDelayBytes = HARDWARE_DELAY_MS * sampleRate / 1000 * bitsPerSample / 8; |
+ mUsePlatformAEC = usePlatformAEC; |
// We use a direct buffer so that the native class can have access to |
// the underlying memory address. This avoids the need to copy from a |
@@ -170,6 +178,35 @@ class AudioRecordInput { |
return false; |
} |
+ // Only try to modify the platform AEC state if it's supported. |
+ if (AudioManagerAndroid.isPlatformAECSupported()) { |
+ mAEC = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId()); |
+ if (mAEC == null) { |
+ Log.wtf(TAG, "AcousticEchoCanceler.create failed"); |
+ return false; |
+ } |
+ try { |
+ Descriptor descriptor = mAEC.getDescriptor(); |
+ Log.d(TAG, "AcousticEchoCanceler " + |
+ "name: " + descriptor.name + ", " + |
+ "implementor: " + descriptor.implementor + ", " + |
+ "uuid: " + descriptor.uuid); |
+ } catch (IllegalStateException e) { |
+ Log.w(TAG, "getDescriptor failed", e); |
+ // Not fatal. |
+ } |
+ try { |
+ int ret = mAEC.setEnabled(mUsePlatformAEC); |
+ if (ret != AudioEffect.SUCCESS) { |
+ Log.wtf(TAG, "setEnabled error: " + ret); |
+ return false; |
+ } |
+ } catch (IllegalStateException e) { |
+ Log.wtf(TAG, "setEnabled failed", e); |
+ return false; |
+ } |
+ Log.d(TAG, "AcousticEchoCanceler.setEnabled(" + mUsePlatformAEC + ")"); |
+ } |
return true; |
} |
@@ -209,6 +246,11 @@ class AudioRecordInput { |
} |
mAudioRecord.release(); |
mAudioRecord = null; |
+ |
+ if (mAEC != null) { |
+ mAEC.release(); |
+ mAEC = null; |
+ } |
} |
private native void nativeCacheDirectBufferAddress(long nativeAudioRecordInputStream, |