Index: media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
diff --git a/media/base/android/java/src/org/chromium/media/AudioRecordInput.java b/media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
index 0752731f80200d6a13f1d0124da84a81d3d9e2ec..9ec7e158754746cf4d228a76aeca9e72a9ca79d0 100644 |
--- a/media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
+++ b/media/base/android/java/src/org/chromium/media/AudioRecordInput.java |
@@ -5,6 +5,9 @@ |
package org.chromium.media; |
import android.content.Context; |
+import android.media.audiofx.AcousticEchoCanceler; |
+import android.media.audiofx.AudioEffect; |
+import android.media.audiofx.AudioEffect.Descriptor; |
import android.media.AudioFormat; |
import android.media.AudioRecord; |
import android.media.MediaRecorder.AudioSource; |
@@ -14,6 +17,7 @@ import java.nio.ByteBuffer; |
import org.chromium.base.CalledByNative; |
import org.chromium.base.JNINamespace; |
+import org.chromium.media.AudioManagerAndroid; |
// Owned by its native counterpart declared in audio_record_input.h. Refer to |
// that class for general comments. |
@@ -30,9 +34,11 @@ class AudioRecordInput { |
private final int mChannels; |
private final int mBitsPerSample; |
private final int mHardwareDelayBytes; |
+ private final boolean mUsePlatformAEC; |
private ByteBuffer mBuffer; |
private AudioRecord mAudioRecord; |
private AudioRecordThread mAudioRecordThread; |
+ private AcousticEchoCanceler mAEC; |
private class AudioRecordThread extends Thread { |
// The "volatile" synchronization technique is discussed here: |
@@ -88,18 +94,20 @@ class AudioRecordInput { |
@CalledByNative |
private static AudioRecordInput createAudioRecordInput(long nativeAudioRecordInputStream, |
- int sampleRate, int channels, int bitsPerSample, int bytesPerBuffer) { |
+ int sampleRate, int channels, int bitsPerSample, int bytesPerBuffer, |
+ boolean usePlatformAEC) { |
return new AudioRecordInput(nativeAudioRecordInputStream, sampleRate, channels, |
- bitsPerSample, bytesPerBuffer); |
+ bitsPerSample, bytesPerBuffer, usePlatformAEC); |
} |
private AudioRecordInput(long nativeAudioRecordInputStream, int sampleRate, int channels, |
- int bitsPerSample, int bytesPerBuffer) { |
+ int bitsPerSample, int bytesPerBuffer, boolean usePlatformAEC) { |
mNativeAudioRecordInputStream = nativeAudioRecordInputStream; |
mSampleRate = sampleRate; |
mChannels = channels; |
mBitsPerSample = bitsPerSample; |
mHardwareDelayBytes = HARDWARE_DELAY_MS * sampleRate / 1000 * bitsPerSample / 8; |
+ mUsePlatformAEC = usePlatformAEC; |
// We use a direct buffer so that the native class can have access to |
// the underlying memory address. This avoids the need to copy from a |
@@ -170,6 +178,46 @@ class AudioRecordInput { |
return false; |
} |
+ // Only try to modify the platform AEC state if it's supported. |
+ if (AudioManagerAndroid.isPlatformAECSupported()) { |
henrika (OOO until Aug 14)
2013/12/06 12:52:21
What happens if we create two streams and enable t
ajm
2013/12/06 19:06:43
AFAIU it's only possible to open a single recordin
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
This function was already consulted in the setting
ajm
2013/12/10 06:37:16
When we add other effects it will be possible to d
|
+ mAEC = AcousticEchoCanceler.create(mAudioRecord.getAudioSessionId()); |
+ if (mAEC == null) { |
+ Log.wtf(TAG, "AcousticEchoCanceler.create failed"); |
henrika (OOO until Aug 14)
2013/12/06 12:52:21
What does .wtf do?
ajm
2013/12/06 19:06:43
Intended for an error that should never happen (i.
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
Crashes the process: http://developer.android.com/
|
+ return false; |
+ } |
+ try { |
henrika (OOO until Aug 14)
2013/12/06 12:52:21
Am I the only one who feels that this try-catch st
ajm
2013/12/06 19:06:43
Yes, according to Ami, my friendly Java reviewer,
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
No. It seems crazy to me, too.
I suggest all the
ajm
2013/12/10 06:37:16
I've attempted to clean this up appropriately thro
|
+ Descriptor descriptor = mAEC.getDescriptor(); |
+ Log.d(TAG, "AcousticEchoCanceler " + |
+ "name: " + descriptor.name + ", " + |
+ "implementor: " + descriptor.implementor + ", " + |
+ "uuid: " + descriptor.uuid); |
+ } catch (IllegalStateException e) { |
+ Log.wtf(TAG, "getDescriptor failed", e); |
+ // Not fatal. |
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
lol that's not what the wtf() on the previous line
ajm
2013/12/06 19:31:24
Yeah noticed that myself :(
Changed to Log.w in t
|
+ } |
+ try { |
+ int ret = mAEC.setEnabled(mUsePlatformAEC); |
+ if (ret != AudioEffect.SUCCESS) { |
+ Log.wtf(TAG, "setEnabled error: " + ret); |
+ return false; |
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
unreachable
ajm
2013/12/10 06:37:16
Done.
|
+ } |
+ } catch (IllegalStateException e) { |
+ Log.wtf(TAG, "setEnabled failed", e); |
+ return false; |
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
unreachable
ajm
2013/12/10 06:37:16
Done.
|
+ } |
+ try { |
+ boolean isEnabled = mAEC.getEnabled(); |
tommi (sloooow) - chröme
2013/12/06 12:11:34
Can you add some comments for why we need to check
ajm
2013/12/06 19:06:43
Right, there's really no need for this. I was doin
|
+ Log.d(TAG, "AcousticEchoCanceler.getEnabled: " + isEnabled); |
+ if (isEnabled != mUsePlatformAEC) { |
+ Log.wtf(TAG, "mUsePlatformAEC: " + mUsePlatformAEC + " != " + "isEnabled: " + |
+ isEnabled); |
+ return false; |
+ } |
+ } catch (IllegalStateException e) { |
+ Log.wtf(TAG, "getEnabled failed", e); |
+ // Not fatal. |
+ } |
+ } |
return true; |
} |
@@ -209,6 +257,11 @@ class AudioRecordInput { |
} |
mAudioRecord.release(); |
mAudioRecord = null; |
+ |
+ if (mAEC != null) { |
+ mAEC.release(); |
Ami GONE FROM CHROMIUM
2013/12/06 19:22:53
mAEC refers to the mAudioRecord, so it should be r
ajm
2013/12/10 06:37:16
Good point, done.
|
+ mAEC = null; |
+ } |
} |
private native void nativeCacheDirectBufferAddress(long nativeAudioRecordInputStream, |