Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1227)

Unified Diff: media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java

Issue 2466463005: Support (E)AC3 passthrough
Patch Set: Add unit tests Created 3 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java
diff --git a/media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java b/media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java
new file mode 100644
index 0000000000000000000000000000000000000000..1071ccdce4a4d1548f7c7d328bd6b5762835cddc
--- /dev/null
+++ b/media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java
@@ -0,0 +1,241 @@
+// Copyright 2017 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.media;
+
+import android.annotation.SuppressLint;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioTrack;
+import android.os.Build;
+
+import org.chromium.base.Log;
+import org.chromium.base.VisibleForTesting;
+import org.chromium.base.annotations.CalledByNative;
+import org.chromium.base.annotations.JNINamespace;
+
+import java.nio.ByteBuffer;
+
+@JNINamespace("media")
+class AudioTrackOutputStream {
+ // Provide dependency injection points for unit tests.
+ interface Callback {
+ int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat);
+ AudioTrack createAudioTrack(int streamType, int sampleRateInHz, int channelConfig,
+ int audioFormat, int bufferSizeInBytes, int mode);
+ int onMoreData(ByteBuffer audioData, int totalPlayedFrames);
+ }
+
+ private static final String TAG = "AudioTrackOutput";
+ private Callback mCallback;
+ private AudioTrack mAudioTrack;
+ private long mNativeAudioTrackOutputStream;
+ private int mBufferSizeInBytes;
+
+ private ByteBuffer mAudioBuffer;
+ private WorkerThread mWorkerThread;
+
+ class WorkerThread extends Thread {
DaleCurtis 2017/06/15 21:46:32 I think instead of doing the threading in Java you
AndyWu 2017/08/02 01:43:39 TBD Will look into that. However, from test point
+ private volatile boolean mDone = false;
+
+ public void finish() {
+ mDone = true;
+ }
+
+ public void run() {
+ while (!mDone) {
+ if (!readMoreData()) {
+ msleep(10);
+ }
+ }
+ }
+
+ private void msleep(int msec) {
+ try {
+ Thread.sleep(msec);
+ } catch (InterruptedException e) {
+ }
+ }
+ }
+
+ @CalledByNative
+ private static AudioTrackOutputStream create() {
+ return new AudioTrackOutputStream(null);
+ }
+
+ @VisibleForTesting
+ static AudioTrackOutputStream create(Callback callback) {
+ return new AudioTrackOutputStream(callback);
+ }
+
+ private AudioTrackOutputStream(Callback callback) {
+ mCallback = callback;
+ if (mCallback == null) {
+ mCallback = new Callback() {
+ @Override
+ public int getMinBufferSize(
+ int sampleRateInHz, int channelConfig, int audioFormat) {
+ return AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
+ }
+
+ @Override
+ public AudioTrack createAudioTrack(int streamType, int sampleRateInHz,
+ int channelConfig, int audioFormat, int bufferSizeInBytes, int mode) {
+ return new AudioTrack(streamType, sampleRateInHz, channelConfig, audioFormat,
+ bufferSizeInBytes, mode);
+ }
+
+ @Override
+ public int onMoreData(ByteBuffer audioData, int totalPlayedFrames) {
+ return nativeOnMoreData(
+ mNativeAudioTrackOutputStream, audioData, totalPlayedFrames);
+ }
+ };
+ }
+ }
+
+ @SuppressWarnings("deprecation")
DaleCurtis 2017/06/15 21:46:32 What's deprecated here?
AndyWu 2017/08/02 01:43:39 AudioFormat.CHANNEL_OUT_7POINT1
+ private int getChannelConfig(int channelCount) {
+ switch (channelCount) {
+ case 1:
+ return AudioFormat.CHANNEL_OUT_MONO;
+ case 2:
+ return AudioFormat.CHANNEL_OUT_STEREO;
+ case 4:
+ return AudioFormat.CHANNEL_OUT_QUAD;
+ case 6:
+ return AudioFormat.CHANNEL_OUT_5POINT1;
+ case 8:
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
+ return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND;
+ } else {
+ return AudioFormat.CHANNEL_OUT_7POINT1;
+ }
+ default:
+ return AudioFormat.CHANNEL_OUT_DEFAULT;
+ }
+ }
+
+ @CalledByNative
+ boolean open(int channelCount, int sampleRate, int sampleFormat) {
+ int channelConfig = getChannelConfig(channelCount);
+ mBufferSizeInBytes =
DaleCurtis 2017/06/15 21:46:32 You might consider 3x buffers here to handle momen
AndyWu 2017/08/02 01:43:39 Done.
+ 2 * mCallback.getMinBufferSize(sampleRate, channelConfig, sampleFormat);
+
+ if (mAudioTrack != null) mAudioTrack.release();
DaleCurtis 2017/06/15 21:46:32 Assert instead, this should not happen.
AndyWu 2017/08/02 01:43:39 Done.
+
+ try {
+ Log.d(TAG, "Crate AudioTrack with sample rate:%d, channel:%d, format:%d ", sampleRate,
+ channelConfig, sampleFormat);
+
+ mAudioTrack = mCallback.createAudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
+ channelConfig, sampleFormat, mBufferSizeInBytes, AudioTrack.MODE_STREAM);
+ assert mAudioTrack != null;
+ } catch (IllegalArgumentException ile) {
+ Log.e(TAG, "Exception creating AudioTrack for playback: ", ile);
+ return false;
+ }
+
+ if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) {
+ Log.e(TAG, "Cannot create AudioTrack");
+ mAudioTrack = null;
+ return false;
+ }
+
+ return true;
+ }
+
+ @CalledByNative
+ void start(long nativeAudioTrackOutputStream) {
+ Log.d(TAG, "AudioTrackOutputStream.start()");
+ if (mWorkerThread != null) return;
+
+ mNativeAudioTrackOutputStream = nativeAudioTrackOutputStream;
+
+ mAudioBuffer = ByteBuffer.allocateDirect(mBufferSizeInBytes);
+ mAudioTrack.play();
+
+ mWorkerThread = new WorkerThread();
+ mWorkerThread.start();
+ }
+
+ @CalledByNative
+ void stop() {
+ Log.d(TAG, "AudioTrackOutputStream.stop()");
+ if (mWorkerThread != null) {
+ mWorkerThread.finish();
+ try {
+ mWorkerThread.interrupt();
+ mWorkerThread.join();
+ } catch (SecurityException e) {
+ Log.e(TAG, "Exception while waiting for AudioTrack worker thread finished: ", e);
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Exception while waiting for AudioTrack worker thread finished: ", e);
+ }
+ mWorkerThread = null;
+ }
+
+ mAudioTrack.pause();
+ mAudioTrack.flush();
+ mNativeAudioTrackOutputStream = 0;
+ }
+
+ @SuppressWarnings("deprecation")
+ @CalledByNative
+ void setVolume(double volume) {
DaleCurtis 2017/06/15 21:46:32 Is this going to work at all for bitstream output?
AndyWu 2017/08/02 01:43:39 Good catch! It does not work for bitstream output.
+ // Chrome sends the volume in the range [0, 1.0], whereas Android
+ // expects the volume to be within [0, getMaxVolume()].
+ float scaledVolume = (float) (volume * mAudioTrack.getMaxVolume());
+ mAudioTrack.setStereoVolume(scaledVolume, scaledVolume);
+ }
+
+ @CalledByNative
+ void close() {
+ Log.d(TAG, "AudioTrackOutputStream.close()");
+ if (mAudioTrack != null) mAudioTrack.release();
+ }
DaleCurtis 2017/06/15 21:46:32 mAudioTrack = nullptr; note once this is called th
AndyWu 2017/08/02 01:43:39 Done.
+
+ private boolean readMoreData() {
chcunningham 2017/06/14 20:32:19 IIUC, the way this is structured you will lose dat
AndyWu 2017/08/02 01:43:39 You are right. However, I am using blocking versio
chcunningham 2017/08/04 19:26:40 From my read of the docs, it seems even the blocki
AndyWu 2017/08/04 21:45:52 Yes, you are right. See AudioTrackOutputStream.clo
AndyWu 2017/08/05 07:17:10 Done, thanks. media/base/android/java/src/test/org
+ if (mNativeAudioTrackOutputStream == 0) return false;
+
+ int position = mAudioTrack.getPlaybackHeadPosition();
chcunningham 2017/06/14 20:27:21 The documentation mentions this is secretly an uns
AndyWu 2017/08/02 01:43:39 Done.
+ int size = mCallback.onMoreData(mAudioBuffer, position);
+ if (size <= 0) {
+ return false;
+ }
+
+ ByteBuffer readOnlyBuffer = mAudioBuffer.asReadOnlyBuffer();
+ int result = writeAudio(readOnlyBuffer, size);
+
+ if (result < 0) {
+ Log.e(TAG, "AudioTrack.write() failed. Error:" + result);
+ return false;
+ } else if (result != size) {
+ Log.e(TAG, "AudioTrack.write() incomplete. Data size: %d, written size: %d", size,
+ result);
+ return false;
+ }
+
+ return true;
+ }
+
+ @SuppressLint("NewApi")
+ private int writeAudio(ByteBuffer buffer, int size) {
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
+ return mAudioTrack.write(buffer, size, AudioTrack.WRITE_BLOCKING);
+ } else {
+ if (buffer.hasArray()) {
+ return mAudioTrack.write(buffer.array(), buffer.arrayOffset(), size);
+ } else {
DaleCurtis 2017/06/15 21:46:32 How common is this? This seems like a high frequen
AndyWu 2017/08/02 01:43:40 Done.
+ byte[] array = new byte[size];
+ buffer.get(array);
+ return mAudioTrack.write(array, 0, size);
+ }
+ }
+ }
+
+ private native int nativeOnMoreData(
+ long nativeAudioTrackOutputStream, ByteBuffer audioData, int totalPlayedFrames);
+ private native void nativeOnError(long nativeAudioTrackOutputStream);
+}

Powered by Google App Engine
This is Rietveld 408576698