Index: media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java |
diff --git a/media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java b/media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java |
new file mode 100644 |
index 0000000000000000000000000000000000000000..45acc9a078459a679d2fb62fd62b47e7ea4a4294 |
--- /dev/null |
+++ b/media/base/android/java/src/org/chromium/media/AudioTrackOutputStream.java |
@@ -0,0 +1,201 @@ |
+// Copyright 2016 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+package org.chromium.media; |
+ |
+import android.annotation.SuppressLint; |
+import android.media.AudioFormat; |
+import android.media.AudioManager; |
+import android.media.AudioTrack; |
+import android.os.Build; |
+ |
+import org.chromium.base.Log; |
+import org.chromium.base.annotations.CalledByNative; |
+import org.chromium.base.annotations.JNINamespace; |
+ |
+import java.nio.ByteBuffer; |
+ |
+@JNINamespace("media") |
+class AudioTrackOutputStream { |
+ private static final String TAG = "AudioTrackOutput"; |
+ |
+ class WorkerThread extends Thread { |
+ private boolean mDone = false; |
+ |
+ public void finish() { |
+ mDone = true; |
+ } |
+ |
+ public void run() { |
+ while (!mDone) { |
+ if (!readMoreData()) { |
+ msleep(10); |
+ } |
+ } |
+ } |
+ |
+ private void msleep(int msec) { |
+ try { |
+ Thread.sleep(msec); |
+ } catch (InterruptedException e) { |
+ } |
+ } |
+ } |
+ |
+ private AudioTrack mAudioTrack; |
+ private long mNativeAudioTrackOutputStream; |
+ private int mBufferSizeInBytes; |
+ |
+ private ByteBuffer mAudioBuffer; |
+ private WorkerThread mWorkerThread; |
DaleCurtis
2016/11/01 23:05:13
What's the point of this? Using a thread instead o
AndyWu
2016/11/04 18:04:24
I did try to use callback, but it failed. Please s
DaleCurtis
2016/11/04 19:48:29
Ugh, no we wouldn't want to use a non-callback bas
chcunningham
2017/06/14 20:03:08
This is still unaddressed. Not sure what to do. Au
chcunningham
2017/06/14 20:27:21
I think minimally you'll want to keep track of the
|
+ |
+ @CalledByNative |
+ private static AudioTrackOutputStream create() { |
+ return new AudioTrackOutputStream(); |
+ } |
+ |
+ @SuppressWarnings("deprecation") |
+ private int getChannelConfig(int channelCount) { |
+ switch (channelCount) { |
+ case 1: |
+ return AudioFormat.CHANNEL_OUT_MONO; |
+ case 2: |
+ return AudioFormat.CHANNEL_OUT_STEREO; |
+ case 4: |
+ return AudioFormat.CHANNEL_OUT_QUAD; |
+ case 6: |
+ return AudioFormat.CHANNEL_OUT_5POINT1; |
+ case 8: |
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { |
+ return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND; |
+ } else { |
+ return AudioFormat.CHANNEL_OUT_7POINT1; |
+ } |
+ default: |
+ return AudioFormat.CHANNEL_OUT_DEFAULT; |
+ } |
+ } |
+ |
+ @CalledByNative |
+ private boolean open(int channelCount, int sampleRate, int sampleFormat) { |
+ int channelConfig = getChannelConfig(channelCount); |
+ mBufferSizeInBytes = |
+ 2 * AudioTrack.getMinBufferSize(sampleRate, channelConfig, sampleFormat); |
+ |
+ if (mAudioTrack != null) mAudioTrack.release(); |
+ |
+ try { |
+ Log.d(TAG, String.format("Crate AudioTrack with sample rate:%d, channel:%d, format:%d ", |
+ sampleRate, channelConfig, sampleFormat)); |
+ |
+ mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, |
+ sampleFormat, 4 * mBufferSizeInBytes, AudioTrack.MODE_STREAM); |
+ } catch (IllegalArgumentException ile) { |
+ Log.e(TAG, "Exception creating AudioTrack for playback: ", ile); |
+ return false; |
+ } |
+ |
+ if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) { |
+ Log.e(TAG, "Cannot create AudioTrack"); |
+ mAudioTrack = null; |
+ return false; |
+ } |
+ |
+ return true; |
+ } |
+ |
+ @CalledByNative |
+ private void start(long nativeAudioTrackOutputStream) { |
+ Log.i(TAG, "AudioTrackOutputStream.start()"); |
+ if (mWorkerThread != null) return; |
+ |
+ mNativeAudioTrackOutputStream = nativeAudioTrackOutputStream; |
+ |
+ mAudioBuffer = ByteBuffer.allocateDirect(mBufferSizeInBytes); |
+ mAudioTrack.play(); |
+ |
+ mWorkerThread = new WorkerThread(); |
+ mWorkerThread.start(); |
+ } |
+ |
+ @CalledByNative |
+ private void stop() { |
+ Log.w(TAG, "AudioTrackOutputStream.stop()"); |
+ if (mWorkerThread != null) { |
+ mWorkerThread.finish(); |
+ try { |
+ mWorkerThread.interrupt(); |
+ mWorkerThread.join(); |
+ } catch (SecurityException e) { |
+ Log.e(TAG, "Exception while waiting for AudioTrack worker thread finished: ", e); |
+ } catch (InterruptedException e) { |
+ Log.e(TAG, "Exception while waiting for AudioTrack worker thread finished: ", e); |
+ } |
+ mWorkerThread = null; |
+ } |
+ |
+ mAudioTrack.pause(); |
+ mAudioTrack.flush(); |
+ mNativeAudioTrackOutputStream = 0; |
+ } |
+ |
+ @SuppressWarnings("deprecation") |
+ @CalledByNative |
+ private void setVolume(double volume) { |
+ Log.w(TAG, "AudioTrackOutputStream.setVolume()"); |
+ // Chrome sends the volume in the range [0, 1.0], whereas Android |
+ // expects the volume to be within [0, getMaxVolume()]. |
+ float scaledVolume = (float) (volume * mAudioTrack.getMaxVolume()); |
+ mAudioTrack.setStereoVolume(scaledVolume, scaledVolume); |
+ } |
+ |
+ @CalledByNative |
+ private void close() { |
+ Log.w(TAG, "AudioTrackOutputStream.close()"); |
+ if (mAudioTrack != null) mAudioTrack.release(); |
+ } |
+ |
+ public boolean readMoreData() { |
+ if (mNativeAudioTrackOutputStream == 0) return false; |
+ |
+ int position = mAudioTrack.getPlaybackHeadPosition(); |
+ int size = nativeOnMoreData(mNativeAudioTrackOutputStream, mAudioBuffer, position); |
+ if (size <= 0) { |
+ return false; |
+ } |
+ |
+ ByteBuffer readOnlyBuffer = mAudioBuffer.asReadOnlyBuffer(); |
+ int result = writeAudio(readOnlyBuffer, size); |
+ |
+ if (result < 0) { |
+ Log.e(TAG, "AudioTrack.write() failed. Error:" + result); |
+ return false; |
+ } else if (result != size) { |
+ Log.e(TAG, "AudioTrack.write() incomplete. Data size:" + size + ", written size:" |
+ + result); |
+ return false; |
+ } |
+ |
+ return true; |
+ } |
+ |
+ @SuppressLint("NewApi") |
+ private int writeAudio(ByteBuffer buffer, int size) { |
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { |
+ return mAudioTrack.write(buffer, size, AudioTrack.WRITE_BLOCKING); |
+ } else { |
+ if (buffer.hasArray()) { |
+ return mAudioTrack.write(buffer.array(), buffer.arrayOffset(), size); |
+ } else { |
+ byte[] array = new byte[size]; |
+ buffer.get(array); |
+ return mAudioTrack.write(array, 0, size); |
+ } |
+ } |
+ } |
+ |
+ private native int nativeOnMoreData( |
+ long nativeAudioTrackOutputStream, ByteBuffer audioData, int totalPlayedFrames); |
+ private native void nativeOnError(long nativeAudioTrackOutputStream); |
+} |