Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 package org.chromium.media; | |
| 6 | |
| 7 import android.annotation.SuppressLint; | |
| 8 import android.media.AudioFormat; | |
| 9 import android.media.AudioManager; | |
| 10 import android.media.AudioTrack; | |
| 11 import android.os.Build; | |
| 12 | |
| 13 import org.chromium.base.Log; | |
| 14 import org.chromium.base.annotations.CalledByNative; | |
| 15 import org.chromium.base.annotations.JNINamespace; | |
| 16 | |
| 17 import java.nio.ByteBuffer; | |
| 18 | |
| 19 @JNINamespace("media") | |
| 20 class AudioTrackOutputStream { | |
| 21 private static final String TAG = "AudioTrackOutput"; | |
| 22 | |
| 23 class WorkerThread extends Thread { | |
| 24 private boolean mDone = false; | |
| 25 | |
| 26 public void finish() { | |
| 27 mDone = true; | |
| 28 } | |
| 29 | |
| 30 public void run() { | |
| 31 while (!mDone) { | |
| 32 if (!readMoreData()) { | |
| 33 msleep(10); | |
| 34 } | |
| 35 } | |
| 36 } | |
| 37 | |
| 38 private void msleep(int msec) { | |
| 39 try { | |
| 40 Thread.sleep(msec); | |
| 41 } catch (InterruptedException e) { | |
| 42 } | |
| 43 } | |
| 44 } | |
| 45 | |
| 46 private AudioTrack mAudioTrack; | |
| 47 private long mNativeAudioTrackOutputStream; | |
| 48 private int mBufferSizeInBytes; | |
| 49 | |
| 50 private ByteBuffer mAudioBuffer; | |
| 51 private WorkerThread mWorkerThread; | |
|
DaleCurtis
2016/11/01 23:05:13
What's the point of this? Using a thread instead o
AndyWu
2016/11/04 18:04:24
I did try to use callback, but it failed. Please s
DaleCurtis
2016/11/04 19:48:29
Ugh, no we wouldn't want to use a non-callback bas
chcunningham
2017/06/14 20:03:08
This is still unaddressed. Not sure what to do. Au
chcunningham
2017/06/14 20:27:21
I think minimally you'll want to keep track of the
| |
| 52 | |
| 53 @CalledByNative | |
| 54 private static AudioTrackOutputStream create() { | |
| 55 return new AudioTrackOutputStream(); | |
| 56 } | |
| 57 | |
| 58 @SuppressWarnings("deprecation") | |
| 59 private int getChannelConfig(int channelCount) { | |
| 60 switch (channelCount) { | |
| 61 case 1: | |
| 62 return AudioFormat.CHANNEL_OUT_MONO; | |
| 63 case 2: | |
| 64 return AudioFormat.CHANNEL_OUT_STEREO; | |
| 65 case 4: | |
| 66 return AudioFormat.CHANNEL_OUT_QUAD; | |
| 67 case 6: | |
| 68 return AudioFormat.CHANNEL_OUT_5POINT1; | |
| 69 case 8: | |
| 70 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { | |
| 71 return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND; | |
| 72 } else { | |
| 73 return AudioFormat.CHANNEL_OUT_7POINT1; | |
| 74 } | |
| 75 default: | |
| 76 return AudioFormat.CHANNEL_OUT_DEFAULT; | |
| 77 } | |
| 78 } | |
| 79 | |
| 80 @CalledByNative | |
| 81 private boolean open(int channelCount, int sampleRate, int sampleFormat) { | |
| 82 int channelConfig = getChannelConfig(channelCount); | |
| 83 mBufferSizeInBytes = | |
| 84 2 * AudioTrack.getMinBufferSize(sampleRate, channelConfig, sampl eFormat); | |
| 85 | |
| 86 if (mAudioTrack != null) mAudioTrack.release(); | |
| 87 | |
| 88 try { | |
| 89 Log.d(TAG, String.format("Crate AudioTrack with sample rate:%d, chan nel:%d, format:%d ", | |
| 90 sampleRate, channelConfig, sampleFormat)); | |
| 91 | |
| 92 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, | |
| 93 sampleFormat, 4 * mBufferSizeInBytes, AudioTrack.MODE_STREAM ); | |
| 94 } catch (IllegalArgumentException ile) { | |
| 95 Log.e(TAG, "Exception creating AudioTrack for playback: ", ile); | |
| 96 return false; | |
| 97 } | |
| 98 | |
| 99 if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) { | |
| 100 Log.e(TAG, "Cannot create AudioTrack"); | |
| 101 mAudioTrack = null; | |
| 102 return false; | |
| 103 } | |
| 104 | |
| 105 return true; | |
| 106 } | |
| 107 | |
| 108 @CalledByNative | |
| 109 private void start(long nativeAudioTrackOutputStream) { | |
| 110 Log.i(TAG, "AudioTrackOutputStream.start()"); | |
| 111 if (mWorkerThread != null) return; | |
| 112 | |
| 113 mNativeAudioTrackOutputStream = nativeAudioTrackOutputStream; | |
| 114 | |
| 115 mAudioBuffer = ByteBuffer.allocateDirect(mBufferSizeInBytes); | |
| 116 mAudioTrack.play(); | |
| 117 | |
| 118 mWorkerThread = new WorkerThread(); | |
| 119 mWorkerThread.start(); | |
| 120 } | |
| 121 | |
| 122 @CalledByNative | |
| 123 private void stop() { | |
| 124 Log.w(TAG, "AudioTrackOutputStream.stop()"); | |
| 125 if (mWorkerThread != null) { | |
| 126 mWorkerThread.finish(); | |
| 127 try { | |
| 128 mWorkerThread.interrupt(); | |
| 129 mWorkerThread.join(); | |
| 130 } catch (SecurityException e) { | |
| 131 Log.e(TAG, "Exception while waiting for AudioTrack worker thread finished: ", e); | |
| 132 } catch (InterruptedException e) { | |
| 133 Log.e(TAG, "Exception while waiting for AudioTrack worker thread finished: ", e); | |
| 134 } | |
| 135 mWorkerThread = null; | |
| 136 } | |
| 137 | |
| 138 mAudioTrack.pause(); | |
| 139 mAudioTrack.flush(); | |
| 140 mNativeAudioTrackOutputStream = 0; | |
| 141 } | |
| 142 | |
| 143 @SuppressWarnings("deprecation") | |
| 144 @CalledByNative | |
| 145 private void setVolume(double volume) { | |
| 146 Log.w(TAG, "AudioTrackOutputStream.setVolume()"); | |
| 147 // Chrome sends the volume in the range [0, 1.0], whereas Android | |
| 148 // expects the volume to be within [0, getMaxVolume()]. | |
| 149 float scaledVolume = (float) (volume * mAudioTrack.getMaxVolume()); | |
| 150 mAudioTrack.setStereoVolume(scaledVolume, scaledVolume); | |
| 151 } | |
| 152 | |
| 153 @CalledByNative | |
| 154 private void close() { | |
| 155 Log.w(TAG, "AudioTrackOutputStream.close()"); | |
| 156 if (mAudioTrack != null) mAudioTrack.release(); | |
| 157 } | |
| 158 | |
| 159 public boolean readMoreData() { | |
| 160 if (mNativeAudioTrackOutputStream == 0) return false; | |
| 161 | |
| 162 int position = mAudioTrack.getPlaybackHeadPosition(); | |
| 163 int size = nativeOnMoreData(mNativeAudioTrackOutputStream, mAudioBuffer, position); | |
| 164 if (size <= 0) { | |
| 165 return false; | |
| 166 } | |
| 167 | |
| 168 ByteBuffer readOnlyBuffer = mAudioBuffer.asReadOnlyBuffer(); | |
| 169 int result = writeAudio(readOnlyBuffer, size); | |
| 170 | |
| 171 if (result < 0) { | |
| 172 Log.e(TAG, "AudioTrack.write() failed. Error:" + result); | |
| 173 return false; | |
| 174 } else if (result != size) { | |
| 175 Log.e(TAG, "AudioTrack.write() incomplete. Data size:" + size + ", w ritten size:" | |
| 176 + result); | |
| 177 return false; | |
| 178 } | |
| 179 | |
| 180 return true; | |
| 181 } | |
| 182 | |
| 183 @SuppressLint("NewApi") | |
| 184 private int writeAudio(ByteBuffer buffer, int size) { | |
| 185 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { | |
| 186 return mAudioTrack.write(buffer, size, AudioTrack.WRITE_BLOCKING); | |
| 187 } else { | |
| 188 if (buffer.hasArray()) { | |
| 189 return mAudioTrack.write(buffer.array(), buffer.arrayOffset(), s ize); | |
| 190 } else { | |
| 191 byte[] array = new byte[size]; | |
| 192 buffer.get(array); | |
| 193 return mAudioTrack.write(array, 0, size); | |
| 194 } | |
| 195 } | |
| 196 } | |
| 197 | |
| 198 private native int nativeOnMoreData( | |
| 199 long nativeAudioTrackOutputStream, ByteBuffer audioData, int totalPl ayedFrames); | |
| 200 private native void nativeOnError(long nativeAudioTrackOutputStream); | |
| 201 } | |
| OLD | NEW |