Index: media/base/android/java/src/test/org/chromium/media/AudioTrackOutputStreamTest.java |
diff --git a/media/base/android/java/src/test/org/chromium/media/AudioTrackOutputStreamTest.java b/media/base/android/java/src/test/org/chromium/media/AudioTrackOutputStreamTest.java |
new file mode 100644 |
index 0000000000000000000000000000000000000000..93dbeb8356c8c60d4d9728cf7b91518fbca336bf |
--- /dev/null |
+++ b/media/base/android/java/src/test/org/chromium/media/AudioTrackOutputStreamTest.java |
@@ -0,0 +1,162 @@ |
+// Copyright 2017 The Chromium Authors. All rights reserved. |
DaleCurtis
2017/06/15 21:46:32
Tests should exist for the C++ variant too; althou
AndyWu
2017/08/02 01:43:40
TBD.
|
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+package org.chromium.media; |
+ |
+import static org.junit.Assert.assertArrayEquals; |
+ |
+import android.media.AudioFormat; |
+import android.media.AudioTrack; |
+ |
+import org.junit.Test; |
+import org.junit.runner.RunWith; |
+import org.robolectric.annotation.Config; |
+ |
+import org.chromium.testing.local.LocalRobolectricTestRunner; |
+ |
+import java.nio.ByteBuffer; |
+import java.util.ArrayList; |
+import java.util.List; |
+import java.util.concurrent.CountDownLatch; |
+import java.util.concurrent.TimeUnit; |
+ |
+@RunWith(LocalRobolectricTestRunner.class) |
+@Config(manifest = Config.NONE) |
+class AudioTrackOutputStreamTest { |
+ static class ObservableAudioTrack extends AudioTrack { |
+ private List<Byte> mReceivedData = new ArrayList<Byte>(); |
+ |
+ public ObservableAudioTrack(int streamType, int sampleRateInHz, int channelConfig, |
+ int audioFormat, int bufferSizeInBytes, int mode) { |
+ super(streamType, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes, mode); |
+ } |
+ |
+ @Override |
+ public int write(ByteBuffer audioData, int sizeInBytes, int writeMode) { |
+ int writternSize = super.write(audioData, sizeInBytes, writeMode); |
+ |
+ if (writternSize > 0) { |
+ byte[] array = new byte[writternSize]; |
+ audioData.asReadOnlyBuffer().get(array); |
+ recordData(array, 0, writternSize); |
+ } |
+ |
+ return writternSize; |
+ } |
+ |
+ @Override |
+ public int write(byte[] audioData, int offsetInBytes, int sizeInBytes) { |
+ int writternSize = super.write(audioData, offsetInBytes, sizeInBytes); |
+ |
+ if (writternSize > 0) recordData(audioData, offsetInBytes, writternSize); |
+ |
+ return writternSize; |
+ } |
+ |
+ private void recordData(byte[] audioData, int offsetInBytes, int sizeInBytes) { |
+ for (; sizeInBytes > 0; --sizeInBytes) mReceivedData.add(audioData[offsetInBytes++]); |
+ } |
+ |
+ public List<Byte> getReceivedData() { |
+ return mReceivedData; |
+ } |
+ } |
+ |
+ static class DataProvider implements AudioTrackOutputStream.Callback { |
+ private static final int MIN_BUFFER_SIZE = 1024; |
+ private List<Byte> mGeneratedData = new ArrayList<Byte>(); |
+ private CountDownLatch mDoneSignal; |
+ private ObservableAudioTrack mAudioTrack; |
+ |
+ public DataProvider(int bufferCount) { |
+ assert bufferCount > 0; |
+ mDoneSignal = new CountDownLatch(bufferCount + 1); |
+ } |
+ |
+ public void updateBufferCount(int bufferCount) { |
+ assert bufferCount > 0; |
+ mDoneSignal = new CountDownLatch(bufferCount + 1); |
+ } |
+ |
+ @Override |
+ public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) { |
+ return MIN_BUFFER_SIZE; |
+ } |
+ |
+ @Override |
+ public AudioTrack createAudioTrack(int streamType, int sampleRateInHz, int channelConfig, |
+ int audioFormat, int bufferSizeInBytes, int mode) { |
+ mAudioTrack = new ObservableAudioTrack(streamType, sampleRateInHz, channelConfig, |
+ audioFormat, bufferSizeInBytes, mode); |
+ return mAudioTrack; |
+ } |
+ |
+ @Override |
+ public int onMoreData(ByteBuffer audioData, int totalPlayedFrames) { |
+ mDoneSignal.countDown(); |
+ if (mDoneSignal.getCount() <= 1) { |
+ return 0; |
+ } |
+ |
+ final int dataSize = MIN_BUFFER_SIZE; |
+ audioData.rewind(); |
+ for (int i = 0; i < dataSize; ++i) { |
+ byte data = (byte) i; |
+ audioData.put(data); |
+ mGeneratedData.add(data); |
+ } |
+ return dataSize; |
+ } |
+ |
+ public void waitForOutOfData() throws InterruptedException { |
+ mDoneSignal.await(300, TimeUnit.MILLISECONDS); |
+ } |
+ |
+ public List<Byte> getGeneratedData() { |
+ return mGeneratedData; |
+ } |
+ |
+ public List<Byte> getReceivedData() { |
+ return mAudioTrack.getReceivedData(); |
+ } |
+ }; |
+ |
+ @Test |
+ public void playSimpleBitstream() throws InterruptedException { |
+ DataProvider provider = new DataProvider(3); |
+ |
+ AudioTrackOutputStream stream = AudioTrackOutputStream.create(provider); |
+ stream.open(2, 44100, AudioFormat.ENCODING_E_AC3); |
+ stream.start(0); |
+ |
+ provider.waitForOutOfData(); |
+ List<Byte> generatedData = provider.getGeneratedData(); |
+ List<Byte> receivedData = provider.getReceivedData(); |
+ |
+ assertArrayEquals(generatedData.toArray(), receivedData.toArray()); |
+ |
+ stream.stop(); |
+ } |
+ |
+ @Test |
+ public void playPiecewiseBitstream() throws InterruptedException { |
+ DataProvider provider = new DataProvider(3); |
+ |
+ AudioTrackOutputStream stream = AudioTrackOutputStream.create(provider); |
+ stream.open(2, 44100, AudioFormat.ENCODING_E_AC3); |
+ stream.start(0); |
+ |
+ provider.waitForOutOfData(); |
+ |
+ provider.updateBufferCount(3); |
+ provider.waitForOutOfData(); |
+ |
+ List<Byte> generatedData = provider.getGeneratedData(); |
+ List<Byte> receivedData = provider.getReceivedData(); |
+ |
+ assertArrayEquals(generatedData.toArray(), receivedData.toArray()); |
+ |
+ stream.stop(); |
+ } |
+} |