OLD | NEW |
---|---|
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 package org.chromium.media; | 5 package org.chromium.media; |
6 | 6 |
7 import android.media.AudioFormat; | 7 import android.media.AudioFormat; |
8 import android.media.AudioManager; | 8 import android.media.AudioManager; |
9 import android.media.AudioTrack; | 9 import android.media.AudioTrack; |
10 import android.media.MediaCodec; | 10 import android.media.MediaCodec; |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
70 | 70 |
71 private ByteBuffer[] mInputBuffers; | 71 private ByteBuffer[] mInputBuffers; |
72 private ByteBuffer[] mOutputBuffers; | 72 private ByteBuffer[] mOutputBuffers; |
73 | 73 |
74 private MediaCodec mMediaCodec; | 74 private MediaCodec mMediaCodec; |
75 private AudioTrack mAudioTrack; | 75 private AudioTrack mAudioTrack; |
76 private boolean mFlushed; | 76 private boolean mFlushed; |
77 private long mLastPresentationTimeUs; | 77 private long mLastPresentationTimeUs; |
78 private String mMime; | 78 private String mMime; |
79 private boolean mAdaptivePlaybackSupported; | 79 private boolean mAdaptivePlaybackSupported; |
80 private int mSampleRate; | |
80 | 81 |
81 private static class DequeueInputResult { | 82 private static class DequeueInputResult { |
82 private final int mStatus; | 83 private final int mStatus; |
83 private final int mIndex; | 84 private final int mIndex; |
84 | 85 |
85 private DequeueInputResult(int status, int index) { | 86 private DequeueInputResult(int status, int index) { |
86 mStatus = status; | 87 mStatus = status; |
87 mIndex = index; | 88 mIndex = index; |
88 } | 89 } |
89 | 90 |
(...skipping 468 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
558 mLastPresentationTimeUs = info.presentationTimeUs; | 559 mLastPresentationTimeUs = info.presentationTimeUs; |
559 | 560 |
560 if (indexOrStatus >= 0) { // index! | 561 if (indexOrStatus >= 0) { // index! |
561 status = MEDIA_CODEC_OK; | 562 status = MEDIA_CODEC_OK; |
562 index = indexOrStatus; | 563 index = indexOrStatus; |
563 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { | 564 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { |
564 mOutputBuffers = mMediaCodec.getOutputBuffers(); | 565 mOutputBuffers = mMediaCodec.getOutputBuffers(); |
565 status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED; | 566 status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED; |
566 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { | 567 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { |
567 status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED; | 568 status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED; |
569 MediaFormat newFormat = mMediaCodec.getOutputFormat(); | |
570 if (newFormat.containsKey(MediaFormat.KEY_SAMPLE_RATE)) { | |
571 int newSampleRate = newFormat.getInteger(MediaFormat.KEY_SAM PLE_RATE); | |
572 if (newSampleRate != mSampleRate) { | |
573 if (!reconfigureAudioTrack(newFormat)) { | |
qinmin
2014/12/17 18:12:54
nit: simply combine this if statement with the abo
gunsch
2014/12/17 19:48:12
Done.
| |
574 status = MEDIA_CODEC_ERROR; | |
575 } | |
576 } | |
577 } | |
568 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { | 578 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { |
569 status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER; | 579 status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER; |
570 } else { | 580 } else { |
571 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus); | 581 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus); |
572 assert false; | 582 assert false; |
573 } | 583 } |
574 } catch (IllegalStateException e) { | 584 } catch (IllegalStateException e) { |
575 Log.e(TAG, "Failed to dequeue output buffer", e); | 585 Log.e(TAG, "Failed to dequeue output buffer", e); |
576 } | 586 } |
577 | 587 |
(...skipping 79 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
657 @CalledByNative | 667 @CalledByNative |
658 private static void setFrameHasADTSHeader(MediaFormat format) { | 668 private static void setFrameHasADTSHeader(MediaFormat format) { |
659 format.setInteger(MediaFormat.KEY_IS_ADTS, 1); | 669 format.setInteger(MediaFormat.KEY_IS_ADTS, 1); |
660 } | 670 } |
661 | 671 |
662 @CalledByNative | 672 @CalledByNative |
663 private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int f lags, | 673 private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int f lags, |
664 boolean playAudio) { | 674 boolean playAudio) { |
665 try { | 675 try { |
666 mMediaCodec.configure(format, null, crypto, flags); | 676 mMediaCodec.configure(format, null, crypto, flags); |
667 if (playAudio) { | 677 if (playAudio && !reconfigureAudioTrack(format)) { |
668 int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); | 678 return false; |
669 int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COU NT); | |
670 int channelConfig = getAudioFormat(channelCount); | |
671 // Using 16bit PCM for output. Keep this value in sync with | |
672 // kBytesPerAudioOutputSample in media_codec_bridge.cc. | |
673 int minBufferSize = AudioTrack.getMinBufferSize(sampleRate, chan nelConfig, | |
674 AudioFormat.ENCODING_PCM_16BIT); | |
675 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRa te, channelConfig, | |
676 AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrac k.MODE_STREAM); | |
677 if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) { | |
678 mAudioTrack = null; | |
679 return false; | |
680 } | |
681 } | 679 } |
682 return true; | 680 return true; |
683 } catch (IllegalStateException e) { | 681 } catch (IllegalStateException e) { |
684 Log.e(TAG, "Cannot configure the audio codec", e); | 682 Log.e(TAG, "Cannot configure the audio codec", e); |
685 } | 683 } |
686 return false; | 684 return false; |
687 } | 685 } |
688 | 686 |
689 /** | 687 /** |
688 * Resets the AudioTrack instance, configured according to the given format. | |
689 * If a previous AudioTrack instance already exists, release it. | |
690 * | |
691 * @param format The format from which to get sample rate and channel count. | |
692 * @return Whether or not creating the AudioTrack succeeded. | |
693 */ | |
694 private boolean reconfigureAudioTrack(MediaFormat format) { | |
695 if (mAudioTrack != null) { | |
696 mAudioTrack.release(); | |
697 } | |
698 | |
699 mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); | |
700 int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); | |
701 int channelConfig = getAudioFormat(channelCount); | |
702 // Using 16bit PCM for output. Keep this value in sync with | |
703 // kBytesPerAudioOutputSample in media_codec_bridge.cc. | |
704 int minBufferSize = AudioTrack.getMinBufferSize(mSampleRate, channelConf ig, | |
705 AudioFormat.ENCODING_PCM_16BIT); | |
706 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, mSampleRate, cha nnelConfig, | |
707 AudioFormat.ENCODING_PCM_16BIT, minBufferSize, AudioTrack.MODE_S TREAM); | |
708 if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) { | |
709 mAudioTrack = null; | |
710 Log.e(TAG, "Failed to initialize AudioTrack"); | |
711 return false; | |
712 } | |
713 return true; | |
714 } | |
715 | |
716 /** | |
690 * Play the audio buffer that is passed in. | 717 * Play the audio buffer that is passed in. |
691 * | 718 * |
692 * @param buf Audio buffer to be rendered. | 719 * @param buf Audio buffer to be rendered. |
693 * @return The number of frames that have already been consumed by the | 720 * @return The number of frames that have already been consumed by the |
694 * hardware. This number resets to 0 after each flush call. | 721 * hardware. This number resets to 0 after each flush call. |
695 */ | 722 */ |
696 @CalledByNative | 723 @CalledByNative |
697 private long playOutputBuffer(byte[] buf) { | 724 private long playOutputBuffer(byte[] buf) { |
698 if (mAudioTrack == null) { | 725 if (mAudioTrack == null) { |
699 return 0; | 726 return 0; |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
744 return AudioFormat.CHANNEL_OUT_QUAD; | 771 return AudioFormat.CHANNEL_OUT_QUAD; |
745 case 6: | 772 case 6: |
746 return AudioFormat.CHANNEL_OUT_5POINT1; | 773 return AudioFormat.CHANNEL_OUT_5POINT1; |
747 case 8: | 774 case 8: |
748 return AudioFormat.CHANNEL_OUT_7POINT1; | 775 return AudioFormat.CHANNEL_OUT_7POINT1; |
749 default: | 776 default: |
750 return AudioFormat.CHANNEL_OUT_DEFAULT; | 777 return AudioFormat.CHANNEL_OUT_DEFAULT; |
751 } | 778 } |
752 } | 779 } |
753 } | 780 } |
OLD | NEW |