OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 package org.chromium.media; | 5 package org.chromium.media; |
6 | 6 |
7 import android.annotation.TargetApi; | 7 import android.annotation.TargetApi; |
8 import android.media.AudioFormat; | 8 import android.media.AudioFormat; |
9 import android.media.AudioManager; | |
10 import android.media.AudioTrack; | |
11 import android.media.MediaCodec; | 9 import android.media.MediaCodec; |
12 import android.media.MediaCrypto; | 10 import android.media.MediaCrypto; |
13 import android.media.MediaFormat; | 11 import android.media.MediaFormat; |
14 import android.os.Build; | 12 import android.os.Build; |
15 import android.os.Bundle; | 13 import android.os.Bundle; |
16 import android.view.Surface; | 14 import android.view.Surface; |
17 | 15 |
18 import org.chromium.base.Log; | 16 import org.chromium.base.Log; |
19 import org.chromium.base.annotations.CalledByNative; | 17 import org.chromium.base.annotations.CalledByNative; |
20 import org.chromium.base.annotations.JNINamespace; | 18 import org.chromium.base.annotations.JNINamespace; |
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
58 // TODO(qinmin): Use MediaFormat constants when part of the public API. | 56 // TODO(qinmin): Use MediaFormat constants when part of the public API. |
59 private static final String KEY_CROP_LEFT = "crop-left"; | 57 private static final String KEY_CROP_LEFT = "crop-left"; |
60 private static final String KEY_CROP_RIGHT = "crop-right"; | 58 private static final String KEY_CROP_RIGHT = "crop-right"; |
61 private static final String KEY_CROP_BOTTOM = "crop-bottom"; | 59 private static final String KEY_CROP_BOTTOM = "crop-bottom"; |
62 private static final String KEY_CROP_TOP = "crop-top"; | 60 private static final String KEY_CROP_TOP = "crop-top"; |
63 | 61 |
64 private ByteBuffer[] mInputBuffers; | 62 private ByteBuffer[] mInputBuffers; |
65 private ByteBuffer[] mOutputBuffers; | 63 private ByteBuffer[] mOutputBuffers; |
66 | 64 |
67 private MediaCodec mMediaCodec; | 65 private MediaCodec mMediaCodec; |
68 private AudioTrack mAudioTrack; | |
69 private byte[] mPendingAudioBuffer; | |
70 private boolean mFlushed; | 66 private boolean mFlushed; |
71 private long mLastPresentationTimeUs; | 67 private long mLastPresentationTimeUs; |
72 private String mMime; | 68 private String mMime; |
73 private boolean mAdaptivePlaybackSupported; | 69 private boolean mAdaptivePlaybackSupported; |
74 | 70 |
75 @MainDex | 71 @MainDex |
76 private static class DequeueInputResult { | 72 private static class DequeueInputResult { |
77 private final int mStatus; | 73 private final int mStatus; |
78 private final int mIndex; | 74 private final int mIndex; |
79 | 75 |
(...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
187 @CalledByNative("GetOutputFormatResult") | 183 @CalledByNative("GetOutputFormatResult") |
188 private int channelCount() { | 184 private int channelCount() { |
189 return mFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); | 185 return mFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); |
190 } | 186 } |
191 } | 187 } |
192 | 188 |
193 private MediaCodecBridge( | 189 private MediaCodecBridge( |
194 MediaCodec mediaCodec, String mime, boolean adaptivePlaybackSupporte
d) { | 190 MediaCodec mediaCodec, String mime, boolean adaptivePlaybackSupporte
d) { |
195 assert mediaCodec != null; | 191 assert mediaCodec != null; |
196 mMediaCodec = mediaCodec; | 192 mMediaCodec = mediaCodec; |
197 mPendingAudioBuffer = null; | |
198 mMime = mime; | 193 mMime = mime; |
199 mLastPresentationTimeUs = 0; | 194 mLastPresentationTimeUs = 0; |
200 mFlushed = true; | 195 mFlushed = true; |
201 mAdaptivePlaybackSupported = adaptivePlaybackSupported; | 196 mAdaptivePlaybackSupported = adaptivePlaybackSupported; |
202 } | 197 } |
203 | 198 |
204 @CalledByNative | 199 @CalledByNative |
205 private static MediaCodecBridge create( | 200 private static MediaCodecBridge create( |
206 String mime, boolean isSecure, int direction, boolean requireSoftwar
eCodec) { | 201 String mime, boolean isSecure, int direction, boolean requireSoftwar
eCodec) { |
207 MediaCodecUtil.CodecCreationInfo info = new MediaCodecUtil.CodecCreation
Info(); | 202 MediaCodecUtil.CodecCreationInfo info = new MediaCodecUtil.CodecCreation
Info(); |
(...skipping 23 matching lines...) Expand all Loading... |
231 codecName = mMediaCodec.getName(); | 226 codecName = mMediaCodec.getName(); |
232 } | 227 } |
233 Log.w(TAG, "calling MediaCodec.release() on " + codecName); | 228 Log.w(TAG, "calling MediaCodec.release() on " + codecName); |
234 mMediaCodec.release(); | 229 mMediaCodec.release(); |
235 } catch (IllegalStateException e) { | 230 } catch (IllegalStateException e) { |
236 // The MediaCodec is stuck in a wrong state, possibly due to losing | 231 // The MediaCodec is stuck in a wrong state, possibly due to losing |
237 // the surface. | 232 // the surface. |
238 Log.e(TAG, "Cannot release media codec", e); | 233 Log.e(TAG, "Cannot release media codec", e); |
239 } | 234 } |
240 mMediaCodec = null; | 235 mMediaCodec = null; |
241 if (mAudioTrack != null) { | |
242 mAudioTrack.release(); | |
243 } | |
244 mPendingAudioBuffer = null; | |
245 } | 236 } |
246 | 237 |
247 @SuppressWarnings("deprecation") | 238 @SuppressWarnings("deprecation") |
248 @CalledByNative | 239 @CalledByNative |
249 private boolean start() { | 240 private boolean start() { |
250 try { | 241 try { |
251 mMediaCodec.start(); | 242 mMediaCodec.start(); |
252 if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) { | 243 if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT) { |
253 mInputBuffers = mMediaCodec.getInputBuffers(); | 244 mInputBuffers = mMediaCodec.getInputBuffers(); |
254 mOutputBuffers = mMediaCodec.getOutputBuffers(); | 245 mOutputBuffers = mMediaCodec.getOutputBuffers(); |
(...skipping 26 matching lines...) Expand all Loading... |
281 } catch (Exception e) { | 272 } catch (Exception e) { |
282 Log.e(TAG, "Failed to dequeue input buffer", e); | 273 Log.e(TAG, "Failed to dequeue input buffer", e); |
283 } | 274 } |
284 return new DequeueInputResult(status, index); | 275 return new DequeueInputResult(status, index); |
285 } | 276 } |
286 | 277 |
287 @CalledByNative | 278 @CalledByNative |
288 private int flush() { | 279 private int flush() { |
289 try { | 280 try { |
290 mFlushed = true; | 281 mFlushed = true; |
291 if (mAudioTrack != null) { | |
292 // Need to call pause() here, or otherwise flush() is a no-op. | |
293 mAudioTrack.pause(); | |
294 mAudioTrack.flush(); | |
295 mPendingAudioBuffer = null; | |
296 } | |
297 mMediaCodec.flush(); | 282 mMediaCodec.flush(); |
298 } catch (IllegalStateException e) { | 283 } catch (IllegalStateException e) { |
299 Log.e(TAG, "Failed to flush MediaCodec", e); | 284 Log.e(TAG, "Failed to flush MediaCodec", e); |
300 return MEDIA_CODEC_ERROR; | 285 return MEDIA_CODEC_ERROR; |
301 } | 286 } |
302 return MEDIA_CODEC_OK; | 287 return MEDIA_CODEC_OK; |
303 } | 288 } |
304 | 289 |
305 @CalledByNative | 290 @CalledByNative |
306 private void stop() { | 291 private void stop() { |
307 mMediaCodec.stop(); | 292 mMediaCodec.stop(); |
308 if (mAudioTrack != null) { | |
309 mAudioTrack.pause(); | |
310 } | |
311 } | 293 } |
312 | 294 |
313 @TargetApi(Build.VERSION_CODES.KITKAT) | 295 @TargetApi(Build.VERSION_CODES.KITKAT) |
314 @CalledByNative | 296 @CalledByNative |
315 private String getName() { | 297 private String getName() { |
316 return mMediaCodec.getName(); | 298 return mMediaCodec.getName(); |
317 } | 299 } |
318 | 300 |
319 @CalledByNative | 301 @CalledByNative |
320 private GetOutputFormatResult getOutputFormat() { | 302 private GetOutputFormatResult getOutputFormat() { |
(...skipping 119 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
440 if (indexOrStatus >= 0) { // index! | 422 if (indexOrStatus >= 0) { // index! |
441 status = MEDIA_CODEC_OK; | 423 status = MEDIA_CODEC_OK; |
442 index = indexOrStatus; | 424 index = indexOrStatus; |
443 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{ | 425 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED)
{ |
444 assert Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT; | 426 assert Build.VERSION.SDK_INT <= Build.VERSION_CODES.KITKAT; |
445 mOutputBuffers = mMediaCodec.getOutputBuffers(); | 427 mOutputBuffers = mMediaCodec.getOutputBuffers(); |
446 status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED; | 428 status = MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED; |
447 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { | 429 } else if (indexOrStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { |
448 status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED; | 430 status = MEDIA_CODEC_OUTPUT_FORMAT_CHANGED; |
449 MediaFormat newFormat = mMediaCodec.getOutputFormat(); | 431 MediaFormat newFormat = mMediaCodec.getOutputFormat(); |
450 if (mAudioTrack != null && newFormat.containsKey(MediaFormat.KEY
_SAMPLE_RATE)) { | |
451 int newSampleRate = newFormat.getInteger(MediaFormat.KEY_SAM
PLE_RATE); | |
452 if (mAudioTrack.setPlaybackRate(newSampleRate) != AudioTrack
.SUCCESS) { | |
453 status = MEDIA_CODEC_ERROR; | |
454 } | |
455 } | |
456 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { | 432 } else if (indexOrStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { |
457 status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER; | 433 status = MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER; |
458 } else { | 434 } else { |
459 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus); | 435 Log.e(TAG, "Unexpected index_or_status: " + indexOrStatus); |
460 assert false; | 436 assert false; |
461 } | 437 } |
462 } catch (IllegalStateException e) { | 438 } catch (IllegalStateException e) { |
463 status = MEDIA_CODEC_ERROR; | 439 status = MEDIA_CODEC_ERROR; |
464 Log.e(TAG, "Failed to dequeue output buffer", e); | 440 Log.e(TAG, "Failed to dequeue output buffer", e); |
465 } | 441 } |
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
603 format.setByteBuffer(name, ByteBuffer.wrap(bytes)); | 579 format.setByteBuffer(name, ByteBuffer.wrap(bytes)); |
604 } | 580 } |
605 } | 581 } |
606 | 582 |
607 @CalledByNative | 583 @CalledByNative |
608 private static void setFrameHasADTSHeader(MediaFormat format) { | 584 private static void setFrameHasADTSHeader(MediaFormat format) { |
609 format.setInteger(MediaFormat.KEY_IS_ADTS, 1); | 585 format.setInteger(MediaFormat.KEY_IS_ADTS, 1); |
610 } | 586 } |
611 | 587 |
612 @CalledByNative | 588 @CalledByNative |
613 private boolean configureAudio( | 589 private boolean configureAudio(MediaFormat format, MediaCrypto crypto, int f
lags) { |
614 MediaFormat format, MediaCrypto crypto, int flags, boolean playAudio
) { | |
615 try { | 590 try { |
616 mMediaCodec.configure(format, null, crypto, flags); | 591 mMediaCodec.configure(format, null, crypto, flags); |
617 if (playAudio) { | |
618 int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); | |
619 int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COU
NT); | |
620 if (!createAudioTrack(sampleRate, channelCount)) return false; | |
621 } | |
622 return true; | 592 return true; |
623 } catch (IllegalArgumentException e) { | 593 } catch (IllegalArgumentException e) { |
624 Log.e(TAG, "Cannot configure the audio codec", e); | 594 Log.e(TAG, "Cannot configure the audio codec", e); |
625 } catch (IllegalStateException e) { | 595 } catch (IllegalStateException e) { |
626 Log.e(TAG, "Cannot configure the audio codec", e); | 596 Log.e(TAG, "Cannot configure the audio codec", e); |
627 } catch (MediaCodec.CryptoException e) { | 597 } catch (MediaCodec.CryptoException e) { |
628 Log.e(TAG, "Cannot configure the audio codec: DRM error", e); | 598 Log.e(TAG, "Cannot configure the audio codec: DRM error", e); |
629 } catch (Exception e) { | 599 } catch (Exception e) { |
630 Log.e(TAG, "Cannot configure the audio codec", e); | 600 Log.e(TAG, "Cannot configure the audio codec", e); |
631 } | 601 } |
632 return false; | 602 return false; |
633 } | 603 } |
634 | 604 |
635 @CalledByNative | |
636 private boolean createAudioTrack(int sampleRate, int channelCount) { | |
637 Log.v(TAG, "createAudioTrack: sampleRate:" + sampleRate + " channelCount
:" + channelCount); | |
638 | |
639 int channelConfig = getAudioFormat(channelCount); | |
640 | |
641 // Using 16bit PCM for output. Keep this value in sync with | |
642 // kBytesPerAudioOutputSample in media_codec_bridge.cc. | |
643 int minBufferSize = AudioTrack.getMinBufferSize( | |
644 sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT); | |
645 | |
646 // Set buffer size to be at least 1.5 times the minimum buffer size | |
647 // (see http://crbug.com/589269). | |
648 // TODO(timav, qinmin): For MediaSourcePlayer, we starts both audio and | |
649 // video decoder once we got valid presentation timestamp from the decod
er | |
650 // (prerolling_==false). However, this doesn't guarantee that audiotrack | |
651 // starts outputing samples, especially with a larger buffersize. | |
652 // The best solution will be having a large buffer size in AudioTrack, a
nd | |
653 // sync audio/video start when audiotrack starts output samples | |
654 // (head position starts progressing). | |
655 int minBufferSizeInFrames = minBufferSize / PCM16_BYTES_PER_SAMPLE / cha
nnelCount; | |
656 int bufferSize = | |
657 (int) (1.5 * minBufferSizeInFrames) * PCM16_BYTES_PER_SAMPLE * c
hannelCount; | |
658 | |
659 if (mAudioTrack != null) mAudioTrack.release(); | |
660 | |
661 mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, chan
nelConfig, | |
662 AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STRE
AM); | |
663 if (mAudioTrack.getState() == AudioTrack.STATE_UNINITIALIZED) { | |
664 Log.e(TAG, "Cannot create AudioTrack"); | |
665 mAudioTrack = null; | |
666 return false; | |
667 } | |
668 return true; | |
669 } | |
670 | |
671 /** | |
672 * Play the audio buffer that is passed in. | |
673 * | |
674 * @param buf Audio buffer to be rendered. | |
675 * @param postpone If true, save audio buffer for playback with the next | |
676 * audio buffer. Must be followed by playOutputBuffer() without postpone, | |
677 * flush() or release(). | |
678 * @return The number of frames that have already been consumed by the | |
679 * hardware. This number resets to 0 after each flush call. | |
680 */ | |
681 @CalledByNative | |
682 private long playOutputBuffer(byte[] buf, boolean postpone) { | |
683 if (mAudioTrack == null) { | |
684 return 0; | |
685 } | |
686 | |
687 if (postpone) { | |
688 assert mPendingAudioBuffer == null; | |
689 mPendingAudioBuffer = buf; | |
690 return 0; | |
691 } | |
692 | |
693 if (AudioTrack.PLAYSTATE_PLAYING != mAudioTrack.getPlayState()) { | |
694 mAudioTrack.play(); | |
695 } | |
696 | |
697 int size = 0; | |
698 if (mPendingAudioBuffer != null) { | |
699 size = mAudioTrack.write(mPendingAudioBuffer, 0, mPendingAudioBuffer
.length); | |
700 if (mPendingAudioBuffer.length != size) { | |
701 Log.i(TAG, "Failed to send all data to audio output, expected si
ze: " | |
702 + mPendingAudioBuffer.length + ", actual size: "
+ size); | |
703 } | |
704 mPendingAudioBuffer = null; | |
705 } | |
706 | |
707 size = mAudioTrack.write(buf, 0, buf.length); | |
708 if (buf.length != size) { | |
709 Log.i(TAG, "Failed to send all data to audio output, expected size:
" | |
710 + buf.length + ", actual size: " + size); | |
711 } | |
712 // TODO(qinmin): Returning the head position allows us to estimate | |
713 // the current presentation time in native code. However, it is | |
714 // better to use AudioTrack.getCurrentTimestamp() to get the last | |
715 // known time when a frame is played. However, we will need to | |
716 // convert the java nano time to C++ timestamp. | |
717 // If the stream runs too long, getPlaybackHeadPosition() could | |
718 // overflow. AudioTimestampHelper in MediaSourcePlayer has the same | |
719 // issue. See http://crbug.com/358801. | |
720 | |
721 // The method AudioTrack.getPlaybackHeadPosition() returns int that shou
ld be | |
722 // interpreted as unsigned 32 bit value. Convert the return value of | |
723 // getPlaybackHeadPosition() into unsigned int using the long mask. | |
724 return 0xFFFFFFFFL & mAudioTrack.getPlaybackHeadPosition(); | |
725 } | |
726 | |
727 @SuppressWarnings("deprecation") | |
728 @CalledByNative | |
729 private void setVolume(double volume) { | |
730 if (mAudioTrack != null) { | |
731 mAudioTrack.setStereoVolume((float) volume, (float) volume); | |
732 } | |
733 } | |
734 | |
735 private void resetLastPresentationTimeIfNeeded(long presentationTimeUs) { | 605 private void resetLastPresentationTimeIfNeeded(long presentationTimeUs) { |
736 if (mFlushed) { | 606 if (mFlushed) { |
737 mLastPresentationTimeUs = | 607 mLastPresentationTimeUs = |
738 Math.max(presentationTimeUs - MAX_PRESENTATION_TIMESTAMP_SHI
FT_US, 0); | 608 Math.max(presentationTimeUs - MAX_PRESENTATION_TIMESTAMP_SHI
FT_US, 0); |
739 mFlushed = false; | 609 mFlushed = false; |
740 } | 610 } |
741 } | 611 } |
742 | 612 |
743 @SuppressWarnings("deprecation") | 613 @SuppressWarnings("deprecation") |
744 private int getAudioFormat(int channelCount) { | 614 private int getAudioFormat(int channelCount) { |
(...skipping 10 matching lines...) Expand all Loading... |
755 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { | 625 if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { |
756 return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND; | 626 return AudioFormat.CHANNEL_OUT_7POINT1_SURROUND; |
757 } else { | 627 } else { |
758 return AudioFormat.CHANNEL_OUT_7POINT1; | 628 return AudioFormat.CHANNEL_OUT_7POINT1; |
759 } | 629 } |
760 default: | 630 default: |
761 return AudioFormat.CHANNEL_OUT_DEFAULT; | 631 return AudioFormat.CHANNEL_OUT_DEFAULT; |
762 } | 632 } |
763 } | 633 } |
764 } | 634 } |
OLD | NEW |