OLD | NEW |
---|---|
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "media/base/android/media_source_player.h" | 5 #include "media/base/android/media_source_player.h" |
6 | 6 |
7 #include "base/android/jni_android.h" | 7 #include "base/android/jni_android.h" |
8 #include "base/android/jni_string.h" | 8 #include "base/android/jni_string.h" |
9 #include "base/basictypes.h" | 9 #include "base/basictypes.h" |
10 #include "base/bind.h" | 10 #include "base/bind.h" |
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
84 media_codec_bridge_->QueueEOS(input_buf_index); | 84 media_codec_bridge_->QueueEOS(input_buf_index); |
85 } else { | 85 } else { |
86 media_codec_bridge_->QueueInputBuffer( | 86 media_codec_bridge_->QueueInputBuffer( |
87 input_buf_index, &unit.data[0], unit.data.size(), unit.timestamp); | 87 input_buf_index, &unit.data[0], unit.data.size(), unit.timestamp); |
88 } | 88 } |
89 } | 89 } |
90 size_t offset = 0; | 90 size_t offset = 0; |
91 size_t size = 0; | 91 size_t size = 0; |
92 base::TimeDelta presentation_timestamp; | 92 base::TimeDelta presentation_timestamp; |
93 bool end_of_stream = false; | 93 bool end_of_stream = false; |
94 bool decode_succeeded = true; | |
94 | 95 |
95 int outputBufferIndex = media_codec_bridge_->DequeueOutputBuffer( | 96 int outputBufferIndex = media_codec_bridge_->DequeueOutputBuffer( |
96 timeout, &offset, &size, &presentation_timestamp, &end_of_stream); | 97 timeout, &offset, &size, &presentation_timestamp, &end_of_stream); |
97 switch (outputBufferIndex) { | 98 switch (outputBufferIndex) { |
98 case MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: | 99 case MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: |
99 media_codec_bridge_->GetOutputBuffers(); | 100 media_codec_bridge_->GetOutputBuffers(); |
100 break; | 101 break; |
101 case MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: | 102 case MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: |
102 // TODO(qinmin): figure out what we should do if format changes. | 103 // TODO(qinmin): figure out what we should do if format changes. |
acolwell GONE FROM CHROMIUM
2013/05/28 20:35:20
nit: For now, you could make this case act like a
qinmin
2013/05/28 22:34:10
Android MediaCodec will report INFO_OUTPUT_FORMAT_
acolwell GONE FROM CHROMIUM
2013/05/28 22:39:38
ok. I didn't realize this happens on the first fra
| |
103 break; | 104 break; |
104 case MediaCodecBridge::INFO_TRY_AGAIN_LATER: | 105 case MediaCodecBridge::INFO_TRY_AGAIN_LATER: |
105 break; | 106 break; |
107 case MediaCodecBridge::INFO_MEDIA_CODEC_ERROR: | |
108 decode_succeeded = false; | |
109 break; | |
106 default: | 110 default: |
107 DCHECK_LE(0, outputBufferIndex); | 111 DCHECK_LE(0, outputBufferIndex); |
108 if (size == 0 && end_of_stream) | 112 if (size == 0 && end_of_stream) |
109 break; | 113 break; |
110 base::TimeDelta time_to_render; | 114 base::TimeDelta time_to_render; |
111 if (!start_wallclock_time.is_null()) { | 115 if (!start_wallclock_time.is_null()) { |
112 time_to_render = presentation_timestamp - (base::Time::Now() - | 116 time_to_render = presentation_timestamp - (base::Time::Now() - |
113 start_wallclock_time + start_presentation_timestamp); | 117 start_wallclock_time + start_presentation_timestamp); |
114 } | 118 } |
115 if (time_to_render >= base::TimeDelta()) { | 119 if (time_to_render >= base::TimeDelta()) { |
116 MessageLoop::current()->PostDelayedTask( | 120 MessageLoop::current()->PostDelayedTask( |
117 FROM_HERE, | 121 FROM_HERE, |
118 base::Bind(&MediaDecoderJob::ReleaseOutputBuffer, | 122 base::Bind(&MediaDecoderJob::ReleaseOutputBuffer, |
119 weak_this_.GetWeakPtr(), outputBufferIndex, size, | 123 weak_this_.GetWeakPtr(), outputBufferIndex, size, |
120 presentation_timestamp, end_of_stream, callback), | 124 presentation_timestamp, end_of_stream, callback), |
121 time_to_render); | 125 time_to_render); |
122 } else { | 126 } else { |
123 // TODO(qinmin): The codec is lagging behind, need to recalculate the | 127 // TODO(qinmin): The codec is lagging behind, need to recalculate the |
124 // |start_presentation_timestamp_| and |start_wallclock_time_|. | 128 // |start_presentation_timestamp_| and |start_wallclock_time_|. |
125 DVLOG(1) << (is_audio_ ? "audio " : "video ") | 129 DVLOG(1) << (is_audio_ ? "audio " : "video ") |
126 << "codec is lagging behind :" << time_to_render.InMicroseconds(); | 130 << "codec is lagging behind :" << time_to_render.InMicroseconds(); |
127 ReleaseOutputBuffer(outputBufferIndex, size, presentation_timestamp, | 131 ReleaseOutputBuffer(outputBufferIndex, size, presentation_timestamp, |
128 end_of_stream, callback); | 132 end_of_stream, callback); |
129 } | 133 } |
130 return; | 134 return; |
131 } | 135 } |
132 message_loop_->PostTask(FROM_HERE, base::Bind( | 136 message_loop_->PostTask(FROM_HERE, base::Bind( |
133 callback, start_presentation_timestamp, start_wallclock_time, | 137 callback, decode_succeeded, start_presentation_timestamp, |
134 end_of_stream)); | 138 start_wallclock_time, end_of_stream)); |
135 } | 139 } |
136 | 140 |
137 void MediaDecoderJob::ReleaseOutputBuffer( | 141 void MediaDecoderJob::ReleaseOutputBuffer( |
138 int outputBufferIndex, size_t size, | 142 int outputBufferIndex, size_t size, |
139 const base::TimeDelta& presentation_timestamp, | 143 const base::TimeDelta& presentation_timestamp, |
140 bool end_of_stream, const MediaDecoderJob::DecoderCallback& callback) { | 144 bool end_of_stream, const MediaDecoderJob::DecoderCallback& callback) { |
141 // TODO(qinmin): Refactor this function. Maybe AudioDecoderJob should provide | 145 // TODO(qinmin): Refactor this function. Maybe AudioDecoderJob should provide |
142 // its own ReleaseOutputBuffer(). | 146 // its own ReleaseOutputBuffer(). |
143 if (is_audio_) { | 147 if (is_audio_) { |
144 static_cast<AudioCodecBridge*>(media_codec_bridge_.get())->PlayOutputBuffer( | 148 static_cast<AudioCodecBridge*>(media_codec_bridge_.get())->PlayOutputBuffer( |
145 outputBufferIndex, size); | 149 outputBufferIndex, size); |
146 } | 150 } |
147 media_codec_bridge_->ReleaseOutputBuffer(outputBufferIndex, !is_audio_); | 151 media_codec_bridge_->ReleaseOutputBuffer(outputBufferIndex, !is_audio_); |
148 message_loop_->PostTask(FROM_HERE, base::Bind( | 152 message_loop_->PostTask(FROM_HERE, base::Bind( |
149 callback, presentation_timestamp, base::Time::Now(), end_of_stream)); | 153 callback, true, presentation_timestamp, base::Time::Now(), |
154 end_of_stream)); | |
150 } | 155 } |
151 | 156 |
152 void MediaDecoderJob::Flush() { | 157 void MediaDecoderJob::Flush() { |
153 // Do nothing, flush when the next Decode() happens. | 158 // Do nothing, flush when the next Decode() happens. |
154 needs_flush_ = true; | 159 needs_flush_ = true; |
155 } | 160 } |
156 | 161 |
157 void MediaDecoderJob::Release() { | 162 void MediaDecoderJob::Release() { |
158 if (thread_->IsRunning() && | 163 if (thread_->IsRunning() && |
159 thread_->message_loop() != base::MessageLoop::current()) { | 164 thread_->message_loop() != base::MessageLoop::current()) { |
(...skipping 254 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
414 pending_event_ &= ~SURFACE_CHANGE_EVENT_PENDING; | 419 pending_event_ &= ~SURFACE_CHANGE_EVENT_PENDING; |
415 } | 420 } |
416 | 421 |
417 ClearDecodingData(); | 422 ClearDecodingData(); |
418 manager()->OnMediaSeekRequest(player_id(), | 423 manager()->OnMediaSeekRequest(player_id(), |
419 last_presentation_timestamp_, | 424 last_presentation_timestamp_, |
420 pending_event_ & SURFACE_CHANGE_EVENT_PENDING); | 425 pending_event_ & SURFACE_CHANGE_EVENT_PENDING); |
421 } | 426 } |
422 | 427 |
423 void MediaSourcePlayer::MediaDecoderCallback( | 428 void MediaSourcePlayer::MediaDecoderCallback( |
424 bool is_audio, const base::TimeDelta& presentation_timestamp, | 429 bool is_audio, bool decode_succeeded, |
430 const base::TimeDelta& presentation_timestamp, | |
425 const base::Time& wallclock_time, bool end_of_stream) { | 431 const base::Time& wallclock_time, bool end_of_stream) { |
426 if (active_decoding_tasks_ > 0) | 432 if (active_decoding_tasks_ > 0) |
427 active_decoding_tasks_--; | 433 active_decoding_tasks_--; |
428 | 434 |
435 if (!decode_succeeded) { | |
436 Release(); | |
437 OnMediaError(MEDIA_ERROR_DECODE); | |
438 return; | |
439 } | |
440 | |
429 if (pending_event_ != NO_EVENT_PENDING) { | 441 if (pending_event_ != NO_EVENT_PENDING) { |
430 ProcessPendingEvents(); | 442 ProcessPendingEvents(); |
431 return; | 443 return; |
432 } | 444 } |
433 | 445 |
434 if (is_audio || !HasAudio()) | 446 if (is_audio || !HasAudio()) |
435 UpdateTimestamps(presentation_timestamp, wallclock_time); | 447 UpdateTimestamps(presentation_timestamp, wallclock_time); |
436 | 448 |
437 if (end_of_stream) { | 449 if (end_of_stream) { |
438 PlaybackCompleted(is_audio); | 450 PlaybackCompleted(is_audio); |
(...skipping 77 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
516 | 528 |
517 bool MediaSourcePlayer::HasVideo() { | 529 bool MediaSourcePlayer::HasVideo() { |
518 return kUnknownVideoCodec != video_codec_; | 530 return kUnknownVideoCodec != video_codec_; |
519 } | 531 } |
520 | 532 |
521 bool MediaSourcePlayer::HasAudio() { | 533 bool MediaSourcePlayer::HasAudio() { |
522 return kUnknownAudioCodec != audio_codec_; | 534 return kUnknownAudioCodec != audio_codec_; |
523 } | 535 } |
524 | 536 |
525 } // namespace media | 537 } // namespace media |
OLD | NEW |