| OLD | NEW |
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/base/android/media_codec_video_decoder.h" | 5 #include "media/base/android/media_codec_video_decoder.h" |
| 6 | 6 |
| 7 #include "base/bind.h" | 7 #include "base/bind.h" |
| 8 #include "base/logging.h" | 8 #include "base/logging.h" |
| 9 #include "media/base/android/media_codec_bridge.h" | 9 #include "media/base/android/media_codec_bridge.h" |
| 10 #include "media/base/buffers.h" | 10 #include "media/base/buffers.h" |
| (...skipping 19 matching lines...) Expand all Loading... |
| 30 starvation_cb, | 30 starvation_cb, |
| 31 stop_done_cb, | 31 stop_done_cb, |
| 32 error_cb, | 32 error_cb, |
| 33 "VideoDecoder"), | 33 "VideoDecoder"), |
| 34 update_current_time_cb_(update_current_time_cb), | 34 update_current_time_cb_(update_current_time_cb), |
| 35 video_size_changed_cb_(video_size_changed_cb), | 35 video_size_changed_cb_(video_size_changed_cb), |
| 36 codec_created_cb_(codec_created_cb) { | 36 codec_created_cb_(codec_created_cb) { |
| 37 } | 37 } |
| 38 | 38 |
| 39 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { | 39 MediaCodecVideoDecoder::~MediaCodecVideoDecoder() { |
| 40 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| 40 DVLOG(1) << "VideoDecoder::~VideoDecoder()"; | 41 DVLOG(1) << "VideoDecoder::~VideoDecoder()"; |
| 41 ReleaseDecoderResources(); | 42 ReleaseDecoderResources(); |
| 42 } | 43 } |
| 43 | 44 |
| 44 const char* MediaCodecVideoDecoder::class_name() const { | 45 const char* MediaCodecVideoDecoder::class_name() const { |
| 45 return "VideoDecoder"; | 46 return "VideoDecoder"; |
| 46 } | 47 } |
| 47 | 48 |
| 48 bool MediaCodecVideoDecoder::HasStream() const { | 49 bool MediaCodecVideoDecoder::HasStream() const { |
| 49 DCHECK(media_task_runner_->BelongsToCurrentThread()); | 50 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 60 | 61 |
| 61 if (video_size_.IsEmpty()) { | 62 if (video_size_.IsEmpty()) { |
| 62 video_size_ = configs_.video_size; | 63 video_size_ = configs_.video_size; |
| 63 media_task_runner_->PostTask( | 64 media_task_runner_->PostTask( |
| 64 FROM_HERE, base::Bind(video_size_changed_cb_, video_size_)); | 65 FROM_HERE, base::Bind(video_size_changed_cb_, video_size_)); |
| 65 } | 66 } |
| 66 } | 67 } |
| 67 | 68 |
| 68 void MediaCodecVideoDecoder::ReleaseDecoderResources() { | 69 void MediaCodecVideoDecoder::ReleaseDecoderResources() { |
| 69 DCHECK(media_task_runner_->BelongsToCurrentThread()); | 70 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| 70 | |
| 71 DVLOG(1) << class_name() << "::" << __FUNCTION__; | 71 DVLOG(1) << class_name() << "::" << __FUNCTION__; |
| 72 | 72 |
| 73 MediaCodecDecoder::ReleaseDecoderResources(); | 73 DoEmergencyStop(); |
| 74 |
| 75 ReleaseMediaCodec(); |
| 76 |
| 74 surface_ = gfx::ScopedJavaSurface(); | 77 surface_ = gfx::ScopedJavaSurface(); |
| 78 } |
| 79 |
| 80 void MediaCodecVideoDecoder::ReleaseMediaCodec() { |
| 81 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| 82 |
| 83 MediaCodecDecoder::ReleaseMediaCodec(); |
| 75 delayed_buffers_.clear(); | 84 delayed_buffers_.clear(); |
| 76 } | 85 } |
| 77 | 86 |
| 78 void MediaCodecVideoDecoder::SetVideoSurface(gfx::ScopedJavaSurface surface) { | 87 void MediaCodecVideoDecoder::SetVideoSurface(gfx::ScopedJavaSurface surface) { |
| 79 DCHECK(media_task_runner_->BelongsToCurrentThread()); | 88 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| 80 | 89 |
| 81 DVLOG(1) << class_name() << "::" << __FUNCTION__ | 90 DVLOG(1) << class_name() << "::" << __FUNCTION__ |
| 82 << (surface.IsEmpty() ? " empty" : " non-empty"); | 91 << (surface.IsEmpty() ? " empty" : " non-empty"); |
| 83 | 92 |
| 84 surface_ = surface.Pass(); | 93 surface_ = surface.Pass(); |
| (...skipping 68 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 153 return kConfigFailure; | 162 return kConfigFailure; |
| 154 } | 163 } |
| 155 | 164 |
| 156 DVLOG(0) << class_name() << "::" << __FUNCTION__ << " succeeded"; | 165 DVLOG(0) << class_name() << "::" << __FUNCTION__ << " succeeded"; |
| 157 | 166 |
| 158 media_task_runner_->PostTask(FROM_HERE, codec_created_cb_); | 167 media_task_runner_->PostTask(FROM_HERE, codec_created_cb_); |
| 159 | 168 |
| 160 return kConfigOk; | 169 return kConfigOk; |
| 161 } | 170 } |
| 162 | 171 |
| 163 void MediaCodecVideoDecoder::SynchronizePTSWithTime( | 172 void MediaCodecVideoDecoder::AssociateCurrentTimeWithPTS(base::TimeDelta pts) { |
| 164 base::TimeDelta current_time) { | |
| 165 DCHECK(media_task_runner_->BelongsToCurrentThread()); | 173 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| 166 | 174 |
| 175 DVLOG(1) << class_name() << "::" << __FUNCTION__ << " pts:" << pts; |
| 176 |
| 167 start_time_ticks_ = base::TimeTicks::Now(); | 177 start_time_ticks_ = base::TimeTicks::Now(); |
| 168 start_pts_ = current_time; | 178 start_pts_ = pts; |
| 169 last_seen_pts_ = current_time; | 179 last_seen_pts_ = pts; |
| 180 } |
| 181 |
| 182 void MediaCodecVideoDecoder::DissociatePTSFromTime() { |
| 183 DCHECK(media_task_runner_->BelongsToCurrentThread()); |
| 184 |
| 185 start_pts_ = last_seen_pts_ = kNoTimestamp(); |
| 170 } | 186 } |
| 171 | 187 |
| 172 void MediaCodecVideoDecoder::OnOutputFormatChanged() { | 188 void MediaCodecVideoDecoder::OnOutputFormatChanged() { |
| 173 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 189 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
| 174 | 190 |
| 175 gfx::Size prev_size = video_size_; | 191 gfx::Size prev_size = video_size_; |
| 176 | 192 |
| 177 // See b/18224769. The values reported from MediaCodecBridge::GetOutputFormat | 193 // See b/18224769. The values reported from MediaCodecBridge::GetOutputFormat |
| 178 // correspond to the actual video frame size, but this is not necessarily the | 194 // correspond to the actual video frame size, but this is not necessarily the |
| 179 // size that should be output. | 195 // size that should be output. |
| 180 video_size_ = configs_.video_size; | 196 video_size_ = configs_.video_size; |
| 181 if (video_size_ != prev_size) { | 197 if (video_size_ != prev_size) { |
| 182 media_task_runner_->PostTask( | 198 media_task_runner_->PostTask( |
| 183 FROM_HERE, base::Bind(video_size_changed_cb_, video_size_)); | 199 FROM_HERE, base::Bind(video_size_changed_cb_, video_size_)); |
| 184 } | 200 } |
| 185 } | 201 } |
| 186 | 202 |
| 187 void MediaCodecVideoDecoder::Render(int buffer_index, | 203 void MediaCodecVideoDecoder::Render(int buffer_index, |
| 188 size_t offset, | 204 size_t offset, |
| 189 size_t size, | 205 size_t size, |
| 190 bool render_output, | 206 RenderMode render_mode, |
| 191 base::TimeDelta pts, | 207 base::TimeDelta pts, |
| 192 bool eos_encountered) { | 208 bool eos_encountered) { |
| 193 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 209 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
| 194 | 210 |
| 195 DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts | 211 DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts |
| 196 << " index:" << buffer_index << " size:" << size | 212 << " index:" << buffer_index << " size:" << size |
| 197 << (eos_encountered ? " EOS" : ""); | 213 << (eos_encountered ? " EOS " : " ") << AsString(render_mode); |
| 198 | 214 |
| 199 // Normally EOS comes as a separate access unit that does not have data, | 215 // Normally EOS comes as a separate access unit that does not have data, |
| 200 // the corresponding |size| will be 0. | 216 // the corresponding |size| will be 0. |
| 201 if (!size && eos_encountered) { | 217 if (!size && eos_encountered) { |
| 202 // Stand-alone EOS | 218 // Stand-alone EOS |
| 203 // Discard the PTS that comes with it and ensure it is released last. | 219 // Discard the PTS that comes with it and ensure it is released last. |
| 204 pts = last_seen_pts_ + | 220 pts = last_seen_pts_ + |
| 205 base::TimeDelta::FromMilliseconds(kDelayForStandAloneEOS); | 221 base::TimeDelta::FromMilliseconds(kDelayForStandAloneEOS); |
| 206 } else { | 222 } else { |
| 207 // Keep track of last seen PTS | 223 // Keep track of last seen PTS |
| 208 last_seen_pts_ = pts; | 224 last_seen_pts_ = pts; |
| 209 } | 225 } |
| 210 | 226 |
| 211 if (!render_output) { | 227 // Do not update time for stand-alone EOS. |
| 212 ReleaseOutputBuffer(buffer_index, pts, size, false, eos_encountered); | 228 const bool update_time = !(eos_encountered && size == 0u); |
| 213 return; | 229 |
| 230 // For video we simplify the preroll operation and render the first frame |
| 231 // after preroll during the preroll phase, i.e. without waiting for audio |
| 232 // stream to finish prerolling. |
| 233 switch (render_mode) { |
| 234 case kRenderSkip: |
| 235 ReleaseOutputBuffer(buffer_index, pts, false, false, eos_encountered); |
| 236 return; |
| 237 case kRenderAfterPreroll: |
| 238 // We get here in the preroll phase. Render now as explained above. |
| 239 // |start_pts_| is not set yet, thus we cannot calculate |time_to_render|. |
| 240 ReleaseOutputBuffer(buffer_index, pts, (size > 0), update_time, |
| 241 eos_encountered); |
| 242 return; |
| 243 case kRenderNow: |
| 244 break; |
| 214 } | 245 } |
| 215 | 246 |
| 247 DCHECK_EQ(kRenderNow, render_mode); |
| 248 DCHECK_NE(kNoTimestamp(), start_pts_); // start_pts_ must be set |
| 249 |
| 216 base::TimeDelta time_to_render = | 250 base::TimeDelta time_to_render = |
| 217 pts - (base::TimeTicks::Now() - start_time_ticks_ + start_pts_); | 251 pts - (base::TimeTicks::Now() - start_time_ticks_ + start_pts_); |
| 218 | 252 |
| 253 DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts |
| 254 << " ticks delta:" << (base::TimeTicks::Now() - start_time_ticks_) |
| 255 << " time_to_render:" << time_to_render; |
| 256 |
| 219 if (time_to_render < base::TimeDelta()) { | 257 if (time_to_render < base::TimeDelta()) { |
| 220 // Skip late frames | 258 // Skip late frames |
| 221 ReleaseOutputBuffer(buffer_index, pts, size, false, eos_encountered); | 259 ReleaseOutputBuffer(buffer_index, pts, false, update_time, eos_encountered); |
| 222 return; | 260 return; |
| 223 } | 261 } |
| 224 | 262 |
| 225 delayed_buffers_.insert(buffer_index); | 263 delayed_buffers_.insert(buffer_index); |
| 226 | 264 |
| 227 bool do_render = size > 0; | 265 const bool render = (size > 0); |
| 228 decoder_thread_.task_runner()->PostDelayedTask( | 266 decoder_thread_.task_runner()->PostDelayedTask( |
| 229 FROM_HERE, base::Bind(&MediaCodecVideoDecoder::ReleaseOutputBuffer, | 267 FROM_HERE, base::Bind(&MediaCodecVideoDecoder::ReleaseOutputBuffer, |
| 230 base::Unretained(this), buffer_index, pts, | 268 base::Unretained(this), buffer_index, pts, render, |
| 231 size, do_render, eos_encountered), | 269 update_time, eos_encountered), |
| 232 time_to_render); | 270 time_to_render); |
| 233 } | 271 } |
| 234 | 272 |
| 235 int MediaCodecVideoDecoder::NumDelayedRenderTasks() const { | 273 int MediaCodecVideoDecoder::NumDelayedRenderTasks() const { |
| 236 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 274 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
| 237 | 275 |
| 238 return delayed_buffers_.size(); | 276 return delayed_buffers_.size(); |
| 239 } | 277 } |
| 240 | 278 |
| 241 void MediaCodecVideoDecoder::ClearDelayedBuffers(bool release) { | 279 void MediaCodecVideoDecoder::ReleaseDelayedBuffers() { |
| 242 // Media thread | 280 // Media thread |
| 243 // Called when there is no decoder thread | 281 // Called when there is no decoder thread |
| 244 if (release) { | 282 for (int index : delayed_buffers_) |
| 245 for (int index : delayed_buffers_) | 283 media_codec_bridge_->ReleaseOutputBuffer(index, false); |
| 246 media_codec_bridge_->ReleaseOutputBuffer(index, false); | |
| 247 } | |
| 248 | 284 |
| 249 delayed_buffers_.clear(); | 285 delayed_buffers_.clear(); |
| 250 } | 286 } |
| 251 | 287 |
| 252 #ifndef NDEBUG | 288 #ifndef NDEBUG |
| 253 void MediaCodecVideoDecoder::VerifyUnitIsKeyFrame( | 289 void MediaCodecVideoDecoder::VerifyUnitIsKeyFrame( |
| 254 const AccessUnit* unit) const { | 290 const AccessUnit* unit) const { |
| 255 // The first video frame in a sequence must be a key frame or stand-alone EOS. | 291 // The first video frame in a sequence must be a key frame or stand-alone EOS. |
| 256 DCHECK(unit); | 292 DCHECK(unit); |
| 257 bool stand_alone_eos = unit->is_end_of_stream && unit->data.empty(); | 293 bool stand_alone_eos = unit->is_end_of_stream && unit->data.empty(); |
| 258 DCHECK(stand_alone_eos || unit->is_key_frame); | 294 DCHECK(stand_alone_eos || unit->is_key_frame); |
| 259 } | 295 } |
| 260 #endif | 296 #endif |
| 261 | 297 |
| 262 void MediaCodecVideoDecoder::ReleaseOutputBuffer(int buffer_index, | 298 void MediaCodecVideoDecoder::ReleaseOutputBuffer(int buffer_index, |
| 263 base::TimeDelta pts, | 299 base::TimeDelta pts, |
| 264 size_t size, | |
| 265 bool render, | 300 bool render, |
| 301 bool update_time, |
| 266 bool eos_encountered) { | 302 bool eos_encountered) { |
| 267 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); | 303 DCHECK(decoder_thread_.task_runner()->BelongsToCurrentThread()); |
| 268 | 304 |
| 269 DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts; | 305 DVLOG(2) << class_name() << "::" << __FUNCTION__ << " pts:" << pts; |
| 270 | 306 |
| 271 // Do not render if we are in emergency stop, there might be no surface. | 307 // Do not render if we are in emergency stop, there might be no surface. |
| 272 if (InEmergencyStop()) | 308 if (InEmergencyStop()) |
| 273 render = false; | 309 render = false; |
| 274 | 310 |
| 275 media_codec_bridge_->ReleaseOutputBuffer(buffer_index, render); | 311 media_codec_bridge_->ReleaseOutputBuffer(buffer_index, render); |
| 276 | 312 |
| 277 delayed_buffers_.erase(buffer_index); | 313 delayed_buffers_.erase(buffer_index); |
| 278 | 314 |
| 279 CheckLastFrame(eos_encountered, !delayed_buffers_.empty()); | 315 CheckLastFrame(eos_encountered, !delayed_buffers_.empty()); |
| 280 | 316 |
| 281 // |update_current_time_cb_| might be null if there is audio stream. | 317 // |update_current_time_cb_| might be null if there is audio stream. |
| 282 // Do not update current time for stand-alone EOS frames. | 318 // Do not update current time for stand-alone EOS frames. |
| 283 if (!update_current_time_cb_.is_null() && !(eos_encountered && !size)) { | 319 if (!update_current_time_cb_.is_null() && update_time) { |
| 284 media_task_runner_->PostTask(FROM_HERE, | 320 media_task_runner_->PostTask(FROM_HERE, |
| 285 base::Bind(update_current_time_cb_, pts, pts)); | 321 base::Bind(update_current_time_cb_, pts, pts)); |
| 286 } | 322 } |
| 287 } | 323 } |
| 288 | 324 |
| 289 } // namespace media | 325 } // namespace media |
| OLD | NEW |