Chromium Code Reviews| OLD | NEW |
|---|---|
| (Empty) | |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
| 6 | |
| 7 #include <jni.h> | |
| 8 | |
| 9 #include "base/android/jni_android.h" | |
| 10 #include "base/android/scoped_java_ref.h" | |
| 11 #include "base/bind.h" | |
| 12 #include "base/logging.h" | |
| 13 #include "base/message_loop.h" | |
| 14 #include "content/common/android/surface_callback.h" | |
| 15 #include "content/common/gpu/gpu_channel.h" | |
| 16 #include "content/common/gpu/media/gles2_external_texture_copier.h" | |
| 17 #include "media/base/bitstream_buffer.h" | |
| 18 #include "media/base/limits.h" | |
| 19 #include "media/video/picture.h" | |
| 20 #include "ui/gl/gl_bindings.h" | |
| 21 | |
| 22 using base::android::MethodID; | |
| 23 using base::android::ScopedJavaLocalRef; | |
| 24 | |
| 25 namespace content { | |
| 26 | |
| 27 // XXX: drop the below before submitting. | |
| 28 #define LOG_LINE() LOG(INFO) << __FUNCTION__ | |
| 29 | |
| 30 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
| 31 // |log| to ERROR, register |error| with the decoder, and return. | |
| 32 #define RETURN_ON_FAILURE(result, log, error) \ | |
| 33 do { \ | |
| 34 if (!(result)) { \ | |
| 35 DLOG(ERROR) << log; \ | |
| 36 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \ | |
| 37 &AndroidVideoDecodeAccelerator::NotifyError, \ | |
| 38 base::AsWeakPtr(this), error)); \ | |
| 39 state_ = ERROR; \ | |
| 40 return; \ | |
| 41 } \ | |
| 42 } while (0) | |
| 43 | |
| 44 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
| 45 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
| 46 // have actual use case. | |
| 47 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
| 48 | |
| 49 // Max number of bitstreams notified to the client with | |
| 50 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | |
| 51 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | |
| 52 | |
| 53 // static | |
| 54 const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay = | |
| 55 base::TimeDelta::FromMilliseconds(10); | |
| 56 | |
| 57 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
| 58 media::VideoDecodeAccelerator::Client* client, | |
| 59 const base::Callback<bool(void)>& make_context_current) | |
| 60 : client_(client), | |
| 61 make_context_current_(make_context_current), | |
| 62 codec_(media::MediaCodecBridge::VIDEO_H264), | |
| 63 state_(NO_ERROR), | |
| 64 surface_texture_id_(-1), | |
| 65 picturebuffers_requested_(false), | |
| 66 io_task_is_posted_(false), | |
| 67 decoder_met_eos_(false), | |
| 68 num_bytes_used_in_the_pending_buffer_(0) { | |
| 69 LOG_LINE(); | |
| 70 } | |
| 71 | |
| 72 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
| 73 LOG_LINE(); | |
| 74 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 75 } | |
| 76 | |
| 77 bool AndroidVideoDecodeAccelerator::Initialize( | |
| 78 media::VideoCodecProfile profile) { | |
| 79 LOG_LINE(); | |
| 80 DCHECK(!media_codec_); | |
| 81 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 82 | |
| 83 if (profile == media::VP8PROFILE_MAIN) { | |
| 84 codec_ = media::MediaCodecBridge::VIDEO_VP8; | |
| 85 } else if (profile >= media::H264PROFILE_MIN && | |
| 86 profile <= media::H264PROFILE_MAX) { | |
| 87 codec_ = media::MediaCodecBridge::VIDEO_H264; | |
| 88 } else { | |
| 89 LOG(ERROR) << "Unsupported profile: " << profile; | |
| 90 return false; | |
| 91 } | |
| 92 | |
| 93 if (!make_context_current_.Run()) { | |
| 94 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
| 95 return false; | |
| 96 } | |
| 97 | |
| 98 glGenTextures(1, &surface_texture_id_); | |
| 99 glActiveTexture(GL_TEXTURE0); | |
| 100 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); | |
| 101 | |
| 102 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); | |
| 103 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
| 104 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
| 105 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
| 106 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
| 107 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
| 108 | |
| 109 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); | |
| 110 | |
| 111 ConfigureMediaCodec(); | |
| 112 | |
| 113 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 114 &AndroidVideoDecodeAccelerator::NotifyInitializeDone, | |
| 115 base::AsWeakPtr(this))); | |
| 116 return true; | |
| 117 } | |
| 118 | |
| 119 void AndroidVideoDecodeAccelerator::DoIOTask() { | |
| 120 io_task_is_posted_ = false; | |
| 121 if (state_ == ERROR) { | |
| 122 return; | |
| 123 } | |
| 124 | |
| 125 DequeueOutput(); | |
| 126 QueueInput(); | |
| 127 | |
| 128 if (!pending_bitstream_buffers_.empty() || | |
| 129 !free_picture_ids_.empty()) { | |
| 130 io_task_is_posted_ = true; | |
| 131 MessageLoop::current()->PostDelayedTask( | |
| 132 FROM_HERE, | |
| 133 base::Bind( | |
| 134 &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)), | |
| 135 kDecodePollDelay); | |
| 136 } | |
| 137 } | |
| 138 | |
| 139 void AndroidVideoDecodeAccelerator::QueueInput() { | |
| 140 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) | |
|
Ami GONE FROM CHROMIUM
2013/02/18 17:46:48
Why do you need this business?
Isn't it best to sa
dwkang1
2013/02/19 02:14:55
Yes. To be specific, internal buffer lived in the
Ami GONE FROM CHROMIUM
2013/02/19 05:22:49
Hmm. That reading of the spec makes NEOBB pretty
| |
| 141 return; | |
| 142 if (pending_bitstream_buffers_.empty()) | |
| 143 return; | |
| 144 | |
| 145 int input_buf_index = media_codec_->DequeueInputBuffer( | |
| 146 media::MediaCodecBridge::kTimeOutNoWait); | |
| 147 if (input_buf_index < 0) { | |
| 148 DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER); | |
| 149 return; | |
| 150 } | |
| 151 media::BitstreamBuffer& bitstream_buffer = | |
| 152 pending_bitstream_buffers_.front(); | |
| 153 | |
| 154 if (bitstream_buffer.id() == -1) { | |
| 155 media_codec_->QueueEOS(input_buf_index); | |
| 156 pending_bitstream_buffers_.pop(); | |
| 157 return; | |
| 158 } | |
| 159 // Abuse the presentation time argument to propagate the bitstream | |
| 160 // buffer ID to the output, so we can report it back to the client in | |
| 161 // PictureReady(). | |
| 162 base::TimeDelta timestamp = | |
| 163 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); | |
| 164 | |
| 165 int bytes_written = 0; | |
| 166 scoped_ptr<base::SharedMemory> shm( | |
| 167 new base::SharedMemory(bitstream_buffer.handle(), true)); | |
| 168 | |
| 169 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | |
| 170 "Failed to SharedMemory::Map()", | |
| 171 UNREADABLE_INPUT); | |
| 172 | |
| 173 const size_t offset = num_bytes_used_in_the_pending_buffer_; | |
| 174 bytes_written = media_codec_->QueueInputBuffer( | |
| 175 input_buf_index, | |
| 176 static_cast<const uint8*>(shm->memory()) + offset, | |
| 177 bitstream_buffer.size() - offset, timestamp); | |
| 178 num_bytes_used_in_the_pending_buffer_ += bytes_written; | |
| 179 CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size()); | |
| 180 | |
| 181 if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) { | |
| 182 num_bytes_used_in_the_pending_buffer_ = 0; | |
| 183 pending_bitstream_buffers_.pop(); | |
| 184 | |
| 185 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | |
| 186 // will be returned from the bitstream buffer. However, MediaCodec API is | |
| 187 // not enough to guarantee it. | |
| 188 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | |
| 189 // keep getting more bitstreams from the client, and throttle them by using | |
| 190 // |bitstreams_notified_in_advance_|. | |
| 191 // TODO(dwkang): check if there is a way to remove this workaround. | |
| 192 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 193 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
| 194 base::AsWeakPtr(this), bitstream_buffer.id())); | |
| 195 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | |
| 196 } | |
| 197 } | |
| 198 | |
| 199 void AndroidVideoDecodeAccelerator::DequeueOutput() { | |
| 200 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | |
| 201 return; | |
| 202 | |
| 203 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | |
| 204 // Don't have any picture buffer to send. Need to wait more. | |
| 205 return; | |
| 206 } | |
| 207 | |
| 208 bool eos = false; | |
| 209 base::TimeDelta timestamp; | |
| 210 int32 buf_index = 0; | |
| 211 do { | |
| 212 int32 offset = 0; | |
| 213 int32 size = 0; | |
| 214 buf_index = media_codec_->DequeueOutputBuffer( | |
| 215 media::MediaCodecBridge::kTimeOutNoWait, | |
| 216 &offset, &size, ×tamp, &eos); | |
| 217 switch (buf_index) { | |
| 218 case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER: | |
| 219 return; | |
| 220 | |
| 221 case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: { | |
| 222 int32 width, height; | |
| 223 media_codec_->GetOutputFormat(&width, &height); | |
| 224 | |
| 225 if (!picturebuffers_requested_) { | |
| 226 picturebuffers_requested_ = true; | |
| 227 size_ = gfx::Size(width, height); | |
| 228 texture_copier_.reset(new Gles2ExternalTextureCopier()); | |
| 229 texture_copier_->Init(width, height); | |
| 230 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 231 &AndroidVideoDecodeAccelerator::RequestPictureBuffers, | |
| 232 base::AsWeakPtr(this))); | |
| 233 } else { | |
| 234 // TODO(dwkang): support the dynamic resolution change. | |
| 235 // Currently, we assume that there is no resolution change in the | |
| 236 // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen | |
| 237 // more than once. However, we allows it if resolution is the same | |
| 238 // as the previous one because |media_codec_| can be reset in Reset(). | |
| 239 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | |
| 240 "Dynamic resolution change is not supported.", | |
| 241 PLATFORM_FAILURE); | |
| 242 } | |
| 243 return; | |
| 244 } | |
| 245 | |
| 246 case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: | |
| 247 media_codec_->GetOutputBuffers(); | |
| 248 break; | |
| 249 } | |
| 250 } while (buf_index < 0); | |
| 251 | |
| 252 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
| 253 | |
| 254 if (eos) { | |
| 255 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 256 &AndroidVideoDecodeAccelerator::NotifyFlushDone, | |
| 257 base::AsWeakPtr(this))); | |
| 258 decoder_met_eos_ = true; | |
| 259 } else { | |
| 260 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | |
| 261 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | |
| 262 | |
| 263 // Removes ids former or equal than the id from decoder. Note that | |
| 264 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | |
| 265 // because of frame reordering issue. We just maintain this roughly and use | |
| 266 // for the throttling purpose. | |
| 267 std::list<int32>::iterator it; | |
| 268 for (it = bitstreams_notified_in_advance_.begin(); | |
| 269 it != bitstreams_notified_in_advance_.end(); | |
| 270 ++it) { | |
| 271 if (*it == bitstream_buffer_id) { | |
| 272 bitstreams_notified_in_advance_.erase( | |
| 273 bitstreams_notified_in_advance_.begin(), ++it); | |
| 274 break; | |
| 275 } | |
| 276 } | |
| 277 } | |
| 278 } | |
| 279 | |
| 280 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | |
| 281 int32 bitstream_id) { | |
| 282 LOG_LINE(); | |
| 283 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 284 DCHECK_NE(bitstream_id, -1); | |
| 285 DCHECK(!free_picture_ids_.empty()); | |
| 286 | |
| 287 RETURN_ON_FAILURE(make_context_current_.Run(), | |
| 288 "Failed to make this decoder's GL context current.", | |
| 289 PLATFORM_FAILURE); | |
| 290 | |
| 291 int32 picture_buffer_id = free_picture_ids_.front(); | |
| 292 free_picture_ids_.pop(); | |
| 293 | |
| 294 float transfrom_matrix[16]; | |
| 295 surface_texture_->UpdateTexImage(); | |
| 296 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
| 297 | |
| 298 OutputBufferMap::const_iterator i = | |
| 299 output_picture_buffers_.find(picture_buffer_id); | |
| 300 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | |
| 301 "Can't find a PictureBuffer for " << picture_buffer_id, | |
| 302 PLATFORM_FAILURE); | |
| 303 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
| 304 | |
| 305 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
| 306 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
| 307 // because: | |
| 308 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
| 309 // attached. | |
| 310 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
| 311 // the texture. | |
| 312 texture_copier_->Copy( | |
| 313 surface_texture_id_, picture_buffer_texture_id, transfrom_matrix); | |
| 314 | |
| 315 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 316 &AndroidVideoDecodeAccelerator::NotifyPictureReady, | |
| 317 base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id))); | |
| 318 } | |
| 319 | |
| 320 void AndroidVideoDecodeAccelerator::Decode( | |
| 321 const media::BitstreamBuffer& bitstream_buffer) { | |
| 322 LOG_LINE(); | |
| 323 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 324 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { | |
|
Ami GONE FROM CHROMIUM
2013/02/18 17:46:48
why would id be -1 here?
dwkang1
2013/02/19 02:14:55
I added this test because Flush() calls this.
| |
| 325 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 326 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
| 327 base::AsWeakPtr(this), bitstream_buffer.id())); | |
| 328 return; | |
| 329 } | |
| 330 | |
| 331 pending_bitstream_buffers_.push(bitstream_buffer); | |
| 332 | |
| 333 if (!io_task_is_posted_) | |
| 334 DoIOTask(); | |
| 335 } | |
| 336 | |
| 337 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
| 338 const std::vector<media::PictureBuffer>& buffers) { | |
| 339 LOG_LINE(); | |
| 340 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 341 DCHECK(output_picture_buffers_.empty()); | |
| 342 | |
| 343 for (size_t i = 0; i < buffers.size(); ++i) { | |
| 344 output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i])); | |
| 345 free_picture_ids_.push(buffers[i].id()); | |
| 346 } | |
| 347 | |
| 348 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers, | |
| 349 "Invalid picture buffers were passed.", | |
| 350 INVALID_ARGUMENT); | |
| 351 | |
| 352 if (!io_task_is_posted_) | |
| 353 DoIOTask(); | |
| 354 } | |
| 355 | |
| 356 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
| 357 int32 picture_buffer_id) { | |
| 358 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 359 free_picture_ids_.push(picture_buffer_id); | |
| 360 | |
| 361 if (!io_task_is_posted_) | |
| 362 DoIOTask(); | |
| 363 } | |
| 364 | |
| 365 void AndroidVideoDecodeAccelerator::Flush() { | |
| 366 LOG_LINE(); | |
| 367 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 368 | |
| 369 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
| 370 } | |
| 371 | |
| 372 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | |
| 373 DCHECK(surface_texture_.get()); | |
| 374 | |
| 375 media_codec_.reset(new media::MediaCodecBridge(codec_)); | |
| 376 | |
| 377 JNIEnv* env = base::android::AttachCurrentThread(); | |
| 378 CHECK(env); | |
| 379 ScopedJavaLocalRef<jclass> cls( | |
| 380 base::android::GetClass(env, "android/view/Surface")); | |
| 381 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( | |
| 382 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); | |
| 383 ScopedJavaLocalRef<jobject> j_surface( | |
| 384 env, env->NewObject( | |
| 385 cls.obj(), constructor, | |
| 386 surface_texture_->j_surface_texture().obj())); | |
| 387 | |
| 388 // VDA does not pass the container indicated resolution in the initialization | |
| 389 // phase. Here, we set 720p by default. | |
| 390 // TODO(dwkang): find out a way to remove the following hard-coded value. | |
| 391 media_codec_->StartVideo(codec_, gfx::Size(1280, 720), j_surface.obj()); | |
| 392 content::ReleaseSurface(j_surface.obj()); | |
| 393 media_codec_->GetOutputBuffers(); | |
| 394 } | |
| 395 | |
| 396 void AndroidVideoDecodeAccelerator::Reset() { | |
| 397 LOG_LINE(); | |
| 398 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 399 | |
| 400 while(!pending_bitstream_buffers_.empty()) { | |
| 401 media::BitstreamBuffer& bitstream_buffer = | |
| 402 pending_bitstream_buffers_.front(); | |
| 403 pending_bitstream_buffers_.pop(); | |
| 404 | |
| 405 if (bitstream_buffer.id() != -1) { | |
| 406 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 407 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
| 408 base::AsWeakPtr(this), bitstream_buffer.id())); | |
| 409 } | |
| 410 } | |
| 411 bitstreams_notified_in_advance_.clear(); | |
| 412 | |
| 413 if (!decoder_met_eos_) { | |
| 414 media_codec_->Reset(); | |
| 415 } else { | |
| 416 // MediaCodec should be usable after meeting EOS, but it is not on some | |
| 417 // devices. b/8125974 To avoid the case, we recreate a new one. | |
| 418 media_codec_->Stop(); | |
| 419 ConfigureMediaCodec(); | |
| 420 } | |
| 421 decoder_met_eos_ = false; | |
| 422 num_bytes_used_in_the_pending_buffer_ = 0; | |
| 423 state_ = NO_ERROR; | |
| 424 | |
| 425 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
| 426 &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this))); | |
| 427 } | |
| 428 | |
| 429 void AndroidVideoDecodeAccelerator::Destroy() { | |
| 430 LOG_LINE(); | |
| 431 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 432 | |
| 433 if (media_codec_) media_codec_->Stop(); | |
| 434 delete this; | |
| 435 } | |
| 436 | |
| 437 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() { | |
| 438 client_->NotifyInitializeDone(); | |
| 439 } | |
| 440 | |
| 441 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
| 442 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | |
| 443 } | |
| 444 | |
| 445 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
| 446 const media::Picture& picture) { | |
| 447 client_->PictureReady(picture); | |
| 448 } | |
| 449 | |
| 450 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | |
| 451 int input_buffer_id) { | |
| 452 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
| 453 } | |
| 454 | |
| 455 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | |
| 456 client_->NotifyFlushDone(); | |
| 457 } | |
| 458 | |
| 459 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | |
| 460 client_->NotifyResetDone(); | |
| 461 } | |
| 462 | |
| 463 void AndroidVideoDecodeAccelerator::NotifyError( | |
| 464 media::VideoDecodeAccelerator::Error error) { | |
| 465 client_->NotifyError(error); | |
| 466 } | |
| 467 | |
| 468 } // namespace content | |
| OLD | NEW |