OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
6 | |
7 #include <jni.h> | |
8 | |
9 #include "base/android/jni_android.h" | |
10 #include "base/android/scoped_java_ref.h" | |
11 #include "base/bind.h" | |
12 #include "base/logging.h" | |
13 #include "base/message_loop.h" | |
14 #include "content/common/android/surface_callback.h" | |
15 #include "content/common/gpu/gpu_channel.h" | |
16 #include "gpu/command_buffer/service/gles2_cmd_decoder.h" | |
17 #include "media/base/bitstream_buffer.h" | |
18 #include "media/base/limits.h" | |
19 #include "media/video/picture.h" | |
20 #include "ui/gl/gl_bindings.h" | |
21 | |
22 using base::android::MethodID; | |
23 using base::android::ScopedJavaLocalRef; | |
24 | |
25 namespace content { | |
26 | |
27 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
28 // |log| to ERROR, register |error| with the decoder, and return. | |
29 #define RETURN_ON_FAILURE(result, log, error) \ | |
30 do { \ | |
31 if (!(result)) { \ | |
32 DLOG(ERROR) << log; \ | |
33 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \ | |
34 &AndroidVideoDecodeAccelerator::NotifyError, \ | |
35 base::AsWeakPtr(this), error)); \ | |
36 state_ = ERROR; \ | |
37 return; \ | |
38 } \ | |
39 } while (0) | |
40 | |
41 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
42 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
43 // have actual use case. | |
44 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
45 | |
46 // Max number of bitstreams notified to the client with | |
47 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream. | |
48 enum { kMaxBitstreamsNotifiedInAdvance = 32 }; | |
49 | |
50 // static | |
51 const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay = | |
52 base::TimeDelta::FromMilliseconds(10); | |
53 | |
54 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
55 media::VideoDecodeAccelerator::Client* client, | |
56 const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder, | |
57 const base::Callback<bool(void)>& make_context_current) | |
58 : client_(client), | |
59 make_context_current_(make_context_current), | |
60 codec_(media::MediaCodecBridge::VIDEO_H264), | |
61 state_(NO_ERROR), | |
62 surface_texture_id_(0), | |
63 picturebuffers_requested_(false), | |
64 io_task_is_posted_(false), | |
65 decoder_met_eos_(false), | |
66 num_bytes_used_in_the_pending_buffer_(0), | |
67 gl_decoder_(decoder) { | |
68 } | |
69 | |
70 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
71 DCHECK(thread_checker_.CalledOnValidThread()); | |
72 } | |
73 | |
74 bool AndroidVideoDecodeAccelerator::Initialize( | |
75 media::VideoCodecProfile profile) { | |
76 DCHECK(!media_codec_); | |
77 DCHECK(thread_checker_.CalledOnValidThread()); | |
78 | |
79 if (profile == media::VP8PROFILE_MAIN) { | |
80 codec_ = media::MediaCodecBridge::VIDEO_VP8; | |
81 } else { | |
82 // TODO(dwkang): enable H264 once b/8125974 is fixed. | |
83 LOG(ERROR) << "Unsupported profile: " << profile; | |
84 return false; | |
85 } | |
86 | |
87 if (!make_context_current_.Run()) { | |
88 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
89 return false; | |
90 } | |
91 | |
92 if (!gl_decoder_.get()) { | |
93 LOG(ERROR) << "Failed to get gles2 decoder instance."; | |
94 return false; | |
95 } | |
96 glGenTextures(1, &surface_texture_id_); | |
97 glActiveTexture(GL_TEXTURE0); | |
98 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); | |
99 | |
100 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); | |
101 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
102 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
103 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
104 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
105 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
106 gl_decoder_->RestoreTextureUnitBindings(0); | |
107 gl_decoder_->RestoreActiveTexture(); | |
108 | |
109 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); | |
110 | |
111 ConfigureMediaCodec(); | |
112 | |
113 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
114 &AndroidVideoDecodeAccelerator::NotifyInitializeDone, | |
115 base::AsWeakPtr(this))); | |
116 return true; | |
117 } | |
118 | |
119 void AndroidVideoDecodeAccelerator::DoIOTask() { | |
120 io_task_is_posted_ = false; | |
121 if (state_ == ERROR) { | |
122 return; | |
123 } | |
124 | |
125 DequeueOutput(); | |
126 QueueInput(); | |
127 | |
128 if (!pending_bitstream_buffers_.empty() || | |
129 !free_picture_ids_.empty()) { | |
130 io_task_is_posted_ = true; | |
131 // TODO(dwkang): PostDelayedTask() does not guarantee the task will awake | |
132 // at the exact time. Need a better way for polling. | |
133 MessageLoop::current()->PostDelayedTask( | |
134 FROM_HERE, | |
135 base::Bind( | |
136 &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)), | |
137 kDecodePollDelay); | |
138 } | |
139 } | |
140 | |
141 void AndroidVideoDecodeAccelerator::QueueInput() { | |
142 if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance) | |
143 return; | |
144 if (pending_bitstream_buffers_.empty()) | |
145 return; | |
146 | |
147 int input_buf_index = media_codec_->DequeueInputBuffer( | |
148 media::MediaCodecBridge::kTimeOutNoWait); | |
149 if (input_buf_index < 0) { | |
150 DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER); | |
151 return; | |
152 } | |
153 media::BitstreamBuffer& bitstream_buffer = | |
154 pending_bitstream_buffers_.front(); | |
155 | |
156 if (bitstream_buffer.id() == -1) { | |
157 media_codec_->QueueEOS(input_buf_index); | |
158 pending_bitstream_buffers_.pop(); | |
159 return; | |
160 } | |
161 // Abuse the presentation time argument to propagate the bitstream | |
162 // buffer ID to the output, so we can report it back to the client in | |
163 // PictureReady(). | |
164 base::TimeDelta timestamp = | |
165 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); | |
166 | |
167 int bytes_written = 0; | |
168 scoped_ptr<base::SharedMemory> shm( | |
169 new base::SharedMemory(bitstream_buffer.handle(), true)); | |
170 | |
171 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | |
172 "Failed to SharedMemory::Map()", | |
173 UNREADABLE_INPUT); | |
174 | |
175 const size_t offset = num_bytes_used_in_the_pending_buffer_; | |
176 bytes_written = media_codec_->QueueInputBuffer( | |
177 input_buf_index, | |
178 static_cast<const uint8*>(shm->memory()) + offset, | |
179 bitstream_buffer.size() - offset, timestamp); | |
180 num_bytes_used_in_the_pending_buffer_ += bytes_written; | |
181 CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size()); | |
182 | |
183 if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) { | |
184 num_bytes_used_in_the_pending_buffer_ = 0; | |
185 pending_bitstream_buffers_.pop(); | |
186 | |
187 // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output | |
188 // will be returned from the bitstream buffer. However, MediaCodec API is | |
189 // not enough to guarantee it. | |
190 // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to | |
191 // keep getting more bitstreams from the client, and throttle them by using | |
192 // |bitstreams_notified_in_advance_|. | |
193 // TODO(dwkang): check if there is a way to remove this workaround. | |
194 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
195 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
196 base::AsWeakPtr(this), bitstream_buffer.id())); | |
197 bitstreams_notified_in_advance_.push_back(bitstream_buffer.id()); | |
198 } | |
199 } | |
200 | |
201 void AndroidVideoDecodeAccelerator::DequeueOutput() { | |
202 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | |
203 return; | |
204 | |
205 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | |
206 // Don't have any picture buffer to send. Need to wait more. | |
207 return; | |
208 } | |
209 | |
210 bool eos = false; | |
211 base::TimeDelta timestamp; | |
212 int32 buf_index = 0; | |
213 do { | |
214 int32 offset = 0; | |
215 int32 size = 0; | |
216 buf_index = media_codec_->DequeueOutputBuffer( | |
217 media::MediaCodecBridge::kTimeOutNoWait, | |
218 &offset, &size, ×tamp, &eos); | |
219 switch (buf_index) { | |
220 case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER: | |
221 return; | |
222 | |
223 case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: { | |
224 int32 width, height; | |
225 media_codec_->GetOutputFormat(&width, &height); | |
226 | |
227 if (!picturebuffers_requested_) { | |
228 picturebuffers_requested_ = true; | |
229 size_ = gfx::Size(width, height); | |
230 //texture_copier_.reset(new Gles2ExternalTextureCopier(width, height)) ; | |
231 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
232 &AndroidVideoDecodeAccelerator::RequestPictureBuffers, | |
233 base::AsWeakPtr(this))); | |
234 } else { | |
235 // TODO(dwkang): support the dynamic resolution change. | |
236 // Currently, we assume that there is no resolution change in the | |
237 // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen | |
238 // more than once. However, we allows it if resolution is the same | |
239 // as the previous one because |media_codec_| can be reset in Reset(). | |
240 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | |
241 "Dynamic resolution change is not supported.", | |
242 PLATFORM_FAILURE); | |
243 } | |
244 return; | |
245 } | |
246 | |
247 case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: | |
248 media_codec_->GetOutputBuffers(); | |
249 break; | |
250 } | |
251 } while (buf_index < 0); | |
252 | |
253 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
254 | |
255 if (eos) { | |
256 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
257 &AndroidVideoDecodeAccelerator::NotifyFlushDone, | |
258 base::AsWeakPtr(this))); | |
259 decoder_met_eos_ = true; | |
260 } else { | |
261 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | |
262 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | |
263 | |
264 // Removes ids former or equal than the id from decoder. Note that | |
265 // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder | |
266 // because of frame reordering issue. We just maintain this roughly and use | |
267 // for the throttling purpose. | |
268 std::list<int32>::iterator it; | |
269 for (it = bitstreams_notified_in_advance_.begin(); | |
270 it != bitstreams_notified_in_advance_.end(); | |
271 ++it) { | |
272 if (*it == bitstream_buffer_id) { | |
273 bitstreams_notified_in_advance_.erase( | |
274 bitstreams_notified_in_advance_.begin(), ++it); | |
275 break; | |
276 } | |
277 } | |
278 } | |
279 } | |
280 | |
281 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | |
282 int32 bitstream_id) { | |
283 DCHECK(thread_checker_.CalledOnValidThread()); | |
284 DCHECK_NE(bitstream_id, -1); | |
285 DCHECK(!free_picture_ids_.empty()); | |
286 | |
287 RETURN_ON_FAILURE(make_context_current_.Run(), | |
288 "Failed to make this decoder's GL context current.", | |
289 PLATFORM_FAILURE); | |
290 | |
291 int32 picture_buffer_id = free_picture_ids_.front(); | |
292 free_picture_ids_.pop(); | |
293 | |
294 float transfrom_matrix[16]; | |
295 surface_texture_->UpdateTexImage(); | |
296 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
297 | |
298 OutputBufferMap::const_iterator i = | |
299 output_picture_buffers_.find(picture_buffer_id); | |
300 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | |
301 "Can't find a PictureBuffer for " << picture_buffer_id, | |
302 PLATFORM_FAILURE); | |
303 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
304 | |
305 RETURN_ON_FAILURE(gl_decoder_.get(), | |
306 "Failed to get gles2 decoder instance.", | |
307 ILLEGAL_STATE); | |
308 // Defer initializing the CopyTextureCHROMIUMResourceManager until it is | |
309 // needed because it takes 10s of milliseconds to initialize. | |
310 if (!copier_.get()) { | |
Ami GONE FROM CHROMIUM
2013/02/28 03:23:05
Doesn't scoped_ptr<>::operator*() mean you don't n
dwkang1
2013/02/28 04:27:49
No. Fixed.
| |
311 copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager()); | |
312 copier_->Initialize(); | |
313 gl_decoder_->RestoreFramebufferBindings(); | |
314 } | |
315 | |
316 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
317 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
318 // because: | |
319 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
320 // attached. | |
321 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
322 // the texture. | |
323 copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES, | |
324 GL_TEXTURE_2D, surface_texture_id_, | |
325 picture_buffer_texture_id, 0, size_.width(), | |
326 size_.height(), false, false, false); | |
327 | |
328 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
329 &AndroidVideoDecodeAccelerator::NotifyPictureReady, | |
330 base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id))); | |
331 } | |
332 | |
333 void AndroidVideoDecodeAccelerator::Decode( | |
334 const media::BitstreamBuffer& bitstream_buffer) { | |
335 DCHECK(thread_checker_.CalledOnValidThread()); | |
336 if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) { | |
337 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
338 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
339 base::AsWeakPtr(this), bitstream_buffer.id())); | |
340 return; | |
341 } | |
342 | |
343 pending_bitstream_buffers_.push(bitstream_buffer); | |
344 | |
345 if (!io_task_is_posted_) | |
346 DoIOTask(); | |
347 } | |
348 | |
349 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
350 const std::vector<media::PictureBuffer>& buffers) { | |
351 DCHECK(thread_checker_.CalledOnValidThread()); | |
352 DCHECK(output_picture_buffers_.empty()); | |
353 | |
354 for (size_t i = 0; i < buffers.size(); ++i) { | |
355 output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i])); | |
356 free_picture_ids_.push(buffers[i].id()); | |
357 } | |
358 | |
359 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers, | |
360 "Invalid picture buffers were passed.", | |
361 INVALID_ARGUMENT); | |
362 | |
363 if (!io_task_is_posted_) | |
364 DoIOTask(); | |
365 } | |
366 | |
367 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
368 int32 picture_buffer_id) { | |
369 DCHECK(thread_checker_.CalledOnValidThread()); | |
370 free_picture_ids_.push(picture_buffer_id); | |
371 | |
372 if (!io_task_is_posted_) | |
373 DoIOTask(); | |
374 } | |
375 | |
376 void AndroidVideoDecodeAccelerator::Flush() { | |
377 DCHECK(thread_checker_.CalledOnValidThread()); | |
378 | |
379 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
380 } | |
381 | |
382 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | |
383 DCHECK(surface_texture_.get()); | |
384 | |
385 media_codec_.reset(new media::MediaCodecBridge(codec_)); | |
386 | |
387 JNIEnv* env = base::android::AttachCurrentThread(); | |
388 CHECK(env); | |
389 ScopedJavaLocalRef<jclass> cls( | |
390 base::android::GetClass(env, "android/view/Surface")); | |
391 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( | |
392 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); | |
393 ScopedJavaLocalRef<jobject> j_surface( | |
394 env, env->NewObject( | |
395 cls.obj(), constructor, | |
396 surface_texture_->j_surface_texture().obj())); | |
397 | |
398 // VDA does not pass the container indicated resolution in the initialization | |
399 // phase. Here, we set 720p by default. | |
400 // TODO(dwkang): find out a way to remove the following hard-coded value. | |
401 media_codec_->StartVideo(codec_, gfx::Size(1280, 720), j_surface.obj()); | |
402 content::ReleaseSurface(j_surface.obj()); | |
403 media_codec_->GetOutputBuffers(); | |
404 } | |
405 | |
406 void AndroidVideoDecodeAccelerator::Reset() { | |
407 DCHECK(thread_checker_.CalledOnValidThread()); | |
408 | |
409 while(!pending_bitstream_buffers_.empty()) { | |
410 media::BitstreamBuffer& bitstream_buffer = | |
411 pending_bitstream_buffers_.front(); | |
412 pending_bitstream_buffers_.pop(); | |
413 | |
414 if (bitstream_buffer.id() != -1) { | |
415 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
416 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
417 base::AsWeakPtr(this), bitstream_buffer.id())); | |
418 } | |
419 } | |
420 bitstreams_notified_in_advance_.clear(); | |
421 | |
422 if (!decoder_met_eos_) { | |
423 media_codec_->Reset(); | |
424 } else { | |
425 // MediaCodec should be usable after meeting EOS, but it is not on some | |
426 // devices. b/8125974 To avoid the case, we recreate a new one. | |
427 media_codec_->Stop(); | |
428 ConfigureMediaCodec(); | |
429 } | |
430 decoder_met_eos_ = false; | |
431 num_bytes_used_in_the_pending_buffer_ = 0; | |
432 state_ = NO_ERROR; | |
433 | |
434 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
435 &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this))); | |
436 } | |
437 | |
438 void AndroidVideoDecodeAccelerator::Destroy() { | |
439 DCHECK(thread_checker_.CalledOnValidThread()); | |
440 | |
441 if (media_codec_) | |
442 media_codec_->Stop(); | |
443 if (surface_texture_id_) | |
444 glDeleteTextures(1, &surface_texture_id_); | |
445 if (copier_.get()) | |
Ami GONE FROM CHROMIUM
2013/02/28 03:23:05
ditto
dwkang1
2013/02/28 04:27:49
Done.
| |
446 copier_->Destroy(); | |
447 delete this; | |
448 } | |
449 | |
450 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() { | |
451 client_->NotifyInitializeDone(); | |
452 } | |
453 | |
454 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
455 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | |
456 } | |
457 | |
458 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
459 const media::Picture& picture) { | |
460 client_->PictureReady(picture); | |
461 } | |
462 | |
463 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | |
464 int input_buffer_id) { | |
465 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
466 } | |
467 | |
468 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | |
469 client_->NotifyFlushDone(); | |
470 } | |
471 | |
472 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | |
473 client_->NotifyResetDone(); | |
474 } | |
475 | |
476 void AndroidVideoDecodeAccelerator::NotifyError( | |
477 media::VideoDecodeAccelerator::Error error) { | |
478 client_->NotifyError(error); | |
479 } | |
480 | |
481 } // namespace content | |
OLD | NEW |