OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
6 | |
7 #include <jni.h> | |
8 | |
9 #include "base/android/jni_android.h" | |
10 #include "base/android/scoped_java_ref.h" | |
11 #include "base/bind.h" | |
12 #include "base/logging.h" | |
13 #include "base/message_loop.h" | |
14 #include "content/common/android/surface_callback.h" | |
15 #include "content/common/gpu/gpu_channel.h" | |
16 #include "content/common/gpu/media/gles2_external_texture_copier.h" | |
17 #include "media/base/bitstream_buffer.h" | |
18 #include "media/base/limits.h" | |
19 #include "media/video/picture.h" | |
20 #include "ui/gl/gl_bindings.h" | |
21 | |
22 using base::android::MethodID; | |
23 using base::android::ScopedJavaLocalRef; | |
24 | |
25 namespace content { | |
26 | |
27 // XXX: drop the below before submitting. | |
28 #define LOG_LINE() LOG(INFO) << __FUNCTION__ | |
29 | |
30 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
31 // |log| to ERROR, register |error| with the decoder, and return. | |
32 #define RETURN_ON_FAILURE(result, log, error) \ | |
33 do { \ | |
34 if (!(result)) { \ | |
35 DLOG(ERROR) << log; \ | |
36 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \ | |
37 &AndroidVideoDecodeAccelerator::NotifyError, \ | |
38 base::AsWeakPtr(this), error)); \ | |
39 state_ = ERROR; \ | |
40 return; \ | |
41 } \ | |
42 } while (0) | |
43 | |
44 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling | |
45 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we | |
46 // have actual use case. | |
47 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
48 | |
49 // static | |
50 const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay = | |
51 base::TimeDelta::FromMilliseconds(10); | |
52 | |
53 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
54 media::VideoDecodeAccelerator::Client* client, | |
55 const base::Callback<bool(void)>& make_context_current) | |
56 : client_(client), | |
57 make_context_current_(make_context_current), | |
58 codec_(media::MediaCodecBridge::VIDEO_H264), | |
59 state_(NO_ERROR), | |
60 surface_texture_id_(-1), | |
61 picturebuffers_requested_(false), | |
62 io_task_is_posted_(false), | |
63 decoder_met_eos_(false), | |
64 num_bytes_used_in_the_pending_buffer_(0) { | |
65 LOG_LINE(); | |
66 } | |
67 | |
68 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
69 LOG_LINE(); | |
70 DCHECK(thread_checker_.CalledOnValidThread()); | |
71 } | |
72 | |
73 bool AndroidVideoDecodeAccelerator::Initialize( | |
74 media::VideoCodecProfile profile) { | |
75 LOG_LINE(); | |
76 DCHECK(!media_codec_); | |
77 DCHECK(thread_checker_.CalledOnValidThread()); | |
78 | |
79 if (profile == media::VP8PROFILE_MAIN) { | |
80 codec_ = media::MediaCodecBridge::VIDEO_VP8; | |
81 } else if (profile >= media::H264PROFILE_MIN && | |
82 profile <= media::H264PROFILE_MAX) { | |
83 codec_ = media::MediaCodecBridge::VIDEO_H264; | |
84 } else { | |
85 LOG(ERROR) << "Unsupported profile: " << profile; | |
86 return false; | |
87 } | |
88 | |
89 if (!make_context_current_.Run()) { | |
90 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
91 return false; | |
92 } | |
93 | |
94 glGenTextures(1, &surface_texture_id_); | |
95 glActiveTexture(GL_TEXTURE0); | |
96 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); | |
97 | |
98 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); | |
99 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
100 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
101 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
102 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
103 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
104 | |
105 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); | |
106 | |
107 ConfigureMediaCodec(); | |
108 | |
109 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
110 &AndroidVideoDecodeAccelerator::NotifyInitializeDone, | |
111 base::AsWeakPtr(this))); | |
112 return true; | |
113 } | |
114 | |
115 void AndroidVideoDecodeAccelerator::DoIOTask() { | |
116 io_task_is_posted_ = false; | |
117 if (state_ == ERROR) { | |
118 return; | |
119 } | |
120 | |
121 DequeueOutput(); | |
122 QueueInput(); | |
123 | |
124 if (!pending_bitstream_buffers_.empty() || | |
125 !free_picture_ids_.empty()) { | |
126 io_task_is_posted_ = true; | |
127 MessageLoop::current()->PostDelayedTask( | |
128 FROM_HERE, | |
129 base::Bind( | |
130 &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)), | |
131 kDecodePollDelay); | |
132 } | |
133 } | |
134 | |
135 void AndroidVideoDecodeAccelerator::QueueInput() { | |
136 if (pending_bitstream_buffers_.empty()) | |
137 return; | |
138 | |
139 int input_buf_index = media_codec_->DequeueInputBuffer( | |
140 media::MediaCodecBridge::kTimeOutNoWait); | |
141 if (input_buf_index < 0) { | |
142 DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER); | |
143 return; | |
144 } | |
145 media::BitstreamBuffer& bitstream_buffer = | |
146 pending_bitstream_buffers_.front(); | |
147 | |
148 if (bitstream_buffer.id() == -1) { | |
149 media_codec_->QueueEOS(input_buf_index); | |
150 pending_bitstream_buffers_.pop(); | |
151 return; | |
152 } | |
153 // Abuse the presentation time argument to propagate the bitstream | |
154 // buffer ID to the output, so we can report it back to the client in | |
155 // PictureReady(). | |
156 base::TimeDelta timestamp = | |
157 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); | |
158 | |
159 int bytes_written = 0; | |
160 if (bitstream_buffer.size() > 0) { | |
Ami GONE FROM CHROMIUM
2013/02/17 00:12:44
nit: since you moved the .pop for EOS buffers abov
dwkang1
2013/02/18 14:13:23
Good idea. Thanks!
| |
161 scoped_ptr<base::SharedMemory> shm( | |
162 new base::SharedMemory(bitstream_buffer.handle(), true)); | |
163 | |
164 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | |
165 "Failed to SharedMemory::Map()", | |
166 UNREADABLE_INPUT); | |
167 | |
168 const size_t offset = num_bytes_used_in_the_pending_buffer_; | |
169 bytes_written = media_codec_->QueueInputBuffer( | |
170 input_buf_index, | |
171 static_cast<const uint8*>(shm->memory()) + offset, | |
172 bitstream_buffer.size() - offset, timestamp); | |
173 num_bytes_used_in_the_pending_buffer_ += bytes_written; | |
174 CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size()); | |
175 } | |
176 | |
177 if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) { | |
178 num_bytes_used_in_the_pending_buffer_ = 0; | |
179 pending_bitstream_buffers_.pop(); | |
180 | |
181 // TODO(dwkang): We should call NotifyEndOfBitstreamBuffer(), when no | |
182 // more decoded output will be returned from the bitstream buffer. | |
183 // Need to find a way to ensure this. | |
184 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
185 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
186 base::AsWeakPtr(this), bitstream_buffer.id())); | |
187 } | |
188 } | |
189 | |
190 void AndroidVideoDecodeAccelerator::DequeueOutput() { | |
191 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | |
192 return; | |
193 | |
194 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | |
195 // Don't have any picture buffer to send. Need to wait more. | |
196 return; | |
197 } | |
198 | |
199 bool eos = false; | |
200 base::TimeDelta timestamp; | |
201 int32 buf_index = 0; | |
202 do { | |
203 int32 offset = 0; | |
204 int32 size = 0; | |
205 buf_index = media_codec_->DequeueOutputBuffer( | |
206 media::MediaCodecBridge::kTimeOutNoWait, | |
207 &offset, &size, ×tamp, &eos); | |
208 switch (buf_index) { | |
209 case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER: | |
210 return; | |
211 | |
212 case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: { | |
213 int32 width, height; | |
214 media_codec_->GetOutputFormat(&width, &height); | |
215 | |
216 if (!picturebuffers_requested_) { | |
217 picturebuffers_requested_ = true; | |
218 size_ = gfx::Size(width, height); | |
219 texture_copier_.reset(new Gles2ExternalTextureCopier()); | |
220 texture_copier_->Init(width, height); | |
221 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
222 &AndroidVideoDecodeAccelerator::RequestPictureBuffers, | |
223 base::AsWeakPtr(this))); | |
224 } else { | |
225 // TODO(dwkang): support the dynamic resolution change. | |
226 // Currently, we assume that there is no resolution change in the | |
227 // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen | |
228 // more than once. However, we allows it if resolution is the same | |
229 // as the previous one because |media_codec_| can be reset in Reset(). | |
230 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | |
231 "Dynamic resolution change is not supported.", | |
232 PLATFORM_FAILURE); | |
233 } | |
234 return; | |
235 } | |
236 | |
237 case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: | |
238 media_codec_->GetOutputBuffers(); | |
239 break; | |
240 } | |
241 } while (buf_index < 0); | |
242 | |
243 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
244 | |
245 if (eos) { | |
246 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
247 &AndroidVideoDecodeAccelerator::NotifyFlushDone, | |
248 base::AsWeakPtr(this))); | |
249 decoder_met_eos_ = true; | |
250 } else { | |
251 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | |
252 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | |
253 } | |
254 } | |
255 | |
256 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | |
257 int32 bitstream_id) { | |
258 LOG_LINE(); | |
259 DCHECK(thread_checker_.CalledOnValidThread()); | |
260 DCHECK_NE(bitstream_id, -1); | |
261 DCHECK(!free_picture_ids_.empty()); | |
262 | |
263 RETURN_ON_FAILURE(make_context_current_.Run(), | |
264 "Failed to make this decoder's GL context current.", | |
265 PLATFORM_FAILURE); | |
266 | |
267 int32 picture_buffer_id = free_picture_ids_.front(); | |
268 free_picture_ids_.pop(); | |
269 | |
270 float transfrom_matrix[16]; | |
271 surface_texture_->UpdateTexImage(); | |
272 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
273 | |
274 OutputBufferMap::const_iterator i = | |
275 output_picture_buffers_.find(picture_buffer_id); | |
276 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | |
277 "Can't find a PictureBuffer for " << picture_buffer_id, | |
278 PLATFORM_FAILURE); | |
279 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
280 | |
281 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
282 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
283 // because: | |
284 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
285 // attached. | |
286 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
287 // the texture. | |
288 texture_copier_->Copy( | |
289 surface_texture_id_, picture_buffer_texture_id, transfrom_matrix); | |
290 | |
291 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
292 &AndroidVideoDecodeAccelerator::NotifyPictureReady, | |
293 base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id))); | |
294 } | |
295 | |
296 void AndroidVideoDecodeAccelerator::Decode( | |
297 const media::BitstreamBuffer& bitstream_buffer) { | |
298 LOG_LINE(); | |
299 DCHECK(thread_checker_.CalledOnValidThread()); | |
300 pending_bitstream_buffers_.push(bitstream_buffer); | |
301 | |
302 if (!io_task_is_posted_) | |
303 DoIOTask(); | |
304 } | |
305 | |
306 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
307 const std::vector<media::PictureBuffer>& buffers) { | |
308 LOG_LINE(); | |
309 DCHECK(thread_checker_.CalledOnValidThread()); | |
310 DCHECK(output_picture_buffers_.empty()); | |
311 | |
312 for (size_t i = 0; i < buffers.size(); ++i) { | |
313 output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i])); | |
314 free_picture_ids_.push(buffers[i].id()); | |
315 } | |
316 | |
317 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers, | |
318 "Invalid picture buffers were passed.", | |
319 INVALID_ARGUMENT); | |
320 | |
321 if (!io_task_is_posted_) | |
322 DoIOTask(); | |
323 } | |
324 | |
325 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
326 int32 picture_buffer_id) { | |
327 DCHECK(thread_checker_.CalledOnValidThread()); | |
328 free_picture_ids_.push(picture_buffer_id); | |
329 | |
330 if (!io_task_is_posted_) | |
331 DoIOTask(); | |
332 } | |
333 | |
334 void AndroidVideoDecodeAccelerator::Flush() { | |
335 LOG_LINE(); | |
336 DCHECK(thread_checker_.CalledOnValidThread()); | |
337 | |
338 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
339 } | |
340 | |
341 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | |
342 DCHECK(surface_texture_.get()); | |
343 | |
344 media_codec_.reset(new media::MediaCodecBridge(codec_)); | |
345 | |
346 JNIEnv* env = base::android::AttachCurrentThread(); | |
347 CHECK(env); | |
348 ScopedJavaLocalRef<jclass> cls( | |
349 base::android::GetClass(env, "android/view/Surface")); | |
350 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( | |
351 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); | |
352 ScopedJavaLocalRef<jobject> j_surface( | |
353 env, env->NewObject( | |
354 cls.obj(), constructor, | |
355 surface_texture_->j_surface_texture().obj())); | |
356 | |
357 // VDA does not pass the container indicated resolution in the initialization | |
358 // phase. Here, we set 720p by default. | |
359 // TODO(dwkang): find out a way to remove the following hard-coded value. | |
360 media_codec_->StartVideo(codec_, gfx::Size(1280, 720), j_surface.obj()); | |
361 content::ReleaseSurface(j_surface.obj()); | |
362 media_codec_->GetOutputBuffers(); | |
363 } | |
364 | |
365 void AndroidVideoDecodeAccelerator::Reset() { | |
366 LOG_LINE(); | |
367 DCHECK(thread_checker_.CalledOnValidThread()); | |
368 | |
369 while(!pending_bitstream_buffers_.empty()) { | |
370 media::BitstreamBuffer& bitstream_buffer = | |
371 pending_bitstream_buffers_.front(); | |
372 pending_bitstream_buffers_.pop(); | |
373 | |
374 if (bitstream_buffer.id() != -1) { | |
375 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
376 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
377 base::AsWeakPtr(this), bitstream_buffer.id())); | |
378 } | |
379 } | |
380 | |
381 if (!decoder_met_eos_) { | |
382 media_codec_->Reset(); | |
383 } else { | |
384 // MediaCodec should be usable after meeting EOS, but it is not on some | |
385 // devices. b/8125974 To avoid the case, we recreate a new one. | |
386 media_codec_->Stop(); | |
387 ConfigureMediaCodec(); | |
388 } | |
389 decoder_met_eos_ = false; | |
390 num_bytes_used_in_the_pending_buffer_ = 0; | |
391 state_ = NO_ERROR; | |
392 | |
393 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
394 &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this))); | |
395 } | |
396 | |
397 void AndroidVideoDecodeAccelerator::Destroy() { | |
398 LOG_LINE(); | |
399 DCHECK(thread_checker_.CalledOnValidThread()); | |
400 | |
401 if (media_codec_) media_codec_->Stop(); | |
402 delete this; | |
403 } | |
404 | |
405 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() { | |
406 client_->NotifyInitializeDone(); | |
407 } | |
408 | |
409 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
410 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | |
411 } | |
412 | |
413 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
414 const media::Picture& picture) { | |
415 client_->PictureReady(picture); | |
416 } | |
417 | |
418 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | |
419 int input_buffer_id) { | |
420 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
421 } | |
422 | |
423 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | |
424 client_->NotifyFlushDone(); | |
425 } | |
426 | |
427 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | |
428 client_->NotifyResetDone(); | |
429 } | |
430 | |
431 void AndroidVideoDecodeAccelerator::NotifyError( | |
432 media::VideoDecodeAccelerator::Error error) { | |
433 client_->NotifyError(error); | |
434 } | |
435 | |
436 } // namespace content | |
OLD | NEW |