OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
6 | |
7 #include <jni.h> | |
8 | |
9 #include "base/android/jni_android.h" | |
10 #include "base/android/scoped_java_ref.h" | |
11 #include "base/bind.h" | |
12 #include "base/logging.h" | |
13 #include "base/message_loop.h" | |
14 #include "content/common/android/surface_callback.h" | |
15 #include "content/common/gpu/gpu_channel.h" | |
16 #include "content/common/gpu/media/gles2_external_texture_copier.h" | |
17 #include "media/base/bitstream_buffer.h" | |
18 #include "media/base/limits.h" | |
19 #include "media/video/picture.h" | |
20 #include "ui/gl/gl_bindings.h" | |
21 | |
22 using base::android::MethodID; | |
23 using base::android::ScopedJavaLocalRef; | |
24 | |
25 namespace content { | |
26 | |
27 // XXX: drop the below before submitting. | |
Ami GONE FROM CHROMIUM
2013/02/13 18:07:11
Time to drop this?
dwkang1
2013/02/16 11:30:31
Let me do this once we have a conclusion on gl cop
| |
28 #define LOG_LINE() LOG(INFO) << __FUNCTION__ | |
29 | |
30 // Helper macros for dealing with failure. If |result| evaluates false, emit | |
31 // |log| to ERROR, register |error| with the decoder, and return. | |
32 #define RETURN_ON_FAILURE(result, log, error) \ | |
33 do { \ | |
34 if (!(result)) { \ | |
35 DLOG(ERROR) << log; \ | |
36 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \ | |
37 &AndroidVideoDecodeAccelerator::NotifyError, \ | |
38 base::AsWeakPtr(this), error)); \ | |
39 state_ = ERROR; \ | |
40 return; \ | |
41 } \ | |
42 } while (0) | |
43 | |
44 // TODO(dwkang): For now, we uses kMaxVideoFrames + 1 in order to provide | |
45 // enough pictures to pass media stack's prerolling phase, but this should be | |
46 // tuned when we have actual use case. | |
Ami GONE FROM CHROMIUM
2013/02/13 18:07:11
Point at http://crbug.com/176036 (which I just fil
dwkang1
2013/02/16 11:30:31
Done. Thanks for filing.
| |
47 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 }; | |
48 | |
49 // static | |
50 const base::TimeDelta AndroidVideoDecodeAccelerator::kDecodePollDelay = | |
51 base::TimeDelta::FromMilliseconds(10); | |
52 | |
53 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
54 media::VideoDecodeAccelerator::Client* client, | |
55 const base::Callback<bool(void)>& make_context_current) | |
56 : client_(client), | |
57 make_context_current_(make_context_current), | |
58 codec_(media::MediaCodecBridge::VIDEO_H264), | |
59 state_(NO_ERROR), | |
60 surface_texture_id_(-1), | |
61 picturebuffers_requested_(false), | |
62 io_task_is_posted_(false), | |
63 decoder_met_eos_(false), | |
64 num_bytes_used_in_the_pending_buffer_(0) { | |
65 LOG_LINE(); | |
66 } | |
67 | |
68 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
69 LOG_LINE(); | |
70 DCHECK(thread_checker_.CalledOnValidThread()); | |
71 } | |
72 | |
73 bool AndroidVideoDecodeAccelerator::Initialize( | |
74 media::VideoCodecProfile profile) { | |
75 LOG_LINE(); | |
76 DCHECK(!media_codec_); | |
77 DCHECK(thread_checker_.CalledOnValidThread()); | |
78 | |
79 if (profile == media::VP8PROFILE_MAIN) { | |
80 codec_ = media::MediaCodecBridge::VIDEO_VP8; | |
81 } else if (profile >= media::H264PROFILE_MIN && | |
82 profile <= media::H264PROFILE_MAX) { | |
83 codec_ = media::MediaCodecBridge::VIDEO_H264; | |
84 } else { | |
85 LOG(ERROR) << "Unsupported profile: " << profile; | |
86 return false; | |
87 } | |
88 | |
89 if (!make_context_current_.Run()) { | |
90 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
91 return false; | |
92 } | |
93 | |
94 glGenTextures(1, &surface_texture_id_); | |
95 glActiveTexture(GL_TEXTURE0); | |
96 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); | |
97 | |
98 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); | |
99 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
100 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
101 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
102 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
103 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
104 | |
105 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); | |
106 | |
107 ConfigureMediaCodec(); | |
108 | |
109 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
110 &AndroidVideoDecodeAccelerator::NotifyInitializeDone, | |
111 base::AsWeakPtr(this))); | |
112 return true; | |
113 } | |
114 | |
115 void AndroidVideoDecodeAccelerator::DoIOTask() { | |
116 io_task_is_posted_ = false; | |
117 if (state_ == ERROR) { | |
118 return; | |
119 } | |
120 | |
121 DequeueOutput(); | |
122 QueueInput(); | |
123 | |
124 if (!pending_bitstream_buffers_.empty() || | |
125 !free_picture_ids_.empty()) { | |
126 io_task_is_posted_ = true; | |
127 MessageLoop::current()->PostDelayedTask( | |
128 FROM_HERE, | |
129 base::Bind( | |
130 &AndroidVideoDecodeAccelerator::DoIOTask, base::AsWeakPtr(this)), | |
131 kDecodePollDelay); | |
132 } | |
133 } | |
134 | |
135 void AndroidVideoDecodeAccelerator::QueueInput() { | |
136 if (pending_bitstream_buffers_.empty()) | |
137 return; | |
138 | |
139 int input_buf_index = media_codec_->DequeueInputBuffer( | |
140 media::MediaCodecBridge::kTimeOutNoWait); | |
141 if (input_buf_index < 0) { | |
142 DCHECK_EQ(input_buf_index, media::MediaCodecBridge::INFO_TRY_AGAIN_LATER); | |
143 return; | |
144 } | |
145 media::BitstreamBuffer& bitstream_buffer = | |
146 pending_bitstream_buffers_.front(); | |
147 | |
148 if (bitstream_buffer.id() == -1) { | |
149 media_codec_->QueueEOS(input_buf_index); | |
150 return; | |
151 } | |
152 // Abuse the presentation time argument to propagate the bitstream | |
153 // buffer ID to the output, so we can report it back to the client in | |
154 // PictureReady(). | |
155 base::TimeDelta timestamp = | |
156 base::TimeDelta::FromMicroseconds(bitstream_buffer.id()); | |
157 | |
158 int bytes_written = 0; | |
159 if (bitstream_buffer.size() > 0) { | |
160 scoped_ptr<base::SharedMemory> shm( | |
161 new base::SharedMemory(bitstream_buffer.handle(), true)); | |
162 | |
163 RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()), | |
164 "Failed to SharedMemory::Map()", | |
165 UNREADABLE_INPUT); | |
166 | |
167 const size_t offset = num_bytes_used_in_the_pending_buffer_; | |
168 bytes_written = media_codec_->QueueInputBuffer( | |
169 input_buf_index, | |
170 static_cast<const uint8*>(shm->memory()) + offset, | |
171 bitstream_buffer.size() - offset, timestamp); | |
172 num_bytes_used_in_the_pending_buffer_ += bytes_written; | |
173 CHECK_LE(num_bytes_used_in_the_pending_buffer_, bitstream_buffer.size()); | |
174 } | |
175 | |
176 if (num_bytes_used_in_the_pending_buffer_ == bitstream_buffer.size()) { | |
177 num_bytes_used_in_the_pending_buffer_ = 0; | |
178 pending_bitstream_buffers_.pop(); | |
179 | |
180 if (bitstream_buffer.id() != -1) { | |
181 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
182 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
183 base::AsWeakPtr(this), bitstream_buffer.id())); | |
184 } | |
185 } | |
186 } | |
187 | |
188 void AndroidVideoDecodeAccelerator::DequeueOutput() { | |
189 if (picturebuffers_requested_ && output_picture_buffers_.empty()) | |
190 return; | |
191 | |
192 if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) { | |
193 // Don't have any picture buffer to send. Need to wait more. | |
194 return; | |
195 } | |
196 | |
197 bool eos = false; | |
198 base::TimeDelta timestamp; | |
199 int32 buf_index = 0; | |
200 do { | |
201 int32 offset = 0; | |
202 int32 size = 0; | |
203 buf_index = media_codec_->DequeueOutputBuffer( | |
204 media::MediaCodecBridge::kTimeOutNoWait, | |
205 &offset, &size, ×tamp, &eos); | |
206 switch (buf_index) { | |
207 case media::MediaCodecBridge::INFO_TRY_AGAIN_LATER: | |
208 return; | |
209 | |
210 case media::MediaCodecBridge::INFO_OUTPUT_FORMAT_CHANGED: { | |
211 int32 width, height; | |
212 media_codec_->GetOutputFormat(&width, &height); | |
213 | |
214 if (!picturebuffers_requested_) { | |
215 picturebuffers_requested_ = true; | |
216 size_ = gfx::Size(width, height); | |
217 texture_copier_.reset(new Gles2ExternalTextureCopier()); | |
218 texture_copier_->Init(width, height); | |
219 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
220 &AndroidVideoDecodeAccelerator::RequestPictureBuffers, | |
221 base::AsWeakPtr(this))); | |
222 } else { | |
223 // TODO(dwkang): support the dynamic resolution change. | |
224 // Currently, we assume that there is no resolution change in the | |
225 // input stream. So, INFO_OUTPUT_FORMAT_CHANGED should not happen | |
226 // more than once. However, we allows it if resolution is the same | |
227 // as the previous one because |media_codec_| can be reset in Reset(). | |
228 RETURN_ON_FAILURE(size_ == gfx::Size(width, height), | |
229 "Dynamic resolution change is not supported.", | |
230 PLATFORM_FAILURE); | |
231 } | |
232 return; | |
233 } | |
234 | |
235 case media::MediaCodecBridge::INFO_OUTPUT_BUFFERS_CHANGED: | |
236 media_codec_->GetOutputBuffers(); | |
237 break; | |
238 } | |
239 } while (buf_index < 0); | |
240 | |
241 media_codec_->ReleaseOutputBuffer(buf_index, true); | |
242 | |
243 if (eos) { | |
244 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
245 &AndroidVideoDecodeAccelerator::NotifyFlushDone, | |
246 base::AsWeakPtr(this))); | |
247 decoder_met_eos_ = true; | |
248 } else { | |
249 int64 bitstream_buffer_id = timestamp.InMicroseconds(); | |
250 SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id)); | |
251 } | |
252 } | |
253 | |
254 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient( | |
255 int32 bitstream_id) { | |
256 LOG_LINE(); | |
257 DCHECK(thread_checker_.CalledOnValidThread()); | |
258 DCHECK_NE(bitstream_id, -1); | |
259 DCHECK(!free_picture_ids_.empty()); | |
260 | |
261 RETURN_ON_FAILURE(make_context_current_.Run(), | |
262 "Failed to make this decoder's GL context current.", | |
263 PLATFORM_FAILURE); | |
264 | |
265 int32 picture_buffer_id = free_picture_ids_.front(); | |
266 free_picture_ids_.pop(); | |
267 | |
268 float transfrom_matrix[16]; | |
269 surface_texture_->UpdateTexImage(); | |
270 surface_texture_->GetTransformMatrix(transfrom_matrix); | |
271 | |
272 OutputBufferMap::const_iterator i = | |
273 output_picture_buffers_.find(picture_buffer_id); | |
274 RETURN_ON_FAILURE(i != output_picture_buffers_.end(), | |
275 "Can't find a PictureBuffer for " << picture_buffer_id, | |
276 PLATFORM_FAILURE); | |
277 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
278 | |
279 // Here, we copy |surface_texture_id_| to the picture buffer instead of | |
280 // setting new texture to |surface_texture_| by calling attachToGLContext() | |
281 // because: | |
282 // 1. Once we call detachFrameGLContext(), it deletes the texture previous | |
283 // attached. | |
284 // 2. SurfaceTexture requires us to apply a transform matrix when we show | |
285 // the texture. | |
286 texture_copier_->Copy( | |
287 surface_texture_id_, picture_buffer_texture_id, transfrom_matrix); | |
288 | |
289 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
290 &AndroidVideoDecodeAccelerator::NotifyPictureReady, | |
291 base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id))); | |
292 } | |
293 | |
294 void AndroidVideoDecodeAccelerator::Decode( | |
295 const media::BitstreamBuffer& bitstream_buffer) { | |
296 LOG_LINE(); | |
297 DCHECK(thread_checker_.CalledOnValidThread()); | |
298 pending_bitstream_buffers_.push(bitstream_buffer); | |
299 | |
300 if (!io_task_is_posted_) | |
301 DoIOTask(); | |
302 } | |
303 | |
304 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
305 const std::vector<media::PictureBuffer>& buffers) { | |
306 LOG_LINE(); | |
307 DCHECK(thread_checker_.CalledOnValidThread()); | |
308 DCHECK(output_picture_buffers_.empty()); | |
309 | |
310 for (size_t i = 0; i < buffers.size(); ++i) { | |
311 output_picture_buffers_.insert(std::make_pair(buffers[i].id(), buffers[i])); | |
312 free_picture_ids_.push(buffers[i].id()); | |
313 } | |
314 | |
315 RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers, | |
316 "Invalid picture buffers were passed.", | |
317 INVALID_ARGUMENT); | |
318 | |
319 if (!io_task_is_posted_) | |
320 DoIOTask(); | |
321 } | |
322 | |
323 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
324 int32 picture_buffer_id) { | |
325 DCHECK(thread_checker_.CalledOnValidThread()); | |
326 free_picture_ids_.push(picture_buffer_id); | |
327 | |
328 if (!io_task_is_posted_) | |
329 DoIOTask(); | |
330 } | |
331 | |
332 void AndroidVideoDecodeAccelerator::Flush() { | |
333 LOG_LINE(); | |
334 DCHECK(thread_checker_.CalledOnValidThread()); | |
335 | |
336 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
337 } | |
338 | |
339 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | |
340 DCHECK(surface_texture_.get()); | |
341 | |
342 media_codec_.reset(new media::MediaCodecBridge(codec_)); | |
343 | |
344 JNIEnv* env = base::android::AttachCurrentThread(); | |
345 CHECK(env); | |
346 ScopedJavaLocalRef<jclass> cls( | |
347 base::android::GetClass(env, "android/view/Surface")); | |
348 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( | |
349 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); | |
350 ScopedJavaLocalRef<jobject> j_surface( | |
351 env, env->NewObject( | |
352 cls.obj(), constructor, | |
353 surface_texture_->j_surface_texture().obj())); | |
354 | |
355 // VDA does not pass the container indicated resolution in the initialization | |
356 // phase. Here, we set 720p by default. | |
357 // TODO(dwkang): find out a way to remove the following hard-coded value. | |
358 media_codec_->StartVideo(codec_, gfx::Size(1280, 720), j_surface.obj()); | |
359 content::ReleaseSurface(j_surface.obj()); | |
360 media_codec_->GetOutputBuffers(); | |
361 } | |
362 | |
363 void AndroidVideoDecodeAccelerator::Reset() { | |
364 LOG_LINE(); | |
365 DCHECK(thread_checker_.CalledOnValidThread()); | |
366 | |
367 while(!pending_bitstream_buffers_.empty()) { | |
368 media::BitstreamBuffer& bitstream_buffer = | |
369 pending_bitstream_buffers_.front(); | |
370 pending_bitstream_buffers_.pop(); | |
371 | |
372 if (bitstream_buffer.id() != -1) { | |
373 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
374 &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer, | |
375 base::AsWeakPtr(this), bitstream_buffer.id())); | |
376 } | |
377 } | |
378 | |
379 if (!decoder_met_eos_) { | |
380 media_codec_->Reset(); | |
381 } else { | |
382 // MediaCodec should be usable after meeting EOS, but it is not on some | |
383 // devices. b/8125974 To avoid the case, we recreate a new one. | |
384 media_codec_->Stop(); | |
385 ConfigureMediaCodec(); | |
386 } | |
387 decoder_met_eos_ = false; | |
388 num_bytes_used_in_the_pending_buffer_ = 0; | |
389 state_ = NO_ERROR; | |
390 | |
391 MessageLoop::current()->PostTask(FROM_HERE, base::Bind( | |
392 &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this))); | |
393 } | |
394 | |
395 void AndroidVideoDecodeAccelerator::Destroy() { | |
396 LOG_LINE(); | |
397 DCHECK(thread_checker_.CalledOnValidThread()); | |
398 | |
399 if (media_codec_) media_codec_->Stop(); | |
400 delete this; | |
401 } | |
402 | |
403 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() { | |
404 client_->NotifyInitializeDone(); | |
405 } | |
406 | |
407 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() { | |
408 client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D); | |
409 } | |
410 | |
411 void AndroidVideoDecodeAccelerator::NotifyPictureReady( | |
412 const media::Picture& picture) { | |
413 client_->PictureReady(picture); | |
414 } | |
415 | |
416 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer( | |
417 int input_buffer_id) { | |
418 client_->NotifyEndOfBitstreamBuffer(input_buffer_id); | |
419 } | |
420 | |
421 void AndroidVideoDecodeAccelerator::NotifyFlushDone() { | |
422 client_->NotifyFlushDone(); | |
423 } | |
424 | |
425 void AndroidVideoDecodeAccelerator::NotifyResetDone() { | |
426 client_->NotifyResetDone(); | |
427 } | |
428 | |
429 void AndroidVideoDecodeAccelerator::NotifyError( | |
430 media::VideoDecodeAccelerator::Error error) { | |
431 client_->NotifyError(error); | |
432 } | |
433 | |
434 } // namespace content | |
OLD | NEW |