OLD | NEW |
---|---|
(Empty) | |
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include "content/common/gpu/media/android_video_decode_accelerator.h" | |
6 | |
7 #include <jni.h> | |
8 | |
9 #include "base/android/jni_android.h" | |
10 #include "base/android/scoped_java_ref.h" | |
11 #include "base/bind.h" | |
12 #include "base/debug/trace_event.h" | |
13 #include "base/logging.h" | |
14 #include "base/stl_util.h" | |
15 #include "base/string_util.h" | |
16 #include "content/common/android/surface_callback.h" | |
17 #include "content/common/gpu/gpu_channel.h" | |
18 #include "content/common/gpu/media/gles2_external_texture_copier.h" | |
19 #include "media/base/android/media_codec_bridge.h" | |
20 #include "media/base/bitstream_buffer.h" | |
21 #include "media/video/picture.h" | |
22 #include "third_party/angle/include/GLES2/gl2.h" | |
23 #include "third_party/angle/include/GLES2/gl2ext.h" | |
24 | |
25 using base::android::MethodID; | |
26 using base::android::ScopedJavaLocalRef; | |
27 | |
28 namespace content { | |
29 | |
30 #define LOG_LINE() VLOG(1) << __FUNCTION__ | |
31 | |
32 enum { kNumPictureBuffers = 4 }; | |
33 enum { kDecodePollDelayMs = 10 }; | |
34 | |
35 enum { kDequeueInputBufferTimeOutUs = 10 }; | |
ycheo (away)
2013/01/17 08:34:44
Could you leave some comment on why you choose thi
dwkang1
2013/01/18 07:14:08
Done.
| |
36 enum { kDequeueOutputBufferTimeOutUs = 10 }; | |
37 | |
38 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator( | |
39 media::VideoDecodeAccelerator::Client* client, | |
40 const base::Callback<bool(void)>& make_context_current) | |
41 : message_loop_(MessageLoop::current()), | |
42 client_(client), | |
43 make_context_current_(make_context_current), | |
44 codec_(UNKNOWN), | |
45 surface_texture_id_(0), | |
46 picturebuffer_requested_(false), | |
47 color_format_(0), | |
48 width_(0), | |
49 height_(0), | |
50 current_bitstream_id_(-1) { | |
51 LOG_LINE(); | |
52 } | |
53 | |
54 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() { | |
55 LOG_LINE(); | |
56 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
57 } | |
58 | |
59 bool AndroidVideoDecodeAccelerator::Initialize( | |
60 media::VideoCodecProfile profile) { | |
61 LOG_LINE(); | |
62 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
63 | |
64 if (profile == media::VP8PROFILE_MAIN) { | |
65 codec_ = VP8; | |
66 } else if (profile >= media::H264PROFILE_MIN | |
67 && profile <= media::H264PROFILE_MAX) { | |
68 codec_ = H264; | |
69 }else { | |
70 LOG(ERROR) << "Unsupported profile: " << profile; | |
71 return false; | |
72 } | |
73 | |
74 if (media_codec_ == NULL) { | |
75 if (!make_context_current_.Run()) { | |
76 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
77 return false; | |
78 } | |
79 glGenTextures(1, &surface_texture_id_); | |
80 glActiveTexture(GL_TEXTURE0); | |
81 glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_); | |
82 | |
83 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST); | |
84 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST); | |
85 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
86 GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); | |
87 glTexParameteri(GL_TEXTURE_EXTERNAL_OES, | |
88 GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); | |
89 | |
90 surface_texture_ = new SurfaceTextureBridge(surface_texture_id_); | |
91 | |
92 ConfigureMediaCodec(); | |
93 } | |
94 | |
95 message_loop_->PostTask( | |
96 FROM_HERE, | |
97 base::Bind( | |
98 &AndroidVideoDecodeAccelerator::DoDecode, base::Unretained(this))); | |
99 | |
100 if (client_) | |
101 client_->NotifyInitializeDone(); | |
102 return true; | |
103 } | |
104 | |
105 void AndroidVideoDecodeAccelerator::DoDecode() { | |
106 QueueInput(); | |
107 DequeueOutput(); | |
108 | |
109 message_loop_->PostDelayedTask( | |
110 FROM_HERE, | |
111 base::Bind( | |
112 &AndroidVideoDecodeAccelerator::DoDecode, base::Unretained(this)), | |
113 base::TimeDelta::FromMilliseconds(kDecodePollDelayMs)); | |
ycheo (away)
2013/01/17 08:34:44
Why don't you make the whole TimeDelta as a consta
dwkang1
2013/01/18 07:14:08
Done.
| |
114 } | |
115 | |
116 void AndroidVideoDecodeAccelerator::QueueInput() { | |
117 if (!pending_bitstream_buffers_.empty()) { | |
118 int input_buf_index = | |
119 media_codec_->DequeueInputBuffer(kDequeueInputBufferTimeOutUs); | |
120 if (input_buf_index < 0) { | |
121 return; | |
122 } | |
123 media::BitstreamBuffer& bitstream_buffer = | |
124 pending_bitstream_buffers_.front(); | |
125 pending_bitstream_buffers_.pop(); | |
126 | |
127 int flags = 0; | |
128 if (bitstream_buffer.id() == -1) { | |
129 flags |= 4; // BUFFER_FLAG_END_OF_STREAM | |
130 } | |
131 if (bitstream_buffer.size() > 0) { | |
132 scoped_ptr<base::SharedMemory> shm( | |
133 new base::SharedMemory(bitstream_buffer.handle(), true)); | |
134 if (!shm->Map(bitstream_buffer.size())) { | |
135 LOG(ERROR) << "Failed to SharedMemory::Map()"; | |
136 if (bitstream_buffer.id() != -1) { | |
137 client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id()); | |
138 } | |
139 return; | |
ycheo (away)
2013/01/17 08:34:44
What can we do for the dequeued input_buf_index?
dwkang1
2013/01/18 07:14:08
Not much. I've noticed that other vda implementati
| |
140 } | |
141 media_codec_->PutToInputBuffer( | |
142 input_buf_index, | |
143 static_cast<const uint8*>(shm->memory()), | |
144 bitstream_buffer.size()); | |
145 } | |
146 // Abuse the presentation time argument to propagate the bitstream | |
147 // buffer ID to the output, so we can report it back to the client in | |
148 // PictureReady(). | |
149 int64 timestamp = bitstream_buffer.id(); | |
150 media_codec_->QueueInputBuffer( | |
151 input_buf_index, 0, bitstream_buffer.size(), timestamp, flags); | |
152 | |
153 if (bitstream_buffer.id() != -1) { | |
154 client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id()); | |
155 } | |
156 } | |
157 } | |
158 | |
159 void AndroidVideoDecodeAccelerator::DequeueOutput() { | |
160 if (picturebuffer_requested_ && picture_map_.empty()) { | |
161 DLOG(INFO) << "Picture buffers are not ready."; | |
162 return; | |
163 } | |
164 if (!picture_map_.empty() && free_picture_ids_.empty()) { | |
165 // Don't have any picture buffer to send. Need to wait more. | |
166 return; | |
167 } | |
168 | |
169 int32 output_offset = 0; | |
170 int32 output_size = 0; | |
171 int32 output_flag = 0; | |
172 int64 timestamp = 0; | |
173 int32 output_buf_index = 0; | |
174 do { | |
175 output_buf_index = media_codec_->DequeueOutputBuffer( | |
176 kDequeueOutputBufferTimeOutUs, &output_offset, &output_size, | |
177 ×tamp, &output_flag); | |
178 switch (output_buf_index) { | |
179 case -1: // INFO_TRY_AGAIN_LATER | |
ycheo (away)
2013/01/17 08:34:44
Sorry, this should be my work. please define these
dwkang1
2013/01/18 07:14:08
Done.
| |
180 return; | |
181 break; | |
ycheo (away)
2013/01/17 08:34:44
redundant?
dwkang1
2013/01/18 07:14:08
Removed.
| |
182 | |
183 case -2: // INFO_OUTPUT_FORMAT_CHANGED | |
184 media_codec_->GetOutputFormat(&color_format_, &width_, &height_); | |
185 DLOG(INFO) << "Output color format: " << color_format_; | |
186 DLOG(INFO) << "Output size: " << width_ << "x" << height_; | |
187 if (!picturebuffer_requested_) { | |
188 picturebuffer_requested_ = true; | |
189 texture_copier_.reset(new Gles2ExternalTextureCopier()); | |
190 texture_copier_->Init(width_, height_); | |
191 client_->ProvidePictureBuffers( | |
192 kNumPictureBuffers, | |
193 gfx::Size(width_, height_), | |
194 GL_TEXTURE_2D); | |
195 } | |
196 // TODO(dwkang): support the dynamic resolution change. | |
197 return; | |
198 break; | |
199 | |
200 case -3: // INFO_OUTPUT_BUFFERS_CHANGED | |
201 media_codec_->GetOutputBuffers(); | |
202 break; | |
203 } | |
204 } while (output_buf_index < 0); | |
205 | |
206 if (output_flag & 4) { // BUFFER_FLAG_END_OF_STREAM | |
207 if (client_) { | |
208 client_->NotifyFlushDone(); | |
209 } | |
210 } | |
211 | |
212 media_codec_->ReleaseOutputBuffer(output_buf_index, true); | |
213 current_bitstream_id_ = static_cast<int32>(timestamp); | |
214 if (current_bitstream_id_ != -1) { | |
215 SendCurrentSurfaceToClient(); | |
216 } | |
217 } | |
218 | |
219 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient() { | |
220 LOG_LINE(); | |
221 | |
222 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
223 DCHECK_NE(current_bitstream_id_, -1); | |
224 DCHECK(!free_picture_ids_.empty()); | |
225 | |
226 int32 picture_buffer_id = free_picture_ids_.front(); | |
227 free_picture_ids_.pop(); | |
228 | |
229 if (!make_context_current_.Run()) { | |
230 LOG(ERROR) << "Failed to make this decoder's GL context current."; | |
231 return; | |
232 } | |
233 | |
234 float mtx[16]; | |
235 surface_texture_->UpdateTexImage(); | |
236 surface_texture_->GetTransformMatrix(mtx); | |
237 CopyCurrentFrameToPictureBuffer(picture_buffer_id, mtx); | |
238 | |
239 client_->PictureReady( | |
240 media::Picture(picture_buffer_id, current_bitstream_id_)); | |
241 current_bitstream_id_ = -1; | |
242 } | |
243 | |
244 void AndroidVideoDecodeAccelerator::CopyCurrentFrameToPictureBuffer( | |
245 int32 picture_buffer_id, float transfrom_matrix[16]) { | |
246 PictureMap::const_iterator i = picture_map_.find(picture_buffer_id); | |
247 if (i == picture_map_.end()) { | |
248 LOG(ERROR) << "Can't find a PuctureBuffer for " << picture_buffer_id; | |
249 return; | |
250 } | |
251 uint32 picture_buffer_texture_id = i->second.texture_id(); | |
252 texture_copier_->Copy(surface_texture_id_, GL_TEXTURE_EXTERNAL_OES, | |
253 transfrom_matrix, | |
254 picture_buffer_texture_id, GL_TEXTURE_2D); | |
255 } | |
256 | |
257 void AndroidVideoDecodeAccelerator::Decode( | |
258 const media::BitstreamBuffer& bitstream_buffer) { | |
259 LOG_LINE(); | |
260 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
261 if (!client_) { | |
262 return; | |
263 } | |
264 pending_bitstream_buffers_.push(bitstream_buffer); | |
265 } | |
266 | |
267 void AndroidVideoDecodeAccelerator::AssignPictureBuffers( | |
268 const std::vector<media::PictureBuffer>& buffers) { | |
269 LOG_LINE(); | |
270 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
271 DCHECK(picture_map_.empty()); | |
272 | |
273 for (size_t i = 0; i < buffers.size(); ++i) { | |
274 picture_map_.insert(std::make_pair(buffers[i].id(), buffers[i])); | |
275 free_picture_ids_.push(buffers[i].id()); | |
276 } | |
277 } | |
278 | |
279 void AndroidVideoDecodeAccelerator::ReusePictureBuffer( | |
280 int32 picture_buffer_id) { | |
281 LOG_LINE(); | |
282 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
283 free_picture_ids_.push(picture_buffer_id); | |
284 } | |
285 | |
286 void AndroidVideoDecodeAccelerator::Flush() { | |
287 LOG_LINE(); | |
288 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
289 | |
290 Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0)); | |
291 } | |
292 | |
293 void AndroidVideoDecodeAccelerator::ConfigureMediaCodec() { | |
294 DCHECK(surface_texture_.get()); | |
295 DCHECK(codec_ == H264 || codec_ == VP8); | |
296 | |
297 std::string mime; | |
298 if (codec_ == VP8) { | |
299 mime = "video/x-vnd.on2.vp8"; | |
300 } else if (codec_ == H264) { | |
301 mime = "video/avc"; | |
302 } else { | |
303 LOG(ERROR) << "Unsupported codec type " << codec_; | |
304 NOTREACHED(); | |
305 } | |
306 media_codec_.reset(new media::MediaCodecBridge(mime)); | |
307 | |
308 JNIEnv* env = base::android::AttachCurrentThread(); | |
309 CHECK(env); | |
310 ScopedJavaLocalRef<jclass> cls( | |
311 base::android::GetClass(env, "android/view/Surface")); | |
312 jmethodID constructor = MethodID::Get<MethodID::TYPE_INSTANCE>( | |
313 env, cls.obj(), "<init>", "(Landroid/graphics/SurfaceTexture;)V"); | |
314 ScopedJavaLocalRef<jobject> j_surface( | |
315 env, env->NewObject( | |
316 cls.obj(), constructor, | |
317 surface_texture_->j_surface_texture().obj())); | |
318 | |
319 // VDA does not pass the container indicated resolution in the initialization | |
320 // phase. Here, we set 1080p by default. | |
321 media_codec_->ConfigureVideo( | |
322 mime, 1920, 1080, NULL, 0, NULL, 0, j_surface.obj()); | |
323 content::ReleaseSurface(j_surface.obj()); | |
324 | |
325 media_codec_->Start(); | |
326 media_codec_->GetInputBuffers(); | |
327 media_codec_->GetOutputBuffers(); | |
328 } | |
329 | |
330 void AndroidVideoDecodeAccelerator::Reset() { | |
331 LOG_LINE(); | |
332 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
333 | |
334 while(!pending_bitstream_buffers_.empty()) { | |
335 media::BitstreamBuffer& bitstream_buffer = | |
336 pending_bitstream_buffers_.front(); | |
337 pending_bitstream_buffers_.pop(); | |
338 | |
339 if (bitstream_buffer.id() != -1) { | |
340 client_->NotifyEndOfBitstreamBuffer(bitstream_buffer.id()); | |
341 } | |
342 } | |
343 media_codec_->Flush(); | |
344 media_codec_->Stop(); | |
ycheo (away)
2013/01/17 08:34:44
Why not Release()?
dwkang1
2013/01/18 07:14:08
Done.
| |
345 ConfigureMediaCodec(); | |
346 | |
347 if (client_) { | |
348 client_->NotifyResetDone(); | |
349 } | |
350 } | |
351 | |
352 void AndroidVideoDecodeAccelerator::Destroy() { | |
353 LOG_LINE(); | |
354 DCHECK_EQ(message_loop_, MessageLoop::current()); | |
355 } | |
356 | |
357 } // namespace content | |
OLD | NEW |