OLD | NEW |
---|---|
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/common/gpu/media/gpu_video_encode_accelerator.h" | 5 #include "content/common/gpu/media/gpu_video_encode_accelerator.h" |
6 | 6 |
7 #include "base/callback.h" | 7 #include "base/callback.h" |
8 #include "base/command_line.h" | 8 #include "base/command_line.h" |
9 #include "base/logging.h" | 9 #include "base/logging.h" |
10 #include "base/memory/shared_memory.h" | 10 #include "base/memory/shared_memory.h" |
11 #include "base/message_loop/message_loop_proxy.h" | 11 #include "base/message_loop/message_loop_proxy.h" |
12 #include "build/build_config.h" | 12 #include "build/build_config.h" |
13 #include "content/common/gpu/gpu_channel.h" | 13 #include "content/common/gpu/gpu_channel.h" |
14 #include "content/common/gpu/gpu_messages.h" | 14 #include "content/common/gpu/gpu_messages.h" |
15 #include "content/public/common/content_switches.h" | 15 #include "content/public/common/content_switches.h" |
16 #include "ipc/ipc_message_macros.h" | 16 #include "ipc/ipc_message_macros.h" |
17 #include "media/base/limits.h" | 17 #include "media/base/limits.h" |
18 #include "media/base/video_frame.h" | 18 #include "media/base/video_frame.h" |
19 | 19 |
20 #if defined(OS_CHROMEOS) | 20 #if defined(OS_CHROMEOS) |
21 | 21 #if defined(USE_OZONE) || defined(ARCH_CPU_ARMEL) |
22 #if defined(ARCH_CPU_ARMEL) && defined(USE_X11) | |
23 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h" | 22 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h" |
24 #elif defined(ARCH_CPU_X86_FAMILY) | 23 #endif // defined(USE_OZONE) || defined(ARCH_CPU_ARMEL) |
24 #if defined(ARCH_CPU_X86_FAMILY) | |
25 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" | 25 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" |
26 #endif | 26 #endif // defined(ARCH_CPU_X86_FAMILY) |
27 | |
28 #elif defined(OS_ANDROID) && defined(ENABLE_WEBRTC) | 27 #elif defined(OS_ANDROID) && defined(ENABLE_WEBRTC) |
29 #include "content/common/gpu/media/android_video_encode_accelerator.h" | 28 #include "content/common/gpu/media/android_video_encode_accelerator.h" |
30 #endif | 29 #endif |
31 | 30 |
32 namespace content { | 31 namespace content { |
33 | 32 |
34 static bool MakeDecoderContextCurrent( | 33 static bool MakeDecoderContextCurrent( |
35 const base::WeakPtr<GpuCommandBufferStub> stub) { | 34 const base::WeakPtr<GpuCommandBufferStub> stub) { |
36 if (!stub) { | 35 if (!stub) { |
37 DLOG(ERROR) << "Stub is gone; won't MakeCurrent()."; | 36 DLOG(ERROR) << "Stub is gone; won't MakeCurrent()."; |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
87 if (input_visible_size.width() > media::limits::kMaxDimension || | 86 if (input_visible_size.width() > media::limits::kMaxDimension || |
88 input_visible_size.height() > media::limits::kMaxDimension || | 87 input_visible_size.height() > media::limits::kMaxDimension || |
89 input_visible_size.GetArea() > media::limits::kMaxCanvas) { | 88 input_visible_size.GetArea() > media::limits::kMaxCanvas) { |
90 DLOG(ERROR) << "GpuVideoEncodeAccelerator::Initialize(): " | 89 DLOG(ERROR) << "GpuVideoEncodeAccelerator::Initialize(): " |
91 "input_visible_size " << input_visible_size.ToString() | 90 "input_visible_size " << input_visible_size.ToString() |
92 << " too large"; | 91 << " too large"; |
93 SendCreateEncoderReply(init_done_msg, false); | 92 SendCreateEncoderReply(init_done_msg, false); |
94 return; | 93 return; |
95 } | 94 } |
96 | 95 |
97 encoder_ = CreateEncoder(); | 96 std::vector<GpuVideoEncodeAccelerator::CreateVEACb> |
98 if (!encoder_) { | 97 create_vea_cbs = CreateVEACbs(); |
99 DLOG(ERROR) | 98 // Try all possible encoders and use the first successful encoder. |
100 << "GpuVideoEncodeAccelerator::Initialize(): VEA creation failed"; | 99 for (size_t i = 0; i < create_vea_cbs.size(); ++i) { |
101 SendCreateEncoderReply(init_done_msg, false); | 100 encoder_ = create_vea_cbs[i].Run(); |
102 return; | 101 if (encoder_ && encoder_->Initialize(input_format, |
102 input_visible_size, | |
103 output_profile, | |
104 initial_bitrate, | |
105 this)) { | |
106 input_format_ = input_format; | |
107 input_visible_size_ = input_visible_size; | |
108 SendCreateEncoderReply(init_done_msg, true); | |
109 return; | |
110 } | |
103 } | 111 } |
104 if (!encoder_->Initialize(input_format, | 112 encoder_.reset(); |
105 input_visible_size, | 113 DLOG(ERROR) |
106 output_profile, | 114 << "GpuVideoEncodeAccelerator::Initialize(): VEA initialization failed"; |
107 initial_bitrate, | 115 SendCreateEncoderReply(init_done_msg, false); |
108 this)) { | |
109 DLOG(ERROR) | |
110 << "GpuVideoEncodeAccelerator::Initialize(): VEA initialization failed"; | |
111 SendCreateEncoderReply(init_done_msg, false); | |
112 return; | |
113 } | |
114 input_format_ = input_format; | |
115 input_visible_size_ = input_visible_size; | |
116 SendCreateEncoderReply(init_done_msg, true); | |
117 } | 116 } |
118 | 117 |
119 bool GpuVideoEncodeAccelerator::OnMessageReceived(const IPC::Message& message) { | 118 bool GpuVideoEncodeAccelerator::OnMessageReceived(const IPC::Message& message) { |
120 bool handled = true; | 119 bool handled = true; |
121 IPC_BEGIN_MESSAGE_MAP(GpuVideoEncodeAccelerator, message) | 120 IPC_BEGIN_MESSAGE_MAP(GpuVideoEncodeAccelerator, message) |
122 IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_Encode, OnEncode) | 121 IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_Encode, OnEncode) |
123 IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_UseOutputBitstreamBuffer, | 122 IPC_MESSAGE_HANDLER(AcceleratedVideoEncoderMsg_UseOutputBitstreamBuffer, |
124 OnUseOutputBitstreamBuffer) | 123 OnUseOutputBitstreamBuffer) |
125 IPC_MESSAGE_HANDLER( | 124 IPC_MESSAGE_HANDLER( |
126 AcceleratedVideoEncoderMsg_RequestEncodingParametersChange, | 125 AcceleratedVideoEncoderMsg_RequestEncodingParametersChange, |
(...skipping 30 matching lines...) Expand all Loading... | |
157 DCHECK(stub_); | 156 DCHECK(stub_); |
158 stub_->channel()->RemoveRoute(host_route_id_); | 157 stub_->channel()->RemoveRoute(host_route_id_); |
159 stub_->RemoveDestructionObserver(this); | 158 stub_->RemoveDestructionObserver(this); |
160 encoder_.reset(); | 159 encoder_.reset(); |
161 delete this; | 160 delete this; |
162 } | 161 } |
163 | 162 |
164 // static | 163 // static |
165 std::vector<gpu::VideoEncodeAcceleratorSupportedProfile> | 164 std::vector<gpu::VideoEncodeAcceleratorSupportedProfile> |
166 GpuVideoEncodeAccelerator::GetSupportedProfiles() { | 165 GpuVideoEncodeAccelerator::GetSupportedProfiles() { |
167 scoped_ptr<media::VideoEncodeAccelerator> encoder = CreateEncoder(); | 166 std::vector<media::VideoEncodeAccelerator::SupportedProfile> profiles; |
168 if (!encoder) | 167 std::vector<GpuVideoEncodeAccelerator::CreateVEACb> |
169 return std::vector<gpu::VideoEncodeAcceleratorSupportedProfile>(); | 168 create_vea_cbs = CreateVEACbs(); |
170 return ConvertMediaToGpuProfiles(encoder->GetSupportedProfiles()); | 169 |
170 for (size_t i = 0; i < create_vea_cbs.size(); ++i) { | |
171 scoped_ptr<media::VideoEncodeAccelerator> encoder = create_vea_cbs[i].Run(); | |
172 if (!encoder) | |
173 continue; | |
174 std::vector<media::VideoEncodeAccelerator::SupportedProfile> | |
175 vea_profiles = encoder->GetSupportedProfiles(); | |
176 profiles.insert(profiles.end(), vea_profiles.begin(), vea_profiles.end()); | |
177 } | |
178 return ConvertMediaToGpuProfiles(profiles); | |
171 } | 179 } |
172 | 180 |
181 // static | |
173 std::vector<gpu::VideoEncodeAcceleratorSupportedProfile> | 182 std::vector<gpu::VideoEncodeAcceleratorSupportedProfile> |
174 GpuVideoEncodeAccelerator::ConvertMediaToGpuProfiles(const std::vector< | 183 GpuVideoEncodeAccelerator::ConvertMediaToGpuProfiles(const std::vector< |
175 media::VideoEncodeAccelerator::SupportedProfile>& media_profiles) { | 184 media::VideoEncodeAccelerator::SupportedProfile>& media_profiles) { |
176 std::vector<gpu::VideoEncodeAcceleratorSupportedProfile> profiles; | 185 std::vector<gpu::VideoEncodeAcceleratorSupportedProfile> profiles; |
177 for (size_t i = 0; i < media_profiles.size(); i++) { | 186 for (size_t i = 0; i < media_profiles.size(); i++) { |
178 gpu::VideoEncodeAcceleratorSupportedProfile profile; | 187 gpu::VideoEncodeAcceleratorSupportedProfile profile; |
179 profile.profile = | 188 profile.profile = |
180 static_cast<gpu::VideoCodecProfile>(media_profiles[i].profile); | 189 static_cast<gpu::VideoCodecProfile>(media_profiles[i].profile); |
181 profile.max_resolution = media_profiles[i].max_resolution; | 190 profile.max_resolution = media_profiles[i].max_resolution; |
182 profile.max_framerate_numerator = media_profiles[i].max_framerate_numerator; | 191 profile.max_framerate_numerator = media_profiles[i].max_framerate_numerator; |
183 profile.max_framerate_denominator = | 192 profile.max_framerate_denominator = |
184 media_profiles[i].max_framerate_denominator; | 193 media_profiles[i].max_framerate_denominator; |
185 profiles.push_back(profile); | 194 profiles.push_back(profile); |
186 } | 195 } |
187 return profiles; | 196 return profiles; |
188 } | 197 } |
189 | 198 |
199 // static | |
200 std::vector<GpuVideoEncodeAccelerator::CreateVEACb> | |
201 GpuVideoEncodeAccelerator::CreateVEACbs() { | |
202 std::vector<GpuVideoEncodeAccelerator::CreateVEACb> create_vea_cbs; | |
piman
2015/01/05 23:50:06
Same wrt static array. They're all static too, so
henryhsu
2015/01/06 08:13:40
For consistency, I remove callback function and re
| |
203 create_vea_cbs.push_back(base::Bind( | |
204 &GpuVideoEncodeAccelerator::CreateV4L2VEA)); | |
205 create_vea_cbs.push_back(base::Bind( | |
206 &GpuVideoEncodeAccelerator::CreateVaapiVEA)); | |
207 create_vea_cbs.push_back(base::Bind( | |
208 &GpuVideoEncodeAccelerator::CreateAndroidVEA)); | |
209 return create_vea_cbs; | |
210 } | |
211 | |
212 // static | |
190 scoped_ptr<media::VideoEncodeAccelerator> | 213 scoped_ptr<media::VideoEncodeAccelerator> |
191 GpuVideoEncodeAccelerator::CreateEncoder() { | 214 GpuVideoEncodeAccelerator::CreateV4L2VEA() { |
192 scoped_ptr<media::VideoEncodeAccelerator> encoder; | 215 scoped_ptr<media::VideoEncodeAccelerator> encoder; |
193 #if defined(OS_CHROMEOS) | 216 #if defined(OS_CHROMEOS) && (defined(USE_OZONE) || defined(ARCH_CPU_ARMEL)) |
194 #if defined(ARCH_CPU_ARMEL) && defined(USE_X11) | |
195 scoped_ptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder); | 217 scoped_ptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder); |
196 if (device) | 218 if (device) |
197 encoder.reset(new V4L2VideoEncodeAccelerator(device.Pass())); | 219 encoder.reset(new V4L2VideoEncodeAccelerator(device.Pass())); |
198 #elif defined(ARCH_CPU_X86_FAMILY) | 220 #endif |
221 return encoder.Pass(); | |
222 } | |
223 | |
224 // static | |
225 scoped_ptr<media::VideoEncodeAccelerator> | |
226 GpuVideoEncodeAccelerator::CreateVaapiVEA() { | |
227 scoped_ptr<media::VideoEncodeAccelerator> encoder; | |
228 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) | |
199 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess(); | 229 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess(); |
200 if (!cmd_line->HasSwitch(switches::kDisableVaapiAcceleratedVideoEncode)) | 230 if (!cmd_line->HasSwitch(switches::kDisableVaapiAcceleratedVideoEncode)) |
201 encoder.reset(new VaapiVideoEncodeAccelerator()); | 231 encoder.reset(new VaapiVideoEncodeAccelerator()); |
202 #endif | 232 #endif |
203 #elif defined(OS_ANDROID) && defined(ENABLE_WEBRTC) | 233 return encoder.Pass(); |
234 } | |
235 | |
236 // static | |
237 scoped_ptr<media::VideoEncodeAccelerator> | |
238 GpuVideoEncodeAccelerator::CreateAndroidVEA() { | |
239 scoped_ptr<media::VideoEncodeAccelerator> encoder; | |
240 #if defined(OS_ANDROID) && defined(ENABLE_WEBRTC) | |
204 encoder.reset(new AndroidVideoEncodeAccelerator()); | 241 encoder.reset(new AndroidVideoEncodeAccelerator()); |
205 #endif | 242 #endif |
206 return encoder.Pass(); | 243 return encoder.Pass(); |
207 } | 244 } |
208 | 245 |
209 void GpuVideoEncodeAccelerator::OnEncode(int32 frame_id, | 246 void GpuVideoEncodeAccelerator::OnEncode(int32 frame_id, |
210 base::SharedMemoryHandle buffer_handle, | 247 base::SharedMemoryHandle buffer_handle, |
211 uint32 buffer_size, | 248 uint32 buffer_size, |
212 bool force_keyframe) { | 249 bool force_keyframe) { |
213 DVLOG(3) << "GpuVideoEncodeAccelerator::OnEncode(): frame_id=" << frame_id | 250 DVLOG(3) << "GpuVideoEncodeAccelerator::OnEncode(): frame_id=" << frame_id |
(...skipping 100 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
314 stub_->channel()->Send(message); | 351 stub_->channel()->Send(message); |
315 } | 352 } |
316 | 353 |
317 void GpuVideoEncodeAccelerator::SendCreateEncoderReply(IPC::Message* message, | 354 void GpuVideoEncodeAccelerator::SendCreateEncoderReply(IPC::Message* message, |
318 bool succeeded) { | 355 bool succeeded) { |
319 GpuCommandBufferMsg_CreateVideoEncoder::WriteReplyParams(message, succeeded); | 356 GpuCommandBufferMsg_CreateVideoEncoder::WriteReplyParams(message, succeeded); |
320 Send(message); | 357 Send(message); |
321 } | 358 } |
322 | 359 |
323 } // namespace content | 360 } // namespace content |
OLD | NEW |