Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(213)

Side by Side Diff: content/common/gpu/media/video_decode_accelerator_unittest.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Squash and rebase Created 4 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // The bulk of this file is support code; sorry about that. Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
8 // Win/EGL.
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 // to be written sequentially and wait for the decode client to see certain
12 // state transitions.
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
15 // infrastructure.
16
17 #include <fcntl.h>
18 #include <stddef.h>
19 #include <stdint.h>
20 #include <sys/stat.h>
21 #include <sys/types.h>
22
23 #include <algorithm>
24 #include <deque>
25 #include <map>
26 #include <memory>
27 #include <utility>
28
29 #include "base/at_exit.h"
30 #include "base/bind.h"
31 #include "base/callback_helpers.h"
32 #include "base/command_line.h"
33 #include "base/files/file.h"
34 #include "base/files/file_util.h"
35 #include "base/format_macros.h"
36 #include "base/macros.h"
37 #include "base/md5.h"
38 #include "base/process/process_handle.h"
39 #include "base/stl_util.h"
40 #include "base/strings/string_number_conversions.h"
41 #include "base/strings/string_split.h"
42 #include "base/strings/stringize_macros.h"
43 #include "base/strings/stringprintf.h"
44 #include "base/strings/utf_string_conversions.h"
45 #include "base/synchronization/condition_variable.h"
46 #include "base/synchronization/lock.h"
47 #include "base/synchronization/waitable_event.h"
48 #include "base/thread_task_runner_handle.h"
49 #include "base/threading/thread.h"
50 #include "build/build_config.h"
51 #include "content/common/gpu/media/fake_video_decode_accelerator.h"
52 #include "content/common/gpu/media/gpu_video_decode_accelerator_factory_impl.h"
53 #include "content/common/gpu/media/rendering_helper.h"
54 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
55 #include "gpu/command_buffer/service/gpu_preferences.h"
56 #include "media/filters/h264_parser.h"
57 #include "testing/gtest/include/gtest/gtest.h"
58 #include "ui/gfx/codec/png_codec.h"
59 #include "ui/gl/gl_image.h"
60
61 #if defined(OS_WIN)
62 #include "base/win/windows_version.h"
63 #include "content/common/gpu/media/dxva_video_decode_accelerator_win.h"
64 #elif defined(OS_CHROMEOS)
65 #if defined(USE_V4L2_CODEC)
66 #include "content/common/gpu/media/v4l2_device.h"
67 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
68 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
69 #endif
70 #if defined(ARCH_CPU_X86_FAMILY)
71 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
72 #include "content/common/gpu/media/vaapi_wrapper.h"
73 #endif // defined(ARCH_CPU_X86_FAMILY)
74 #else
75 #error The VideoAccelerator tests are not supported on this platform.
76 #endif // OS_WIN
77
78 #if defined(USE_OZONE)
79 #include "ui/ozone/public/native_pixmap.h"
80 #include "ui/ozone/public/ozone_gpu_test_helper.h"
81 #include "ui/ozone/public/ozone_platform.h"
82 #include "ui/ozone/public/surface_factory_ozone.h"
83 #endif // defined(USE_OZONE)
84
85 using media::VideoDecodeAccelerator;
86
87 namespace content {
88 namespace {
89
90 using base::MakeTuple;
91
92 // Values optionally filled in from flags; see main() below.
93 // The syntax of multiple test videos is:
94 // test-video1;test-video2;test-video3
95 // where only the first video is required and other optional videos would be
96 // decoded by concurrent decoders.
97 // The syntax of each test-video is:
98 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
99 // where only the first field is required. Value details:
100 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8/9 stream.
101 // - |width| and |height| are in pixels.
102 // - |numframes| is the number of picture frames in the file.
103 // - |numfragments| NALU (h264) or frame (VP8/9) count in the stream.
104 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
105 // expected to be achieved with and without rendering to the screen, resp.
106 // (the latter tests just decode speed).
107 // - |profile| is the media::VideoCodecProfile set during Initialization.
108 // An empty value for a numeric field means "ignore".
109 const base::FilePath::CharType* g_test_video_data =
110 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
111 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
112
113 // The file path of the test output log. This is used to communicate the test
114 // results to CrOS autotests. We can enable the log and specify the filename by
115 // the "--output_log" switch.
116 const base::FilePath::CharType* g_output_log = NULL;
117
118 // The value is set by the switch "--rendering_fps".
119 double g_rendering_fps = 60;
120
121 // The value is set by the switch "--rendering_warm_up".
122 int g_rendering_warm_up = 0;
123
124 // The value is set by the switch "--num_play_throughs". The video will play
125 // the specified number of times. In different test cases, we have different
126 // values for |num_play_throughs|. This setting will override the value. A
127 // special value "0" means no override.
128 int g_num_play_throughs = 0;
129 // Fake decode
130 int g_fake_decoder = 0;
131
132 // Test buffer import into VDA, providing buffers allocated by us, instead of
133 // requesting the VDA itself to allocate buffers.
134 bool g_test_import = false;
135
136 // Environment to store rendering thread.
137 class VideoDecodeAcceleratorTestEnvironment;
138 VideoDecodeAcceleratorTestEnvironment* g_env;
139
140 // Magic constants for differentiating the reasons for NotifyResetDone being
141 // called.
142 enum ResetPoint {
143 // Reset() just after calling Decode() with a fragment containing config info.
144 RESET_AFTER_FIRST_CONFIG_INFO = -4,
145 START_OF_STREAM_RESET = -3,
146 MID_STREAM_RESET = -2,
147 END_OF_STREAM_RESET = -1
148 };
149
150 const int kMaxResetAfterFrameNum = 100;
151 const int kMaxFramesToDelayReuse = 64;
152 const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
153 // Simulate WebRTC and call VDA::Decode 30 times per second.
154 const int kWebRtcDecodeCallsPerSecond = 30;
155 // Simulate an adjustment to a larger number of pictures to make sure the
156 // decoder supports an upwards adjustment.
157 const int kExtraPictureBuffers = 2;
158
159 struct TestVideoFile {
160 explicit TestVideoFile(base::FilePath::StringType file_name)
161 : file_name(file_name),
162 width(-1),
163 height(-1),
164 num_frames(-1),
165 num_fragments(-1),
166 min_fps_render(-1),
167 min_fps_no_render(-1),
168 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
169 reset_after_frame_num(END_OF_STREAM_RESET) {
170 }
171
172 base::FilePath::StringType file_name;
173 int width;
174 int height;
175 int num_frames;
176 int num_fragments;
177 int min_fps_render;
178 int min_fps_no_render;
179 media::VideoCodecProfile profile;
180 int reset_after_frame_num;
181 std::string data_str;
182 };
183
184 const gfx::Size kThumbnailsPageSize(1600, 1200);
185 const gfx::Size kThumbnailSize(160, 120);
186 const int kMD5StringLength = 32;
187
188 // Read in golden MD5s for the thumbnailed rendering of this video
189 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
190 std::vector<std::string>* md5_strings) {
191 base::FilePath filepath(video_file->file_name);
192 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
193 std::string all_md5s;
194 base::ReadFileToString(filepath, &all_md5s);
195 *md5_strings = base::SplitString(
196 all_md5s, "\n", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
197 // Check these are legitimate MD5s.
198 for (const std::string& md5_string : *md5_strings) {
199 // Ignore the empty string added by SplitString
200 if (!md5_string.length())
201 continue;
202 // Ignore comments
203 if (md5_string.at(0) == '#')
204 continue;
205
206 LOG_ASSERT(static_cast<int>(md5_string.length()) ==
207 kMD5StringLength) << md5_string;
208 bool hex_only = std::count_if(md5_string.begin(),
209 md5_string.end(), isxdigit) ==
210 kMD5StringLength;
211 LOG_ASSERT(hex_only) << md5_string;
212 }
213 LOG_ASSERT(md5_strings->size() >= 1U) << " MD5 checksum file ("
214 << filepath.MaybeAsASCII()
215 << ") missing or empty.";
216 }
217
218 // State of the GLRenderingVDAClient below. Order matters here as the test
219 // makes assumptions about it.
220 enum ClientState {
221 CS_CREATED = 0,
222 CS_DECODER_SET = 1,
223 CS_INITIALIZED = 2,
224 CS_FLUSHING = 3,
225 CS_FLUSHED = 4,
226 CS_RESETTING = 5,
227 CS_RESET = 6,
228 CS_ERROR = 7,
229 CS_DESTROYED = 8,
230 CS_MAX, // Must be last entry.
231 };
232
233 // Initialize the GPU thread for rendering. We only need to setup once
234 // for all test cases.
235 class VideoDecodeAcceleratorTestEnvironment : public ::testing::Environment {
236 public:
237 VideoDecodeAcceleratorTestEnvironment()
238 : rendering_thread_("GLRenderingVDAClientThread") {}
239
240 void SetUp() override {
241 rendering_thread_.Start();
242
243 base::WaitableEvent done(false, false);
244 rendering_thread_.task_runner()->PostTask(
245 FROM_HERE, base::Bind(&RenderingHelper::InitializeOneOff, &done));
246 done.Wait();
247
248 #if defined(USE_OZONE)
249 gpu_helper_.reset(new ui::OzoneGpuTestHelper());
250 // Need to initialize after the rendering side since the rendering side
251 // initializes the "GPU" parts of Ozone.
252 //
253 // This also needs to be done in the test environment since this shouldn't
254 // be initialized multiple times for the same Ozone platform.
255 gpu_helper_->Initialize(base::ThreadTaskRunnerHandle::Get(),
256 GetRenderingTaskRunner());
257 #endif
258 }
259
260 void TearDown() override {
261 #if defined(USE_OZONE)
262 gpu_helper_.reset();
263 #endif
264 rendering_thread_.Stop();
265 }
266
267 scoped_refptr<base::SingleThreadTaskRunner> GetRenderingTaskRunner() const {
268 return rendering_thread_.task_runner();
269 }
270
271 private:
272 base::Thread rendering_thread_;
273 #if defined(USE_OZONE)
274 std::unique_ptr<ui::OzoneGpuTestHelper> gpu_helper_;
275 #endif
276
277 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTestEnvironment);
278 };
279
280 // A helper class used to manage the lifetime of a Texture. Can be backed by
281 // either a buffer allocated by the VDA, or by a preallocated pixmap.
282 class TextureRef : public base::RefCounted<TextureRef> {
283 public:
284 static scoped_refptr<TextureRef> Create(
285 uint32_t texture_id,
286 const base::Closure& no_longer_needed_cb);
287
288 static scoped_refptr<TextureRef> CreatePreallocated(
289 uint32_t texture_id,
290 const base::Closure& no_longer_needed_cb,
291 media::VideoPixelFormat pixel_format,
292 const gfx::Size& size);
293
294 std::vector<gfx::GpuMemoryBufferHandle> ExportGpuMemoryBufferHandles() const;
295
296 int32_t texture_id() const { return texture_id_; }
297
298 private:
299 friend class base::RefCounted<TextureRef>;
300
301 TextureRef(uint32_t texture_id, const base::Closure& no_longer_needed_cb)
302 : texture_id_(texture_id), no_longer_needed_cb_(no_longer_needed_cb) {}
303
304 ~TextureRef();
305
306 uint32_t texture_id_;
307 base::Closure no_longer_needed_cb_;
308 #if defined(USE_OZONE)
309 scoped_refptr<ui::NativePixmap> pixmap_;
310 #endif
311 };
312
313 TextureRef::~TextureRef() {
314 base::ResetAndReturn(&no_longer_needed_cb_).Run();
315 }
316
317 // static
318 scoped_refptr<TextureRef> TextureRef::Create(
319 uint32_t texture_id,
320 const base::Closure& no_longer_needed_cb) {
321 return make_scoped_refptr(new TextureRef(texture_id, no_longer_needed_cb));
322 }
323
324 #if defined(USE_OZONE)
325 gfx::BufferFormat VideoPixelFormatToGfxBufferFormat(
326 media::VideoPixelFormat pixel_format) {
327 switch (pixel_format) {
328 case media::VideoPixelFormat::PIXEL_FORMAT_ARGB:
329 return gfx::BufferFormat::BGRA_8888;
330 case media::VideoPixelFormat::PIXEL_FORMAT_XRGB:
331 return gfx::BufferFormat::BGRX_8888;
332 case media::VideoPixelFormat::PIXEL_FORMAT_NV12:
333 return gfx::BufferFormat::YUV_420_BIPLANAR;
334 default:
335 LOG_ASSERT(false) << "Unknown VideoPixelFormat";
336 return gfx::BufferFormat::BGRX_8888;
337 }
338 }
339 #endif
340
341 // static
342 scoped_refptr<TextureRef> TextureRef::CreatePreallocated(
343 uint32_t texture_id,
344 const base::Closure& no_longer_needed_cb,
345 media::VideoPixelFormat pixel_format,
346 const gfx::Size& size) {
347 scoped_refptr<TextureRef> texture_ref;
348 #if defined(USE_OZONE)
349 texture_ref = TextureRef::Create(texture_id, no_longer_needed_cb);
350 LOG_ASSERT(texture_ref);
351
352 ui::OzonePlatform* platform = ui::OzonePlatform::GetInstance();
353 ui::SurfaceFactoryOzone* factory = platform->GetSurfaceFactoryOzone();
354 gfx::BufferFormat buffer_format =
355 VideoPixelFormatToGfxBufferFormat(pixel_format);
356 texture_ref->pixmap_ =
357 factory->CreateNativePixmap(gfx::kNullAcceleratedWidget, size,
358 buffer_format, gfx::BufferUsage::SCANOUT);
359 LOG_ASSERT(texture_ref->pixmap_);
360 #endif
361
362 return texture_ref;
363 }
364
365 std::vector<gfx::GpuMemoryBufferHandle>
366 TextureRef::ExportGpuMemoryBufferHandles() const {
367 std::vector<gfx::GpuMemoryBufferHandle> handles;
368 #if defined(USE_OZONE)
369 CHECK(pixmap_);
370 int duped_fd = HANDLE_EINTR(dup(pixmap_->GetDmaBufFd()));
371 LOG_ASSERT(duped_fd != -1) << "Failed duplicating dmabuf fd";
372 gfx::GpuMemoryBufferHandle handle;
373 handle.type = gfx::OZONE_NATIVE_PIXMAP;
374 handle.native_pixmap_handle.fd = base::FileDescriptor(duped_fd, true);
375 handles.push_back(handle);
376 #endif
377 return handles;
378 }
379
380 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
381 // the TESTs below.
382 class GLRenderingVDAClient
383 : public VideoDecodeAccelerator::Client,
384 public base::SupportsWeakPtr<GLRenderingVDAClient> {
385 public:
386 // |window_id| the window_id of the client, which is used to identify the
387 // rendering area in the |rendering_helper|.
388 // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
389 // |*this|.
390 // |num_play_throughs| indicates how many times to play through the video.
391 // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
392 // Reset() should be done after that frame number is delivered, or
393 // END_OF_STREAM_RESET to indicate no mid-stream Reset().
394 // |delete_decoder_state| indicates when the underlying decoder should be
395 // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
396 // calls have been made, N>=0 means interpret as ClientState.
397 // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
398 // last play-through (governed by |num_play_throughs|).
399 // |suppress_rendering| indicates GL rendering is supressed or not.
400 // After |delay_reuse_after_frame_num| frame has been delivered, the client
401 // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
402 // |decode_calls_per_second| is the number of VDA::Decode calls per second.
403 // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
404 GLRenderingVDAClient(size_t window_id,
405 RenderingHelper* rendering_helper,
406 ClientStateNotification<ClientState>* note,
407 const std::string& encoded_data,
408 int num_in_flight_decodes,
409 int num_play_throughs,
410 int reset_after_frame_num,
411 int delete_decoder_state,
412 int frame_width,
413 int frame_height,
414 media::VideoCodecProfile profile,
415 int fake_decoder,
416 bool suppress_rendering,
417 int delay_reuse_after_frame_num,
418 int decode_calls_per_second,
419 bool render_as_thumbnails);
420 ~GLRenderingVDAClient() override;
421 void CreateAndStartDecoder();
422
423 // VideoDecodeAccelerator::Client implementation.
424 // The heart of the Client.
425 void ProvidePictureBuffers(uint32_t requested_num_of_buffers,
426 uint32_t textures_per_buffer,
427 const gfx::Size& dimensions,
428 uint32_t texture_target) override;
429 void DismissPictureBuffer(int32_t picture_buffer_id) override;
430 void PictureReady(const media::Picture& picture) override;
431 // Simple state changes.
432 void NotifyEndOfBitstreamBuffer(int32_t bitstream_buffer_id) override;
433 void NotifyFlushDone() override;
434 void NotifyResetDone() override;
435 void NotifyError(VideoDecodeAccelerator::Error error) override;
436
437 void OutputFrameDeliveryTimes(base::File* output);
438
439 // Simple getters for inspecting the state of the Client.
440 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
441 int num_skipped_fragments() { return num_skipped_fragments_; }
442 int num_queued_fragments() { return num_queued_fragments_; }
443 int num_decoded_frames() { return num_decoded_frames_; }
444 double frames_per_second();
445 // Return the median of the decode time of all decoded frames.
446 base::TimeDelta decode_time_median();
447 bool decoder_deleted() { return !decoder_.get(); }
448
449 private:
450 typedef std::map<int32_t, scoped_refptr<TextureRef>> TextureRefMap;
451
452 void SetState(ClientState new_state);
453 void FinishInitialization();
454 void ReturnPicture(int32_t picture_buffer_id);
455
456 // Delete the associated decoder helper.
457 void DeleteDecoder();
458
459 // Compute & return the first encoded bytes (including a start frame) to send
460 // to the decoder, starting at |start_pos| and returning one fragment. Skips
461 // to the first decodable position.
462 std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
463 // Compute & return the encoded bytes of next fragment to send to the decoder
464 // (based on |start_pos|).
465 std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
466 // Helpers for GetBytesForNextFragment above.
467 void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
468 std::string GetBytesForNextFrame(
469 size_t start_pos, size_t* end_pos); // For VP8/9.
470
471 // Request decode of the next fragment in the encoded data.
472 void DecodeNextFragment();
473
474 size_t window_id_;
475 RenderingHelper* rendering_helper_;
476 gfx::Size frame_size_;
477 std::string encoded_data_;
478 const int num_in_flight_decodes_;
479 int outstanding_decodes_;
480 size_t encoded_data_next_pos_to_decode_;
481 int next_bitstream_buffer_id_;
482 ClientStateNotification<ClientState>* note_;
483 std::unique_ptr<VideoDecodeAccelerator> decoder_;
484 base::WeakPtr<VideoDecodeAccelerator> weak_vda_;
485 std::unique_ptr<base::WeakPtrFactory<VideoDecodeAccelerator>>
486 weak_vda_ptr_factory_;
487 std::unique_ptr<GpuVideoDecodeAcceleratorFactoryImpl> vda_factory_;
488 int remaining_play_throughs_;
489 int reset_after_frame_num_;
490 int delete_decoder_state_;
491 ClientState state_;
492 int num_skipped_fragments_;
493 int num_queued_fragments_;
494 int num_decoded_frames_;
495 int num_done_bitstream_buffers_;
496 base::TimeTicks initialize_done_ticks_;
497 media::VideoCodecProfile profile_;
498 int fake_decoder_;
499 GLenum texture_target_;
500 bool suppress_rendering_;
501 std::vector<base::TimeTicks> frame_delivery_times_;
502 int delay_reuse_after_frame_num_;
503 // A map from bitstream buffer id to the decode start time of the buffer.
504 std::map<int, base::TimeTicks> decode_start_time_;
505 // The decode time of all decoded frames.
506 std::vector<base::TimeDelta> decode_time_;
507 // The number of VDA::Decode calls per second. This is to simulate webrtc.
508 int decode_calls_per_second_;
509 bool render_as_thumbnails_;
510
511 // A map of the textures that are currently active for the decoder, i.e.,
512 // have been created via AssignPictureBuffers() and not dismissed via
513 // DismissPictureBuffer(). The keys in the map are the IDs of the
514 // corresponding picture buffers, and the values are TextureRefs to the
515 // textures.
516 TextureRefMap active_textures_;
517
518 // A map of the textures that are still pending in the renderer.
519 // We check this to ensure all frames are rendered before entering the
520 // CS_RESET_State.
521 TextureRefMap pending_textures_;
522
523 int32_t next_picture_buffer_id_;
524
525 base::WeakPtr<GLRenderingVDAClient> weak_this_;
526 base::WeakPtrFactory<GLRenderingVDAClient> weak_this_factory_;
527
528 DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
529 };
530
531 static bool DoNothingReturnTrue() {
532 return true;
533 }
534
535 static bool DummyBindImage(uint32_t client_texture_id,
536 uint32_t texture_target,
537 const scoped_refptr<gl::GLImage>& image,
538 bool can_bind_to_sampler) {
539 return true;
540 }
541
542 GLRenderingVDAClient::GLRenderingVDAClient(
543 size_t window_id,
544 RenderingHelper* rendering_helper,
545 ClientStateNotification<ClientState>* note,
546 const std::string& encoded_data,
547 int num_in_flight_decodes,
548 int num_play_throughs,
549 int reset_after_frame_num,
550 int delete_decoder_state,
551 int frame_width,
552 int frame_height,
553 media::VideoCodecProfile profile,
554 int fake_decoder,
555 bool suppress_rendering,
556 int delay_reuse_after_frame_num,
557 int decode_calls_per_second,
558 bool render_as_thumbnails)
559 : window_id_(window_id),
560 rendering_helper_(rendering_helper),
561 frame_size_(frame_width, frame_height),
562 encoded_data_(encoded_data),
563 num_in_flight_decodes_(num_in_flight_decodes),
564 outstanding_decodes_(0),
565 encoded_data_next_pos_to_decode_(0),
566 next_bitstream_buffer_id_(0),
567 note_(note),
568 remaining_play_throughs_(num_play_throughs),
569 reset_after_frame_num_(reset_after_frame_num),
570 delete_decoder_state_(delete_decoder_state),
571 state_(CS_CREATED),
572 num_skipped_fragments_(0),
573 num_queued_fragments_(0),
574 num_decoded_frames_(0),
575 num_done_bitstream_buffers_(0),
576 fake_decoder_(fake_decoder),
577 texture_target_(0),
578 suppress_rendering_(suppress_rendering),
579 delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
580 decode_calls_per_second_(decode_calls_per_second),
581 render_as_thumbnails_(render_as_thumbnails),
582 next_picture_buffer_id_(1),
583 weak_this_factory_(this) {
584 LOG_ASSERT(num_in_flight_decodes > 0);
585 LOG_ASSERT(num_play_throughs > 0);
586 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
587 if (decode_calls_per_second_ > 0)
588 LOG_ASSERT(1 == num_in_flight_decodes_);
589
590 // Default to H264 baseline if no profile provided.
591 profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
592 ? profile
593 : media::H264PROFILE_BASELINE);
594
595 weak_this_ = weak_this_factory_.GetWeakPtr();
596 }
597
598 GLRenderingVDAClient::~GLRenderingVDAClient() {
599 DeleteDecoder(); // Clean up in case of expected error.
600 LOG_ASSERT(decoder_deleted());
601 SetState(CS_DESTROYED);
602 }
603
604 void GLRenderingVDAClient::CreateAndStartDecoder() {
605 LOG_ASSERT(decoder_deleted());
606 LOG_ASSERT(!decoder_.get());
607
608 if (fake_decoder_) {
609 decoder_.reset(new FakeVideoDecodeAccelerator(
610 frame_size_, base::Bind(&DoNothingReturnTrue)));
611 LOG_ASSERT(decoder_->Initialize(profile_, this));
612 } else {
613 if (!vda_factory_) {
614 vda_factory_ = GpuVideoDecodeAcceleratorFactoryImpl::Create(
615 base::Bind(&RenderingHelper::GetGLContext,
616 base::Unretained(rendering_helper_)),
617 base::Bind(&DoNothingReturnTrue), base::Bind(&DummyBindImage));
618 LOG_ASSERT(vda_factory_);
619 }
620
621 VideoDecodeAccelerator::Config config(profile_);
622 if (g_test_import) {
623 config.output_mode =
624 media::VideoDecodeAccelerator::Config::OutputMode::IMPORT;
625 }
626 gpu::GpuPreferences gpu_preferences;
627 decoder_ = vda_factory_->CreateVDA(this, config, gpu_preferences);
628 }
629
630 LOG_ASSERT(decoder_) << "Failed creating a VDA";
631
632 decoder_->TryToSetupDecodeOnSeparateThread(
633 weak_this_, base::ThreadTaskRunnerHandle::Get());
634
635 weak_vda_ptr_factory_.reset(
636 new base::WeakPtrFactory<VideoDecodeAccelerator>(decoder_.get()));
637 weak_vda_ = weak_vda_ptr_factory_->GetWeakPtr();
638
639 SetState(CS_DECODER_SET);
640 FinishInitialization();
641 }
642
643 void GLRenderingVDAClient::ProvidePictureBuffers(
644 uint32_t requested_num_of_buffers,
645 uint32_t textures_per_buffer,
646 const gfx::Size& dimensions,
647 uint32_t texture_target) {
648 if (decoder_deleted())
649 return;
650 LOG_ASSERT(textures_per_buffer == 1u);
651 std::vector<media::PictureBuffer> buffers;
652
653 requested_num_of_buffers += kExtraPictureBuffers;
654
655 texture_target_ = texture_target;
656 for (uint32_t i = 0; i < requested_num_of_buffers; ++i) {
657 uint32_t texture_id;
658 base::WaitableEvent done(false, false);
659 rendering_helper_->CreateTexture(
660 texture_target_, &texture_id, dimensions, &done);
661 done.Wait();
662
663 scoped_refptr<TextureRef> texture_ref;
664 base::Closure delete_texture_cb =
665 base::Bind(&RenderingHelper::DeleteTexture,
666 base::Unretained(rendering_helper_), texture_id);
667
668 if (g_test_import) {
669 media::VideoPixelFormat pixel_format = decoder_->GetOutputFormat();
670 if (pixel_format == media::PIXEL_FORMAT_UNKNOWN)
671 pixel_format = media::PIXEL_FORMAT_ARGB;
672 texture_ref = TextureRef::CreatePreallocated(
673 texture_id, delete_texture_cb, pixel_format, dimensions);
674 } else {
675 texture_ref = TextureRef::Create(texture_id, delete_texture_cb);
676 }
677
678 LOG_ASSERT(texture_ref);
679
680 int32_t picture_buffer_id = next_picture_buffer_id_++;
681 LOG_ASSERT(
682 active_textures_.insert(std::make_pair(picture_buffer_id, texture_ref))
683 .second);
684
685 media::PictureBuffer::TextureIds ids;
686 ids.push_back(texture_id);
687 buffers.push_back(media::PictureBuffer(picture_buffer_id, dimensions, ids));
688 }
689 decoder_->AssignPictureBuffers(buffers);
690
691 if (g_test_import) {
692 for (const auto& buffer : buffers) {
693 TextureRefMap::iterator texture_it = active_textures_.find(buffer.id());
694 ASSERT_NE(active_textures_.end(), texture_it);
695
696 std::vector<gfx::GpuMemoryBufferHandle> handles =
697 texture_it->second->ExportGpuMemoryBufferHandles();
698 LOG_ASSERT(!handles.empty()) << "Failed producing GMB handles";
699
700 decoder_->ImportBufferForPicture(buffer.id(), handles);
701 }
702 }
703 }
704
705 void GLRenderingVDAClient::DismissPictureBuffer(int32_t picture_buffer_id) {
706 LOG_ASSERT(1U == active_textures_.erase(picture_buffer_id));
707 }
708
709 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
710 // We shouldn't be getting pictures delivered after Reset has completed.
711 LOG_ASSERT(state_ < CS_RESET);
712
713 if (decoder_deleted())
714 return;
715
716 base::TimeTicks now = base::TimeTicks::Now();
717
718 frame_delivery_times_.push_back(now);
719
720 // Save the decode time of this picture.
721 std::map<int, base::TimeTicks>::iterator it =
722 decode_start_time_.find(picture.bitstream_buffer_id());
723 ASSERT_NE(decode_start_time_.end(), it);
724 decode_time_.push_back(now - it->second);
725 decode_start_time_.erase(it);
726
727 LOG_ASSERT(picture.bitstream_buffer_id() <= next_bitstream_buffer_id_);
728 ++num_decoded_frames_;
729
730 // Mid-stream reset applies only to the last play-through per constructor
731 // comment.
732 if (remaining_play_throughs_ == 1 &&
733 reset_after_frame_num_ == num_decoded_frames_) {
734 reset_after_frame_num_ = MID_STREAM_RESET;
735 decoder_->Reset();
736 // Re-start decoding from the beginning of the stream to avoid needing to
737 // know how to find I-frames and so on in this test.
738 encoded_data_next_pos_to_decode_ = 0;
739 }
740
741 TextureRefMap::iterator texture_it =
742 active_textures_.find(picture.picture_buffer_id());
743 ASSERT_NE(active_textures_.end(), texture_it);
744
745 scoped_refptr<VideoFrameTexture> video_frame = new VideoFrameTexture(
746 texture_target_, texture_it->second->texture_id(),
747 base::Bind(&GLRenderingVDAClient::ReturnPicture, AsWeakPtr(),
748 picture.picture_buffer_id()));
749 ASSERT_TRUE(pending_textures_.insert(*texture_it).second);
750
751 if (render_as_thumbnails_) {
752 rendering_helper_->RenderThumbnail(video_frame->texture_target(),
753 video_frame->texture_id());
754 } else if (!suppress_rendering_) {
755 rendering_helper_->QueueVideoFrame(window_id_, video_frame);
756 }
757 }
758
759 void GLRenderingVDAClient::ReturnPicture(int32_t picture_buffer_id) {
760 if (decoder_deleted())
761 return;
762 LOG_ASSERT(1U == pending_textures_.erase(picture_buffer_id));
763
764 if (pending_textures_.empty() && state_ == CS_RESETTING) {
765 SetState(CS_RESET);
766 DeleteDecoder();
767 return;
768 }
769
770 if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
771 base::MessageLoop::current()->PostDelayedTask(
772 FROM_HERE, base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
773 weak_vda_, picture_buffer_id),
774 kReuseDelay);
775 } else {
776 decoder_->ReusePictureBuffer(picture_buffer_id);
777 }
778 }
779
780 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
781 int32_t bitstream_buffer_id) {
782 // TODO(fischman): this test currently relies on this notification to make
783 // forward progress during a Reset(). But the VDA::Reset() API doesn't
784 // guarantee this, so stop relying on it (and remove the notifications from
785 // VaapiVideoDecodeAccelerator::FinishReset()).
786 ++num_done_bitstream_buffers_;
787 --outstanding_decodes_;
788
789 // Flush decoder after all BitstreamBuffers are processed.
790 if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
791 // TODO(owenlin): We should not have to check the number of
792 // |outstanding_decodes_|. |decoder_| should be able to accept Flush()
793 // before it's done with outstanding decodes. (crbug.com/528183)
794 if (outstanding_decodes_ == 0) {
795 decoder_->Flush();
796 SetState(CS_FLUSHING);
797 }
798 } else if (decode_calls_per_second_ == 0) {
799 DecodeNextFragment();
800 }
801 }
802
803 void GLRenderingVDAClient::NotifyFlushDone() {
804 if (decoder_deleted())
805 return;
806
807 SetState(CS_FLUSHED);
808 --remaining_play_throughs_;
809 DCHECK_GE(remaining_play_throughs_, 0);
810 if (decoder_deleted())
811 return;
812 decoder_->Reset();
813 SetState(CS_RESETTING);
814 }
815
816 void GLRenderingVDAClient::NotifyResetDone() {
817 if (decoder_deleted())
818 return;
819
820 if (reset_after_frame_num_ == MID_STREAM_RESET) {
821 reset_after_frame_num_ = END_OF_STREAM_RESET;
822 DecodeNextFragment();
823 return;
824 } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
825 reset_after_frame_num_ = END_OF_STREAM_RESET;
826 for (int i = 0; i < num_in_flight_decodes_; ++i)
827 DecodeNextFragment();
828 return;
829 }
830
831 if (remaining_play_throughs_) {
832 encoded_data_next_pos_to_decode_ = 0;
833 FinishInitialization();
834 return;
835 }
836
837 rendering_helper_->Flush(window_id_);
838
839 if (pending_textures_.empty()) {
840 SetState(CS_RESET);
841 DeleteDecoder();
842 }
843 }
844
845 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
846 SetState(CS_ERROR);
847 }
848
849 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
850 std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
851 frame_delivery_times_.size());
852 output->WriteAtCurrentPos(s.data(), s.length());
853 base::TimeTicks t0 = initialize_done_ticks_;
854 for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
855 s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
856 i,
857 (frame_delivery_times_[i] - t0).InMicroseconds());
858 t0 = frame_delivery_times_[i];
859 output->WriteAtCurrentPos(s.data(), s.length());
860 }
861 }
862
863 static bool LookingAtNAL(const std::string& encoded, size_t pos) {
864 return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
865 encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
866 }
867
868 void GLRenderingVDAClient::SetState(ClientState new_state) {
869 note_->Notify(new_state);
870 state_ = new_state;
871 if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
872 LOG_ASSERT(!decoder_deleted());
873 DeleteDecoder();
874 }
875 }
876
877 void GLRenderingVDAClient::FinishInitialization() {
878 SetState(CS_INITIALIZED);
879 initialize_done_ticks_ = base::TimeTicks::Now();
880
881 if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
882 reset_after_frame_num_ = MID_STREAM_RESET;
883 decoder_->Reset();
884 return;
885 }
886
887 for (int i = 0; i < num_in_flight_decodes_; ++i)
888 DecodeNextFragment();
889 DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
890 }
891
892 void GLRenderingVDAClient::DeleteDecoder() {
893 if (decoder_deleted())
894 return;
895 weak_vda_ptr_factory_->InvalidateWeakPtrs();
896 decoder_.reset();
897 STLClearObject(&encoded_data_);
898 active_textures_.clear();
899
900 // Cascade through the rest of the states to simplify test code below.
901 for (int i = state_ + 1; i < CS_MAX; ++i)
902 SetState(static_cast<ClientState>(i));
903 }
904
905 std::string GLRenderingVDAClient::GetBytesForFirstFragment(
906 size_t start_pos, size_t* end_pos) {
907 if (profile_ < media::H264PROFILE_MAX) {
908 *end_pos = start_pos;
909 while (*end_pos + 4 < encoded_data_.size()) {
910 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
911 return GetBytesForNextFragment(*end_pos, end_pos);
912 GetBytesForNextNALU(*end_pos, end_pos);
913 num_skipped_fragments_++;
914 }
915 *end_pos = start_pos;
916 return std::string();
917 }
918 DCHECK_LE(profile_, media::VP9PROFILE_MAX);
919 return GetBytesForNextFragment(start_pos, end_pos);
920 }
921
922 std::string GLRenderingVDAClient::GetBytesForNextFragment(
923 size_t start_pos, size_t* end_pos) {
924 if (profile_ < media::H264PROFILE_MAX) {
925 *end_pos = start_pos;
926 GetBytesForNextNALU(*end_pos, end_pos);
927 if (start_pos != *end_pos) {
928 num_queued_fragments_++;
929 }
930 return encoded_data_.substr(start_pos, *end_pos - start_pos);
931 }
932 DCHECK_LE(profile_, media::VP9PROFILE_MAX);
933 return GetBytesForNextFrame(start_pos, end_pos);
934 }
935
936 void GLRenderingVDAClient::GetBytesForNextNALU(
937 size_t start_pos, size_t* end_pos) {
938 *end_pos = start_pos;
939 if (*end_pos + 4 > encoded_data_.size())
940 return;
941 LOG_ASSERT(LookingAtNAL(encoded_data_, start_pos));
942 *end_pos += 4;
943 while (*end_pos + 4 <= encoded_data_.size() &&
944 !LookingAtNAL(encoded_data_, *end_pos)) {
945 ++*end_pos;
946 }
947 if (*end_pos + 3 >= encoded_data_.size())
948 *end_pos = encoded_data_.size();
949 }
950
951 std::string GLRenderingVDAClient::GetBytesForNextFrame(
952 size_t start_pos, size_t* end_pos) {
953 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
954 std::string bytes;
955 if (start_pos == 0)
956 start_pos = 32; // Skip IVF header.
957 *end_pos = start_pos;
958 uint32_t frame_size = *reinterpret_cast<uint32_t*>(&encoded_data_[*end_pos]);
959 *end_pos += 12; // Skip frame header.
960 bytes.append(encoded_data_.substr(*end_pos, frame_size));
961 *end_pos += frame_size;
962 num_queued_fragments_++;
963 return bytes;
964 }
965
966 static bool FragmentHasConfigInfo(const uint8_t* data,
967 size_t size,
968 media::VideoCodecProfile profile) {
969 if (profile >= media::H264PROFILE_MIN &&
970 profile <= media::H264PROFILE_MAX) {
971 media::H264Parser parser;
972 parser.SetStream(data, size);
973 media::H264NALU nalu;
974 media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
975 if (result != media::H264Parser::kOk) {
976 // Let the VDA figure out there's something wrong with the stream.
977 return false;
978 }
979
980 return nalu.nal_unit_type == media::H264NALU::kSPS;
981 } else if (profile >= media::VP8PROFILE_MIN &&
982 profile <= media::VP9PROFILE_MAX) {
983 return (size > 0 && !(data[0] & 0x01));
984 }
985 // Shouldn't happen at this point.
986 LOG(FATAL) << "Invalid profile: " << profile;
987 return false;
988 }
989
990 void GLRenderingVDAClient::DecodeNextFragment() {
991 if (decoder_deleted())
992 return;
993 if (encoded_data_next_pos_to_decode_ == encoded_data_.size())
994 return;
995 size_t end_pos;
996 std::string next_fragment_bytes;
997 if (encoded_data_next_pos_to_decode_ == 0) {
998 next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
999 } else {
1000 next_fragment_bytes =
1001 GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
1002 }
1003 size_t next_fragment_size = next_fragment_bytes.size();
1004
1005 // Call Reset() just after Decode() if the fragment contains config info.
1006 // This tests how the VDA behaves when it gets a reset request before it has
1007 // a chance to ProvidePictureBuffers().
1008 bool reset_here = false;
1009 if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
1010 reset_here = FragmentHasConfigInfo(
1011 reinterpret_cast<const uint8_t*>(next_fragment_bytes.data()),
1012 next_fragment_size, profile_);
1013 if (reset_here)
1014 reset_after_frame_num_ = END_OF_STREAM_RESET;
1015 }
1016
1017 // Populate the shared memory buffer w/ the fragment, duplicate its handle,
1018 // and hand it off to the decoder.
1019 base::SharedMemory shm;
1020 LOG_ASSERT(shm.CreateAndMapAnonymous(next_fragment_size));
1021 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
1022 base::SharedMemoryHandle dup_handle;
1023 bool result = shm.ShareToProcess(base::GetCurrentProcessHandle(),
1024 &dup_handle);
1025 LOG_ASSERT(result);
1026 media::BitstreamBuffer bitstream_buffer(
1027 next_bitstream_buffer_id_, dup_handle, next_fragment_size);
1028 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
1029 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
1030 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
1031 decoder_->Decode(bitstream_buffer);
1032 ++outstanding_decodes_;
1033 if (!remaining_play_throughs_ &&
1034 -delete_decoder_state_ == next_bitstream_buffer_id_) {
1035 DeleteDecoder();
1036 }
1037
1038 if (reset_here) {
1039 reset_after_frame_num_ = MID_STREAM_RESET;
1040 decoder_->Reset();
1041 // Restart from the beginning to re-Decode() the SPS we just sent.
1042 encoded_data_next_pos_to_decode_ = 0;
1043 } else {
1044 encoded_data_next_pos_to_decode_ = end_pos;
1045 }
1046
1047 if (decode_calls_per_second_ > 0) {
1048 base::MessageLoop::current()->PostDelayedTask(
1049 FROM_HERE,
1050 base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
1051 base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
1052 }
1053 }
1054
1055 double GLRenderingVDAClient::frames_per_second() {
1056 base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
1057 return num_decoded_frames_ / delta.InSecondsF();
1058 }
1059
1060 base::TimeDelta GLRenderingVDAClient::decode_time_median() {
1061 if (decode_time_.size() == 0)
1062 return base::TimeDelta();
1063 std::sort(decode_time_.begin(), decode_time_.end());
1064 int index = decode_time_.size() / 2;
1065 if (decode_time_.size() % 2 != 0)
1066 return decode_time_[index];
1067
1068 return (decode_time_[index] + decode_time_[index - 1]) / 2;
1069 }
1070
1071 class VideoDecodeAcceleratorTest : public ::testing::Test {
1072 protected:
1073 VideoDecodeAcceleratorTest();
1074 void SetUp() override;
1075 void TearDown() override;
1076
1077 // Parse |data| into its constituent parts, set the various output fields
1078 // accordingly, and read in video stream. CHECK-fails on unexpected or
1079 // missing required data. Unspecified optional fields are set to -1.
1080 void ParseAndReadTestVideoData(base::FilePath::StringType data,
1081 std::vector<TestVideoFile*>* test_video_files);
1082
1083 // Update the parameters of |test_video_files| according to
1084 // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
1085 // frames should be adjusted if decoder is reset in the middle of the stream.
1086 void UpdateTestVideoFileParams(
1087 size_t num_concurrent_decoders,
1088 int reset_point,
1089 std::vector<TestVideoFile*>* test_video_files);
1090
1091 void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
1092 void CreateAndStartDecoder(GLRenderingVDAClient* client,
1093 ClientStateNotification<ClientState>* note);
1094 void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
1095 void WaitUntilIdle();
1096 void OutputLogFile(const base::FilePath::CharType* log_path,
1097 const std::string& content);
1098
1099 std::vector<TestVideoFile*> test_video_files_;
1100 RenderingHelper rendering_helper_;
1101
1102 private:
1103 // Required for Thread to work. Not used otherwise.
1104 base::ShadowingAtExitManager at_exit_manager_;
1105
1106 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
1107 };
1108
1109 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest() {
1110 }
1111
1112 void VideoDecodeAcceleratorTest::SetUp() {
1113 ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
1114 }
1115
1116 void VideoDecodeAcceleratorTest::TearDown() {
1117 g_env->GetRenderingTaskRunner()->PostTask(
1118 FROM_HERE, base::Bind(&STLDeleteElements<std::vector<TestVideoFile*>>,
1119 &test_video_files_));
1120
1121 base::WaitableEvent done(false, false);
1122 g_env->GetRenderingTaskRunner()->PostTask(
1123 FROM_HERE, base::Bind(&RenderingHelper::UnInitialize,
1124 base::Unretained(&rendering_helper_), &done));
1125 done.Wait();
1126
1127 rendering_helper_.TearDown();
1128 }
1129
1130 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
1131 base::FilePath::StringType data,
1132 std::vector<TestVideoFile*>* test_video_files) {
1133 std::vector<base::FilePath::StringType> entries = base::SplitString(
1134 data, base::FilePath::StringType(1, ';'),
1135 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
1136 LOG_ASSERT(entries.size() >= 1U) << data;
1137 for (size_t index = 0; index < entries.size(); ++index) {
1138 std::vector<base::FilePath::StringType> fields = base::SplitString(
1139 entries[index], base::FilePath::StringType(1, ':'),
1140 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
1141 LOG_ASSERT(fields.size() >= 1U) << entries[index];
1142 LOG_ASSERT(fields.size() <= 8U) << entries[index];
1143 TestVideoFile* video_file = new TestVideoFile(fields[0]);
1144 if (!fields[1].empty())
1145 LOG_ASSERT(base::StringToInt(fields[1], &video_file->width));
1146 if (!fields[2].empty())
1147 LOG_ASSERT(base::StringToInt(fields[2], &video_file->height));
1148 if (!fields[3].empty())
1149 LOG_ASSERT(base::StringToInt(fields[3], &video_file->num_frames));
1150 if (!fields[4].empty())
1151 LOG_ASSERT(base::StringToInt(fields[4], &video_file->num_fragments));
1152 if (!fields[5].empty())
1153 LOG_ASSERT(base::StringToInt(fields[5], &video_file->min_fps_render));
1154 if (!fields[6].empty())
1155 LOG_ASSERT(base::StringToInt(fields[6], &video_file->min_fps_no_render));
1156 int profile = -1;
1157 if (!fields[7].empty())
1158 LOG_ASSERT(base::StringToInt(fields[7], &profile));
1159 video_file->profile = static_cast<media::VideoCodecProfile>(profile);
1160
1161 // Read in the video data.
1162 base::FilePath filepath(video_file->file_name);
1163 LOG_ASSERT(base::ReadFileToString(filepath, &video_file->data_str))
1164 << "test_video_file: " << filepath.MaybeAsASCII();
1165
1166 test_video_files->push_back(video_file);
1167 }
1168 }
1169
1170 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
1171 size_t num_concurrent_decoders,
1172 int reset_point,
1173 std::vector<TestVideoFile*>* test_video_files) {
1174 for (size_t i = 0; i < test_video_files->size(); i++) {
1175 TestVideoFile* video_file = (*test_video_files)[i];
1176 if (reset_point == MID_STREAM_RESET) {
1177 // Reset should not go beyond the last frame;
1178 // reset in the middle of the stream for short videos.
1179 video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
1180 if (video_file->num_frames <= video_file->reset_after_frame_num)
1181 video_file->reset_after_frame_num = video_file->num_frames / 2;
1182
1183 video_file->num_frames += video_file->reset_after_frame_num;
1184 } else {
1185 video_file->reset_after_frame_num = reset_point;
1186 }
1187
1188 if (video_file->min_fps_render != -1)
1189 video_file->min_fps_render /= num_concurrent_decoders;
1190 if (video_file->min_fps_no_render != -1)
1191 video_file->min_fps_no_render /= num_concurrent_decoders;
1192 }
1193 }
1194
1195 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
1196 const RenderingHelperParams& helper_params) {
1197 rendering_helper_.Setup();
1198
1199 base::WaitableEvent done(false, false);
1200 g_env->GetRenderingTaskRunner()->PostTask(
1201 FROM_HERE,
1202 base::Bind(&RenderingHelper::Initialize,
1203 base::Unretained(&rendering_helper_), helper_params, &done));
1204 done.Wait();
1205 }
1206
1207 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
1208 GLRenderingVDAClient* client,
1209 ClientStateNotification<ClientState>* note) {
1210 g_env->GetRenderingTaskRunner()->PostTask(
1211 FROM_HERE, base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
1212 base::Unretained(client)));
1213 ASSERT_EQ(note->Wait(), CS_DECODER_SET);
1214 }
1215
1216 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
1217 ClientStateNotification<ClientState>* note) {
1218 for (int i = 0; i < CS_MAX; i++) {
1219 if (note->Wait() == CS_DESTROYED)
1220 break;
1221 }
1222 }
1223
1224 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
1225 base::WaitableEvent done(false, false);
1226 g_env->GetRenderingTaskRunner()->PostTask(
1227 FROM_HERE,
1228 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
1229 done.Wait();
1230 }
1231
1232 void VideoDecodeAcceleratorTest::OutputLogFile(
1233 const base::FilePath::CharType* log_path,
1234 const std::string& content) {
1235 base::File file(base::FilePath(log_path),
1236 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1237 file.WriteAtCurrentPos(content.data(), content.length());
1238 }
1239
1240 // Test parameters:
1241 // - Number of concurrent decoders. The value takes effect when there is only
1242 // one input stream; otherwise, one decoder per input stream will be
1243 // instantiated.
1244 // - Number of concurrent in-flight Decode() calls per decoder.
1245 // - Number of play-throughs.
1246 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1247 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1248 // - whether to test slow rendering by delaying ReusePictureBuffer().
1249 // - whether the video frames are rendered as thumbnails.
1250 class VideoDecodeAcceleratorParamTest
1251 : public VideoDecodeAcceleratorTest,
1252 public ::testing::WithParamInterface<
1253 base::Tuple<int, int, int, ResetPoint, ClientState, bool, bool> > {
1254 };
1255
1256 // Wait for |note| to report a state and if it's not |expected_state| then
1257 // assert |client| has deleted its decoder.
1258 static void AssertWaitForStateOrDeleted(
1259 ClientStateNotification<ClientState>* note,
1260 GLRenderingVDAClient* client,
1261 ClientState expected_state) {
1262 ClientState state = note->Wait();
1263 if (state == expected_state) return;
1264 ASSERT_TRUE(client->decoder_deleted())
1265 << "Decoder not deleted but Wait() returned " << state
1266 << ", instead of " << expected_state;
1267 }
1268
1269 // We assert a minimal number of concurrent decoders we expect to succeed.
1270 // Different platforms can support more concurrent decoders, so we don't assert
1271 // failure above this.
1272 enum { kMinSupportedNumConcurrentDecoders = 3 };
1273
1274 // Test the most straightforward case possible: data is decoded from a single
1275 // chunk and rendered to the screen.
1276 TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
1277 size_t num_concurrent_decoders = base::get<0>(GetParam());
1278 const size_t num_in_flight_decodes = base::get<1>(GetParam());
1279 int num_play_throughs = base::get<2>(GetParam());
1280 const int reset_point = base::get<3>(GetParam());
1281 const int delete_decoder_state = base::get<4>(GetParam());
1282 bool test_reuse_delay = base::get<5>(GetParam());
1283 const bool render_as_thumbnails = base::get<6>(GetParam());
1284
1285 if (test_video_files_.size() > 1)
1286 num_concurrent_decoders = test_video_files_.size();
1287
1288 if (g_num_play_throughs > 0)
1289 num_play_throughs = g_num_play_throughs;
1290
1291 UpdateTestVideoFileParams(
1292 num_concurrent_decoders, reset_point, &test_video_files_);
1293
1294 // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
1295 const bool suppress_rendering = g_rendering_fps == 0;
1296
1297 std::vector<ClientStateNotification<ClientState>*>
1298 notes(num_concurrent_decoders, NULL);
1299 std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
1300
1301 RenderingHelperParams helper_params;
1302 helper_params.rendering_fps = g_rendering_fps;
1303 helper_params.warm_up_iterations = g_rendering_warm_up;
1304 helper_params.render_as_thumbnails = render_as_thumbnails;
1305 if (render_as_thumbnails) {
1306 // Only one decoder is supported with thumbnail rendering
1307 LOG_ASSERT(num_concurrent_decoders == 1U);
1308 helper_params.thumbnails_page_size = kThumbnailsPageSize;
1309 helper_params.thumbnail_size = kThumbnailSize;
1310 }
1311
1312 // First kick off all the decoders.
1313 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1314 TestVideoFile* video_file =
1315 test_video_files_[index % test_video_files_.size()];
1316 ClientStateNotification<ClientState>* note =
1317 new ClientStateNotification<ClientState>();
1318 notes[index] = note;
1319
1320 int delay_after_frame_num = std::numeric_limits<int>::max();
1321 if (test_reuse_delay &&
1322 kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
1323 delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
1324 }
1325
1326 GLRenderingVDAClient* client =
1327 new GLRenderingVDAClient(index,
1328 &rendering_helper_,
1329 note,
1330 video_file->data_str,
1331 num_in_flight_decodes,
1332 num_play_throughs,
1333 video_file->reset_after_frame_num,
1334 delete_decoder_state,
1335 video_file->width,
1336 video_file->height,
1337 video_file->profile,
1338 g_fake_decoder,
1339 suppress_rendering,
1340 delay_after_frame_num,
1341 0,
1342 render_as_thumbnails);
1343
1344 clients[index] = client;
1345 helper_params.window_sizes.push_back(
1346 render_as_thumbnails
1347 ? kThumbnailsPageSize
1348 : gfx::Size(video_file->width, video_file->height));
1349 }
1350
1351 InitializeRenderingHelper(helper_params);
1352
1353 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1354 CreateAndStartDecoder(clients[index], notes[index]);
1355 }
1356
1357 // Then wait for all the decodes to finish.
1358 // Only check performance & correctness later if we play through only once.
1359 bool skip_performance_and_correctness_checks = num_play_throughs > 1;
1360 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1361 ClientStateNotification<ClientState>* note = notes[i];
1362 ClientState state = note->Wait();
1363 if (state != CS_INITIALIZED) {
1364 skip_performance_and_correctness_checks = true;
1365 // We expect initialization to fail only when more than the supported
1366 // number of decoders is instantiated. Assert here that something else
1367 // didn't trigger failure.
1368 ASSERT_GT(num_concurrent_decoders,
1369 static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
1370 continue;
1371 }
1372 ASSERT_EQ(state, CS_INITIALIZED);
1373 for (int n = 0; n < num_play_throughs; ++n) {
1374 // For play-throughs other than the first, we expect initialization to
1375 // succeed unconditionally.
1376 if (n > 0) {
1377 ASSERT_NO_FATAL_FAILURE(
1378 AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
1379 }
1380 // InitializeDone kicks off decoding inside the client, so we just need to
1381 // wait for Flush.
1382 ASSERT_NO_FATAL_FAILURE(
1383 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
1384 ASSERT_NO_FATAL_FAILURE(
1385 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
1386 // FlushDone requests Reset().
1387 ASSERT_NO_FATAL_FAILURE(
1388 AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
1389 }
1390 ASSERT_NO_FATAL_FAILURE(
1391 AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
1392 // ResetDone requests Destroy().
1393 ASSERT_NO_FATAL_FAILURE(
1394 AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
1395 }
1396 // Finally assert that decoding went as expected.
1397 for (size_t i = 0; i < num_concurrent_decoders &&
1398 !skip_performance_and_correctness_checks; ++i) {
1399 // We can only make performance/correctness assertions if the decoder was
1400 // allowed to finish.
1401 if (delete_decoder_state < CS_FLUSHED)
1402 continue;
1403 GLRenderingVDAClient* client = clients[i];
1404 TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
1405 if (video_file->num_frames > 0) {
1406 // Expect the decoded frames may be more than the video frames as frames
1407 // could still be returned until resetting done.
1408 if (video_file->reset_after_frame_num > 0)
1409 EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
1410 else
1411 EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
1412 }
1413 if (reset_point == END_OF_STREAM_RESET) {
1414 EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
1415 client->num_queued_fragments());
1416 EXPECT_EQ(client->num_done_bitstream_buffers(),
1417 client->num_queued_fragments());
1418 }
1419 LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
1420 if (!render_as_thumbnails) {
1421 int min_fps = suppress_rendering ?
1422 video_file->min_fps_no_render : video_file->min_fps_render;
1423 if (min_fps > 0 && !test_reuse_delay)
1424 EXPECT_GT(client->frames_per_second(), min_fps);
1425 }
1426 }
1427
1428 if (render_as_thumbnails) {
1429 std::vector<unsigned char> rgb;
1430 bool alpha_solid;
1431 base::WaitableEvent done(false, false);
1432 g_env->GetRenderingTaskRunner()->PostTask(
1433 FROM_HERE, base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
1434 base::Unretained(&rendering_helper_), &rgb,
1435 &alpha_solid, &done));
1436 done.Wait();
1437
1438 std::vector<std::string> golden_md5s;
1439 std::string md5_string = base::MD5String(
1440 base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
1441 ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
1442 std::vector<std::string>::iterator match =
1443 find(golden_md5s.begin(), golden_md5s.end(), md5_string);
1444 if (match == golden_md5s.end()) {
1445 // Convert raw RGB into PNG for export.
1446 std::vector<unsigned char> png;
1447 gfx::PNGCodec::Encode(&rgb[0],
1448 gfx::PNGCodec::FORMAT_RGB,
1449 kThumbnailsPageSize,
1450 kThumbnailsPageSize.width() * 3,
1451 true,
1452 std::vector<gfx::PNGCodec::Comment>(),
1453 &png);
1454
1455 LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
1456
1457 base::FilePath filepath(test_video_files_[0]->file_name);
1458 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1459 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
1460 int num_bytes = base::WriteFile(filepath,
1461 reinterpret_cast<char*>(&png[0]),
1462 png.size());
1463 ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
1464 }
1465 ASSERT_NE(match, golden_md5s.end());
1466 EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
1467 }
1468
1469 // Output the frame delivery time to file
1470 // We can only make performance/correctness assertions if the decoder was
1471 // allowed to finish.
1472 if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
1473 base::File output_file(
1474 base::FilePath(g_output_log),
1475 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1476 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1477 clients[i]->OutputFrameDeliveryTimes(&output_file);
1478 }
1479 }
1480
1481 g_env->GetRenderingTaskRunner()->PostTask(
1482 FROM_HERE,
1483 base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*>>,
1484 &clients));
1485 g_env->GetRenderingTaskRunner()->PostTask(
1486 FROM_HERE,
1487 base::Bind(&STLDeleteElements<
1488 std::vector<ClientStateNotification<ClientState>*>>,
1489 &notes));
1490 WaitUntilIdle();
1491 };
1492
1493 // Test that replay after EOS works fine.
1494 INSTANTIATE_TEST_CASE_P(
1495 ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
1496 ::testing::Values(
1497 MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
1498
1499 // Test that Reset() before the first Decode() works fine.
1500 INSTANTIATE_TEST_CASE_P(
1501 ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
1502 ::testing::Values(
1503 MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
1504
1505 // Test Reset() immediately after Decode() containing config info.
1506 INSTANTIATE_TEST_CASE_P(
1507 ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
1508 ::testing::Values(
1509 MakeTuple(
1510 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
1511
1512 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1513 // Decode() calls are made during the reset.
1514 INSTANTIATE_TEST_CASE_P(
1515 MidStreamReset, VideoDecodeAcceleratorParamTest,
1516 ::testing::Values(
1517 MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
1518
1519 INSTANTIATE_TEST_CASE_P(
1520 SlowRendering, VideoDecodeAcceleratorParamTest,
1521 ::testing::Values(
1522 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
1523
1524 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1525 // crashes occur).
1526 INSTANTIATE_TEST_CASE_P(
1527 TearDownTiming, VideoDecodeAcceleratorParamTest,
1528 ::testing::Values(
1529 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
1530 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
1531 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
1532 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
1533 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
1534 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1535 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1536 static_cast<ClientState>(-1), false, false),
1537 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1538 static_cast<ClientState>(-10), false, false),
1539 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1540 static_cast<ClientState>(-100), false, false)));
1541
1542 // Test that decoding various variation works with multiple in-flight decodes.
1543 INSTANTIATE_TEST_CASE_P(
1544 DecodeVariations, VideoDecodeAcceleratorParamTest,
1545 ::testing::Values(
1546 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1547 MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1548 // Tests queuing.
1549 MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
1550
1551 // Find out how many concurrent decoders can go before we exhaust system
1552 // resources.
1553 INSTANTIATE_TEST_CASE_P(
1554 ResourceExhaustion, VideoDecodeAcceleratorParamTest,
1555 ::testing::Values(
1556 // +0 hack below to promote enum to int.
1557 MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
1558 END_OF_STREAM_RESET, CS_RESET, false, false),
1559 MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
1560 END_OF_STREAM_RESET, CS_RESET, false, false)));
1561
1562 // Thumbnailing test
1563 INSTANTIATE_TEST_CASE_P(
1564 Thumbnail, VideoDecodeAcceleratorParamTest,
1565 ::testing::Values(
1566 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
1567
1568 // Measure the median of the decode time when VDA::Decode is called 30 times per
1569 // second.
1570 TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
1571 RenderingHelperParams helper_params;
1572
1573 // Disable rendering by setting the rendering_fps = 0.
1574 helper_params.rendering_fps = 0;
1575 helper_params.warm_up_iterations = 0;
1576 helper_params.render_as_thumbnails = false;
1577
1578 ClientStateNotification<ClientState>* note =
1579 new ClientStateNotification<ClientState>();
1580 GLRenderingVDAClient* client =
1581 new GLRenderingVDAClient(0,
1582 &rendering_helper_,
1583 note,
1584 test_video_files_[0]->data_str,
1585 1,
1586 1,
1587 test_video_files_[0]->reset_after_frame_num,
1588 CS_RESET,
1589 test_video_files_[0]->width,
1590 test_video_files_[0]->height,
1591 test_video_files_[0]->profile,
1592 g_fake_decoder,
1593 true,
1594 std::numeric_limits<int>::max(),
1595 kWebRtcDecodeCallsPerSecond,
1596 false /* render_as_thumbnail */);
1597 helper_params.window_sizes.push_back(
1598 gfx::Size(test_video_files_[0]->width, test_video_files_[0]->height));
1599 InitializeRenderingHelper(helper_params);
1600 CreateAndStartDecoder(client, note);
1601 WaitUntilDecodeFinish(note);
1602
1603 base::TimeDelta decode_time_median = client->decode_time_median();
1604 std::string output_string =
1605 base::StringPrintf("Decode time median: %" PRId64 " us",
1606 decode_time_median.InMicroseconds());
1607 LOG(INFO) << output_string;
1608
1609 if (g_output_log != NULL)
1610 OutputLogFile(g_output_log, output_string);
1611
1612 g_env->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE, client);
1613 g_env->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE, note);
1614 WaitUntilIdle();
1615 };
1616
1617 // TODO(fischman, vrk): add more tests! In particular:
1618 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1619 // - Test alternate configurations
1620 // - Test failure conditions.
1621 // - Test frame size changes mid-stream
1622
1623 } // namespace
1624 } // namespace content
1625
1626 int main(int argc, char **argv) {
1627 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
1628 base::CommandLine::Init(argc, argv);
1629
1630 // Needed to enable DVLOG through --vmodule.
1631 logging::LoggingSettings settings;
1632 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
1633 LOG_ASSERT(logging::InitLogging(settings));
1634
1635 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
1636 DCHECK(cmd_line);
1637
1638 base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
1639 for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
1640 it != switches.end(); ++it) {
1641 if (it->first == "test_video_data") {
1642 content::g_test_video_data = it->second.c_str();
1643 continue;
1644 }
1645 // The output log for VDA performance test.
1646 if (it->first == "output_log") {
1647 content::g_output_log = it->second.c_str();
1648 continue;
1649 }
1650 if (it->first == "rendering_fps") {
1651 // On Windows, CommandLine::StringType is wstring. We need to convert
1652 // it to std::string first
1653 std::string input(it->second.begin(), it->second.end());
1654 LOG_ASSERT(base::StringToDouble(input, &content::g_rendering_fps));
1655 continue;
1656 }
1657 if (it->first == "rendering_warm_up") {
1658 std::string input(it->second.begin(), it->second.end());
1659 LOG_ASSERT(base::StringToInt(input, &content::g_rendering_warm_up));
1660 continue;
1661 }
1662 // TODO(owenlin): Remove this flag once it is not used in autotest.
1663 if (it->first == "disable_rendering") {
1664 content::g_rendering_fps = 0;
1665 continue;
1666 }
1667
1668 if (it->first == "num_play_throughs") {
1669 std::string input(it->second.begin(), it->second.end());
1670 LOG_ASSERT(base::StringToInt(input, &content::g_num_play_throughs));
1671 continue;
1672 }
1673 if (it->first == "fake_decoder") {
1674 content::g_fake_decoder = 1;
1675 continue;
1676 }
1677 if (it->first == "v" || it->first == "vmodule")
1678 continue;
1679 if (it->first == "ozone-platform" || it->first == "ozone-use-surfaceless")
1680 continue;
1681 if (it->first == "test_import") {
1682 content::g_test_import = true;
1683 continue;
1684 }
1685 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
1686 }
1687
1688 base::ShadowingAtExitManager at_exit_manager;
1689 #if defined(OS_WIN) || defined(USE_OZONE)
1690 // For windows the decoding thread initializes the media foundation decoder
1691 // which uses COM. We need the thread to be a UI thread.
1692 // On Ozone, the backend initializes the event system using a UI
1693 // thread.
1694 base::MessageLoopForUI main_loop;
1695 #else
1696 base::MessageLoop main_loop;
1697 #endif // OS_WIN || USE_OZONE
1698
1699 #if defined(USE_OZONE)
1700 ui::OzonePlatform::InitializeForUI();
1701 #endif
1702
1703 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
1704 content::VaapiWrapper::PreSandboxInitialization();
1705 #endif
1706
1707 content::g_env =
1708 reinterpret_cast<content::VideoDecodeAcceleratorTestEnvironment*>(
1709 testing::AddGlobalTestEnvironment(
1710 new content::VideoDecodeAcceleratorTestEnvironment()));
1711
1712 return RUN_ALL_TESTS();
1713 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698