Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(159)

Side by Side Diff: content/common/gpu/media/video_decode_accelerator_unittest.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix several more bot-identified build issues Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
(Empty)
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // The bulk of this file is support code; sorry about that. Here's an overview
6 // to hopefully help readers of this code:
7 // - RenderingHelper is charged with interacting with X11/{EGL/GLES2,GLX/GL} or
8 // Win/EGL.
9 // - ClientState is an enum for the state of the decode client used by the test.
10 // - ClientStateNotification is a barrier abstraction that allows the test code
11 // to be written sequentially and wait for the decode client to see certain
12 // state transitions.
13 // - GLRenderingVDAClient is a VideoDecodeAccelerator::Client implementation
14 // - Finally actual TEST cases are at the bottom of this file, using the above
15 // infrastructure.
16
17 #include <fcntl.h>
18 #include <stddef.h>
19 #include <stdint.h>
20 #include <sys/stat.h>
21 #include <sys/types.h>
22 #include <algorithm>
23 #include <deque>
24 #include <map>
25 #include <utility>
26
27 #include "base/at_exit.h"
28 #include "base/bind.h"
29 #include "base/callback_helpers.h"
30 #include "base/command_line.h"
31 #include "base/files/file.h"
32 #include "base/files/file_util.h"
33 #include "base/format_macros.h"
34 #include "base/macros.h"
35 #include "base/md5.h"
36 #include "base/process/process_handle.h"
37 #include "base/stl_util.h"
38 #include "base/strings/string_number_conversions.h"
39 #include "base/strings/string_split.h"
40 #include "base/strings/stringize_macros.h"
41 #include "base/strings/stringprintf.h"
42 #include "base/strings/utf_string_conversions.h"
43 #include "base/synchronization/condition_variable.h"
44 #include "base/synchronization/lock.h"
45 #include "base/synchronization/waitable_event.h"
46 #include "base/thread_task_runner_handle.h"
47 #include "base/threading/thread.h"
48 #include "build/build_config.h"
49 #include "content/common/gpu/media/fake_video_decode_accelerator.h"
50 #include "content/common/gpu/media/gpu_video_decode_accelerator_factory_impl.h"
51 #include "content/common/gpu/media/rendering_helper.h"
52 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
53 #include "gpu/command_buffer/service/gpu_preferences.h"
54 #include "media/filters/h264_parser.h"
55 #include "testing/gtest/include/gtest/gtest.h"
56 #include "ui/gfx/codec/png_codec.h"
57 #include "ui/gl/gl_image.h"
58
59 #if defined(OS_WIN)
60 #include "base/win/windows_version.h"
61 #include "content/common/gpu/media/dxva_video_decode_accelerator_win.h"
62 #elif defined(OS_CHROMEOS)
63 #if defined(USE_V4L2_CODEC)
64 #include "content/common/gpu/media/v4l2_device.h"
65 #include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
66 #include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
67 #endif
68 #if defined(ARCH_CPU_X86_FAMILY)
69 #include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
70 #include "content/common/gpu/media/vaapi_wrapper.h"
71 #endif // defined(ARCH_CPU_X86_FAMILY)
72 #else
73 #error The VideoAccelerator tests are not supported on this platform.
74 #endif // OS_WIN
75
76 #if defined(USE_OZONE)
77 #include "ui/ozone/public/ozone_gpu_test_helper.h"
78 #include "ui/ozone/public/ozone_platform.h"
79 #endif // defined(USE_OZONE)
80
81 using media::VideoDecodeAccelerator;
82
83 namespace content {
84 namespace {
85
86 using base::MakeTuple;
87
88 // Values optionally filled in from flags; see main() below.
89 // The syntax of multiple test videos is:
90 // test-video1;test-video2;test-video3
91 // where only the first video is required and other optional videos would be
92 // decoded by concurrent decoders.
93 // The syntax of each test-video is:
94 // filename:width:height:numframes:numfragments:minFPSwithRender:minFPSnoRender
95 // where only the first field is required. Value details:
96 // - |filename| must be an h264 Annex B (NAL) stream or an IVF VP8/9 stream.
97 // - |width| and |height| are in pixels.
98 // - |numframes| is the number of picture frames in the file.
99 // - |numfragments| NALU (h264) or frame (VP8/9) count in the stream.
100 // - |minFPSwithRender| and |minFPSnoRender| are minimum frames/second speeds
101 // expected to be achieved with and without rendering to the screen, resp.
102 // (the latter tests just decode speed).
103 // - |profile| is the media::VideoCodecProfile set during Initialization.
104 // An empty value for a numeric field means "ignore".
105 const base::FilePath::CharType* g_test_video_data =
106 // FILE_PATH_LITERAL("test-25fps.vp8:320:240:250:250:50:175:11");
107 FILE_PATH_LITERAL("test-25fps.h264:320:240:250:258:50:175:1");
108
109 // The file path of the test output log. This is used to communicate the test
110 // results to CrOS autotests. We can enable the log and specify the filename by
111 // the "--output_log" switch.
112 const base::FilePath::CharType* g_output_log = NULL;
113
114 // The value is set by the switch "--rendering_fps".
115 double g_rendering_fps = 60;
116
117 // The value is set by the switch "--rendering_warm_up".
118 int g_rendering_warm_up = 0;
119
120 // The value is set by the switch "--num_play_throughs". The video will play
121 // the specified number of times. In different test cases, we have different
122 // values for |num_play_throughs|. This setting will override the value. A
123 // special value "0" means no override.
124 int g_num_play_throughs = 0;
125 // Fake decode
126 int g_fake_decoder = 0;
127
128 // Environment to store rendering thread.
129 class VideoDecodeAcceleratorTestEnvironment;
130 VideoDecodeAcceleratorTestEnvironment* g_env;
131
132 // Magic constants for differentiating the reasons for NotifyResetDone being
133 // called.
134 enum ResetPoint {
135 // Reset() just after calling Decode() with a fragment containing config info.
136 RESET_AFTER_FIRST_CONFIG_INFO = -4,
137 START_OF_STREAM_RESET = -3,
138 MID_STREAM_RESET = -2,
139 END_OF_STREAM_RESET = -1
140 };
141
142 const int kMaxResetAfterFrameNum = 100;
143 const int kMaxFramesToDelayReuse = 64;
144 const base::TimeDelta kReuseDelay = base::TimeDelta::FromSeconds(1);
145 // Simulate WebRTC and call VDA::Decode 30 times per second.
146 const int kWebRtcDecodeCallsPerSecond = 30;
147 // Simulate an adjustment to a larger number of pictures to make sure the
148 // decoder supports an upwards adjustment.
149 const int kExtraPictureBuffers = 2;
150
151 struct TestVideoFile {
152 explicit TestVideoFile(base::FilePath::StringType file_name)
153 : file_name(file_name),
154 width(-1),
155 height(-1),
156 num_frames(-1),
157 num_fragments(-1),
158 min_fps_render(-1),
159 min_fps_no_render(-1),
160 profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
161 reset_after_frame_num(END_OF_STREAM_RESET) {
162 }
163
164 base::FilePath::StringType file_name;
165 int width;
166 int height;
167 int num_frames;
168 int num_fragments;
169 int min_fps_render;
170 int min_fps_no_render;
171 media::VideoCodecProfile profile;
172 int reset_after_frame_num;
173 std::string data_str;
174 };
175
176 const gfx::Size kThumbnailsPageSize(1600, 1200);
177 const gfx::Size kThumbnailSize(160, 120);
178 const int kMD5StringLength = 32;
179
180 // Read in golden MD5s for the thumbnailed rendering of this video
181 void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
182 std::vector<std::string>* md5_strings) {
183 base::FilePath filepath(video_file->file_name);
184 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
185 std::string all_md5s;
186 base::ReadFileToString(filepath, &all_md5s);
187 *md5_strings = base::SplitString(
188 all_md5s, "\n", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
189 // Check these are legitimate MD5s.
190 for (const std::string& md5_string : *md5_strings) {
191 // Ignore the empty string added by SplitString
192 if (!md5_string.length())
193 continue;
194 // Ignore comments
195 if (md5_string.at(0) == '#')
196 continue;
197
198 LOG_ASSERT(static_cast<int>(md5_string.length()) ==
199 kMD5StringLength) << md5_string;
200 bool hex_only = std::count_if(md5_string.begin(),
201 md5_string.end(), isxdigit) ==
202 kMD5StringLength;
203 LOG_ASSERT(hex_only) << md5_string;
204 }
205 LOG_ASSERT(md5_strings->size() >= 1U) << " MD5 checksum file ("
206 << filepath.MaybeAsASCII()
207 << ") missing or empty.";
208 }
209
210 // State of the GLRenderingVDAClient below. Order matters here as the test
211 // makes assumptions about it.
212 enum ClientState {
213 CS_CREATED = 0,
214 CS_DECODER_SET = 1,
215 CS_INITIALIZED = 2,
216 CS_FLUSHING = 3,
217 CS_FLUSHED = 4,
218 CS_RESETTING = 5,
219 CS_RESET = 6,
220 CS_ERROR = 7,
221 CS_DESTROYED = 8,
222 CS_MAX, // Must be last entry.
223 };
224
225 // Initialize the GPU thread for rendering. We only need to setup once
226 // for all test cases.
227 class VideoDecodeAcceleratorTestEnvironment : public ::testing::Environment {
228 public:
229 VideoDecodeAcceleratorTestEnvironment()
230 : rendering_thread_("GLRenderingVDAClientThread") {}
231
232 void SetUp() override {
233 rendering_thread_.Start();
234
235 base::WaitableEvent done(false, false);
236 rendering_thread_.task_runner()->PostTask(
237 FROM_HERE, base::Bind(&RenderingHelper::InitializeOneOff, &done));
238 done.Wait();
239
240 #if defined(USE_OZONE)
241 gpu_helper_.reset(new ui::OzoneGpuTestHelper());
242 // Need to initialize after the rendering side since the rendering side
243 // initializes the "GPU" parts of Ozone.
244 //
245 // This also needs to be done in the test environment since this shouldn't
246 // be initialized multiple times for the same Ozone platform.
247 gpu_helper_->Initialize(base::ThreadTaskRunnerHandle::Get(),
248 GetRenderingTaskRunner());
249 #endif
250 }
251
252 void TearDown() override {
253 #if defined(USE_OZONE)
254 gpu_helper_.reset();
255 #endif
256 rendering_thread_.Stop();
257 }
258
259 scoped_refptr<base::SingleThreadTaskRunner> GetRenderingTaskRunner() const {
260 return rendering_thread_.task_runner();
261 }
262
263 private:
264 base::Thread rendering_thread_;
265 #if defined(USE_OZONE)
266 std::unique_ptr<ui::OzoneGpuTestHelper> gpu_helper_;
267 #endif
268
269 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTestEnvironment);
270 };
271
272 // A helper class used to manage the lifetime of a Texture.
273 class TextureRef : public base::RefCounted<TextureRef> {
274 public:
275 TextureRef(uint32_t texture_id, const base::Closure& no_longer_needed_cb)
276 : texture_id_(texture_id), no_longer_needed_cb_(no_longer_needed_cb) {}
277
278 int32_t texture_id() const { return texture_id_; }
279
280 private:
281 friend class base::RefCounted<TextureRef>;
282 ~TextureRef();
283
284 uint32_t texture_id_;
285 base::Closure no_longer_needed_cb_;
286 };
287
288 TextureRef::~TextureRef() {
289 base::ResetAndReturn(&no_longer_needed_cb_).Run();
290 }
291
292 // Client that can accept callbacks from a VideoDecodeAccelerator and is used by
293 // the TESTs below.
294 class GLRenderingVDAClient
295 : public VideoDecodeAccelerator::Client,
296 public base::SupportsWeakPtr<GLRenderingVDAClient> {
297 public:
298 // |window_id| the window_id of the client, which is used to identify the
299 // rendering area in the |rendering_helper|.
300 // Doesn't take ownership of |rendering_helper| or |note|, which must outlive
301 // |*this|.
302 // |num_play_throughs| indicates how many times to play through the video.
303 // |reset_after_frame_num| can be a frame number >=0 indicating a mid-stream
304 // Reset() should be done after that frame number is delivered, or
305 // END_OF_STREAM_RESET to indicate no mid-stream Reset().
306 // |delete_decoder_state| indicates when the underlying decoder should be
307 // Destroy()'d and deleted and can take values: N<0: delete after -N Decode()
308 // calls have been made, N>=0 means interpret as ClientState.
309 // Both |reset_after_frame_num| & |delete_decoder_state| apply only to the
310 // last play-through (governed by |num_play_throughs|).
311 // |suppress_rendering| indicates GL rendering is supressed or not.
312 // After |delay_reuse_after_frame_num| frame has been delivered, the client
313 // will start delaying the call to ReusePictureBuffer() for kReuseDelay.
314 // |decode_calls_per_second| is the number of VDA::Decode calls per second.
315 // If |decode_calls_per_second| > 0, |num_in_flight_decodes| must be 1.
316 GLRenderingVDAClient(size_t window_id,
317 RenderingHelper* rendering_helper,
318 ClientStateNotification<ClientState>* note,
319 const std::string& encoded_data,
320 int num_in_flight_decodes,
321 int num_play_throughs,
322 int reset_after_frame_num,
323 int delete_decoder_state,
324 int frame_width,
325 int frame_height,
326 media::VideoCodecProfile profile,
327 int fake_decoder,
328 bool suppress_rendering,
329 int delay_reuse_after_frame_num,
330 int decode_calls_per_second,
331 bool render_as_thumbnails);
332 ~GLRenderingVDAClient() override;
333 void CreateAndStartDecoder();
334
335 // VideoDecodeAccelerator::Client implementation.
336 // The heart of the Client.
337 void ProvidePictureBuffers(uint32_t requested_num_of_buffers,
338 uint32_t textures_per_buffer,
339 const gfx::Size& dimensions,
340 uint32_t texture_target) override;
341 void DismissPictureBuffer(int32_t picture_buffer_id) override;
342 void PictureReady(const media::Picture& picture) override;
343 // Simple state changes.
344 void NotifyEndOfBitstreamBuffer(int32_t bitstream_buffer_id) override;
345 void NotifyFlushDone() override;
346 void NotifyResetDone() override;
347 void NotifyError(VideoDecodeAccelerator::Error error) override;
348
349 void OutputFrameDeliveryTimes(base::File* output);
350
351 // Simple getters for inspecting the state of the Client.
352 int num_done_bitstream_buffers() { return num_done_bitstream_buffers_; }
353 int num_skipped_fragments() { return num_skipped_fragments_; }
354 int num_queued_fragments() { return num_queued_fragments_; }
355 int num_decoded_frames() { return num_decoded_frames_; }
356 double frames_per_second();
357 // Return the median of the decode time of all decoded frames.
358 base::TimeDelta decode_time_median();
359 bool decoder_deleted() { return !decoder_.get(); }
360
361 private:
362 typedef std::map<int32_t, scoped_refptr<TextureRef>> TextureRefMap;
363
364 void SetState(ClientState new_state);
365 void FinishInitialization();
366 void ReturnPicture(int32_t picture_buffer_id);
367
368 // Delete the associated decoder helper.
369 void DeleteDecoder();
370
371 // Compute & return the first encoded bytes (including a start frame) to send
372 // to the decoder, starting at |start_pos| and returning one fragment. Skips
373 // to the first decodable position.
374 std::string GetBytesForFirstFragment(size_t start_pos, size_t* end_pos);
375 // Compute & return the encoded bytes of next fragment to send to the decoder
376 // (based on |start_pos|).
377 std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
378 // Helpers for GetBytesForNextFragment above.
379 void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
380 std::string GetBytesForNextFrame(
381 size_t start_pos, size_t* end_pos); // For VP8/9.
382
383 // Request decode of the next fragment in the encoded data.
384 void DecodeNextFragment();
385
386 size_t window_id_;
387 RenderingHelper* rendering_helper_;
388 gfx::Size frame_size_;
389 std::string encoded_data_;
390 const int num_in_flight_decodes_;
391 int outstanding_decodes_;
392 size_t encoded_data_next_pos_to_decode_;
393 int next_bitstream_buffer_id_;
394 ClientStateNotification<ClientState>* note_;
395 std::unique_ptr<VideoDecodeAccelerator> decoder_;
396 base::WeakPtr<VideoDecodeAccelerator> weak_vda_;
397 std::unique_ptr<base::WeakPtrFactory<VideoDecodeAccelerator>>
398 weak_vda_ptr_factory_;
399 std::unique_ptr<GpuVideoDecodeAcceleratorFactoryImpl> vda_factory_;
400 int remaining_play_throughs_;
401 int reset_after_frame_num_;
402 int delete_decoder_state_;
403 ClientState state_;
404 int num_skipped_fragments_;
405 int num_queued_fragments_;
406 int num_decoded_frames_;
407 int num_done_bitstream_buffers_;
408 base::TimeTicks initialize_done_ticks_;
409 media::VideoCodecProfile profile_;
410 int fake_decoder_;
411 GLenum texture_target_;
412 bool suppress_rendering_;
413 std::vector<base::TimeTicks> frame_delivery_times_;
414 int delay_reuse_after_frame_num_;
415 // A map from bitstream buffer id to the decode start time of the buffer.
416 std::map<int, base::TimeTicks> decode_start_time_;
417 // The decode time of all decoded frames.
418 std::vector<base::TimeDelta> decode_time_;
419 // The number of VDA::Decode calls per second. This is to simulate webrtc.
420 int decode_calls_per_second_;
421 bool render_as_thumbnails_;
422
423 // A map of the textures that are currently active for the decoder, i.e.,
424 // have been created via AssignPictureBuffers() and not dismissed via
425 // DismissPictureBuffer(). The keys in the map are the IDs of the
426 // corresponding picture buffers, and the values are TextureRefs to the
427 // textures.
428 TextureRefMap active_textures_;
429
430 // A map of the textures that are still pending in the renderer.
431 // We check this to ensure all frames are rendered before entering the
432 // CS_RESET_State.
433 TextureRefMap pending_textures_;
434
435 int32_t next_picture_buffer_id_;
436
437 base::WeakPtr<GLRenderingVDAClient> weak_this_;
438 base::WeakPtrFactory<GLRenderingVDAClient> weak_this_factory_;
439
440 DISALLOW_IMPLICIT_CONSTRUCTORS(GLRenderingVDAClient);
441 };
442
443 static bool DoNothingReturnTrue() {
444 return true;
445 }
446
447 static bool DummyBindImage(uint32_t client_texture_id,
448 uint32_t texture_target,
449 const scoped_refptr<gl::GLImage>& image,
450 bool can_bind_to_sampler) {
451 return true;
452 }
453
454 GLRenderingVDAClient::GLRenderingVDAClient(
455 size_t window_id,
456 RenderingHelper* rendering_helper,
457 ClientStateNotification<ClientState>* note,
458 const std::string& encoded_data,
459 int num_in_flight_decodes,
460 int num_play_throughs,
461 int reset_after_frame_num,
462 int delete_decoder_state,
463 int frame_width,
464 int frame_height,
465 media::VideoCodecProfile profile,
466 int fake_decoder,
467 bool suppress_rendering,
468 int delay_reuse_after_frame_num,
469 int decode_calls_per_second,
470 bool render_as_thumbnails)
471 : window_id_(window_id),
472 rendering_helper_(rendering_helper),
473 frame_size_(frame_width, frame_height),
474 encoded_data_(encoded_data),
475 num_in_flight_decodes_(num_in_flight_decodes),
476 outstanding_decodes_(0),
477 encoded_data_next_pos_to_decode_(0),
478 next_bitstream_buffer_id_(0),
479 note_(note),
480 remaining_play_throughs_(num_play_throughs),
481 reset_after_frame_num_(reset_after_frame_num),
482 delete_decoder_state_(delete_decoder_state),
483 state_(CS_CREATED),
484 num_skipped_fragments_(0),
485 num_queued_fragments_(0),
486 num_decoded_frames_(0),
487 num_done_bitstream_buffers_(0),
488 fake_decoder_(fake_decoder),
489 texture_target_(0),
490 suppress_rendering_(suppress_rendering),
491 delay_reuse_after_frame_num_(delay_reuse_after_frame_num),
492 decode_calls_per_second_(decode_calls_per_second),
493 render_as_thumbnails_(render_as_thumbnails),
494 next_picture_buffer_id_(1),
495 weak_this_factory_(this) {
496 LOG_ASSERT(num_in_flight_decodes > 0);
497 LOG_ASSERT(num_play_throughs > 0);
498 // |num_in_flight_decodes_| is unsupported if |decode_calls_per_second_| > 0.
499 if (decode_calls_per_second_ > 0)
500 LOG_ASSERT(1 == num_in_flight_decodes_);
501
502 // Default to H264 baseline if no profile provided.
503 profile_ = (profile != media::VIDEO_CODEC_PROFILE_UNKNOWN
504 ? profile
505 : media::H264PROFILE_BASELINE);
506
507 weak_this_ = weak_this_factory_.GetWeakPtr();
508 }
509
510 GLRenderingVDAClient::~GLRenderingVDAClient() {
511 DeleteDecoder(); // Clean up in case of expected error.
512 LOG_ASSERT(decoder_deleted());
513 SetState(CS_DESTROYED);
514 }
515
516 void GLRenderingVDAClient::CreateAndStartDecoder() {
517 LOG_ASSERT(decoder_deleted());
518 LOG_ASSERT(!decoder_.get());
519
520 if (fake_decoder_) {
521 decoder_.reset(new FakeVideoDecodeAccelerator(
522 frame_size_, base::Bind(&DoNothingReturnTrue)));
523 LOG_ASSERT(decoder_->Initialize(profile_, this));
524 } else {
525 if (!vda_factory_) {
526 vda_factory_ = GpuVideoDecodeAcceleratorFactoryImpl::Create(
527 base::Bind(&RenderingHelper::GetGLContext,
528 base::Unretained(rendering_helper_)),
529 base::Bind(&DoNothingReturnTrue), base::Bind(&DummyBindImage));
530 LOG_ASSERT(vda_factory_);
531 }
532
533 VideoDecodeAccelerator::Config config(profile_);
534 gpu::GpuPreferences gpu_preferences;
535 decoder_ = vda_factory_->CreateVDA(this, config, gpu_preferences);
536 }
537
538 LOG_ASSERT(decoder_) << "Failed creating a VDA";
539
540 decoder_->TryToSetupDecodeOnSeparateThread(
541 weak_this_, base::ThreadTaskRunnerHandle::Get());
542
543 SetState(CS_DECODER_SET);
544 FinishInitialization();
545 }
546
547 void GLRenderingVDAClient::ProvidePictureBuffers(
548 uint32_t requested_num_of_buffers,
549 uint32_t textures_per_buffer,
550 const gfx::Size& dimensions,
551 uint32_t texture_target) {
552 if (decoder_deleted())
553 return;
554 LOG_ASSERT(textures_per_buffer == 1u);
555 std::vector<media::PictureBuffer> buffers;
556
557 requested_num_of_buffers += kExtraPictureBuffers;
558
559 texture_target_ = texture_target;
560 for (uint32_t i = 0; i < requested_num_of_buffers; ++i) {
561 uint32_t texture_id;
562 base::WaitableEvent done(false, false);
563 rendering_helper_->CreateTexture(
564 texture_target_, &texture_id, dimensions, &done);
565 done.Wait();
566
567 int32_t picture_buffer_id = next_picture_buffer_id_++;
568 LOG_ASSERT(active_textures_
569 .insert(std::make_pair(
570 picture_buffer_id,
571 new TextureRef(texture_id,
572 base::Bind(&RenderingHelper::DeleteTexture,
573 base::Unretained(rendering_helper_),
574 texture_id))))
575 .second);
576
577 media::PictureBuffer::TextureIds ids;
578 ids.push_back(texture_id);
579 buffers.push_back(media::PictureBuffer(picture_buffer_id, dimensions, ids));
580 }
581 decoder_->AssignPictureBuffers(buffers);
582 }
583
584 void GLRenderingVDAClient::DismissPictureBuffer(int32_t picture_buffer_id) {
585 LOG_ASSERT(1U == active_textures_.erase(picture_buffer_id));
586 }
587
588 void GLRenderingVDAClient::PictureReady(const media::Picture& picture) {
589 // We shouldn't be getting pictures delivered after Reset has completed.
590 LOG_ASSERT(state_ < CS_RESET);
591
592 if (decoder_deleted())
593 return;
594
595 base::TimeTicks now = base::TimeTicks::Now();
596
597 frame_delivery_times_.push_back(now);
598
599 // Save the decode time of this picture.
600 std::map<int, base::TimeTicks>::iterator it =
601 decode_start_time_.find(picture.bitstream_buffer_id());
602 ASSERT_NE(decode_start_time_.end(), it);
603 decode_time_.push_back(now - it->second);
604 decode_start_time_.erase(it);
605
606 LOG_ASSERT(picture.bitstream_buffer_id() <= next_bitstream_buffer_id_);
607 ++num_decoded_frames_;
608
609 // Mid-stream reset applies only to the last play-through per constructor
610 // comment.
611 if (remaining_play_throughs_ == 1 &&
612 reset_after_frame_num_ == num_decoded_frames_) {
613 reset_after_frame_num_ = MID_STREAM_RESET;
614 decoder_->Reset();
615 // Re-start decoding from the beginning of the stream to avoid needing to
616 // know how to find I-frames and so on in this test.
617 encoded_data_next_pos_to_decode_ = 0;
618 }
619
620 TextureRefMap::iterator texture_it =
621 active_textures_.find(picture.picture_buffer_id());
622 ASSERT_NE(active_textures_.end(), texture_it);
623
624 scoped_refptr<VideoFrameTexture> video_frame = new VideoFrameTexture(
625 texture_target_, texture_it->second->texture_id(),
626 base::Bind(&GLRenderingVDAClient::ReturnPicture, AsWeakPtr(),
627 picture.picture_buffer_id()));
628 ASSERT_TRUE(pending_textures_.insert(*texture_it).second);
629
630 if (render_as_thumbnails_) {
631 rendering_helper_->RenderThumbnail(video_frame->texture_target(),
632 video_frame->texture_id());
633 } else if (!suppress_rendering_) {
634 rendering_helper_->QueueVideoFrame(window_id_, video_frame);
635 }
636 }
637
638 void GLRenderingVDAClient::ReturnPicture(int32_t picture_buffer_id) {
639 if (decoder_deleted())
640 return;
641 LOG_ASSERT(1U == pending_textures_.erase(picture_buffer_id));
642
643 if (pending_textures_.empty() && state_ == CS_RESETTING) {
644 SetState(CS_RESET);
645 DeleteDecoder();
646 return;
647 }
648
649 if (num_decoded_frames_ > delay_reuse_after_frame_num_) {
650 base::MessageLoop::current()->PostDelayedTask(
651 FROM_HERE, base::Bind(&VideoDecodeAccelerator::ReusePictureBuffer,
652 weak_vda_, picture_buffer_id),
653 kReuseDelay);
654 } else {
655 decoder_->ReusePictureBuffer(picture_buffer_id);
656 }
657 }
658
659 void GLRenderingVDAClient::NotifyEndOfBitstreamBuffer(
660 int32_t bitstream_buffer_id) {
661 // TODO(fischman): this test currently relies on this notification to make
662 // forward progress during a Reset(). But the VDA::Reset() API doesn't
663 // guarantee this, so stop relying on it (and remove the notifications from
664 // VaapiVideoDecodeAccelerator::FinishReset()).
665 ++num_done_bitstream_buffers_;
666 --outstanding_decodes_;
667
668 // Flush decoder after all BitstreamBuffers are processed.
669 if (encoded_data_next_pos_to_decode_ == encoded_data_.size()) {
670 // TODO(owenlin): We should not have to check the number of
671 // |outstanding_decodes_|. |decoder_| should be able to accept Flush()
672 // before it's done with outstanding decodes. (crbug.com/528183)
673 if (outstanding_decodes_ == 0) {
674 decoder_->Flush();
675 SetState(CS_FLUSHING);
676 }
677 } else if (decode_calls_per_second_ == 0) {
678 DecodeNextFragment();
679 }
680 }
681
682 void GLRenderingVDAClient::NotifyFlushDone() {
683 if (decoder_deleted())
684 return;
685
686 SetState(CS_FLUSHED);
687 --remaining_play_throughs_;
688 DCHECK_GE(remaining_play_throughs_, 0);
689 if (decoder_deleted())
690 return;
691 decoder_->Reset();
692 SetState(CS_RESETTING);
693 }
694
695 void GLRenderingVDAClient::NotifyResetDone() {
696 if (decoder_deleted())
697 return;
698
699 if (reset_after_frame_num_ == MID_STREAM_RESET) {
700 reset_after_frame_num_ = END_OF_STREAM_RESET;
701 DecodeNextFragment();
702 return;
703 } else if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
704 reset_after_frame_num_ = END_OF_STREAM_RESET;
705 for (int i = 0; i < num_in_flight_decodes_; ++i)
706 DecodeNextFragment();
707 return;
708 }
709
710 if (remaining_play_throughs_) {
711 encoded_data_next_pos_to_decode_ = 0;
712 FinishInitialization();
713 return;
714 }
715
716 rendering_helper_->Flush(window_id_);
717
718 if (pending_textures_.empty()) {
719 SetState(CS_RESET);
720 DeleteDecoder();
721 }
722 }
723
724 void GLRenderingVDAClient::NotifyError(VideoDecodeAccelerator::Error error) {
725 SetState(CS_ERROR);
726 }
727
728 void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
729 std::string s = base::StringPrintf("frame count: %" PRIuS "\n",
730 frame_delivery_times_.size());
731 output->WriteAtCurrentPos(s.data(), s.length());
732 base::TimeTicks t0 = initialize_done_ticks_;
733 for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
734 s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
735 i,
736 (frame_delivery_times_[i] - t0).InMicroseconds());
737 t0 = frame_delivery_times_[i];
738 output->WriteAtCurrentPos(s.data(), s.length());
739 }
740 }
741
742 static bool LookingAtNAL(const std::string& encoded, size_t pos) {
743 return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
744 encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
745 }
746
747 void GLRenderingVDAClient::SetState(ClientState new_state) {
748 note_->Notify(new_state);
749 state_ = new_state;
750 if (!remaining_play_throughs_ && new_state == delete_decoder_state_) {
751 LOG_ASSERT(!decoder_deleted());
752 DeleteDecoder();
753 }
754 }
755
756 void GLRenderingVDAClient::FinishInitialization() {
757 SetState(CS_INITIALIZED);
758 initialize_done_ticks_ = base::TimeTicks::Now();
759
760 if (reset_after_frame_num_ == START_OF_STREAM_RESET) {
761 reset_after_frame_num_ = MID_STREAM_RESET;
762 decoder_->Reset();
763 return;
764 }
765
766 for (int i = 0; i < num_in_flight_decodes_; ++i)
767 DecodeNextFragment();
768 DCHECK_EQ(outstanding_decodes_, num_in_flight_decodes_);
769 }
770
771 void GLRenderingVDAClient::DeleteDecoder() {
772 if (decoder_deleted())
773 return;
774 weak_vda_ptr_factory_.reset();
775 decoder_.reset();
776 STLClearObject(&encoded_data_);
777 active_textures_.clear();
778
779 // Cascade through the rest of the states to simplify test code below.
780 for (int i = state_ + 1; i < CS_MAX; ++i)
781 SetState(static_cast<ClientState>(i));
782 }
783
784 std::string GLRenderingVDAClient::GetBytesForFirstFragment(
785 size_t start_pos, size_t* end_pos) {
786 if (profile_ < media::H264PROFILE_MAX) {
787 *end_pos = start_pos;
788 while (*end_pos + 4 < encoded_data_.size()) {
789 if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
790 return GetBytesForNextFragment(*end_pos, end_pos);
791 GetBytesForNextNALU(*end_pos, end_pos);
792 num_skipped_fragments_++;
793 }
794 *end_pos = start_pos;
795 return std::string();
796 }
797 DCHECK_LE(profile_, media::VP9PROFILE_MAX);
798 return GetBytesForNextFragment(start_pos, end_pos);
799 }
800
801 std::string GLRenderingVDAClient::GetBytesForNextFragment(
802 size_t start_pos, size_t* end_pos) {
803 if (profile_ < media::H264PROFILE_MAX) {
804 *end_pos = start_pos;
805 GetBytesForNextNALU(*end_pos, end_pos);
806 if (start_pos != *end_pos) {
807 num_queued_fragments_++;
808 }
809 return encoded_data_.substr(start_pos, *end_pos - start_pos);
810 }
811 DCHECK_LE(profile_, media::VP9PROFILE_MAX);
812 return GetBytesForNextFrame(start_pos, end_pos);
813 }
814
815 void GLRenderingVDAClient::GetBytesForNextNALU(
816 size_t start_pos, size_t* end_pos) {
817 *end_pos = start_pos;
818 if (*end_pos + 4 > encoded_data_.size())
819 return;
820 LOG_ASSERT(LookingAtNAL(encoded_data_, start_pos));
821 *end_pos += 4;
822 while (*end_pos + 4 <= encoded_data_.size() &&
823 !LookingAtNAL(encoded_data_, *end_pos)) {
824 ++*end_pos;
825 }
826 if (*end_pos + 3 >= encoded_data_.size())
827 *end_pos = encoded_data_.size();
828 }
829
830 std::string GLRenderingVDAClient::GetBytesForNextFrame(
831 size_t start_pos, size_t* end_pos) {
832 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
833 std::string bytes;
834 if (start_pos == 0)
835 start_pos = 32; // Skip IVF header.
836 *end_pos = start_pos;
837 uint32_t frame_size = *reinterpret_cast<uint32_t*>(&encoded_data_[*end_pos]);
838 *end_pos += 12; // Skip frame header.
839 bytes.append(encoded_data_.substr(*end_pos, frame_size));
840 *end_pos += frame_size;
841 num_queued_fragments_++;
842 return bytes;
843 }
844
845 static bool FragmentHasConfigInfo(const uint8_t* data,
846 size_t size,
847 media::VideoCodecProfile profile) {
848 if (profile >= media::H264PROFILE_MIN &&
849 profile <= media::H264PROFILE_MAX) {
850 media::H264Parser parser;
851 parser.SetStream(data, size);
852 media::H264NALU nalu;
853 media::H264Parser::Result result = parser.AdvanceToNextNALU(&nalu);
854 if (result != media::H264Parser::kOk) {
855 // Let the VDA figure out there's something wrong with the stream.
856 return false;
857 }
858
859 return nalu.nal_unit_type == media::H264NALU::kSPS;
860 } else if (profile >= media::VP8PROFILE_MIN &&
861 profile <= media::VP9PROFILE_MAX) {
862 return (size > 0 && !(data[0] & 0x01));
863 }
864 // Shouldn't happen at this point.
865 LOG(FATAL) << "Invalid profile: " << profile;
866 return false;
867 }
868
869 void GLRenderingVDAClient::DecodeNextFragment() {
870 if (decoder_deleted())
871 return;
872 if (encoded_data_next_pos_to_decode_ == encoded_data_.size())
873 return;
874 size_t end_pos;
875 std::string next_fragment_bytes;
876 if (encoded_data_next_pos_to_decode_ == 0) {
877 next_fragment_bytes = GetBytesForFirstFragment(0, &end_pos);
878 } else {
879 next_fragment_bytes =
880 GetBytesForNextFragment(encoded_data_next_pos_to_decode_, &end_pos);
881 }
882 size_t next_fragment_size = next_fragment_bytes.size();
883
884 // Call Reset() just after Decode() if the fragment contains config info.
885 // This tests how the VDA behaves when it gets a reset request before it has
886 // a chance to ProvidePictureBuffers().
887 bool reset_here = false;
888 if (reset_after_frame_num_ == RESET_AFTER_FIRST_CONFIG_INFO) {
889 reset_here = FragmentHasConfigInfo(
890 reinterpret_cast<const uint8_t*>(next_fragment_bytes.data()),
891 next_fragment_size, profile_);
892 if (reset_here)
893 reset_after_frame_num_ = END_OF_STREAM_RESET;
894 }
895
896 // Populate the shared memory buffer w/ the fragment, duplicate its handle,
897 // and hand it off to the decoder.
898 base::SharedMemory shm;
899 LOG_ASSERT(shm.CreateAndMapAnonymous(next_fragment_size));
900 memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
901 base::SharedMemoryHandle dup_handle;
902 bool result = shm.ShareToProcess(base::GetCurrentProcessHandle(),
903 &dup_handle);
904 LOG_ASSERT(result);
905 media::BitstreamBuffer bitstream_buffer(
906 next_bitstream_buffer_id_, dup_handle, next_fragment_size);
907 decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
908 // Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
909 next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
910 decoder_->Decode(bitstream_buffer);
911 ++outstanding_decodes_;
912 if (!remaining_play_throughs_ &&
913 -delete_decoder_state_ == next_bitstream_buffer_id_) {
914 DeleteDecoder();
915 }
916
917 if (reset_here) {
918 reset_after_frame_num_ = MID_STREAM_RESET;
919 decoder_->Reset();
920 // Restart from the beginning to re-Decode() the SPS we just sent.
921 encoded_data_next_pos_to_decode_ = 0;
922 } else {
923 encoded_data_next_pos_to_decode_ = end_pos;
924 }
925
926 if (decode_calls_per_second_ > 0) {
927 base::MessageLoop::current()->PostDelayedTask(
928 FROM_HERE,
929 base::Bind(&GLRenderingVDAClient::DecodeNextFragment, AsWeakPtr()),
930 base::TimeDelta::FromSeconds(1) / decode_calls_per_second_);
931 }
932 }
933
934 double GLRenderingVDAClient::frames_per_second() {
935 base::TimeDelta delta = frame_delivery_times_.back() - initialize_done_ticks_;
936 return num_decoded_frames_ / delta.InSecondsF();
937 }
938
939 base::TimeDelta GLRenderingVDAClient::decode_time_median() {
940 if (decode_time_.size() == 0)
941 return base::TimeDelta();
942 std::sort(decode_time_.begin(), decode_time_.end());
943 int index = decode_time_.size() / 2;
944 if (decode_time_.size() % 2 != 0)
945 return decode_time_[index];
946
947 return (decode_time_[index] + decode_time_[index - 1]) / 2;
948 }
949
950 class VideoDecodeAcceleratorTest : public ::testing::Test {
951 protected:
952 VideoDecodeAcceleratorTest();
953 void SetUp() override;
954 void TearDown() override;
955
956 // Parse |data| into its constituent parts, set the various output fields
957 // accordingly, and read in video stream. CHECK-fails on unexpected or
958 // missing required data. Unspecified optional fields are set to -1.
959 void ParseAndReadTestVideoData(base::FilePath::StringType data,
960 std::vector<TestVideoFile*>* test_video_files);
961
962 // Update the parameters of |test_video_files| according to
963 // |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
964 // frames should be adjusted if decoder is reset in the middle of the stream.
965 void UpdateTestVideoFileParams(
966 size_t num_concurrent_decoders,
967 int reset_point,
968 std::vector<TestVideoFile*>* test_video_files);
969
970 void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
971 void CreateAndStartDecoder(GLRenderingVDAClient* client,
972 ClientStateNotification<ClientState>* note);
973 void WaitUntilDecodeFinish(ClientStateNotification<ClientState>* note);
974 void WaitUntilIdle();
975 void OutputLogFile(const base::FilePath::CharType* log_path,
976 const std::string& content);
977
978 std::vector<TestVideoFile*> test_video_files_;
979 RenderingHelper rendering_helper_;
980
981 private:
982 // Required for Thread to work. Not used otherwise.
983 base::ShadowingAtExitManager at_exit_manager_;
984
985 DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
986 };
987
988 VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest() {
989 }
990
991 void VideoDecodeAcceleratorTest::SetUp() {
992 ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
993 }
994
995 void VideoDecodeAcceleratorTest::TearDown() {
996 g_env->GetRenderingTaskRunner()->PostTask(
997 FROM_HERE, base::Bind(&STLDeleteElements<std::vector<TestVideoFile*>>,
998 &test_video_files_));
999
1000 base::WaitableEvent done(false, false);
1001 g_env->GetRenderingTaskRunner()->PostTask(
1002 FROM_HERE, base::Bind(&RenderingHelper::UnInitialize,
1003 base::Unretained(&rendering_helper_), &done));
1004 done.Wait();
1005
1006 rendering_helper_.TearDown();
1007 }
1008
1009 void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
1010 base::FilePath::StringType data,
1011 std::vector<TestVideoFile*>* test_video_files) {
1012 std::vector<base::FilePath::StringType> entries = base::SplitString(
1013 data, base::FilePath::StringType(1, ';'),
1014 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
1015 LOG_ASSERT(entries.size() >= 1U) << data;
1016 for (size_t index = 0; index < entries.size(); ++index) {
1017 std::vector<base::FilePath::StringType> fields = base::SplitString(
1018 entries[index], base::FilePath::StringType(1, ':'),
1019 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
1020 LOG_ASSERT(fields.size() >= 1U) << entries[index];
1021 LOG_ASSERT(fields.size() <= 8U) << entries[index];
1022 TestVideoFile* video_file = new TestVideoFile(fields[0]);
1023 if (!fields[1].empty())
1024 LOG_ASSERT(base::StringToInt(fields[1], &video_file->width));
1025 if (!fields[2].empty())
1026 LOG_ASSERT(base::StringToInt(fields[2], &video_file->height));
1027 if (!fields[3].empty())
1028 LOG_ASSERT(base::StringToInt(fields[3], &video_file->num_frames));
1029 if (!fields[4].empty())
1030 LOG_ASSERT(base::StringToInt(fields[4], &video_file->num_fragments));
1031 if (!fields[5].empty())
1032 LOG_ASSERT(base::StringToInt(fields[5], &video_file->min_fps_render));
1033 if (!fields[6].empty())
1034 LOG_ASSERT(base::StringToInt(fields[6], &video_file->min_fps_no_render));
1035 int profile = -1;
1036 if (!fields[7].empty())
1037 LOG_ASSERT(base::StringToInt(fields[7], &profile));
1038 video_file->profile = static_cast<media::VideoCodecProfile>(profile);
1039
1040 // Read in the video data.
1041 base::FilePath filepath(video_file->file_name);
1042 LOG_ASSERT(base::ReadFileToString(filepath, &video_file->data_str))
1043 << "test_video_file: " << filepath.MaybeAsASCII();
1044
1045 test_video_files->push_back(video_file);
1046 }
1047 }
1048
1049 void VideoDecodeAcceleratorTest::UpdateTestVideoFileParams(
1050 size_t num_concurrent_decoders,
1051 int reset_point,
1052 std::vector<TestVideoFile*>* test_video_files) {
1053 for (size_t i = 0; i < test_video_files->size(); i++) {
1054 TestVideoFile* video_file = (*test_video_files)[i];
1055 if (reset_point == MID_STREAM_RESET) {
1056 // Reset should not go beyond the last frame;
1057 // reset in the middle of the stream for short videos.
1058 video_file->reset_after_frame_num = kMaxResetAfterFrameNum;
1059 if (video_file->num_frames <= video_file->reset_after_frame_num)
1060 video_file->reset_after_frame_num = video_file->num_frames / 2;
1061
1062 video_file->num_frames += video_file->reset_after_frame_num;
1063 } else {
1064 video_file->reset_after_frame_num = reset_point;
1065 }
1066
1067 if (video_file->min_fps_render != -1)
1068 video_file->min_fps_render /= num_concurrent_decoders;
1069 if (video_file->min_fps_no_render != -1)
1070 video_file->min_fps_no_render /= num_concurrent_decoders;
1071 }
1072 }
1073
1074 void VideoDecodeAcceleratorTest::InitializeRenderingHelper(
1075 const RenderingHelperParams& helper_params) {
1076 rendering_helper_.Setup();
1077
1078 base::WaitableEvent done(false, false);
1079 g_env->GetRenderingTaskRunner()->PostTask(
1080 FROM_HERE,
1081 base::Bind(&RenderingHelper::Initialize,
1082 base::Unretained(&rendering_helper_), helper_params, &done));
1083 done.Wait();
1084 }
1085
1086 void VideoDecodeAcceleratorTest::CreateAndStartDecoder(
1087 GLRenderingVDAClient* client,
1088 ClientStateNotification<ClientState>* note) {
1089 g_env->GetRenderingTaskRunner()->PostTask(
1090 FROM_HERE, base::Bind(&GLRenderingVDAClient::CreateAndStartDecoder,
1091 base::Unretained(client)));
1092 ASSERT_EQ(note->Wait(), CS_DECODER_SET);
1093 }
1094
1095 void VideoDecodeAcceleratorTest::WaitUntilDecodeFinish(
1096 ClientStateNotification<ClientState>* note) {
1097 for (int i = 0; i < CS_MAX; i++) {
1098 if (note->Wait() == CS_DESTROYED)
1099 break;
1100 }
1101 }
1102
1103 void VideoDecodeAcceleratorTest::WaitUntilIdle() {
1104 base::WaitableEvent done(false, false);
1105 g_env->GetRenderingTaskRunner()->PostTask(
1106 FROM_HERE,
1107 base::Bind(&base::WaitableEvent::Signal, base::Unretained(&done)));
1108 done.Wait();
1109 }
1110
1111 void VideoDecodeAcceleratorTest::OutputLogFile(
1112 const base::FilePath::CharType* log_path,
1113 const std::string& content) {
1114 base::File file(base::FilePath(log_path),
1115 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1116 file.WriteAtCurrentPos(content.data(), content.length());
1117 }
1118
1119 // Test parameters:
1120 // - Number of concurrent decoders. The value takes effect when there is only
1121 // one input stream; otherwise, one decoder per input stream will be
1122 // instantiated.
1123 // - Number of concurrent in-flight Decode() calls per decoder.
1124 // - Number of play-throughs.
1125 // - reset_after_frame_num: see GLRenderingVDAClient ctor.
1126 // - delete_decoder_phase: see GLRenderingVDAClient ctor.
1127 // - whether to test slow rendering by delaying ReusePictureBuffer().
1128 // - whether the video frames are rendered as thumbnails.
1129 class VideoDecodeAcceleratorParamTest
1130 : public VideoDecodeAcceleratorTest,
1131 public ::testing::WithParamInterface<
1132 base::Tuple<int, int, int, ResetPoint, ClientState, bool, bool> > {
1133 };
1134
1135 // Wait for |note| to report a state and if it's not |expected_state| then
1136 // assert |client| has deleted its decoder.
1137 static void AssertWaitForStateOrDeleted(
1138 ClientStateNotification<ClientState>* note,
1139 GLRenderingVDAClient* client,
1140 ClientState expected_state) {
1141 ClientState state = note->Wait();
1142 if (state == expected_state) return;
1143 ASSERT_TRUE(client->decoder_deleted())
1144 << "Decoder not deleted but Wait() returned " << state
1145 << ", instead of " << expected_state;
1146 }
1147
1148 // We assert a minimal number of concurrent decoders we expect to succeed.
1149 // Different platforms can support more concurrent decoders, so we don't assert
1150 // failure above this.
1151 enum { kMinSupportedNumConcurrentDecoders = 3 };
1152
1153 // Test the most straightforward case possible: data is decoded from a single
1154 // chunk and rendered to the screen.
1155 TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
1156 size_t num_concurrent_decoders = base::get<0>(GetParam());
1157 const size_t num_in_flight_decodes = base::get<1>(GetParam());
1158 int num_play_throughs = base::get<2>(GetParam());
1159 const int reset_point = base::get<3>(GetParam());
1160 const int delete_decoder_state = base::get<4>(GetParam());
1161 bool test_reuse_delay = base::get<5>(GetParam());
1162 const bool render_as_thumbnails = base::get<6>(GetParam());
1163
1164 if (test_video_files_.size() > 1)
1165 num_concurrent_decoders = test_video_files_.size();
1166
1167 if (g_num_play_throughs > 0)
1168 num_play_throughs = g_num_play_throughs;
1169
1170 UpdateTestVideoFileParams(
1171 num_concurrent_decoders, reset_point, &test_video_files_);
1172
1173 // Suppress GL rendering for all tests when the "--rendering_fps" is 0.
1174 const bool suppress_rendering = g_rendering_fps == 0;
1175
1176 std::vector<ClientStateNotification<ClientState>*>
1177 notes(num_concurrent_decoders, NULL);
1178 std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
1179
1180 RenderingHelperParams helper_params;
1181 helper_params.rendering_fps = g_rendering_fps;
1182 helper_params.warm_up_iterations = g_rendering_warm_up;
1183 helper_params.render_as_thumbnails = render_as_thumbnails;
1184 if (render_as_thumbnails) {
1185 // Only one decoder is supported with thumbnail rendering
1186 LOG_ASSERT(num_concurrent_decoders == 1U);
1187 helper_params.thumbnails_page_size = kThumbnailsPageSize;
1188 helper_params.thumbnail_size = kThumbnailSize;
1189 }
1190
1191 // First kick off all the decoders.
1192 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1193 TestVideoFile* video_file =
1194 test_video_files_[index % test_video_files_.size()];
1195 ClientStateNotification<ClientState>* note =
1196 new ClientStateNotification<ClientState>();
1197 notes[index] = note;
1198
1199 int delay_after_frame_num = std::numeric_limits<int>::max();
1200 if (test_reuse_delay &&
1201 kMaxFramesToDelayReuse * 2 < video_file->num_frames) {
1202 delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
1203 }
1204
1205 GLRenderingVDAClient* client =
1206 new GLRenderingVDAClient(index,
1207 &rendering_helper_,
1208 note,
1209 video_file->data_str,
1210 num_in_flight_decodes,
1211 num_play_throughs,
1212 video_file->reset_after_frame_num,
1213 delete_decoder_state,
1214 video_file->width,
1215 video_file->height,
1216 video_file->profile,
1217 g_fake_decoder,
1218 suppress_rendering,
1219 delay_after_frame_num,
1220 0,
1221 render_as_thumbnails);
1222
1223 clients[index] = client;
1224 helper_params.window_sizes.push_back(
1225 render_as_thumbnails
1226 ? kThumbnailsPageSize
1227 : gfx::Size(video_file->width, video_file->height));
1228 }
1229
1230 InitializeRenderingHelper(helper_params);
1231
1232 for (size_t index = 0; index < num_concurrent_decoders; ++index) {
1233 CreateAndStartDecoder(clients[index], notes[index]);
1234 }
1235
1236 // Then wait for all the decodes to finish.
1237 // Only check performance & correctness later if we play through only once.
1238 bool skip_performance_and_correctness_checks = num_play_throughs > 1;
1239 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1240 ClientStateNotification<ClientState>* note = notes[i];
1241 ClientState state = note->Wait();
1242 if (state != CS_INITIALIZED) {
1243 skip_performance_and_correctness_checks = true;
1244 // We expect initialization to fail only when more than the supported
1245 // number of decoders is instantiated. Assert here that something else
1246 // didn't trigger failure.
1247 ASSERT_GT(num_concurrent_decoders,
1248 static_cast<size_t>(kMinSupportedNumConcurrentDecoders));
1249 continue;
1250 }
1251 ASSERT_EQ(state, CS_INITIALIZED);
1252 for (int n = 0; n < num_play_throughs; ++n) {
1253 // For play-throughs other than the first, we expect initialization to
1254 // succeed unconditionally.
1255 if (n > 0) {
1256 ASSERT_NO_FATAL_FAILURE(
1257 AssertWaitForStateOrDeleted(note, clients[i], CS_INITIALIZED));
1258 }
1259 // InitializeDone kicks off decoding inside the client, so we just need to
1260 // wait for Flush.
1261 ASSERT_NO_FATAL_FAILURE(
1262 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHING));
1263 ASSERT_NO_FATAL_FAILURE(
1264 AssertWaitForStateOrDeleted(note, clients[i], CS_FLUSHED));
1265 // FlushDone requests Reset().
1266 ASSERT_NO_FATAL_FAILURE(
1267 AssertWaitForStateOrDeleted(note, clients[i], CS_RESETTING));
1268 }
1269 ASSERT_NO_FATAL_FAILURE(
1270 AssertWaitForStateOrDeleted(note, clients[i], CS_RESET));
1271 // ResetDone requests Destroy().
1272 ASSERT_NO_FATAL_FAILURE(
1273 AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
1274 }
1275 // Finally assert that decoding went as expected.
1276 for (size_t i = 0; i < num_concurrent_decoders &&
1277 !skip_performance_and_correctness_checks; ++i) {
1278 // We can only make performance/correctness assertions if the decoder was
1279 // allowed to finish.
1280 if (delete_decoder_state < CS_FLUSHED)
1281 continue;
1282 GLRenderingVDAClient* client = clients[i];
1283 TestVideoFile* video_file = test_video_files_[i % test_video_files_.size()];
1284 if (video_file->num_frames > 0) {
1285 // Expect the decoded frames may be more than the video frames as frames
1286 // could still be returned until resetting done.
1287 if (video_file->reset_after_frame_num > 0)
1288 EXPECT_GE(client->num_decoded_frames(), video_file->num_frames);
1289 else
1290 EXPECT_EQ(client->num_decoded_frames(), video_file->num_frames);
1291 }
1292 if (reset_point == END_OF_STREAM_RESET) {
1293 EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
1294 client->num_queued_fragments());
1295 EXPECT_EQ(client->num_done_bitstream_buffers(),
1296 client->num_queued_fragments());
1297 }
1298 LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
1299 if (!render_as_thumbnails) {
1300 int min_fps = suppress_rendering ?
1301 video_file->min_fps_no_render : video_file->min_fps_render;
1302 if (min_fps > 0 && !test_reuse_delay)
1303 EXPECT_GT(client->frames_per_second(), min_fps);
1304 }
1305 }
1306
1307 if (render_as_thumbnails) {
1308 std::vector<unsigned char> rgb;
1309 bool alpha_solid;
1310 base::WaitableEvent done(false, false);
1311 g_env->GetRenderingTaskRunner()->PostTask(
1312 FROM_HERE, base::Bind(&RenderingHelper::GetThumbnailsAsRGB,
1313 base::Unretained(&rendering_helper_), &rgb,
1314 &alpha_solid, &done));
1315 done.Wait();
1316
1317 std::vector<std::string> golden_md5s;
1318 std::string md5_string = base::MD5String(
1319 base::StringPiece(reinterpret_cast<char*>(&rgb[0]), rgb.size()));
1320 ReadGoldenThumbnailMD5s(test_video_files_[0], &golden_md5s);
1321 std::vector<std::string>::iterator match =
1322 find(golden_md5s.begin(), golden_md5s.end(), md5_string);
1323 if (match == golden_md5s.end()) {
1324 // Convert raw RGB into PNG for export.
1325 std::vector<unsigned char> png;
1326 gfx::PNGCodec::Encode(&rgb[0],
1327 gfx::PNGCodec::FORMAT_RGB,
1328 kThumbnailsPageSize,
1329 kThumbnailsPageSize.width() * 3,
1330 true,
1331 std::vector<gfx::PNGCodec::Comment>(),
1332 &png);
1333
1334 LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
1335
1336 base::FilePath filepath(test_video_files_[0]->file_name);
1337 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
1338 filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
1339 int num_bytes = base::WriteFile(filepath,
1340 reinterpret_cast<char*>(&png[0]),
1341 png.size());
1342 ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
1343 }
1344 ASSERT_NE(match, golden_md5s.end());
1345 EXPECT_EQ(alpha_solid, true) << "RGBA frame had incorrect alpha";
1346 }
1347
1348 // Output the frame delivery time to file
1349 // We can only make performance/correctness assertions if the decoder was
1350 // allowed to finish.
1351 if (g_output_log != NULL && delete_decoder_state >= CS_FLUSHED) {
1352 base::File output_file(
1353 base::FilePath(g_output_log),
1354 base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
1355 for (size_t i = 0; i < num_concurrent_decoders; ++i) {
1356 clients[i]->OutputFrameDeliveryTimes(&output_file);
1357 }
1358 }
1359
1360 g_env->GetRenderingTaskRunner()->PostTask(
1361 FROM_HERE,
1362 base::Bind(&STLDeleteElements<std::vector<GLRenderingVDAClient*>>,
1363 &clients));
1364 g_env->GetRenderingTaskRunner()->PostTask(
1365 FROM_HERE,
1366 base::Bind(&STLDeleteElements<
1367 std::vector<ClientStateNotification<ClientState>*>>,
1368 &notes));
1369 WaitUntilIdle();
1370 };
1371
1372 // Test that replay after EOS works fine.
1373 INSTANTIATE_TEST_CASE_P(
1374 ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
1375 ::testing::Values(
1376 MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
1377
1378 // Test that Reset() before the first Decode() works fine.
1379 INSTANTIATE_TEST_CASE_P(
1380 ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
1381 ::testing::Values(
1382 MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
1383
1384 // Test Reset() immediately after Decode() containing config info.
1385 INSTANTIATE_TEST_CASE_P(
1386 ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
1387 ::testing::Values(
1388 MakeTuple(
1389 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
1390
1391 // Test that Reset() mid-stream works fine and doesn't affect decoding even when
1392 // Decode() calls are made during the reset.
1393 INSTANTIATE_TEST_CASE_P(
1394 MidStreamReset, VideoDecodeAcceleratorParamTest,
1395 ::testing::Values(
1396 MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
1397
1398 INSTANTIATE_TEST_CASE_P(
1399 SlowRendering, VideoDecodeAcceleratorParamTest,
1400 ::testing::Values(
1401 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
1402
1403 // Test that Destroy() mid-stream works fine (primarily this is testing that no
1404 // crashes occur).
1405 INSTANTIATE_TEST_CASE_P(
1406 TearDownTiming, VideoDecodeAcceleratorParamTest,
1407 ::testing::Values(
1408 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
1409 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
1410 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHING, false, false),
1411 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
1412 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
1413 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1414 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1415 static_cast<ClientState>(-1), false, false),
1416 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1417 static_cast<ClientState>(-10), false, false),
1418 MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
1419 static_cast<ClientState>(-100), false, false)));
1420
1421 // Test that decoding various variation works with multiple in-flight decodes.
1422 INSTANTIATE_TEST_CASE_P(
1423 DecodeVariations, VideoDecodeAcceleratorParamTest,
1424 ::testing::Values(
1425 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1426 MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
1427 // Tests queuing.
1428 MakeTuple(1, 15, 1, END_OF_STREAM_RESET, CS_RESET, false, false)));
1429
1430 // Find out how many concurrent decoders can go before we exhaust system
1431 // resources.
1432 INSTANTIATE_TEST_CASE_P(
1433 ResourceExhaustion, VideoDecodeAcceleratorParamTest,
1434 ::testing::Values(
1435 // +0 hack below to promote enum to int.
1436 MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
1437 END_OF_STREAM_RESET, CS_RESET, false, false),
1438 MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
1439 END_OF_STREAM_RESET, CS_RESET, false, false)));
1440
1441 // Thumbnailing test
1442 INSTANTIATE_TEST_CASE_P(
1443 Thumbnail, VideoDecodeAcceleratorParamTest,
1444 ::testing::Values(
1445 MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
1446
1447 // Measure the median of the decode time when VDA::Decode is called 30 times per
1448 // second.
1449 TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
1450 RenderingHelperParams helper_params;
1451
1452 // Disable rendering by setting the rendering_fps = 0.
1453 helper_params.rendering_fps = 0;
1454 helper_params.warm_up_iterations = 0;
1455 helper_params.render_as_thumbnails = false;
1456
1457 ClientStateNotification<ClientState>* note =
1458 new ClientStateNotification<ClientState>();
1459 GLRenderingVDAClient* client =
1460 new GLRenderingVDAClient(0,
1461 &rendering_helper_,
1462 note,
1463 test_video_files_[0]->data_str,
1464 1,
1465 1,
1466 test_video_files_[0]->reset_after_frame_num,
1467 CS_RESET,
1468 test_video_files_[0]->width,
1469 test_video_files_[0]->height,
1470 test_video_files_[0]->profile,
1471 g_fake_decoder,
1472 true,
1473 std::numeric_limits<int>::max(),
1474 kWebRtcDecodeCallsPerSecond,
1475 false /* render_as_thumbnail */);
1476 helper_params.window_sizes.push_back(
1477 gfx::Size(test_video_files_[0]->width, test_video_files_[0]->height));
1478 InitializeRenderingHelper(helper_params);
1479 CreateAndStartDecoder(client, note);
1480 WaitUntilDecodeFinish(note);
1481
1482 base::TimeDelta decode_time_median = client->decode_time_median();
1483 std::string output_string =
1484 base::StringPrintf("Decode time median: %" PRId64 " us",
1485 decode_time_median.InMicroseconds());
1486 LOG(INFO) << output_string;
1487
1488 if (g_output_log != NULL)
1489 OutputLogFile(g_output_log, output_string);
1490
1491 g_env->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE, client);
1492 g_env->GetRenderingTaskRunner()->DeleteSoon(FROM_HERE, note);
1493 WaitUntilIdle();
1494 };
1495
1496 // TODO(fischman, vrk): add more tests! In particular:
1497 // - Test life-cycle: Seek/Stop/Pause/Play for a single decoder.
1498 // - Test alternate configurations
1499 // - Test failure conditions.
1500 // - Test frame size changes mid-stream
1501
1502 } // namespace
1503 } // namespace content
1504
1505 int main(int argc, char **argv) {
1506 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
1507 base::CommandLine::Init(argc, argv);
1508
1509 // Needed to enable DVLOG through --vmodule.
1510 logging::LoggingSettings settings;
1511 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
1512 LOG_ASSERT(logging::InitLogging(settings));
1513
1514 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess();
1515 DCHECK(cmd_line);
1516
1517 base::CommandLine::SwitchMap switches = cmd_line->GetSwitches();
1518 for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
1519 it != switches.end(); ++it) {
1520 if (it->first == "test_video_data") {
1521 content::g_test_video_data = it->second.c_str();
1522 continue;
1523 }
1524 // The output log for VDA performance test.
1525 if (it->first == "output_log") {
1526 content::g_output_log = it->second.c_str();
1527 continue;
1528 }
1529 if (it->first == "rendering_fps") {
1530 // On Windows, CommandLine::StringType is wstring. We need to convert
1531 // it to std::string first
1532 std::string input(it->second.begin(), it->second.end());
1533 LOG_ASSERT(base::StringToDouble(input, &content::g_rendering_fps));
1534 continue;
1535 }
1536 if (it->first == "rendering_warm_up") {
1537 std::string input(it->second.begin(), it->second.end());
1538 LOG_ASSERT(base::StringToInt(input, &content::g_rendering_warm_up));
1539 continue;
1540 }
1541 // TODO(owenlin): Remove this flag once it is not used in autotest.
1542 if (it->first == "disable_rendering") {
1543 content::g_rendering_fps = 0;
1544 continue;
1545 }
1546
1547 if (it->first == "num_play_throughs") {
1548 std::string input(it->second.begin(), it->second.end());
1549 LOG_ASSERT(base::StringToInt(input, &content::g_num_play_throughs));
1550 continue;
1551 }
1552 if (it->first == "fake_decoder") {
1553 content::g_fake_decoder = 1;
1554 continue;
1555 }
1556 if (it->first == "v" || it->first == "vmodule")
1557 continue;
1558 if (it->first == "ozone-platform" || it->first == "ozone-use-surfaceless")
1559 continue;
1560 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second;
1561 }
1562
1563 base::ShadowingAtExitManager at_exit_manager;
1564 #if defined(OS_WIN) || defined(USE_OZONE)
1565 // For windows the decoding thread initializes the media foundation decoder
1566 // which uses COM. We need the thread to be a UI thread.
1567 // On Ozone, the backend initializes the event system using a UI
1568 // thread.
1569 base::MessageLoopForUI main_loop;
1570 #else
1571 base::MessageLoop main_loop;
1572 #endif // OS_WIN || USE_OZONE
1573
1574 #if defined(USE_OZONE)
1575 ui::OzonePlatform::InitializeForUI();
1576 #endif
1577
1578 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
1579 content::VaapiWrapper::PreSandboxInitialization();
1580 #endif
1581
1582 content::g_env =
1583 reinterpret_cast<content::VideoDecodeAcceleratorTestEnvironment*>(
1584 testing::AddGlobalTestEnvironment(
1585 new content::VideoDecodeAcceleratorTestEnvironment()));
1586
1587 return RUN_ALL_TESTS();
1588 }
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698