OLD | NEW |
| (Empty) |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
2 // Use of this source code is governed by a BSD-style license that can be | |
3 // found in the LICENSE file. | |
4 | |
5 #include <inttypes.h> | |
6 #include <stddef.h> | |
7 #include <stdint.h> | |
8 #include <algorithm> | |
9 #include <queue> | |
10 #include <string> | |
11 #include <utility> | |
12 | |
13 #include "base/at_exit.h" | |
14 #include "base/bind.h" | |
15 #include "base/command_line.h" | |
16 #include "base/files/file_util.h" | |
17 #include "base/files/memory_mapped_file.h" | |
18 #include "base/macros.h" | |
19 #include "base/memory/scoped_vector.h" | |
20 #include "base/message_loop/message_loop.h" | |
21 #include "base/numerics/safe_conversions.h" | |
22 #include "base/process/process_handle.h" | |
23 #include "base/strings/string_number_conversions.h" | |
24 #include "base/strings/string_split.h" | |
25 #include "base/strings/stringprintf.h" | |
26 #include "base/threading/thread.h" | |
27 #include "base/threading/thread_checker.h" | |
28 #include "base/time/time.h" | |
29 #include "base/timer/timer.h" | |
30 #include "build/build_config.h" | |
31 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h" | |
32 #include "media/base/bind_to_current_loop.h" | |
33 #include "media/base/bitstream_buffer.h" | |
34 #include "media/base/cdm_context.h" | |
35 #include "media/base/decoder_buffer.h" | |
36 #include "media/base/media_util.h" | |
37 #include "media/base/test_data_util.h" | |
38 #include "media/base/video_decoder.h" | |
39 #include "media/base/video_frame.h" | |
40 #include "media/filters/ffmpeg_glue.h" | |
41 #include "media/filters/ffmpeg_video_decoder.h" | |
42 #include "media/filters/h264_parser.h" | |
43 #include "media/filters/ivf_parser.h" | |
44 #include "media/video/fake_video_encode_accelerator.h" | |
45 #include "media/video/video_encode_accelerator.h" | |
46 #include "testing/gtest/include/gtest/gtest.h" | |
47 | |
48 #if defined(OS_CHROMEOS) | |
49 #if defined(ARCH_CPU_ARMEL) || (defined(USE_OZONE) && defined(USE_V4L2_CODEC)) | |
50 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h" | |
51 #endif | |
52 #if defined(ARCH_CPU_X86_FAMILY) | |
53 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" | |
54 #include "content/common/gpu/media/vaapi_wrapper.h" | |
55 // Status has been defined as int in Xlib.h. | |
56 #undef Status | |
57 #endif // defined(ARCH_CPU_X86_FAMILY) | |
58 #elif defined(OS_MACOSX) | |
59 #include "content/common/gpu/media/vt_video_encode_accelerator_mac.h" | |
60 #else | |
61 #error The VideoEncodeAcceleratorUnittest is not supported on this platform. | |
62 #endif | |
63 | |
64 using media::VideoEncodeAccelerator; | |
65 | |
66 namespace content { | |
67 namespace { | |
68 | |
69 const media::VideoPixelFormat kInputFormat = media::PIXEL_FORMAT_I420; | |
70 | |
71 // The absolute differences between original frame and decoded frame usually | |
72 // ranges aroud 1 ~ 7. So we pick 10 as an extreme value to detect abnormal | |
73 // decoded frames. | |
74 const double kDecodeSimilarityThreshold = 10.0; | |
75 | |
76 // Arbitrarily chosen to add some depth to the pipeline. | |
77 const unsigned int kNumOutputBuffers = 4; | |
78 const unsigned int kNumExtraInputFrames = 4; | |
79 // Maximum delay between requesting a keyframe and receiving one, in frames. | |
80 // Arbitrarily chosen as a reasonable requirement. | |
81 const unsigned int kMaxKeyframeDelay = 4; | |
82 // Default initial bitrate. | |
83 const uint32_t kDefaultBitrate = 2000000; | |
84 // Default ratio of requested_subsequent_bitrate to initial_bitrate | |
85 // (see test parameters below) if one is not provided. | |
86 const double kDefaultSubsequentBitrateRatio = 2.0; | |
87 // Default initial framerate. | |
88 const uint32_t kDefaultFramerate = 30; | |
89 // Default ratio of requested_subsequent_framerate to initial_framerate | |
90 // (see test parameters below) if one is not provided. | |
91 const double kDefaultSubsequentFramerateRatio = 0.1; | |
92 // Tolerance factor for how encoded bitrate can differ from requested bitrate. | |
93 const double kBitrateTolerance = 0.1; | |
94 // Minimum required FPS throughput for the basic performance test. | |
95 const uint32_t kMinPerfFPS = 30; | |
96 // Minimum (arbitrary) number of frames required to enforce bitrate requirements | |
97 // over. Streams shorter than this may be too short to realistically require | |
98 // an encoder to be able to converge to the requested bitrate over. | |
99 // The input stream will be looped as many times as needed in bitrate tests | |
100 // to reach at least this number of frames before calculating final bitrate. | |
101 const unsigned int kMinFramesForBitrateTests = 300; | |
102 // The percentiles to measure for encode latency. | |
103 const unsigned int kLoggedLatencyPercentiles[] = {50, 75, 95}; | |
104 | |
105 // The syntax of multiple test streams is: | |
106 // test-stream1;test-stream2;test-stream3 | |
107 // The syntax of each test stream is: | |
108 // "in_filename:width:height:profile:out_filename:requested_bitrate | |
109 // :requested_framerate:requested_subsequent_bitrate | |
110 // :requested_subsequent_framerate" | |
111 // - |in_filename| must be an I420 (YUV planar) raw stream | |
112 // (see http://www.fourcc.org/yuv.php#IYUV). | |
113 // - |width| and |height| are in pixels. | |
114 // - |profile| to encode into (values of media::VideoCodecProfile). | |
115 // - |out_filename| filename to save the encoded stream to (optional). The | |
116 // format for H264 is Annex-B byte stream. The format for VP8 is IVF. Output | |
117 // stream is saved for the simple encode test only. H264 raw stream and IVF | |
118 // can be used as input of VDA unittest. H264 raw stream can be played by | |
119 // "mplayer -fps 25 out.h264" and IVF can be played by mplayer directly. | |
120 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF | |
121 // Further parameters are optional (need to provide preceding positional | |
122 // parameters if a specific subsequent parameter is required): | |
123 // - |requested_bitrate| requested bitrate in bits per second. | |
124 // - |requested_framerate| requested initial framerate. | |
125 // - |requested_subsequent_bitrate| bitrate to switch to in the middle of the | |
126 // stream. | |
127 // - |requested_subsequent_framerate| framerate to switch to in the middle | |
128 // of the stream. | |
129 // Bitrate is only forced for tests that test bitrate. | |
130 const char* g_default_in_filename = "bear_320x192_40frames.yuv"; | |
131 #if !defined(OS_MACOSX) | |
132 const char* g_default_in_parameters = ":320:192:1:out.h264:200000"; | |
133 #else | |
134 const char* g_default_in_parameters = ":320:192:0:out.h264:200000"; | |
135 #endif | |
136 | |
137 // Enabled by including a --fake_encoder flag to the command line invoking the | |
138 // test. | |
139 bool g_fake_encoder = false; | |
140 | |
141 // Environment to store test stream data for all test cases. | |
142 class VideoEncodeAcceleratorTestEnvironment; | |
143 VideoEncodeAcceleratorTestEnvironment* g_env; | |
144 | |
145 // The number of frames to be encoded. This variable is set by the switch | |
146 // "--num_frames_to_encode". Ignored if 0. | |
147 int g_num_frames_to_encode = 0; | |
148 | |
149 struct TestStream { | |
150 TestStream() | |
151 : num_frames(0), | |
152 aligned_buffer_size(0), | |
153 requested_bitrate(0), | |
154 requested_framerate(0), | |
155 requested_subsequent_bitrate(0), | |
156 requested_subsequent_framerate(0) {} | |
157 ~TestStream() {} | |
158 | |
159 gfx::Size visible_size; | |
160 gfx::Size coded_size; | |
161 unsigned int num_frames; | |
162 | |
163 // Original unaligned input file name provided as an argument to the test. | |
164 // And the file must be an I420 (YUV planar) raw stream. | |
165 std::string in_filename; | |
166 | |
167 // A temporary file used to prepare aligned input buffers of |in_filename|. | |
168 // The file makes sure starting address of YUV planes are 64 byte-aligned. | |
169 base::FilePath aligned_in_file; | |
170 | |
171 // The memory mapping of |aligned_in_file| | |
172 base::MemoryMappedFile mapped_aligned_in_file; | |
173 | |
174 // Byte size of a frame of |aligned_in_file|. | |
175 size_t aligned_buffer_size; | |
176 | |
177 // Byte size for each aligned plane of a frame | |
178 std::vector<size_t> aligned_plane_size; | |
179 | |
180 std::string out_filename; | |
181 media::VideoCodecProfile requested_profile; | |
182 unsigned int requested_bitrate; | |
183 unsigned int requested_framerate; | |
184 unsigned int requested_subsequent_bitrate; | |
185 unsigned int requested_subsequent_framerate; | |
186 }; | |
187 | |
188 inline static size_t Align64Bytes(size_t value) { | |
189 return (value + 63) & ~63; | |
190 } | |
191 | |
192 // Write |data| of |size| bytes at |offset| bytes into |file|. | |
193 static bool WriteFile(base::File* file, | |
194 const off_t offset, | |
195 const uint8_t* data, | |
196 size_t size) { | |
197 size_t written_bytes = 0; | |
198 while (written_bytes < size) { | |
199 int bytes = file->Write(offset + written_bytes, | |
200 reinterpret_cast<const char*>(data + written_bytes), | |
201 size - written_bytes); | |
202 if (bytes <= 0) | |
203 return false; | |
204 written_bytes += bytes; | |
205 } | |
206 return true; | |
207 } | |
208 | |
209 // Return the |percentile| from a sorted vector. | |
210 static base::TimeDelta Percentile( | |
211 const std::vector<base::TimeDelta>& sorted_values, | |
212 unsigned int percentile) { | |
213 size_t size = sorted_values.size(); | |
214 LOG_ASSERT(size > 0UL); | |
215 LOG_ASSERT(percentile <= 100UL); | |
216 // Use Nearest Rank method in http://en.wikipedia.org/wiki/Percentile. | |
217 int index = | |
218 std::max(static_cast<int>(ceil(0.01f * percentile * size)) - 1, 0); | |
219 return sorted_values[index]; | |
220 } | |
221 | |
222 static bool IsH264(media::VideoCodecProfile profile) { | |
223 return profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX; | |
224 } | |
225 | |
226 static bool IsVP8(media::VideoCodecProfile profile) { | |
227 return profile >= media::VP8PROFILE_MIN && profile <= media::VP8PROFILE_MAX; | |
228 } | |
229 | |
230 // ARM performs CPU cache management with CPU cache line granularity. We thus | |
231 // need to ensure our buffers are CPU cache line-aligned (64 byte-aligned). | |
232 // Otherwise newer kernels will refuse to accept them, and on older kernels | |
233 // we'll be treating ourselves to random corruption. | |
234 // Since we are just mapping and passing chunks of the input file directly to | |
235 // the VEA as input frames to avoid copying large chunks of raw data on each | |
236 // frame and thus affecting performance measurements, we have to prepare a | |
237 // temporary file with all planes aligned to 64-byte boundaries beforehand. | |
238 static void CreateAlignedInputStreamFile(const gfx::Size& coded_size, | |
239 TestStream* test_stream) { | |
240 // Test case may have many encoders and memory should be prepared once. | |
241 if (test_stream->coded_size == coded_size && | |
242 test_stream->mapped_aligned_in_file.IsValid()) | |
243 return; | |
244 | |
245 // All encoders in multiple encoder test reuse the same test_stream, make | |
246 // sure they requested the same coded_size | |
247 ASSERT_TRUE(!test_stream->mapped_aligned_in_file.IsValid() || | |
248 coded_size == test_stream->coded_size); | |
249 test_stream->coded_size = coded_size; | |
250 | |
251 size_t num_planes = media::VideoFrame::NumPlanes(kInputFormat); | |
252 std::vector<std::vector<uint8_t>> padding(num_planes); | |
253 std::vector<size_t> coded_bpl(num_planes); | |
254 std::vector<size_t> visible_bpl(num_planes); | |
255 std::vector<size_t> visible_plane_rows(num_planes); | |
256 | |
257 // Calculate padding in bytes to be added after each plane required to keep | |
258 // starting addresses of all planes at a 64 byte boudnary. This padding will | |
259 // be added after each plane when copying to the temporary file. | |
260 // At the same time we also need to take into account coded_size requested by | |
261 // the VEA; each row of visible_bpl bytes in the original file needs to be | |
262 // copied into a row of coded_bpl bytes in the aligned file. | |
263 for (size_t i = 0; i < num_planes; i++) { | |
264 const size_t size = | |
265 media::VideoFrame::PlaneSize(kInputFormat, i, coded_size).GetArea(); | |
266 test_stream->aligned_plane_size.push_back(Align64Bytes(size)); | |
267 test_stream->aligned_buffer_size += test_stream->aligned_plane_size.back(); | |
268 | |
269 coded_bpl[i] = | |
270 media::VideoFrame::RowBytes(i, kInputFormat, coded_size.width()); | |
271 visible_bpl[i] = media::VideoFrame::RowBytes( | |
272 i, kInputFormat, test_stream->visible_size.width()); | |
273 visible_plane_rows[i] = media::VideoFrame::Rows( | |
274 i, kInputFormat, test_stream->visible_size.height()); | |
275 const size_t padding_rows = | |
276 media::VideoFrame::Rows(i, kInputFormat, coded_size.height()) - | |
277 visible_plane_rows[i]; | |
278 padding[i].resize(padding_rows * coded_bpl[i] + Align64Bytes(size) - size); | |
279 } | |
280 | |
281 base::MemoryMappedFile src_file; | |
282 LOG_ASSERT(src_file.Initialize(base::FilePath(test_stream->in_filename))); | |
283 LOG_ASSERT(base::CreateTemporaryFile(&test_stream->aligned_in_file)); | |
284 | |
285 size_t visible_buffer_size = media::VideoFrame::AllocationSize( | |
286 kInputFormat, test_stream->visible_size); | |
287 LOG_ASSERT(src_file.length() % visible_buffer_size == 0U) | |
288 << "Stream byte size is not a product of calculated frame byte size"; | |
289 | |
290 test_stream->num_frames = src_file.length() / visible_buffer_size; | |
291 uint32_t flags = base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE | | |
292 base::File::FLAG_READ; | |
293 | |
294 // Create a temporary file with coded_size length. | |
295 base::File dest_file(test_stream->aligned_in_file, flags); | |
296 LOG_ASSERT(test_stream->aligned_buffer_size > 0UL); | |
297 dest_file.SetLength(test_stream->aligned_buffer_size * | |
298 test_stream->num_frames); | |
299 | |
300 const uint8_t* src = src_file.data(); | |
301 off_t dest_offset = 0; | |
302 for (size_t frame = 0; frame < test_stream->num_frames; frame++) { | |
303 for (size_t i = 0; i < num_planes; i++) { | |
304 // Assert that each plane of frame starts at 64 byte boundary. | |
305 ASSERT_EQ(dest_offset & 63, 0) | |
306 << "Planes of frame should be mapped at a 64 byte boundary"; | |
307 for (size_t j = 0; j < visible_plane_rows[i]; j++) { | |
308 LOG_ASSERT(WriteFile(&dest_file, dest_offset, src, visible_bpl[i])); | |
309 src += visible_bpl[i]; | |
310 dest_offset += coded_bpl[i]; | |
311 } | |
312 if (!padding[i].empty()) { | |
313 LOG_ASSERT(WriteFile(&dest_file, dest_offset, &padding[i][0], | |
314 padding[i].size())); | |
315 dest_offset += padding[i].size(); | |
316 } | |
317 } | |
318 } | |
319 LOG_ASSERT( | |
320 test_stream->mapped_aligned_in_file.Initialize(std::move(dest_file))); | |
321 // Assert that memory mapped of file starts at 64 byte boundary. So each | |
322 // plane of frames also start at 64 byte boundary. | |
323 | |
324 ASSERT_EQ( | |
325 reinterpret_cast<off_t>(test_stream->mapped_aligned_in_file.data()) & 63, | |
326 0) | |
327 << "File should be mapped at a 64 byte boundary"; | |
328 | |
329 LOG_ASSERT(test_stream->mapped_aligned_in_file.length() % | |
330 test_stream->aligned_buffer_size == 0U) | |
331 << "Stream byte size is not a product of calculated frame byte size"; | |
332 LOG_ASSERT(test_stream->num_frames > 0UL); | |
333 } | |
334 | |
335 // Parse |data| into its constituent parts, set the various output fields | |
336 // accordingly, read in video stream, and store them to |test_streams|. | |
337 static void ParseAndReadTestStreamData(const base::FilePath::StringType& data, | |
338 ScopedVector<TestStream>* test_streams) { | |
339 // Split the string to individual test stream data. | |
340 std::vector<base::FilePath::StringType> test_streams_data = base::SplitString( | |
341 data, base::FilePath::StringType(1, ';'), | |
342 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL); | |
343 LOG_ASSERT(test_streams_data.size() >= 1U) << data; | |
344 | |
345 // Parse each test stream data and read the input file. | |
346 for (size_t index = 0; index < test_streams_data.size(); ++index) { | |
347 std::vector<base::FilePath::StringType> fields = base::SplitString( | |
348 test_streams_data[index], base::FilePath::StringType(1, ':'), | |
349 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL); | |
350 LOG_ASSERT(fields.size() >= 4U) << data; | |
351 LOG_ASSERT(fields.size() <= 9U) << data; | |
352 TestStream* test_stream = new TestStream(); | |
353 | |
354 test_stream->in_filename = fields[0]; | |
355 int width, height; | |
356 bool result = base::StringToInt(fields[1], &width); | |
357 LOG_ASSERT(result); | |
358 result = base::StringToInt(fields[2], &height); | |
359 LOG_ASSERT(result); | |
360 test_stream->visible_size = gfx::Size(width, height); | |
361 LOG_ASSERT(!test_stream->visible_size.IsEmpty()); | |
362 int profile; | |
363 result = base::StringToInt(fields[3], &profile); | |
364 LOG_ASSERT(result); | |
365 LOG_ASSERT(profile > media::VIDEO_CODEC_PROFILE_UNKNOWN); | |
366 LOG_ASSERT(profile <= media::VIDEO_CODEC_PROFILE_MAX); | |
367 test_stream->requested_profile = | |
368 static_cast<media::VideoCodecProfile>(profile); | |
369 | |
370 if (fields.size() >= 5 && !fields[4].empty()) | |
371 test_stream->out_filename = fields[4]; | |
372 | |
373 if (fields.size() >= 6 && !fields[5].empty()) | |
374 LOG_ASSERT(base::StringToUint(fields[5], | |
375 &test_stream->requested_bitrate)); | |
376 | |
377 if (fields.size() >= 7 && !fields[6].empty()) | |
378 LOG_ASSERT(base::StringToUint(fields[6], | |
379 &test_stream->requested_framerate)); | |
380 | |
381 if (fields.size() >= 8 && !fields[7].empty()) { | |
382 LOG_ASSERT(base::StringToUint(fields[7], | |
383 &test_stream->requested_subsequent_bitrate)); | |
384 } | |
385 | |
386 if (fields.size() >= 9 && !fields[8].empty()) { | |
387 LOG_ASSERT(base::StringToUint(fields[8], | |
388 &test_stream->requested_subsequent_framerate)); | |
389 } | |
390 test_streams->push_back(test_stream); | |
391 } | |
392 } | |
393 | |
394 // Basic test environment shared across multiple test cases. We only need to | |
395 // setup it once for all test cases. | |
396 // It helps | |
397 // - maintain test stream data and other test settings. | |
398 // - clean up temporary aligned files. | |
399 // - output log to file. | |
400 class VideoEncodeAcceleratorTestEnvironment : public ::testing::Environment { | |
401 public: | |
402 VideoEncodeAcceleratorTestEnvironment( | |
403 std::unique_ptr<base::FilePath::StringType> data, | |
404 const base::FilePath& log_path, | |
405 bool run_at_fps, | |
406 bool needs_encode_latency, | |
407 bool verify_all_output) | |
408 : test_stream_data_(std::move(data)), | |
409 log_path_(log_path), | |
410 run_at_fps_(run_at_fps), | |
411 needs_encode_latency_(needs_encode_latency), | |
412 verify_all_output_(verify_all_output) {} | |
413 | |
414 virtual void SetUp() { | |
415 if (!log_path_.empty()) { | |
416 log_file_.reset(new base::File( | |
417 log_path_, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE)); | |
418 LOG_ASSERT(log_file_->IsValid()); | |
419 } | |
420 ParseAndReadTestStreamData(*test_stream_data_, &test_streams_); | |
421 } | |
422 | |
423 virtual void TearDown() { | |
424 for (size_t i = 0; i < test_streams_.size(); i++) { | |
425 base::DeleteFile(test_streams_[i]->aligned_in_file, false); | |
426 } | |
427 log_file_.reset(); | |
428 } | |
429 | |
430 // Log one entry of machine-readable data to file and LOG(INFO). | |
431 // The log has one data entry per line in the format of "<key>: <value>". | |
432 // Note that Chrome OS video_VEAPerf autotest parses the output key and value | |
433 // pairs. Be sure to keep the autotest in sync. | |
434 void LogToFile(const std::string& key, const std::string& value) { | |
435 std::string s = base::StringPrintf("%s: %s\n", key.c_str(), value.c_str()); | |
436 LOG(INFO) << s; | |
437 if (log_file_) { | |
438 log_file_->WriteAtCurrentPos(s.data(), s.length()); | |
439 } | |
440 } | |
441 | |
442 // Feed the encoder with the input buffers at the requested framerate. If | |
443 // false, feed as fast as possible. This is set by the command line switch | |
444 // "--run_at_fps". | |
445 bool run_at_fps() const { return run_at_fps_; } | |
446 | |
447 // Whether to measure encode latency. This is set by the command line switch | |
448 // "--measure_latency". | |
449 bool needs_encode_latency() const { return needs_encode_latency_; } | |
450 | |
451 // Verify the encoder output of all testcases. This is set by the command line | |
452 // switch "--verify_all_output". | |
453 bool verify_all_output() const { return verify_all_output_; } | |
454 | |
455 ScopedVector<TestStream> test_streams_; | |
456 | |
457 private: | |
458 std::unique_ptr<base::FilePath::StringType> test_stream_data_; | |
459 base::FilePath log_path_; | |
460 std::unique_ptr<base::File> log_file_; | |
461 bool run_at_fps_; | |
462 bool needs_encode_latency_; | |
463 bool verify_all_output_; | |
464 }; | |
465 | |
466 enum ClientState { | |
467 CS_CREATED, | |
468 CS_ENCODER_SET, | |
469 CS_INITIALIZED, | |
470 CS_ENCODING, | |
471 // Encoding has finished. | |
472 CS_FINISHED, | |
473 // Encoded frame quality has been validated. | |
474 CS_VALIDATED, | |
475 CS_ERROR, | |
476 }; | |
477 | |
478 // Performs basic, codec-specific sanity checks on the stream buffers passed | |
479 // to ProcessStreamBuffer(): whether we've seen keyframes before non-keyframes, | |
480 // correct sequences of H.264 NALUs (SPS before PPS and before slices), etc. | |
481 // Calls given FrameFoundCallback when a complete frame is found while | |
482 // processing. | |
483 class StreamValidator { | |
484 public: | |
485 // To be called when a complete frame is found while processing a stream | |
486 // buffer, passing true if the frame is a keyframe. Returns false if we | |
487 // are not interested in more frames and further processing should be aborted. | |
488 typedef base::Callback<bool(bool)> FrameFoundCallback; | |
489 | |
490 virtual ~StreamValidator() {} | |
491 | |
492 // Provide a StreamValidator instance for the given |profile|. | |
493 static std::unique_ptr<StreamValidator> Create( | |
494 media::VideoCodecProfile profile, | |
495 const FrameFoundCallback& frame_cb); | |
496 | |
497 // Process and verify contents of a bitstream buffer. | |
498 virtual void ProcessStreamBuffer(const uint8_t* stream, size_t size) = 0; | |
499 | |
500 protected: | |
501 explicit StreamValidator(const FrameFoundCallback& frame_cb) | |
502 : frame_cb_(frame_cb) {} | |
503 | |
504 FrameFoundCallback frame_cb_; | |
505 }; | |
506 | |
507 class H264Validator : public StreamValidator { | |
508 public: | |
509 explicit H264Validator(const FrameFoundCallback& frame_cb) | |
510 : StreamValidator(frame_cb), | |
511 seen_sps_(false), | |
512 seen_pps_(false), | |
513 seen_idr_(false) {} | |
514 | |
515 void ProcessStreamBuffer(const uint8_t* stream, size_t size) override; | |
516 | |
517 private: | |
518 // Set to true when encoder provides us with the corresponding NALU type. | |
519 bool seen_sps_; | |
520 bool seen_pps_; | |
521 bool seen_idr_; | |
522 | |
523 media::H264Parser h264_parser_; | |
524 }; | |
525 | |
526 void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { | |
527 h264_parser_.SetStream(stream, size); | |
528 | |
529 while (1) { | |
530 media::H264NALU nalu; | |
531 media::H264Parser::Result result; | |
532 | |
533 result = h264_parser_.AdvanceToNextNALU(&nalu); | |
534 if (result == media::H264Parser::kEOStream) | |
535 break; | |
536 | |
537 ASSERT_EQ(media::H264Parser::kOk, result); | |
538 | |
539 bool keyframe = false; | |
540 | |
541 switch (nalu.nal_unit_type) { | |
542 case media::H264NALU::kIDRSlice: | |
543 ASSERT_TRUE(seen_sps_); | |
544 ASSERT_TRUE(seen_pps_); | |
545 seen_idr_ = true; | |
546 keyframe = true; | |
547 // fallthrough | |
548 case media::H264NALU::kNonIDRSlice: { | |
549 ASSERT_TRUE(seen_idr_); | |
550 if (!frame_cb_.Run(keyframe)) | |
551 return; | |
552 break; | |
553 } | |
554 | |
555 case media::H264NALU::kSPS: { | |
556 int sps_id; | |
557 ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParseSPS(&sps_id)); | |
558 seen_sps_ = true; | |
559 break; | |
560 } | |
561 | |
562 case media::H264NALU::kPPS: { | |
563 ASSERT_TRUE(seen_sps_); | |
564 int pps_id; | |
565 ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParsePPS(&pps_id)); | |
566 seen_pps_ = true; | |
567 break; | |
568 } | |
569 | |
570 default: | |
571 break; | |
572 } | |
573 } | |
574 } | |
575 | |
576 class VP8Validator : public StreamValidator { | |
577 public: | |
578 explicit VP8Validator(const FrameFoundCallback& frame_cb) | |
579 : StreamValidator(frame_cb), | |
580 seen_keyframe_(false) {} | |
581 | |
582 void ProcessStreamBuffer(const uint8_t* stream, size_t size) override; | |
583 | |
584 private: | |
585 // Have we already got a keyframe in the stream? | |
586 bool seen_keyframe_; | |
587 }; | |
588 | |
589 void VP8Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { | |
590 bool keyframe = !(stream[0] & 0x01); | |
591 if (keyframe) | |
592 seen_keyframe_ = true; | |
593 | |
594 EXPECT_TRUE(seen_keyframe_); | |
595 | |
596 frame_cb_.Run(keyframe); | |
597 // TODO(posciak): We could be getting more frames in the buffer, but there is | |
598 // no simple way to detect this. We'd need to parse the frames and go through | |
599 // partition numbers/sizes. For now assume one frame per buffer. | |
600 } | |
601 | |
602 // static | |
603 std::unique_ptr<StreamValidator> StreamValidator::Create( | |
604 media::VideoCodecProfile profile, | |
605 const FrameFoundCallback& frame_cb) { | |
606 std::unique_ptr<StreamValidator> validator; | |
607 | |
608 if (IsH264(profile)) { | |
609 validator.reset(new H264Validator(frame_cb)); | |
610 } else if (IsVP8(profile)) { | |
611 validator.reset(new VP8Validator(frame_cb)); | |
612 } else { | |
613 LOG(FATAL) << "Unsupported profile: " << profile; | |
614 } | |
615 | |
616 return validator; | |
617 } | |
618 | |
619 class VideoFrameQualityValidator { | |
620 public: | |
621 VideoFrameQualityValidator(const media::VideoCodecProfile profile, | |
622 const base::Closure& flush_complete_cb, | |
623 const base::Closure& decode_error_cb); | |
624 void Initialize(const gfx::Size& coded_size, const gfx::Rect& visible_size); | |
625 // Save original YUV frame to compare it with the decoded frame later. | |
626 void AddOriginalFrame(scoped_refptr<media::VideoFrame> frame); | |
627 void AddDecodeBuffer(const scoped_refptr<media::DecoderBuffer>& buffer); | |
628 // Flush the decoder. | |
629 void Flush(); | |
630 | |
631 private: | |
632 void InitializeCB(bool success); | |
633 void DecodeDone(media::DecodeStatus status); | |
634 void FlushDone(media::DecodeStatus status); | |
635 void VerifyOutputFrame(const scoped_refptr<media::VideoFrame>& output_frame); | |
636 void Decode(); | |
637 | |
638 enum State { UNINITIALIZED, INITIALIZED, DECODING, ERROR }; | |
639 | |
640 const media::VideoCodecProfile profile_; | |
641 std::unique_ptr<media::FFmpegVideoDecoder> decoder_; | |
642 media::VideoDecoder::DecodeCB decode_cb_; | |
643 // Decode callback of an EOS buffer. | |
644 media::VideoDecoder::DecodeCB eos_decode_cb_; | |
645 // Callback of Flush(). Called after all frames are decoded. | |
646 const base::Closure flush_complete_cb_; | |
647 const base::Closure decode_error_cb_; | |
648 State decoder_state_; | |
649 std::queue<scoped_refptr<media::VideoFrame>> original_frames_; | |
650 std::queue<scoped_refptr<media::DecoderBuffer>> decode_buffers_; | |
651 }; | |
652 | |
653 VideoFrameQualityValidator::VideoFrameQualityValidator( | |
654 const media::VideoCodecProfile profile, | |
655 const base::Closure& flush_complete_cb, | |
656 const base::Closure& decode_error_cb) | |
657 : profile_(profile), | |
658 decoder_(new media::FFmpegVideoDecoder()), | |
659 decode_cb_(base::Bind(&VideoFrameQualityValidator::DecodeDone, | |
660 base::Unretained(this))), | |
661 eos_decode_cb_(base::Bind(&VideoFrameQualityValidator::FlushDone, | |
662 base::Unretained(this))), | |
663 flush_complete_cb_(flush_complete_cb), | |
664 decode_error_cb_(decode_error_cb), | |
665 decoder_state_(UNINITIALIZED) { | |
666 // Allow decoding of individual NALU. Entire frames are required by default. | |
667 decoder_->set_decode_nalus(true); | |
668 } | |
669 | |
670 void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size, | |
671 const gfx::Rect& visible_size) { | |
672 media::FFmpegGlue::InitializeFFmpeg(); | |
673 | |
674 gfx::Size natural_size(visible_size.size()); | |
675 // The default output format of ffmpeg video decoder is YV12. | |
676 media::VideoDecoderConfig config; | |
677 if (IsVP8(profile_)) | |
678 config.Initialize(media::kCodecVP8, media::VP8PROFILE_ANY, kInputFormat, | |
679 media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, | |
680 natural_size, media::EmptyExtraData(), | |
681 media::Unencrypted()); | |
682 else if (IsH264(profile_)) | |
683 config.Initialize(media::kCodecH264, media::H264PROFILE_MAIN, kInputFormat, | |
684 media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, | |
685 natural_size, media::EmptyExtraData(), | |
686 media::Unencrypted()); | |
687 else | |
688 LOG_ASSERT(0) << "Invalid profile " << profile_; | |
689 | |
690 decoder_->Initialize( | |
691 config, false, nullptr, | |
692 base::Bind(&VideoFrameQualityValidator::InitializeCB, | |
693 base::Unretained(this)), | |
694 base::Bind(&VideoFrameQualityValidator::VerifyOutputFrame, | |
695 base::Unretained(this))); | |
696 } | |
697 | |
698 void VideoFrameQualityValidator::InitializeCB(bool success) { | |
699 if (success) { | |
700 decoder_state_ = INITIALIZED; | |
701 Decode(); | |
702 } else { | |
703 decoder_state_ = ERROR; | |
704 if (IsH264(profile_)) | |
705 LOG(ERROR) << "Chromium does not support H264 decode. Try Chrome."; | |
706 FAIL() << "Decoder initialization error"; | |
707 decode_error_cb_.Run(); | |
708 } | |
709 } | |
710 | |
711 void VideoFrameQualityValidator::AddOriginalFrame( | |
712 scoped_refptr<media::VideoFrame> frame) { | |
713 original_frames_.push(frame); | |
714 } | |
715 | |
716 void VideoFrameQualityValidator::DecodeDone(media::DecodeStatus status) { | |
717 if (status == media::DecodeStatus::OK) { | |
718 decoder_state_ = INITIALIZED; | |
719 Decode(); | |
720 } else { | |
721 decoder_state_ = ERROR; | |
722 FAIL() << "Unexpected decode status = " << status << ". Stop decoding."; | |
723 decode_error_cb_.Run(); | |
724 } | |
725 } | |
726 | |
727 void VideoFrameQualityValidator::FlushDone(media::DecodeStatus status) { | |
728 flush_complete_cb_.Run(); | |
729 } | |
730 | |
731 void VideoFrameQualityValidator::Flush() { | |
732 if (decoder_state_ != ERROR) { | |
733 decode_buffers_.push(media::DecoderBuffer::CreateEOSBuffer()); | |
734 Decode(); | |
735 } | |
736 } | |
737 | |
738 void VideoFrameQualityValidator::AddDecodeBuffer( | |
739 const scoped_refptr<media::DecoderBuffer>& buffer) { | |
740 if (decoder_state_ != ERROR) { | |
741 decode_buffers_.push(buffer); | |
742 Decode(); | |
743 } | |
744 } | |
745 | |
746 void VideoFrameQualityValidator::Decode() { | |
747 if (decoder_state_ == INITIALIZED && !decode_buffers_.empty()) { | |
748 scoped_refptr<media::DecoderBuffer> next_buffer = decode_buffers_.front(); | |
749 decode_buffers_.pop(); | |
750 decoder_state_ = DECODING; | |
751 if (next_buffer->end_of_stream()) | |
752 decoder_->Decode(next_buffer, eos_decode_cb_); | |
753 else | |
754 decoder_->Decode(next_buffer, decode_cb_); | |
755 } | |
756 } | |
757 | |
758 void VideoFrameQualityValidator::VerifyOutputFrame( | |
759 const scoped_refptr<media::VideoFrame>& output_frame) { | |
760 scoped_refptr<media::VideoFrame> original_frame = original_frames_.front(); | |
761 original_frames_.pop(); | |
762 gfx::Size visible_size = original_frame->visible_rect().size(); | |
763 | |
764 int planes[] = {media::VideoFrame::kYPlane, media::VideoFrame::kUPlane, | |
765 media::VideoFrame::kVPlane}; | |
766 double difference = 0; | |
767 for (int plane : planes) { | |
768 uint8_t* original_plane = original_frame->data(plane); | |
769 uint8_t* output_plane = output_frame->data(plane); | |
770 | |
771 size_t rows = | |
772 media::VideoFrame::Rows(plane, kInputFormat, visible_size.height()); | |
773 size_t columns = | |
774 media::VideoFrame::Columns(plane, kInputFormat, visible_size.width()); | |
775 size_t stride = original_frame->stride(plane); | |
776 | |
777 for (size_t i = 0; i < rows; i++) | |
778 for (size_t j = 0; j < columns; j++) | |
779 difference += std::abs(original_plane[stride * i + j] - | |
780 output_plane[stride * i + j]); | |
781 } | |
782 // Divide the difference by the size of frame. | |
783 difference /= media::VideoFrame::AllocationSize(kInputFormat, visible_size); | |
784 EXPECT_TRUE(difference <= kDecodeSimilarityThreshold) | |
785 << "differrence = " << difference << " > decode similarity threshold"; | |
786 } | |
787 | |
788 class VEAClient : public VideoEncodeAccelerator::Client { | |
789 public: | |
790 VEAClient(TestStream* test_stream, | |
791 ClientStateNotification<ClientState>* note, | |
792 bool save_to_file, | |
793 unsigned int keyframe_period, | |
794 bool force_bitrate, | |
795 bool test_perf, | |
796 bool mid_stream_bitrate_switch, | |
797 bool mid_stream_framerate_switch, | |
798 bool verify_output); | |
799 ~VEAClient() override; | |
800 void CreateEncoder(); | |
801 void DestroyEncoder(); | |
802 | |
803 // VideoDecodeAccelerator::Client implementation. | |
804 void RequireBitstreamBuffers(unsigned int input_count, | |
805 const gfx::Size& input_coded_size, | |
806 size_t output_buffer_size) override; | |
807 void BitstreamBufferReady(int32_t bitstream_buffer_id, | |
808 size_t payload_size, | |
809 bool key_frame) override; | |
810 void NotifyError(VideoEncodeAccelerator::Error error) override; | |
811 | |
812 private: | |
813 bool has_encoder() { return encoder_.get(); } | |
814 | |
815 // Return the number of encoded frames per second. | |
816 double frames_per_second(); | |
817 | |
818 std::unique_ptr<media::VideoEncodeAccelerator> CreateFakeVEA(); | |
819 std::unique_ptr<media::VideoEncodeAccelerator> CreateV4L2VEA(); | |
820 std::unique_ptr<media::VideoEncodeAccelerator> CreateVaapiVEA(); | |
821 std::unique_ptr<media::VideoEncodeAccelerator> CreateVTVEA(); | |
822 | |
823 void SetState(ClientState new_state); | |
824 | |
825 // Set current stream parameters to given |bitrate| at |framerate|. | |
826 void SetStreamParameters(unsigned int bitrate, unsigned int framerate); | |
827 | |
828 // Called when encoder is done with a VideoFrame. | |
829 void InputNoLongerNeededCallback(int32_t input_id); | |
830 | |
831 // Feed the encoder with one input frame. | |
832 void FeedEncoderWithOneInput(); | |
833 | |
834 // Provide the encoder with a new output buffer. | |
835 void FeedEncoderWithOutput(base::SharedMemory* shm); | |
836 | |
837 // Called on finding a complete frame (with |keyframe| set to true for | |
838 // keyframes) in the stream, to perform codec-independent, per-frame checks | |
839 // and accounting. Returns false once we have collected all frames we needed. | |
840 bool HandleEncodedFrame(bool keyframe); | |
841 | |
842 // Verify the minimum FPS requirement. | |
843 void VerifyMinFPS(); | |
844 | |
845 // Verify that stream bitrate has been close to current_requested_bitrate_, | |
846 // assuming current_framerate_ since the last time VerifyStreamProperties() | |
847 // was called. Fail the test if |force_bitrate_| is true and the bitrate | |
848 // is not within kBitrateTolerance. | |
849 void VerifyStreamProperties(); | |
850 | |
851 // Log the performance data. | |
852 void LogPerf(); | |
853 | |
854 // Write IVF file header to test_stream_->out_filename. | |
855 void WriteIvfFileHeader(); | |
856 | |
857 // Write an IVF frame header to test_stream_->out_filename. | |
858 void WriteIvfFrameHeader(int frame_index, size_t frame_size); | |
859 | |
860 // Create and return a VideoFrame wrapping the data at |position| bytes in the | |
861 // input stream. | |
862 scoped_refptr<media::VideoFrame> CreateFrame(off_t position); | |
863 | |
864 // Prepare and return a frame wrapping the data at |position| bytes in the | |
865 // input stream, ready to be sent to encoder. | |
866 // The input frame id is returned in |input_id|. | |
867 scoped_refptr<media::VideoFrame> PrepareInputFrame(off_t position, | |
868 int32_t* input_id); | |
869 | |
870 // Update the parameters according to |mid_stream_bitrate_switch| and | |
871 // |mid_stream_framerate_switch|. | |
872 void UpdateTestStreamData(bool mid_stream_bitrate_switch, | |
873 bool mid_stream_framerate_switch); | |
874 | |
875 // Callback function of the |input_timer_|. | |
876 void OnInputTimer(); | |
877 | |
878 // Called when the quality validator has decoded all the frames. | |
879 void DecodeCompleted(); | |
880 | |
881 // Called when the quality validator fails to decode a frame. | |
882 void DecodeFailed(); | |
883 | |
884 ClientState state_; | |
885 std::unique_ptr<VideoEncodeAccelerator> encoder_; | |
886 | |
887 TestStream* test_stream_; | |
888 | |
889 // Used to notify another thread about the state. VEAClient does not own this. | |
890 ClientStateNotification<ClientState>* note_; | |
891 | |
892 // Ids assigned to VideoFrames. | |
893 std::set<int32_t> inputs_at_client_; | |
894 int32_t next_input_id_; | |
895 | |
896 // Encode start time of all encoded frames. The position in the vector is the | |
897 // frame input id. | |
898 std::vector<base::TimeTicks> encode_start_time_; | |
899 // The encode latencies of all encoded frames. We define encode latency as the | |
900 // time delay from input of each VideoFrame (VEA::Encode()) to output of the | |
901 // corresponding BitstreamBuffer (VEA::Client::BitstreamBufferReady()). | |
902 std::vector<base::TimeDelta> encode_latencies_; | |
903 | |
904 // Ids for output BitstreamBuffers. | |
905 typedef std::map<int32_t, base::SharedMemory*> IdToSHM; | |
906 ScopedVector<base::SharedMemory> output_shms_; | |
907 IdToSHM output_buffers_at_client_; | |
908 int32_t next_output_buffer_id_; | |
909 | |
910 // Current offset into input stream. | |
911 off_t pos_in_input_stream_; | |
912 gfx::Size input_coded_size_; | |
913 // Requested by encoder. | |
914 unsigned int num_required_input_buffers_; | |
915 size_t output_buffer_size_; | |
916 | |
917 // Number of frames to encode. This may differ from the number of frames in | |
918 // stream if we need more frames for bitrate tests. | |
919 unsigned int num_frames_to_encode_; | |
920 | |
921 // Number of encoded frames we've got from the encoder thus far. | |
922 unsigned int num_encoded_frames_; | |
923 | |
924 // Frames since last bitrate verification. | |
925 unsigned int num_frames_since_last_check_; | |
926 | |
927 // True if received a keyframe while processing current bitstream buffer. | |
928 bool seen_keyframe_in_this_buffer_; | |
929 | |
930 // True if we are to save the encoded stream to a file. | |
931 bool save_to_file_; | |
932 | |
933 // Request a keyframe every keyframe_period_ frames. | |
934 const unsigned int keyframe_period_; | |
935 | |
936 // Number of keyframes requested by now. | |
937 unsigned int num_keyframes_requested_; | |
938 | |
939 // Next keyframe expected before next_keyframe_at_ + kMaxKeyframeDelay. | |
940 unsigned int next_keyframe_at_; | |
941 | |
942 // True if we are asking encoder for a particular bitrate. | |
943 bool force_bitrate_; | |
944 | |
945 // Current requested bitrate. | |
946 unsigned int current_requested_bitrate_; | |
947 | |
948 // Current expected framerate. | |
949 unsigned int current_framerate_; | |
950 | |
951 // Byte size of the encoded stream (for bitrate calculation) since last | |
952 // time we checked bitrate. | |
953 size_t encoded_stream_size_since_last_check_; | |
954 | |
955 // If true, verify performance at the end of the test. | |
956 bool test_perf_; | |
957 | |
958 // Check the output frame quality of the encoder. | |
959 bool verify_output_; | |
960 | |
961 // Used to perform codec-specific sanity checks on the stream. | |
962 std::unique_ptr<StreamValidator> stream_validator_; | |
963 | |
964 // Used to validate the encoded frame quality. | |
965 std::unique_ptr<VideoFrameQualityValidator> quality_validator_; | |
966 | |
967 // The time when the first frame is submitted for encode. | |
968 base::TimeTicks first_frame_start_time_; | |
969 | |
970 // The time when the last encoded frame is ready. | |
971 base::TimeTicks last_frame_ready_time_; | |
972 | |
973 // All methods of this class should be run on the same thread. | |
974 base::ThreadChecker thread_checker_; | |
975 | |
976 // Requested bitrate in bits per second. | |
977 unsigned int requested_bitrate_; | |
978 | |
979 // Requested initial framerate. | |
980 unsigned int requested_framerate_; | |
981 | |
982 // Bitrate to switch to in the middle of the stream. | |
983 unsigned int requested_subsequent_bitrate_; | |
984 | |
985 // Framerate to switch to in the middle of the stream. | |
986 unsigned int requested_subsequent_framerate_; | |
987 | |
988 // The timer used to feed the encoder with the input frames. | |
989 std::unique_ptr<base::RepeatingTimer> input_timer_; | |
990 }; | |
991 | |
992 VEAClient::VEAClient(TestStream* test_stream, | |
993 ClientStateNotification<ClientState>* note, | |
994 bool save_to_file, | |
995 unsigned int keyframe_period, | |
996 bool force_bitrate, | |
997 bool test_perf, | |
998 bool mid_stream_bitrate_switch, | |
999 bool mid_stream_framerate_switch, | |
1000 bool verify_output) | |
1001 : state_(CS_CREATED), | |
1002 test_stream_(test_stream), | |
1003 note_(note), | |
1004 next_input_id_(0), | |
1005 next_output_buffer_id_(0), | |
1006 pos_in_input_stream_(0), | |
1007 num_required_input_buffers_(0), | |
1008 output_buffer_size_(0), | |
1009 num_frames_to_encode_(0), | |
1010 num_encoded_frames_(0), | |
1011 num_frames_since_last_check_(0), | |
1012 seen_keyframe_in_this_buffer_(false), | |
1013 save_to_file_(save_to_file), | |
1014 keyframe_period_(keyframe_period), | |
1015 num_keyframes_requested_(0), | |
1016 next_keyframe_at_(0), | |
1017 force_bitrate_(force_bitrate), | |
1018 current_requested_bitrate_(0), | |
1019 current_framerate_(0), | |
1020 encoded_stream_size_since_last_check_(0), | |
1021 test_perf_(test_perf), | |
1022 verify_output_(verify_output), | |
1023 requested_bitrate_(0), | |
1024 requested_framerate_(0), | |
1025 requested_subsequent_bitrate_(0), | |
1026 requested_subsequent_framerate_(0) { | |
1027 if (keyframe_period_) | |
1028 LOG_ASSERT(kMaxKeyframeDelay < keyframe_period_); | |
1029 | |
1030 // Fake encoder produces an invalid stream, so skip validating it. | |
1031 if (!g_fake_encoder) { | |
1032 stream_validator_ = StreamValidator::Create( | |
1033 test_stream_->requested_profile, | |
1034 base::Bind(&VEAClient::HandleEncodedFrame, base::Unretained(this))); | |
1035 CHECK(stream_validator_); | |
1036 } | |
1037 | |
1038 if (save_to_file_) { | |
1039 LOG_ASSERT(!test_stream_->out_filename.empty()); | |
1040 base::FilePath out_filename(test_stream_->out_filename); | |
1041 // This creates or truncates out_filename. | |
1042 // Without it, AppendToFile() will not work. | |
1043 EXPECT_EQ(0, base::WriteFile(out_filename, NULL, 0)); | |
1044 } | |
1045 | |
1046 // Initialize the parameters of the test streams. | |
1047 UpdateTestStreamData(mid_stream_bitrate_switch, mid_stream_framerate_switch); | |
1048 | |
1049 thread_checker_.DetachFromThread(); | |
1050 } | |
1051 | |
1052 VEAClient::~VEAClient() { LOG_ASSERT(!has_encoder()); } | |
1053 | |
1054 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateFakeVEA() { | |
1055 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
1056 if (g_fake_encoder) { | |
1057 encoder.reset(new media::FakeVideoEncodeAccelerator( | |
1058 scoped_refptr<base::SingleThreadTaskRunner>( | |
1059 base::ThreadTaskRunnerHandle::Get()))); | |
1060 } | |
1061 return encoder; | |
1062 } | |
1063 | |
1064 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateV4L2VEA() { | |
1065 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
1066 #if defined(OS_CHROMEOS) && (defined(ARCH_CPU_ARMEL) || \ | |
1067 (defined(USE_OZONE) && defined(USE_V4L2_CODEC))) | |
1068 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder); | |
1069 if (device) | |
1070 encoder.reset(new V4L2VideoEncodeAccelerator(device)); | |
1071 #endif | |
1072 return encoder; | |
1073 } | |
1074 | |
1075 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVaapiVEA() { | |
1076 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
1077 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) | |
1078 encoder.reset(new VaapiVideoEncodeAccelerator()); | |
1079 #endif | |
1080 return encoder; | |
1081 } | |
1082 | |
1083 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVTVEA() { | |
1084 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
1085 #if defined(OS_MACOSX) | |
1086 encoder.reset(new VTVideoEncodeAccelerator()); | |
1087 #endif | |
1088 return encoder; | |
1089 } | |
1090 | |
1091 void VEAClient::CreateEncoder() { | |
1092 DCHECK(thread_checker_.CalledOnValidThread()); | |
1093 LOG_ASSERT(!has_encoder()); | |
1094 | |
1095 std::unique_ptr<media::VideoEncodeAccelerator> encoders[] = { | |
1096 CreateFakeVEA(), CreateV4L2VEA(), CreateVaapiVEA(), CreateVTVEA()}; | |
1097 | |
1098 DVLOG(1) << "Profile: " << test_stream_->requested_profile | |
1099 << ", initial bitrate: " << requested_bitrate_; | |
1100 | |
1101 for (size_t i = 0; i < arraysize(encoders); ++i) { | |
1102 if (!encoders[i]) | |
1103 continue; | |
1104 encoder_ = std::move(encoders[i]); | |
1105 SetState(CS_ENCODER_SET); | |
1106 if (encoder_->Initialize(kInputFormat, | |
1107 test_stream_->visible_size, | |
1108 test_stream_->requested_profile, | |
1109 requested_bitrate_, | |
1110 this)) { | |
1111 SetStreamParameters(requested_bitrate_, requested_framerate_); | |
1112 SetState(CS_INITIALIZED); | |
1113 | |
1114 if (verify_output_ && !g_fake_encoder) | |
1115 quality_validator_.reset(new VideoFrameQualityValidator( | |
1116 test_stream_->requested_profile, | |
1117 base::Bind(&VEAClient::DecodeCompleted, base::Unretained(this)), | |
1118 base::Bind(&VEAClient::DecodeFailed, base::Unretained(this)))); | |
1119 return; | |
1120 } | |
1121 } | |
1122 encoder_.reset(); | |
1123 LOG(ERROR) << "VideoEncodeAccelerator::Initialize() failed"; | |
1124 SetState(CS_ERROR); | |
1125 } | |
1126 | |
1127 void VEAClient::DecodeCompleted() { | |
1128 SetState(CS_VALIDATED); | |
1129 } | |
1130 | |
1131 void VEAClient::DecodeFailed() { | |
1132 SetState(CS_ERROR); | |
1133 } | |
1134 | |
1135 void VEAClient::DestroyEncoder() { | |
1136 DCHECK(thread_checker_.CalledOnValidThread()); | |
1137 if (!has_encoder()) | |
1138 return; | |
1139 // Clear the objects that should be destroyed on the same thread as creation. | |
1140 encoder_.reset(); | |
1141 input_timer_.reset(); | |
1142 quality_validator_.reset(); | |
1143 } | |
1144 | |
1145 void VEAClient::UpdateTestStreamData(bool mid_stream_bitrate_switch, | |
1146 bool mid_stream_framerate_switch) { | |
1147 // Use defaults for bitrate/framerate if they are not provided. | |
1148 if (test_stream_->requested_bitrate == 0) | |
1149 requested_bitrate_ = kDefaultBitrate; | |
1150 else | |
1151 requested_bitrate_ = test_stream_->requested_bitrate; | |
1152 | |
1153 if (test_stream_->requested_framerate == 0) | |
1154 requested_framerate_ = kDefaultFramerate; | |
1155 else | |
1156 requested_framerate_ = test_stream_->requested_framerate; | |
1157 | |
1158 // If bitrate/framerate switch is requested, use the subsequent values if | |
1159 // provided, or, if not, calculate them from their initial values using | |
1160 // the default ratios. | |
1161 // Otherwise, if a switch is not requested, keep the initial values. | |
1162 if (mid_stream_bitrate_switch) { | |
1163 if (test_stream_->requested_subsequent_bitrate == 0) | |
1164 requested_subsequent_bitrate_ = | |
1165 requested_bitrate_ * kDefaultSubsequentBitrateRatio; | |
1166 else | |
1167 requested_subsequent_bitrate_ = | |
1168 test_stream_->requested_subsequent_bitrate; | |
1169 } else { | |
1170 requested_subsequent_bitrate_ = requested_bitrate_; | |
1171 } | |
1172 if (requested_subsequent_bitrate_ == 0) | |
1173 requested_subsequent_bitrate_ = 1; | |
1174 | |
1175 if (mid_stream_framerate_switch) { | |
1176 if (test_stream_->requested_subsequent_framerate == 0) | |
1177 requested_subsequent_framerate_ = | |
1178 requested_framerate_ * kDefaultSubsequentFramerateRatio; | |
1179 else | |
1180 requested_subsequent_framerate_ = | |
1181 test_stream_->requested_subsequent_framerate; | |
1182 } else { | |
1183 requested_subsequent_framerate_ = requested_framerate_; | |
1184 } | |
1185 if (requested_subsequent_framerate_ == 0) | |
1186 requested_subsequent_framerate_ = 1; | |
1187 } | |
1188 | |
1189 double VEAClient::frames_per_second() { | |
1190 LOG_ASSERT(num_encoded_frames_ != 0UL); | |
1191 base::TimeDelta duration = last_frame_ready_time_ - first_frame_start_time_; | |
1192 return num_encoded_frames_ / duration.InSecondsF(); | |
1193 } | |
1194 | |
1195 void VEAClient::RequireBitstreamBuffers(unsigned int input_count, | |
1196 const gfx::Size& input_coded_size, | |
1197 size_t output_size) { | |
1198 DCHECK(thread_checker_.CalledOnValidThread()); | |
1199 ASSERT_EQ(state_, CS_INITIALIZED); | |
1200 SetState(CS_ENCODING); | |
1201 | |
1202 if (quality_validator_) | |
1203 quality_validator_->Initialize(input_coded_size, | |
1204 gfx::Rect(test_stream_->visible_size)); | |
1205 | |
1206 CreateAlignedInputStreamFile(input_coded_size, test_stream_); | |
1207 | |
1208 num_frames_to_encode_ = test_stream_->num_frames; | |
1209 if (g_num_frames_to_encode > 0) | |
1210 num_frames_to_encode_ = g_num_frames_to_encode; | |
1211 | |
1212 // We may need to loop over the stream more than once if more frames than | |
1213 // provided is required for bitrate tests. | |
1214 if (force_bitrate_ && num_frames_to_encode_ < kMinFramesForBitrateTests) { | |
1215 DVLOG(1) << "Stream too short for bitrate test (" | |
1216 << test_stream_->num_frames << " frames), will loop it to reach " | |
1217 << kMinFramesForBitrateTests << " frames"; | |
1218 num_frames_to_encode_ = kMinFramesForBitrateTests; | |
1219 } | |
1220 if (save_to_file_ && IsVP8(test_stream_->requested_profile)) | |
1221 WriteIvfFileHeader(); | |
1222 | |
1223 input_coded_size_ = input_coded_size; | |
1224 num_required_input_buffers_ = input_count; | |
1225 ASSERT_GT(num_required_input_buffers_, 0UL); | |
1226 | |
1227 output_buffer_size_ = output_size; | |
1228 ASSERT_GT(output_buffer_size_, 0UL); | |
1229 | |
1230 for (unsigned int i = 0; i < kNumOutputBuffers; ++i) { | |
1231 base::SharedMemory* shm = new base::SharedMemory(); | |
1232 LOG_ASSERT(shm->CreateAndMapAnonymous(output_buffer_size_)); | |
1233 output_shms_.push_back(shm); | |
1234 FeedEncoderWithOutput(shm); | |
1235 } | |
1236 | |
1237 if (g_env->run_at_fps()) { | |
1238 input_timer_.reset(new base::RepeatingTimer()); | |
1239 input_timer_->Start( | |
1240 FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_, | |
1241 base::Bind(&VEAClient::OnInputTimer, base::Unretained(this))); | |
1242 } else { | |
1243 while (inputs_at_client_.size() < | |
1244 num_required_input_buffers_ + kNumExtraInputFrames) | |
1245 FeedEncoderWithOneInput(); | |
1246 } | |
1247 } | |
1248 | |
1249 void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id, | |
1250 size_t payload_size, | |
1251 bool key_frame) { | |
1252 DCHECK(thread_checker_.CalledOnValidThread()); | |
1253 ASSERT_LE(payload_size, output_buffer_size_); | |
1254 | |
1255 IdToSHM::iterator it = output_buffers_at_client_.find(bitstream_buffer_id); | |
1256 ASSERT_NE(it, output_buffers_at_client_.end()); | |
1257 base::SharedMemory* shm = it->second; | |
1258 output_buffers_at_client_.erase(it); | |
1259 | |
1260 if (state_ == CS_FINISHED || state_ == CS_VALIDATED) | |
1261 return; | |
1262 | |
1263 encoded_stream_size_since_last_check_ += payload_size; | |
1264 | |
1265 const uint8_t* stream_ptr = static_cast<const uint8_t*>(shm->memory()); | |
1266 if (payload_size > 0) { | |
1267 if (stream_validator_) { | |
1268 stream_validator_->ProcessStreamBuffer(stream_ptr, payload_size); | |
1269 } else { | |
1270 HandleEncodedFrame(key_frame); | |
1271 } | |
1272 | |
1273 if (quality_validator_) { | |
1274 scoped_refptr<media::DecoderBuffer> buffer(media::DecoderBuffer::CopyFrom( | |
1275 reinterpret_cast<const uint8_t*>(shm->memory()), | |
1276 static_cast<int>(payload_size))); | |
1277 quality_validator_->AddDecodeBuffer(buffer); | |
1278 // Insert EOS buffer to flush the decoder. | |
1279 if (num_encoded_frames_ == num_frames_to_encode_) | |
1280 quality_validator_->Flush(); | |
1281 } | |
1282 | |
1283 if (save_to_file_) { | |
1284 if (IsVP8(test_stream_->requested_profile)) | |
1285 WriteIvfFrameHeader(num_encoded_frames_ - 1, payload_size); | |
1286 | |
1287 EXPECT_TRUE(base::AppendToFile( | |
1288 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | |
1289 static_cast<char*>(shm->memory()), | |
1290 base::checked_cast<int>(payload_size))); | |
1291 } | |
1292 } | |
1293 | |
1294 EXPECT_EQ(key_frame, seen_keyframe_in_this_buffer_); | |
1295 seen_keyframe_in_this_buffer_ = false; | |
1296 | |
1297 FeedEncoderWithOutput(shm); | |
1298 } | |
1299 | |
1300 void VEAClient::NotifyError(VideoEncodeAccelerator::Error error) { | |
1301 DCHECK(thread_checker_.CalledOnValidThread()); | |
1302 SetState(CS_ERROR); | |
1303 } | |
1304 | |
1305 void VEAClient::SetState(ClientState new_state) { | |
1306 DVLOG(4) << "Changing state " << state_ << "->" << new_state; | |
1307 note_->Notify(new_state); | |
1308 state_ = new_state; | |
1309 } | |
1310 | |
1311 void VEAClient::SetStreamParameters(unsigned int bitrate, | |
1312 unsigned int framerate) { | |
1313 current_requested_bitrate_ = bitrate; | |
1314 current_framerate_ = framerate; | |
1315 LOG_ASSERT(current_requested_bitrate_ > 0UL); | |
1316 LOG_ASSERT(current_framerate_ > 0UL); | |
1317 encoder_->RequestEncodingParametersChange(current_requested_bitrate_, | |
1318 current_framerate_); | |
1319 DVLOG(1) << "Switched parameters to " << current_requested_bitrate_ | |
1320 << " bps @ " << current_framerate_ << " FPS"; | |
1321 } | |
1322 | |
1323 void VEAClient::InputNoLongerNeededCallback(int32_t input_id) { | |
1324 std::set<int32_t>::iterator it = inputs_at_client_.find(input_id); | |
1325 ASSERT_NE(it, inputs_at_client_.end()); | |
1326 inputs_at_client_.erase(it); | |
1327 if (!g_env->run_at_fps()) | |
1328 FeedEncoderWithOneInput(); | |
1329 } | |
1330 | |
1331 scoped_refptr<media::VideoFrame> VEAClient::CreateFrame(off_t position) { | |
1332 uint8_t* frame_data_y = const_cast<uint8_t*>( | |
1333 test_stream_->mapped_aligned_in_file.data() + position); | |
1334 uint8_t* frame_data_u = frame_data_y + test_stream_->aligned_plane_size[0]; | |
1335 uint8_t* frame_data_v = frame_data_u + test_stream_->aligned_plane_size[1]; | |
1336 CHECK_GT(current_framerate_, 0U); | |
1337 | |
1338 scoped_refptr<media::VideoFrame> video_frame = | |
1339 media::VideoFrame::WrapExternalYuvData( | |
1340 kInputFormat, input_coded_size_, | |
1341 gfx::Rect(test_stream_->visible_size), test_stream_->visible_size, | |
1342 input_coded_size_.width(), input_coded_size_.width() / 2, | |
1343 input_coded_size_.width() / 2, frame_data_y, frame_data_u, | |
1344 frame_data_v, | |
1345 base::TimeDelta().FromMilliseconds( | |
1346 next_input_id_ * base::Time::kMillisecondsPerSecond / | |
1347 current_framerate_)); | |
1348 EXPECT_NE(nullptr, video_frame.get()); | |
1349 return video_frame; | |
1350 } | |
1351 | |
1352 scoped_refptr<media::VideoFrame> VEAClient::PrepareInputFrame( | |
1353 off_t position, | |
1354 int32_t* input_id) { | |
1355 CHECK_LE(position + test_stream_->aligned_buffer_size, | |
1356 test_stream_->mapped_aligned_in_file.length()); | |
1357 | |
1358 scoped_refptr<media::VideoFrame> frame = CreateFrame(position); | |
1359 EXPECT_TRUE(frame); | |
1360 frame->AddDestructionObserver( | |
1361 media::BindToCurrentLoop( | |
1362 base::Bind(&VEAClient::InputNoLongerNeededCallback, | |
1363 base::Unretained(this), | |
1364 next_input_id_))); | |
1365 | |
1366 LOG_ASSERT(inputs_at_client_.insert(next_input_id_).second); | |
1367 | |
1368 *input_id = next_input_id_++; | |
1369 return frame; | |
1370 } | |
1371 | |
1372 void VEAClient::OnInputTimer() { | |
1373 if (!has_encoder() || state_ != CS_ENCODING) | |
1374 input_timer_.reset(); | |
1375 else if (inputs_at_client_.size() < | |
1376 num_required_input_buffers_ + kNumExtraInputFrames) | |
1377 FeedEncoderWithOneInput(); | |
1378 else | |
1379 DVLOG(1) << "Dropping input frame"; | |
1380 } | |
1381 | |
1382 void VEAClient::FeedEncoderWithOneInput() { | |
1383 if (!has_encoder() || state_ != CS_ENCODING) | |
1384 return; | |
1385 | |
1386 size_t bytes_left = | |
1387 test_stream_->mapped_aligned_in_file.length() - pos_in_input_stream_; | |
1388 if (bytes_left < test_stream_->aligned_buffer_size) { | |
1389 DCHECK_EQ(bytes_left, 0UL); | |
1390 // Rewind if at the end of stream and we are still encoding. | |
1391 // This is to flush the encoder with additional frames from the beginning | |
1392 // of the stream, or if the stream is shorter that the number of frames | |
1393 // we require for bitrate tests. | |
1394 pos_in_input_stream_ = 0; | |
1395 } | |
1396 | |
1397 if (quality_validator_) | |
1398 quality_validator_->AddOriginalFrame(CreateFrame(pos_in_input_stream_)); | |
1399 | |
1400 int32_t input_id; | |
1401 scoped_refptr<media::VideoFrame> video_frame = | |
1402 PrepareInputFrame(pos_in_input_stream_, &input_id); | |
1403 pos_in_input_stream_ += test_stream_->aligned_buffer_size; | |
1404 | |
1405 bool force_keyframe = false; | |
1406 if (keyframe_period_ && input_id % keyframe_period_ == 0) { | |
1407 force_keyframe = true; | |
1408 ++num_keyframes_requested_; | |
1409 } | |
1410 | |
1411 if (input_id == 0) { | |
1412 first_frame_start_time_ = base::TimeTicks::Now(); | |
1413 } | |
1414 | |
1415 if (g_env->needs_encode_latency()) { | |
1416 LOG_ASSERT(input_id == static_cast<int32_t>(encode_start_time_.size())); | |
1417 encode_start_time_.push_back(base::TimeTicks::Now()); | |
1418 } | |
1419 encoder_->Encode(video_frame, force_keyframe); | |
1420 } | |
1421 | |
1422 void VEAClient::FeedEncoderWithOutput(base::SharedMemory* shm) { | |
1423 if (!has_encoder()) | |
1424 return; | |
1425 | |
1426 if (state_ != CS_ENCODING) | |
1427 return; | |
1428 | |
1429 base::SharedMemoryHandle dup_handle; | |
1430 LOG_ASSERT(shm->ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle)); | |
1431 | |
1432 media::BitstreamBuffer bitstream_buffer( | |
1433 next_output_buffer_id_++, dup_handle, output_buffer_size_); | |
1434 LOG_ASSERT(output_buffers_at_client_.insert( | |
1435 std::make_pair(bitstream_buffer.id(), shm)).second); | |
1436 encoder_->UseOutputBitstreamBuffer(bitstream_buffer); | |
1437 } | |
1438 | |
1439 bool VEAClient::HandleEncodedFrame(bool keyframe) { | |
1440 // This would be a bug in the test, which should not ignore false | |
1441 // return value from this method. | |
1442 LOG_ASSERT(num_encoded_frames_ <= num_frames_to_encode_); | |
1443 | |
1444 last_frame_ready_time_ = base::TimeTicks::Now(); | |
1445 | |
1446 if (g_env->needs_encode_latency()) { | |
1447 LOG_ASSERT(num_encoded_frames_ < encode_start_time_.size()); | |
1448 base::TimeTicks start_time = encode_start_time_[num_encoded_frames_]; | |
1449 LOG_ASSERT(!start_time.is_null()); | |
1450 encode_latencies_.push_back(last_frame_ready_time_ - start_time); | |
1451 } | |
1452 | |
1453 ++num_encoded_frames_; | |
1454 ++num_frames_since_last_check_; | |
1455 | |
1456 // Because the keyframe behavior requirements are loose, we give | |
1457 // the encoder more freedom here. It could either deliver a keyframe | |
1458 // immediately after we requested it, which could be for a frame number | |
1459 // before the one we requested it for (if the keyframe request | |
1460 // is asynchronous, i.e. not bound to any concrete frame, and because | |
1461 // the pipeline can be deeper than one frame), at that frame, or after. | |
1462 // So the only constraints we put here is that we get a keyframe not | |
1463 // earlier than we requested one (in time), and not later than | |
1464 // kMaxKeyframeDelay frames after the frame, for which we requested | |
1465 // it, comes back encoded. | |
1466 if (keyframe) { | |
1467 if (num_keyframes_requested_ > 0) { | |
1468 --num_keyframes_requested_; | |
1469 next_keyframe_at_ += keyframe_period_; | |
1470 } | |
1471 seen_keyframe_in_this_buffer_ = true; | |
1472 } | |
1473 | |
1474 if (num_keyframes_requested_ > 0) | |
1475 EXPECT_LE(num_encoded_frames_, next_keyframe_at_ + kMaxKeyframeDelay); | |
1476 | |
1477 if (num_encoded_frames_ == num_frames_to_encode_ / 2) { | |
1478 VerifyStreamProperties(); | |
1479 if (requested_subsequent_bitrate_ != current_requested_bitrate_ || | |
1480 requested_subsequent_framerate_ != current_framerate_) { | |
1481 SetStreamParameters(requested_subsequent_bitrate_, | |
1482 requested_subsequent_framerate_); | |
1483 if (g_env->run_at_fps() && input_timer_) | |
1484 input_timer_->Start( | |
1485 FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_, | |
1486 base::Bind(&VEAClient::OnInputTimer, base::Unretained(this))); | |
1487 } | |
1488 } else if (num_encoded_frames_ == num_frames_to_encode_) { | |
1489 LogPerf(); | |
1490 VerifyMinFPS(); | |
1491 VerifyStreamProperties(); | |
1492 SetState(CS_FINISHED); | |
1493 if (!quality_validator_) | |
1494 SetState(CS_VALIDATED); | |
1495 return false; | |
1496 } | |
1497 | |
1498 return true; | |
1499 } | |
1500 | |
1501 void VEAClient::LogPerf() { | |
1502 g_env->LogToFile("Measured encoder FPS", | |
1503 base::StringPrintf("%.3f", frames_per_second())); | |
1504 | |
1505 // Log encode latencies. | |
1506 if (g_env->needs_encode_latency()) { | |
1507 std::sort(encode_latencies_.begin(), encode_latencies_.end()); | |
1508 for (const auto& percentile : kLoggedLatencyPercentiles) { | |
1509 base::TimeDelta latency = Percentile(encode_latencies_, percentile); | |
1510 g_env->LogToFile( | |
1511 base::StringPrintf("Encode latency for the %dth percentile", | |
1512 percentile), | |
1513 base::StringPrintf("%" PRId64 " us", latency.InMicroseconds())); | |
1514 } | |
1515 } | |
1516 } | |
1517 | |
1518 void VEAClient::VerifyMinFPS() { | |
1519 if (test_perf_) | |
1520 EXPECT_GE(frames_per_second(), kMinPerfFPS); | |
1521 } | |
1522 | |
1523 void VEAClient::VerifyStreamProperties() { | |
1524 LOG_ASSERT(num_frames_since_last_check_ > 0UL); | |
1525 LOG_ASSERT(encoded_stream_size_since_last_check_ > 0UL); | |
1526 unsigned int bitrate = encoded_stream_size_since_last_check_ * 8 * | |
1527 current_framerate_ / num_frames_since_last_check_; | |
1528 DVLOG(1) << "Current chunk's bitrate: " << bitrate | |
1529 << " (expected: " << current_requested_bitrate_ | |
1530 << " @ " << current_framerate_ << " FPS," | |
1531 << " num frames in chunk: " << num_frames_since_last_check_; | |
1532 | |
1533 num_frames_since_last_check_ = 0; | |
1534 encoded_stream_size_since_last_check_ = 0; | |
1535 | |
1536 if (force_bitrate_) { | |
1537 EXPECT_NEAR(bitrate, | |
1538 current_requested_bitrate_, | |
1539 kBitrateTolerance * current_requested_bitrate_); | |
1540 } | |
1541 | |
1542 // All requested keyframes should've been provided. Allow the last requested | |
1543 // frame to remain undelivered if we haven't reached the maximum frame number | |
1544 // by which it should have arrived. | |
1545 if (num_encoded_frames_ < next_keyframe_at_ + kMaxKeyframeDelay) | |
1546 EXPECT_LE(num_keyframes_requested_, 1UL); | |
1547 else | |
1548 EXPECT_EQ(num_keyframes_requested_, 0UL); | |
1549 } | |
1550 | |
1551 void VEAClient::WriteIvfFileHeader() { | |
1552 media::IvfFileHeader header = {}; | |
1553 | |
1554 memcpy(header.signature, media::kIvfHeaderSignature, | |
1555 sizeof(header.signature)); | |
1556 header.version = 0; | |
1557 header.header_size = sizeof(header); | |
1558 header.fourcc = 0x30385056; // VP80 | |
1559 header.width = | |
1560 base::checked_cast<uint16_t>(test_stream_->visible_size.width()); | |
1561 header.height = | |
1562 base::checked_cast<uint16_t>(test_stream_->visible_size.height()); | |
1563 header.timebase_denum = requested_framerate_; | |
1564 header.timebase_num = 1; | |
1565 header.num_frames = num_frames_to_encode_; | |
1566 header.ByteSwap(); | |
1567 | |
1568 EXPECT_TRUE(base::AppendToFile( | |
1569 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | |
1570 reinterpret_cast<char*>(&header), sizeof(header))); | |
1571 } | |
1572 | |
1573 void VEAClient::WriteIvfFrameHeader(int frame_index, size_t frame_size) { | |
1574 media::IvfFrameHeader header = {}; | |
1575 | |
1576 header.frame_size = frame_size; | |
1577 header.timestamp = frame_index; | |
1578 header.ByteSwap(); | |
1579 EXPECT_TRUE(base::AppendToFile( | |
1580 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | |
1581 reinterpret_cast<char*>(&header), sizeof(header))); | |
1582 } | |
1583 | |
1584 // Test parameters: | |
1585 // - Number of concurrent encoders. The value takes effect when there is only | |
1586 // one input stream; otherwise, one encoder per input stream will be | |
1587 // instantiated. | |
1588 // - If true, save output to file (provided an output filename was supplied). | |
1589 // - Force a keyframe every n frames. | |
1590 // - Force bitrate; the actual required value is provided as a property | |
1591 // of the input stream, because it depends on stream type/resolution/etc. | |
1592 // - If true, measure performance. | |
1593 // - If true, switch bitrate mid-stream. | |
1594 // - If true, switch framerate mid-stream. | |
1595 // - If true, verify the output frames of encoder. | |
1596 class VideoEncodeAcceleratorTest | |
1597 : public ::testing::TestWithParam< | |
1598 base::Tuple<int, bool, int, bool, bool, bool, bool, bool>> {}; | |
1599 | |
1600 TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) { | |
1601 size_t num_concurrent_encoders = base::get<0>(GetParam()); | |
1602 const bool save_to_file = base::get<1>(GetParam()); | |
1603 const unsigned int keyframe_period = base::get<2>(GetParam()); | |
1604 const bool force_bitrate = base::get<3>(GetParam()); | |
1605 const bool test_perf = base::get<4>(GetParam()); | |
1606 const bool mid_stream_bitrate_switch = base::get<5>(GetParam()); | |
1607 const bool mid_stream_framerate_switch = base::get<6>(GetParam()); | |
1608 const bool verify_output = | |
1609 base::get<7>(GetParam()) || g_env->verify_all_output(); | |
1610 | |
1611 ScopedVector<ClientStateNotification<ClientState> > notes; | |
1612 ScopedVector<VEAClient> clients; | |
1613 base::Thread encoder_thread("EncoderThread"); | |
1614 ASSERT_TRUE(encoder_thread.Start()); | |
1615 | |
1616 if (g_env->test_streams_.size() > 1) | |
1617 num_concurrent_encoders = g_env->test_streams_.size(); | |
1618 | |
1619 // Create all encoders. | |
1620 for (size_t i = 0; i < num_concurrent_encoders; i++) { | |
1621 size_t test_stream_index = i % g_env->test_streams_.size(); | |
1622 // Disregard save_to_file if we didn't get an output filename. | |
1623 bool encoder_save_to_file = | |
1624 (save_to_file && | |
1625 !g_env->test_streams_[test_stream_index]->out_filename.empty()); | |
1626 | |
1627 notes.push_back(new ClientStateNotification<ClientState>()); | |
1628 clients.push_back(new VEAClient( | |
1629 g_env->test_streams_[test_stream_index], notes.back(), | |
1630 encoder_save_to_file, keyframe_period, force_bitrate, test_perf, | |
1631 mid_stream_bitrate_switch, mid_stream_framerate_switch, verify_output)); | |
1632 | |
1633 encoder_thread.message_loop()->PostTask( | |
1634 FROM_HERE, | |
1635 base::Bind(&VEAClient::CreateEncoder, | |
1636 base::Unretained(clients.back()))); | |
1637 } | |
1638 | |
1639 // All encoders must pass through states in this order. | |
1640 enum ClientState state_transitions[] = { | |
1641 CS_ENCODER_SET, CS_INITIALIZED, CS_ENCODING, CS_FINISHED, CS_VALIDATED}; | |
1642 | |
1643 // Wait for all encoders to go through all states and finish. | |
1644 // Do this by waiting for all encoders to advance to state n before checking | |
1645 // state n+1, to verify that they are able to operate concurrently. | |
1646 // It also simulates the real-world usage better, as the main thread, on which | |
1647 // encoders are created/destroyed, is a single GPU Process ChildThread. | |
1648 // Moreover, we can't have proper multithreading on X11, so this could cause | |
1649 // hard to debug issues there, if there were multiple "ChildThreads". | |
1650 for (size_t state_no = 0; state_no < arraysize(state_transitions); | |
1651 ++state_no) { | |
1652 for (size_t i = 0; i < num_concurrent_encoders; i++) | |
1653 ASSERT_EQ(notes[i]->Wait(), state_transitions[state_no]); | |
1654 } | |
1655 | |
1656 for (size_t i = 0; i < num_concurrent_encoders; ++i) { | |
1657 encoder_thread.message_loop()->PostTask( | |
1658 FROM_HERE, | |
1659 base::Bind(&VEAClient::DestroyEncoder, base::Unretained(clients[i]))); | |
1660 } | |
1661 | |
1662 // This ensures all tasks have finished. | |
1663 encoder_thread.Stop(); | |
1664 } | |
1665 | |
1666 #if !defined(OS_MACOSX) | |
1667 INSTANTIATE_TEST_CASE_P( | |
1668 SimpleEncode, | |
1669 VideoEncodeAcceleratorTest, | |
1670 ::testing::Values( | |
1671 base::MakeTuple(1, true, 0, false, false, false, false, false), | |
1672 base::MakeTuple(1, true, 0, false, false, false, false, true))); | |
1673 | |
1674 INSTANTIATE_TEST_CASE_P( | |
1675 EncoderPerf, | |
1676 VideoEncodeAcceleratorTest, | |
1677 ::testing::Values( | |
1678 base::MakeTuple(1, false, 0, false, true, false, false, false))); | |
1679 | |
1680 INSTANTIATE_TEST_CASE_P( | |
1681 ForceKeyframes, | |
1682 VideoEncodeAcceleratorTest, | |
1683 ::testing::Values( | |
1684 base::MakeTuple(1, false, 10, false, false, false, false, false))); | |
1685 | |
1686 INSTANTIATE_TEST_CASE_P( | |
1687 ForceBitrate, | |
1688 VideoEncodeAcceleratorTest, | |
1689 ::testing::Values( | |
1690 base::MakeTuple(1, false, 0, true, false, false, false, false))); | |
1691 | |
1692 INSTANTIATE_TEST_CASE_P( | |
1693 MidStreamParamSwitchBitrate, | |
1694 VideoEncodeAcceleratorTest, | |
1695 ::testing::Values( | |
1696 base::MakeTuple(1, false, 0, true, false, true, false, false))); | |
1697 | |
1698 INSTANTIATE_TEST_CASE_P( | |
1699 MidStreamParamSwitchFPS, | |
1700 VideoEncodeAcceleratorTest, | |
1701 ::testing::Values( | |
1702 base::MakeTuple(1, false, 0, true, false, false, true, false))); | |
1703 | |
1704 INSTANTIATE_TEST_CASE_P( | |
1705 MultipleEncoders, | |
1706 VideoEncodeAcceleratorTest, | |
1707 ::testing::Values( | |
1708 base::MakeTuple(3, false, 0, false, false, false, false, false), | |
1709 base::MakeTuple(3, false, 0, true, false, false, true, false), | |
1710 base::MakeTuple(3, false, 0, true, false, true, false, false))); | |
1711 #else | |
1712 INSTANTIATE_TEST_CASE_P( | |
1713 SimpleEncode, | |
1714 VideoEncodeAcceleratorTest, | |
1715 ::testing::Values( | |
1716 base::MakeTuple(1, true, 0, false, false, false, false, false), | |
1717 base::MakeTuple(1, true, 0, false, false, false, false, true))); | |
1718 | |
1719 INSTANTIATE_TEST_CASE_P( | |
1720 EncoderPerf, | |
1721 VideoEncodeAcceleratorTest, | |
1722 ::testing::Values( | |
1723 base::MakeTuple(1, false, 0, false, true, false, false, false))); | |
1724 | |
1725 INSTANTIATE_TEST_CASE_P( | |
1726 MultipleEncoders, | |
1727 VideoEncodeAcceleratorTest, | |
1728 ::testing::Values( | |
1729 base::MakeTuple(3, false, 0, false, false, false, false, false))); | |
1730 #endif | |
1731 | |
1732 // TODO(posciak): more tests: | |
1733 // - async FeedEncoderWithOutput | |
1734 // - out-of-order return of outputs to encoder | |
1735 // - multiple encoders + decoders | |
1736 // - mid-stream encoder_->Destroy() | |
1737 | |
1738 } // namespace | |
1739 } // namespace content | |
1740 | |
1741 int main(int argc, char** argv) { | |
1742 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args. | |
1743 base::CommandLine::Init(argc, argv); | |
1744 | |
1745 base::ShadowingAtExitManager at_exit_manager; | |
1746 base::MessageLoop main_loop; | |
1747 | |
1748 std::unique_ptr<base::FilePath::StringType> test_stream_data( | |
1749 new base::FilePath::StringType( | |
1750 media::GetTestDataFilePath(content::g_default_in_filename).value() + | |
1751 content::g_default_in_parameters)); | |
1752 | |
1753 // Needed to enable DVLOG through --vmodule. | |
1754 logging::LoggingSettings settings; | |
1755 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG; | |
1756 LOG_ASSERT(logging::InitLogging(settings)); | |
1757 | |
1758 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess(); | |
1759 DCHECK(cmd_line); | |
1760 | |
1761 bool run_at_fps = false; | |
1762 bool needs_encode_latency = false; | |
1763 bool verify_all_output = false; | |
1764 base::FilePath log_path; | |
1765 | |
1766 base::CommandLine::SwitchMap switches = cmd_line->GetSwitches(); | |
1767 for (base::CommandLine::SwitchMap::const_iterator it = switches.begin(); | |
1768 it != switches.end(); | |
1769 ++it) { | |
1770 if (it->first == "test_stream_data") { | |
1771 test_stream_data->assign(it->second.c_str()); | |
1772 continue; | |
1773 } | |
1774 // Output machine-readable logs with fixed formats to a file. | |
1775 if (it->first == "output_log") { | |
1776 log_path = base::FilePath( | |
1777 base::FilePath::StringType(it->second.begin(), it->second.end())); | |
1778 continue; | |
1779 } | |
1780 if (it->first == "num_frames_to_encode") { | |
1781 std::string input(it->second.begin(), it->second.end()); | |
1782 LOG_ASSERT(base::StringToInt(input, &content::g_num_frames_to_encode)); | |
1783 continue; | |
1784 } | |
1785 if (it->first == "measure_latency") { | |
1786 needs_encode_latency = true; | |
1787 continue; | |
1788 } | |
1789 if (it->first == "fake_encoder") { | |
1790 content::g_fake_encoder = true; | |
1791 continue; | |
1792 } | |
1793 if (it->first == "run_at_fps") { | |
1794 run_at_fps = true; | |
1795 continue; | |
1796 } | |
1797 if (it->first == "verify_all_output") { | |
1798 verify_all_output = true; | |
1799 continue; | |
1800 } | |
1801 if (it->first == "v" || it->first == "vmodule") | |
1802 continue; | |
1803 if (it->first == "ozone-platform" || it->first == "ozone-use-surfaceless") | |
1804 continue; | |
1805 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second; | |
1806 } | |
1807 | |
1808 if (needs_encode_latency && !run_at_fps) { | |
1809 // Encode latency can only be measured with --run_at_fps. Otherwise, we get | |
1810 // skewed results since it may queue too many frames at once with the same | |
1811 // encode start time. | |
1812 LOG(FATAL) << "--measure_latency requires --run_at_fps enabled to work."; | |
1813 } | |
1814 | |
1815 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) | |
1816 content::VaapiWrapper::PreSandboxInitialization(); | |
1817 #endif | |
1818 | |
1819 content::g_env = | |
1820 reinterpret_cast<content::VideoEncodeAcceleratorTestEnvironment*>( | |
1821 testing::AddGlobalTestEnvironment( | |
1822 new content::VideoEncodeAcceleratorTestEnvironment( | |
1823 std::move(test_stream_data), log_path, run_at_fps, | |
1824 needs_encode_latency, verify_all_output))); | |
1825 | |
1826 return RUN_ALL_TESTS(); | |
1827 } | |
OLD | NEW |