| OLD | NEW |
| (Empty) |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include <inttypes.h> | |
| 6 #include <stddef.h> | |
| 7 #include <stdint.h> | |
| 8 | |
| 9 #include <algorithm> | |
| 10 #include <memory> | |
| 11 #include <queue> | |
| 12 #include <string> | |
| 13 #include <utility> | |
| 14 | |
| 15 #include "base/at_exit.h" | |
| 16 #include "base/bind.h" | |
| 17 #include "base/command_line.h" | |
| 18 #include "base/files/file_util.h" | |
| 19 #include "base/files/memory_mapped_file.h" | |
| 20 #include "base/macros.h" | |
| 21 #include "base/memory/scoped_vector.h" | |
| 22 #include "base/message_loop/message_loop.h" | |
| 23 #include "base/numerics/safe_conversions.h" | |
| 24 #include "base/process/process_handle.h" | |
| 25 #include "base/strings/string_number_conversions.h" | |
| 26 #include "base/strings/string_split.h" | |
| 27 #include "base/strings/stringprintf.h" | |
| 28 #include "base/threading/thread.h" | |
| 29 #include "base/threading/thread_checker.h" | |
| 30 #include "base/time/time.h" | |
| 31 #include "base/timer/timer.h" | |
| 32 #include "build/build_config.h" | |
| 33 #include "content/common/gpu/media/video_accelerator_unittest_helpers.h" | |
| 34 #include "media/base/bind_to_current_loop.h" | |
| 35 #include "media/base/bitstream_buffer.h" | |
| 36 #include "media/base/cdm_context.h" | |
| 37 #include "media/base/decoder_buffer.h" | |
| 38 #include "media/base/media_util.h" | |
| 39 #include "media/base/test_data_util.h" | |
| 40 #include "media/base/video_decoder.h" | |
| 41 #include "media/base/video_frame.h" | |
| 42 #include "media/filters/ffmpeg_glue.h" | |
| 43 #include "media/filters/ffmpeg_video_decoder.h" | |
| 44 #include "media/filters/h264_parser.h" | |
| 45 #include "media/filters/ivf_parser.h" | |
| 46 #include "media/video/fake_video_encode_accelerator.h" | |
| 47 #include "media/video/video_encode_accelerator.h" | |
| 48 #include "testing/gtest/include/gtest/gtest.h" | |
| 49 | |
| 50 #if defined(OS_CHROMEOS) | |
| 51 #if defined(ARCH_CPU_ARMEL) || (defined(USE_OZONE) && defined(USE_V4L2_CODEC)) | |
| 52 #include "content/common/gpu/media/v4l2_video_encode_accelerator.h" | |
| 53 #endif | |
| 54 #if defined(ARCH_CPU_X86_FAMILY) | |
| 55 #include "content/common/gpu/media/vaapi_video_encode_accelerator.h" | |
| 56 #include "content/common/gpu/media/vaapi_wrapper.h" | |
| 57 // Status has been defined as int in Xlib.h. | |
| 58 #undef Status | |
| 59 #endif // defined(ARCH_CPU_X86_FAMILY) | |
| 60 #elif defined(OS_MACOSX) | |
| 61 #include "content/common/gpu/media/vt_video_encode_accelerator_mac.h" | |
| 62 #else | |
| 63 #error The VideoEncodeAcceleratorUnittest is not supported on this platform. | |
| 64 #endif | |
| 65 | |
| 66 using media::VideoEncodeAccelerator; | |
| 67 | |
| 68 namespace content { | |
| 69 namespace { | |
| 70 | |
| 71 const media::VideoPixelFormat kInputFormat = media::PIXEL_FORMAT_I420; | |
| 72 | |
| 73 // The absolute differences between original frame and decoded frame usually | |
| 74 // ranges aroud 1 ~ 7. So we pick 10 as an extreme value to detect abnormal | |
| 75 // decoded frames. | |
| 76 const double kDecodeSimilarityThreshold = 10.0; | |
| 77 | |
| 78 // Arbitrarily chosen to add some depth to the pipeline. | |
| 79 const unsigned int kNumOutputBuffers = 4; | |
| 80 const unsigned int kNumExtraInputFrames = 4; | |
| 81 // Maximum delay between requesting a keyframe and receiving one, in frames. | |
| 82 // Arbitrarily chosen as a reasonable requirement. | |
| 83 const unsigned int kMaxKeyframeDelay = 4; | |
| 84 // Default initial bitrate. | |
| 85 const uint32_t kDefaultBitrate = 2000000; | |
| 86 // Default ratio of requested_subsequent_bitrate to initial_bitrate | |
| 87 // (see test parameters below) if one is not provided. | |
| 88 const double kDefaultSubsequentBitrateRatio = 2.0; | |
| 89 // Default initial framerate. | |
| 90 const uint32_t kDefaultFramerate = 30; | |
| 91 // Default ratio of requested_subsequent_framerate to initial_framerate | |
| 92 // (see test parameters below) if one is not provided. | |
| 93 const double kDefaultSubsequentFramerateRatio = 0.1; | |
| 94 // Tolerance factor for how encoded bitrate can differ from requested bitrate. | |
| 95 const double kBitrateTolerance = 0.1; | |
| 96 // Minimum required FPS throughput for the basic performance test. | |
| 97 const uint32_t kMinPerfFPS = 30; | |
| 98 // Minimum (arbitrary) number of frames required to enforce bitrate requirements | |
| 99 // over. Streams shorter than this may be too short to realistically require | |
| 100 // an encoder to be able to converge to the requested bitrate over. | |
| 101 // The input stream will be looped as many times as needed in bitrate tests | |
| 102 // to reach at least this number of frames before calculating final bitrate. | |
| 103 const unsigned int kMinFramesForBitrateTests = 300; | |
| 104 // The percentiles to measure for encode latency. | |
| 105 const unsigned int kLoggedLatencyPercentiles[] = {50, 75, 95}; | |
| 106 | |
| 107 // The syntax of multiple test streams is: | |
| 108 // test-stream1;test-stream2;test-stream3 | |
| 109 // The syntax of each test stream is: | |
| 110 // "in_filename:width:height:profile:out_filename:requested_bitrate | |
| 111 // :requested_framerate:requested_subsequent_bitrate | |
| 112 // :requested_subsequent_framerate" | |
| 113 // - |in_filename| must be an I420 (YUV planar) raw stream | |
| 114 // (see http://www.fourcc.org/yuv.php#IYUV). | |
| 115 // - |width| and |height| are in pixels. | |
| 116 // - |profile| to encode into (values of media::VideoCodecProfile). | |
| 117 // - |out_filename| filename to save the encoded stream to (optional). The | |
| 118 // format for H264 is Annex-B byte stream. The format for VP8 is IVF. Output | |
| 119 // stream is saved for the simple encode test only. H264 raw stream and IVF | |
| 120 // can be used as input of VDA unittest. H264 raw stream can be played by | |
| 121 // "mplayer -fps 25 out.h264" and IVF can be played by mplayer directly. | |
| 122 // Helpful description: http://wiki.multimedia.cx/index.php?title=IVF | |
| 123 // Further parameters are optional (need to provide preceding positional | |
| 124 // parameters if a specific subsequent parameter is required): | |
| 125 // - |requested_bitrate| requested bitrate in bits per second. | |
| 126 // - |requested_framerate| requested initial framerate. | |
| 127 // - |requested_subsequent_bitrate| bitrate to switch to in the middle of the | |
| 128 // stream. | |
| 129 // - |requested_subsequent_framerate| framerate to switch to in the middle | |
| 130 // of the stream. | |
| 131 // Bitrate is only forced for tests that test bitrate. | |
| 132 const char* g_default_in_filename = "bear_320x192_40frames.yuv"; | |
| 133 #if !defined(OS_MACOSX) | |
| 134 const char* g_default_in_parameters = ":320:192:1:out.h264:200000"; | |
| 135 #else | |
| 136 const char* g_default_in_parameters = ":320:192:0:out.h264:200000"; | |
| 137 #endif | |
| 138 | |
| 139 // Enabled by including a --fake_encoder flag to the command line invoking the | |
| 140 // test. | |
| 141 bool g_fake_encoder = false; | |
| 142 | |
| 143 // Environment to store test stream data for all test cases. | |
| 144 class VideoEncodeAcceleratorTestEnvironment; | |
| 145 VideoEncodeAcceleratorTestEnvironment* g_env; | |
| 146 | |
| 147 // The number of frames to be encoded. This variable is set by the switch | |
| 148 // "--num_frames_to_encode". Ignored if 0. | |
| 149 int g_num_frames_to_encode = 0; | |
| 150 | |
| 151 struct TestStream { | |
| 152 TestStream() | |
| 153 : num_frames(0), | |
| 154 aligned_buffer_size(0), | |
| 155 requested_bitrate(0), | |
| 156 requested_framerate(0), | |
| 157 requested_subsequent_bitrate(0), | |
| 158 requested_subsequent_framerate(0) {} | |
| 159 ~TestStream() {} | |
| 160 | |
| 161 gfx::Size visible_size; | |
| 162 gfx::Size coded_size; | |
| 163 unsigned int num_frames; | |
| 164 | |
| 165 // Original unaligned input file name provided as an argument to the test. | |
| 166 // And the file must be an I420 (YUV planar) raw stream. | |
| 167 std::string in_filename; | |
| 168 | |
| 169 // A temporary file used to prepare aligned input buffers of |in_filename|. | |
| 170 // The file makes sure starting address of YUV planes are 64 byte-aligned. | |
| 171 base::FilePath aligned_in_file; | |
| 172 | |
| 173 // The memory mapping of |aligned_in_file| | |
| 174 base::MemoryMappedFile mapped_aligned_in_file; | |
| 175 | |
| 176 // Byte size of a frame of |aligned_in_file|. | |
| 177 size_t aligned_buffer_size; | |
| 178 | |
| 179 // Byte size for each aligned plane of a frame | |
| 180 std::vector<size_t> aligned_plane_size; | |
| 181 | |
| 182 std::string out_filename; | |
| 183 media::VideoCodecProfile requested_profile; | |
| 184 unsigned int requested_bitrate; | |
| 185 unsigned int requested_framerate; | |
| 186 unsigned int requested_subsequent_bitrate; | |
| 187 unsigned int requested_subsequent_framerate; | |
| 188 }; | |
| 189 | |
| 190 inline static size_t Align64Bytes(size_t value) { | |
| 191 return (value + 63) & ~63; | |
| 192 } | |
| 193 | |
| 194 // Write |data| of |size| bytes at |offset| bytes into |file|. | |
| 195 static bool WriteFile(base::File* file, | |
| 196 const off_t offset, | |
| 197 const uint8_t* data, | |
| 198 size_t size) { | |
| 199 size_t written_bytes = 0; | |
| 200 while (written_bytes < size) { | |
| 201 int bytes = file->Write(offset + written_bytes, | |
| 202 reinterpret_cast<const char*>(data + written_bytes), | |
| 203 size - written_bytes); | |
| 204 if (bytes <= 0) | |
| 205 return false; | |
| 206 written_bytes += bytes; | |
| 207 } | |
| 208 return true; | |
| 209 } | |
| 210 | |
| 211 // Return the |percentile| from a sorted vector. | |
| 212 static base::TimeDelta Percentile( | |
| 213 const std::vector<base::TimeDelta>& sorted_values, | |
| 214 unsigned int percentile) { | |
| 215 size_t size = sorted_values.size(); | |
| 216 LOG_ASSERT(size > 0UL); | |
| 217 LOG_ASSERT(percentile <= 100UL); | |
| 218 // Use Nearest Rank method in http://en.wikipedia.org/wiki/Percentile. | |
| 219 int index = | |
| 220 std::max(static_cast<int>(ceil(0.01f * percentile * size)) - 1, 0); | |
| 221 return sorted_values[index]; | |
| 222 } | |
| 223 | |
| 224 static bool IsH264(media::VideoCodecProfile profile) { | |
| 225 return profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX; | |
| 226 } | |
| 227 | |
| 228 static bool IsVP8(media::VideoCodecProfile profile) { | |
| 229 return profile >= media::VP8PROFILE_MIN && profile <= media::VP8PROFILE_MAX; | |
| 230 } | |
| 231 | |
| 232 // ARM performs CPU cache management with CPU cache line granularity. We thus | |
| 233 // need to ensure our buffers are CPU cache line-aligned (64 byte-aligned). | |
| 234 // Otherwise newer kernels will refuse to accept them, and on older kernels | |
| 235 // we'll be treating ourselves to random corruption. | |
| 236 // Since we are just mapping and passing chunks of the input file directly to | |
| 237 // the VEA as input frames to avoid copying large chunks of raw data on each | |
| 238 // frame and thus affecting performance measurements, we have to prepare a | |
| 239 // temporary file with all planes aligned to 64-byte boundaries beforehand. | |
| 240 static void CreateAlignedInputStreamFile(const gfx::Size& coded_size, | |
| 241 TestStream* test_stream) { | |
| 242 // Test case may have many encoders and memory should be prepared once. | |
| 243 if (test_stream->coded_size == coded_size && | |
| 244 test_stream->mapped_aligned_in_file.IsValid()) | |
| 245 return; | |
| 246 | |
| 247 // All encoders in multiple encoder test reuse the same test_stream, make | |
| 248 // sure they requested the same coded_size | |
| 249 ASSERT_TRUE(!test_stream->mapped_aligned_in_file.IsValid() || | |
| 250 coded_size == test_stream->coded_size); | |
| 251 test_stream->coded_size = coded_size; | |
| 252 | |
| 253 size_t num_planes = media::VideoFrame::NumPlanes(kInputFormat); | |
| 254 std::vector<std::vector<uint8_t>> padding(num_planes); | |
| 255 std::vector<size_t> coded_bpl(num_planes); | |
| 256 std::vector<size_t> visible_bpl(num_planes); | |
| 257 std::vector<size_t> visible_plane_rows(num_planes); | |
| 258 | |
| 259 // Calculate padding in bytes to be added after each plane required to keep | |
| 260 // starting addresses of all planes at a 64 byte boudnary. This padding will | |
| 261 // be added after each plane when copying to the temporary file. | |
| 262 // At the same time we also need to take into account coded_size requested by | |
| 263 // the VEA; each row of visible_bpl bytes in the original file needs to be | |
| 264 // copied into a row of coded_bpl bytes in the aligned file. | |
| 265 for (size_t i = 0; i < num_planes; i++) { | |
| 266 const size_t size = | |
| 267 media::VideoFrame::PlaneSize(kInputFormat, i, coded_size).GetArea(); | |
| 268 test_stream->aligned_plane_size.push_back(Align64Bytes(size)); | |
| 269 test_stream->aligned_buffer_size += test_stream->aligned_plane_size.back(); | |
| 270 | |
| 271 coded_bpl[i] = | |
| 272 media::VideoFrame::RowBytes(i, kInputFormat, coded_size.width()); | |
| 273 visible_bpl[i] = media::VideoFrame::RowBytes( | |
| 274 i, kInputFormat, test_stream->visible_size.width()); | |
| 275 visible_plane_rows[i] = media::VideoFrame::Rows( | |
| 276 i, kInputFormat, test_stream->visible_size.height()); | |
| 277 const size_t padding_rows = | |
| 278 media::VideoFrame::Rows(i, kInputFormat, coded_size.height()) - | |
| 279 visible_plane_rows[i]; | |
| 280 padding[i].resize(padding_rows * coded_bpl[i] + Align64Bytes(size) - size); | |
| 281 } | |
| 282 | |
| 283 base::MemoryMappedFile src_file; | |
| 284 LOG_ASSERT(src_file.Initialize(base::FilePath(test_stream->in_filename))); | |
| 285 LOG_ASSERT(base::CreateTemporaryFile(&test_stream->aligned_in_file)); | |
| 286 | |
| 287 size_t visible_buffer_size = media::VideoFrame::AllocationSize( | |
| 288 kInputFormat, test_stream->visible_size); | |
| 289 LOG_ASSERT(src_file.length() % visible_buffer_size == 0U) | |
| 290 << "Stream byte size is not a product of calculated frame byte size"; | |
| 291 | |
| 292 test_stream->num_frames = src_file.length() / visible_buffer_size; | |
| 293 uint32_t flags = base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE | | |
| 294 base::File::FLAG_READ; | |
| 295 | |
| 296 // Create a temporary file with coded_size length. | |
| 297 base::File dest_file(test_stream->aligned_in_file, flags); | |
| 298 LOG_ASSERT(test_stream->aligned_buffer_size > 0UL); | |
| 299 dest_file.SetLength(test_stream->aligned_buffer_size * | |
| 300 test_stream->num_frames); | |
| 301 | |
| 302 const uint8_t* src = src_file.data(); | |
| 303 off_t dest_offset = 0; | |
| 304 for (size_t frame = 0; frame < test_stream->num_frames; frame++) { | |
| 305 for (size_t i = 0; i < num_planes; i++) { | |
| 306 // Assert that each plane of frame starts at 64 byte boundary. | |
| 307 ASSERT_EQ(dest_offset & 63, 0) | |
| 308 << "Planes of frame should be mapped at a 64 byte boundary"; | |
| 309 for (size_t j = 0; j < visible_plane_rows[i]; j++) { | |
| 310 LOG_ASSERT(WriteFile(&dest_file, dest_offset, src, visible_bpl[i])); | |
| 311 src += visible_bpl[i]; | |
| 312 dest_offset += coded_bpl[i]; | |
| 313 } | |
| 314 if (!padding[i].empty()) { | |
| 315 LOG_ASSERT(WriteFile(&dest_file, dest_offset, &padding[i][0], | |
| 316 padding[i].size())); | |
| 317 dest_offset += padding[i].size(); | |
| 318 } | |
| 319 } | |
| 320 } | |
| 321 LOG_ASSERT( | |
| 322 test_stream->mapped_aligned_in_file.Initialize(std::move(dest_file))); | |
| 323 // Assert that memory mapped of file starts at 64 byte boundary. So each | |
| 324 // plane of frames also start at 64 byte boundary. | |
| 325 | |
| 326 ASSERT_EQ( | |
| 327 reinterpret_cast<off_t>(test_stream->mapped_aligned_in_file.data()) & 63, | |
| 328 0) | |
| 329 << "File should be mapped at a 64 byte boundary"; | |
| 330 | |
| 331 LOG_ASSERT(test_stream->mapped_aligned_in_file.length() % | |
| 332 test_stream->aligned_buffer_size == 0U) | |
| 333 << "Stream byte size is not a product of calculated frame byte size"; | |
| 334 LOG_ASSERT(test_stream->num_frames > 0UL); | |
| 335 } | |
| 336 | |
| 337 // Parse |data| into its constituent parts, set the various output fields | |
| 338 // accordingly, read in video stream, and store them to |test_streams|. | |
| 339 static void ParseAndReadTestStreamData(const base::FilePath::StringType& data, | |
| 340 ScopedVector<TestStream>* test_streams) { | |
| 341 // Split the string to individual test stream data. | |
| 342 std::vector<base::FilePath::StringType> test_streams_data = base::SplitString( | |
| 343 data, base::FilePath::StringType(1, ';'), | |
| 344 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL); | |
| 345 LOG_ASSERT(test_streams_data.size() >= 1U) << data; | |
| 346 | |
| 347 // Parse each test stream data and read the input file. | |
| 348 for (size_t index = 0; index < test_streams_data.size(); ++index) { | |
| 349 std::vector<base::FilePath::StringType> fields = base::SplitString( | |
| 350 test_streams_data[index], base::FilePath::StringType(1, ':'), | |
| 351 base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL); | |
| 352 LOG_ASSERT(fields.size() >= 4U) << data; | |
| 353 LOG_ASSERT(fields.size() <= 9U) << data; | |
| 354 TestStream* test_stream = new TestStream(); | |
| 355 | |
| 356 test_stream->in_filename = fields[0]; | |
| 357 int width, height; | |
| 358 bool result = base::StringToInt(fields[1], &width); | |
| 359 LOG_ASSERT(result); | |
| 360 result = base::StringToInt(fields[2], &height); | |
| 361 LOG_ASSERT(result); | |
| 362 test_stream->visible_size = gfx::Size(width, height); | |
| 363 LOG_ASSERT(!test_stream->visible_size.IsEmpty()); | |
| 364 int profile; | |
| 365 result = base::StringToInt(fields[3], &profile); | |
| 366 LOG_ASSERT(result); | |
| 367 LOG_ASSERT(profile > media::VIDEO_CODEC_PROFILE_UNKNOWN); | |
| 368 LOG_ASSERT(profile <= media::VIDEO_CODEC_PROFILE_MAX); | |
| 369 test_stream->requested_profile = | |
| 370 static_cast<media::VideoCodecProfile>(profile); | |
| 371 | |
| 372 if (fields.size() >= 5 && !fields[4].empty()) | |
| 373 test_stream->out_filename = fields[4]; | |
| 374 | |
| 375 if (fields.size() >= 6 && !fields[5].empty()) | |
| 376 LOG_ASSERT(base::StringToUint(fields[5], | |
| 377 &test_stream->requested_bitrate)); | |
| 378 | |
| 379 if (fields.size() >= 7 && !fields[6].empty()) | |
| 380 LOG_ASSERT(base::StringToUint(fields[6], | |
| 381 &test_stream->requested_framerate)); | |
| 382 | |
| 383 if (fields.size() >= 8 && !fields[7].empty()) { | |
| 384 LOG_ASSERT(base::StringToUint(fields[7], | |
| 385 &test_stream->requested_subsequent_bitrate)); | |
| 386 } | |
| 387 | |
| 388 if (fields.size() >= 9 && !fields[8].empty()) { | |
| 389 LOG_ASSERT(base::StringToUint(fields[8], | |
| 390 &test_stream->requested_subsequent_framerate)); | |
| 391 } | |
| 392 test_streams->push_back(test_stream); | |
| 393 } | |
| 394 } | |
| 395 | |
| 396 // Basic test environment shared across multiple test cases. We only need to | |
| 397 // setup it once for all test cases. | |
| 398 // It helps | |
| 399 // - maintain test stream data and other test settings. | |
| 400 // - clean up temporary aligned files. | |
| 401 // - output log to file. | |
| 402 class VideoEncodeAcceleratorTestEnvironment : public ::testing::Environment { | |
| 403 public: | |
| 404 VideoEncodeAcceleratorTestEnvironment( | |
| 405 std::unique_ptr<base::FilePath::StringType> data, | |
| 406 const base::FilePath& log_path, | |
| 407 bool run_at_fps, | |
| 408 bool needs_encode_latency, | |
| 409 bool verify_all_output) | |
| 410 : test_stream_data_(std::move(data)), | |
| 411 log_path_(log_path), | |
| 412 run_at_fps_(run_at_fps), | |
| 413 needs_encode_latency_(needs_encode_latency), | |
| 414 verify_all_output_(verify_all_output) {} | |
| 415 | |
| 416 virtual void SetUp() { | |
| 417 if (!log_path_.empty()) { | |
| 418 log_file_.reset(new base::File( | |
| 419 log_path_, base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE)); | |
| 420 LOG_ASSERT(log_file_->IsValid()); | |
| 421 } | |
| 422 ParseAndReadTestStreamData(*test_stream_data_, &test_streams_); | |
| 423 } | |
| 424 | |
| 425 virtual void TearDown() { | |
| 426 for (size_t i = 0; i < test_streams_.size(); i++) { | |
| 427 base::DeleteFile(test_streams_[i]->aligned_in_file, false); | |
| 428 } | |
| 429 log_file_.reset(); | |
| 430 } | |
| 431 | |
| 432 // Log one entry of machine-readable data to file and LOG(INFO). | |
| 433 // The log has one data entry per line in the format of "<key>: <value>". | |
| 434 // Note that Chrome OS video_VEAPerf autotest parses the output key and value | |
| 435 // pairs. Be sure to keep the autotest in sync. | |
| 436 void LogToFile(const std::string& key, const std::string& value) { | |
| 437 std::string s = base::StringPrintf("%s: %s\n", key.c_str(), value.c_str()); | |
| 438 LOG(INFO) << s; | |
| 439 if (log_file_) { | |
| 440 log_file_->WriteAtCurrentPos(s.data(), s.length()); | |
| 441 } | |
| 442 } | |
| 443 | |
| 444 // Feed the encoder with the input buffers at the requested framerate. If | |
| 445 // false, feed as fast as possible. This is set by the command line switch | |
| 446 // "--run_at_fps". | |
| 447 bool run_at_fps() const { return run_at_fps_; } | |
| 448 | |
| 449 // Whether to measure encode latency. This is set by the command line switch | |
| 450 // "--measure_latency". | |
| 451 bool needs_encode_latency() const { return needs_encode_latency_; } | |
| 452 | |
| 453 // Verify the encoder output of all testcases. This is set by the command line | |
| 454 // switch "--verify_all_output". | |
| 455 bool verify_all_output() const { return verify_all_output_; } | |
| 456 | |
| 457 ScopedVector<TestStream> test_streams_; | |
| 458 | |
| 459 private: | |
| 460 std::unique_ptr<base::FilePath::StringType> test_stream_data_; | |
| 461 base::FilePath log_path_; | |
| 462 std::unique_ptr<base::File> log_file_; | |
| 463 bool run_at_fps_; | |
| 464 bool needs_encode_latency_; | |
| 465 bool verify_all_output_; | |
| 466 }; | |
| 467 | |
| 468 enum ClientState { | |
| 469 CS_CREATED, | |
| 470 CS_ENCODER_SET, | |
| 471 CS_INITIALIZED, | |
| 472 CS_ENCODING, | |
| 473 // Encoding has finished. | |
| 474 CS_FINISHED, | |
| 475 // Encoded frame quality has been validated. | |
| 476 CS_VALIDATED, | |
| 477 CS_ERROR, | |
| 478 }; | |
| 479 | |
| 480 // Performs basic, codec-specific sanity checks on the stream buffers passed | |
| 481 // to ProcessStreamBuffer(): whether we've seen keyframes before non-keyframes, | |
| 482 // correct sequences of H.264 NALUs (SPS before PPS and before slices), etc. | |
| 483 // Calls given FrameFoundCallback when a complete frame is found while | |
| 484 // processing. | |
| 485 class StreamValidator { | |
| 486 public: | |
| 487 // To be called when a complete frame is found while processing a stream | |
| 488 // buffer, passing true if the frame is a keyframe. Returns false if we | |
| 489 // are not interested in more frames and further processing should be aborted. | |
| 490 typedef base::Callback<bool(bool)> FrameFoundCallback; | |
| 491 | |
| 492 virtual ~StreamValidator() {} | |
| 493 | |
| 494 // Provide a StreamValidator instance for the given |profile|. | |
| 495 static std::unique_ptr<StreamValidator> Create( | |
| 496 media::VideoCodecProfile profile, | |
| 497 const FrameFoundCallback& frame_cb); | |
| 498 | |
| 499 // Process and verify contents of a bitstream buffer. | |
| 500 virtual void ProcessStreamBuffer(const uint8_t* stream, size_t size) = 0; | |
| 501 | |
| 502 protected: | |
| 503 explicit StreamValidator(const FrameFoundCallback& frame_cb) | |
| 504 : frame_cb_(frame_cb) {} | |
| 505 | |
| 506 FrameFoundCallback frame_cb_; | |
| 507 }; | |
| 508 | |
| 509 class H264Validator : public StreamValidator { | |
| 510 public: | |
| 511 explicit H264Validator(const FrameFoundCallback& frame_cb) | |
| 512 : StreamValidator(frame_cb), | |
| 513 seen_sps_(false), | |
| 514 seen_pps_(false), | |
| 515 seen_idr_(false) {} | |
| 516 | |
| 517 void ProcessStreamBuffer(const uint8_t* stream, size_t size) override; | |
| 518 | |
| 519 private: | |
| 520 // Set to true when encoder provides us with the corresponding NALU type. | |
| 521 bool seen_sps_; | |
| 522 bool seen_pps_; | |
| 523 bool seen_idr_; | |
| 524 | |
| 525 media::H264Parser h264_parser_; | |
| 526 }; | |
| 527 | |
| 528 void H264Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { | |
| 529 h264_parser_.SetStream(stream, size); | |
| 530 | |
| 531 while (1) { | |
| 532 media::H264NALU nalu; | |
| 533 media::H264Parser::Result result; | |
| 534 | |
| 535 result = h264_parser_.AdvanceToNextNALU(&nalu); | |
| 536 if (result == media::H264Parser::kEOStream) | |
| 537 break; | |
| 538 | |
| 539 ASSERT_EQ(media::H264Parser::kOk, result); | |
| 540 | |
| 541 bool keyframe = false; | |
| 542 | |
| 543 switch (nalu.nal_unit_type) { | |
| 544 case media::H264NALU::kIDRSlice: | |
| 545 ASSERT_TRUE(seen_sps_); | |
| 546 ASSERT_TRUE(seen_pps_); | |
| 547 seen_idr_ = true; | |
| 548 keyframe = true; | |
| 549 // fallthrough | |
| 550 case media::H264NALU::kNonIDRSlice: { | |
| 551 ASSERT_TRUE(seen_idr_); | |
| 552 if (!frame_cb_.Run(keyframe)) | |
| 553 return; | |
| 554 break; | |
| 555 } | |
| 556 | |
| 557 case media::H264NALU::kSPS: { | |
| 558 int sps_id; | |
| 559 ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParseSPS(&sps_id)); | |
| 560 seen_sps_ = true; | |
| 561 break; | |
| 562 } | |
| 563 | |
| 564 case media::H264NALU::kPPS: { | |
| 565 ASSERT_TRUE(seen_sps_); | |
| 566 int pps_id; | |
| 567 ASSERT_EQ(media::H264Parser::kOk, h264_parser_.ParsePPS(&pps_id)); | |
| 568 seen_pps_ = true; | |
| 569 break; | |
| 570 } | |
| 571 | |
| 572 default: | |
| 573 break; | |
| 574 } | |
| 575 } | |
| 576 } | |
| 577 | |
| 578 class VP8Validator : public StreamValidator { | |
| 579 public: | |
| 580 explicit VP8Validator(const FrameFoundCallback& frame_cb) | |
| 581 : StreamValidator(frame_cb), | |
| 582 seen_keyframe_(false) {} | |
| 583 | |
| 584 void ProcessStreamBuffer(const uint8_t* stream, size_t size) override; | |
| 585 | |
| 586 private: | |
| 587 // Have we already got a keyframe in the stream? | |
| 588 bool seen_keyframe_; | |
| 589 }; | |
| 590 | |
| 591 void VP8Validator::ProcessStreamBuffer(const uint8_t* stream, size_t size) { | |
| 592 bool keyframe = !(stream[0] & 0x01); | |
| 593 if (keyframe) | |
| 594 seen_keyframe_ = true; | |
| 595 | |
| 596 EXPECT_TRUE(seen_keyframe_); | |
| 597 | |
| 598 frame_cb_.Run(keyframe); | |
| 599 // TODO(posciak): We could be getting more frames in the buffer, but there is | |
| 600 // no simple way to detect this. We'd need to parse the frames and go through | |
| 601 // partition numbers/sizes. For now assume one frame per buffer. | |
| 602 } | |
| 603 | |
| 604 // static | |
| 605 std::unique_ptr<StreamValidator> StreamValidator::Create( | |
| 606 media::VideoCodecProfile profile, | |
| 607 const FrameFoundCallback& frame_cb) { | |
| 608 std::unique_ptr<StreamValidator> validator; | |
| 609 | |
| 610 if (IsH264(profile)) { | |
| 611 validator.reset(new H264Validator(frame_cb)); | |
| 612 } else if (IsVP8(profile)) { | |
| 613 validator.reset(new VP8Validator(frame_cb)); | |
| 614 } else { | |
| 615 LOG(FATAL) << "Unsupported profile: " << profile; | |
| 616 } | |
| 617 | |
| 618 return validator; | |
| 619 } | |
| 620 | |
| 621 class VideoFrameQualityValidator { | |
| 622 public: | |
| 623 VideoFrameQualityValidator(const media::VideoCodecProfile profile, | |
| 624 const base::Closure& flush_complete_cb, | |
| 625 const base::Closure& decode_error_cb); | |
| 626 void Initialize(const gfx::Size& coded_size, const gfx::Rect& visible_size); | |
| 627 // Save original YUV frame to compare it with the decoded frame later. | |
| 628 void AddOriginalFrame(scoped_refptr<media::VideoFrame> frame); | |
| 629 void AddDecodeBuffer(const scoped_refptr<media::DecoderBuffer>& buffer); | |
| 630 // Flush the decoder. | |
| 631 void Flush(); | |
| 632 | |
| 633 private: | |
| 634 void InitializeCB(bool success); | |
| 635 void DecodeDone(media::DecodeStatus status); | |
| 636 void FlushDone(media::DecodeStatus status); | |
| 637 void VerifyOutputFrame(const scoped_refptr<media::VideoFrame>& output_frame); | |
| 638 void Decode(); | |
| 639 | |
| 640 enum State { UNINITIALIZED, INITIALIZED, DECODING, ERROR }; | |
| 641 | |
| 642 const media::VideoCodecProfile profile_; | |
| 643 std::unique_ptr<media::FFmpegVideoDecoder> decoder_; | |
| 644 media::VideoDecoder::DecodeCB decode_cb_; | |
| 645 // Decode callback of an EOS buffer. | |
| 646 media::VideoDecoder::DecodeCB eos_decode_cb_; | |
| 647 // Callback of Flush(). Called after all frames are decoded. | |
| 648 const base::Closure flush_complete_cb_; | |
| 649 const base::Closure decode_error_cb_; | |
| 650 State decoder_state_; | |
| 651 std::queue<scoped_refptr<media::VideoFrame>> original_frames_; | |
| 652 std::queue<scoped_refptr<media::DecoderBuffer>> decode_buffers_; | |
| 653 }; | |
| 654 | |
| 655 VideoFrameQualityValidator::VideoFrameQualityValidator( | |
| 656 const media::VideoCodecProfile profile, | |
| 657 const base::Closure& flush_complete_cb, | |
| 658 const base::Closure& decode_error_cb) | |
| 659 : profile_(profile), | |
| 660 decoder_(new media::FFmpegVideoDecoder()), | |
| 661 decode_cb_(base::Bind(&VideoFrameQualityValidator::DecodeDone, | |
| 662 base::Unretained(this))), | |
| 663 eos_decode_cb_(base::Bind(&VideoFrameQualityValidator::FlushDone, | |
| 664 base::Unretained(this))), | |
| 665 flush_complete_cb_(flush_complete_cb), | |
| 666 decode_error_cb_(decode_error_cb), | |
| 667 decoder_state_(UNINITIALIZED) { | |
| 668 // Allow decoding of individual NALU. Entire frames are required by default. | |
| 669 decoder_->set_decode_nalus(true); | |
| 670 } | |
| 671 | |
| 672 void VideoFrameQualityValidator::Initialize(const gfx::Size& coded_size, | |
| 673 const gfx::Rect& visible_size) { | |
| 674 media::FFmpegGlue::InitializeFFmpeg(); | |
| 675 | |
| 676 gfx::Size natural_size(visible_size.size()); | |
| 677 // The default output format of ffmpeg video decoder is YV12. | |
| 678 media::VideoDecoderConfig config; | |
| 679 if (IsVP8(profile_)) | |
| 680 config.Initialize(media::kCodecVP8, media::VP8PROFILE_ANY, kInputFormat, | |
| 681 media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, | |
| 682 natural_size, media::EmptyExtraData(), | |
| 683 media::Unencrypted()); | |
| 684 else if (IsH264(profile_)) | |
| 685 config.Initialize(media::kCodecH264, media::H264PROFILE_MAIN, kInputFormat, | |
| 686 media::COLOR_SPACE_UNSPECIFIED, coded_size, visible_size, | |
| 687 natural_size, media::EmptyExtraData(), | |
| 688 media::Unencrypted()); | |
| 689 else | |
| 690 LOG_ASSERT(0) << "Invalid profile " << profile_; | |
| 691 | |
| 692 decoder_->Initialize( | |
| 693 config, false, nullptr, | |
| 694 base::Bind(&VideoFrameQualityValidator::InitializeCB, | |
| 695 base::Unretained(this)), | |
| 696 base::Bind(&VideoFrameQualityValidator::VerifyOutputFrame, | |
| 697 base::Unretained(this))); | |
| 698 } | |
| 699 | |
| 700 void VideoFrameQualityValidator::InitializeCB(bool success) { | |
| 701 if (success) { | |
| 702 decoder_state_ = INITIALIZED; | |
| 703 Decode(); | |
| 704 } else { | |
| 705 decoder_state_ = ERROR; | |
| 706 if (IsH264(profile_)) | |
| 707 LOG(ERROR) << "Chromium does not support H264 decode. Try Chrome."; | |
| 708 FAIL() << "Decoder initialization error"; | |
| 709 decode_error_cb_.Run(); | |
| 710 } | |
| 711 } | |
| 712 | |
| 713 void VideoFrameQualityValidator::AddOriginalFrame( | |
| 714 scoped_refptr<media::VideoFrame> frame) { | |
| 715 original_frames_.push(frame); | |
| 716 } | |
| 717 | |
| 718 void VideoFrameQualityValidator::DecodeDone(media::DecodeStatus status) { | |
| 719 if (status == media::DecodeStatus::OK) { | |
| 720 decoder_state_ = INITIALIZED; | |
| 721 Decode(); | |
| 722 } else { | |
| 723 decoder_state_ = ERROR; | |
| 724 FAIL() << "Unexpected decode status = " << status << ". Stop decoding."; | |
| 725 decode_error_cb_.Run(); | |
| 726 } | |
| 727 } | |
| 728 | |
| 729 void VideoFrameQualityValidator::FlushDone(media::DecodeStatus status) { | |
| 730 flush_complete_cb_.Run(); | |
| 731 } | |
| 732 | |
| 733 void VideoFrameQualityValidator::Flush() { | |
| 734 if (decoder_state_ != ERROR) { | |
| 735 decode_buffers_.push(media::DecoderBuffer::CreateEOSBuffer()); | |
| 736 Decode(); | |
| 737 } | |
| 738 } | |
| 739 | |
| 740 void VideoFrameQualityValidator::AddDecodeBuffer( | |
| 741 const scoped_refptr<media::DecoderBuffer>& buffer) { | |
| 742 if (decoder_state_ != ERROR) { | |
| 743 decode_buffers_.push(buffer); | |
| 744 Decode(); | |
| 745 } | |
| 746 } | |
| 747 | |
| 748 void VideoFrameQualityValidator::Decode() { | |
| 749 if (decoder_state_ == INITIALIZED && !decode_buffers_.empty()) { | |
| 750 scoped_refptr<media::DecoderBuffer> next_buffer = decode_buffers_.front(); | |
| 751 decode_buffers_.pop(); | |
| 752 decoder_state_ = DECODING; | |
| 753 if (next_buffer->end_of_stream()) | |
| 754 decoder_->Decode(next_buffer, eos_decode_cb_); | |
| 755 else | |
| 756 decoder_->Decode(next_buffer, decode_cb_); | |
| 757 } | |
| 758 } | |
| 759 | |
| 760 void VideoFrameQualityValidator::VerifyOutputFrame( | |
| 761 const scoped_refptr<media::VideoFrame>& output_frame) { | |
| 762 scoped_refptr<media::VideoFrame> original_frame = original_frames_.front(); | |
| 763 original_frames_.pop(); | |
| 764 gfx::Size visible_size = original_frame->visible_rect().size(); | |
| 765 | |
| 766 int planes[] = {media::VideoFrame::kYPlane, media::VideoFrame::kUPlane, | |
| 767 media::VideoFrame::kVPlane}; | |
| 768 double difference = 0; | |
| 769 for (int plane : planes) { | |
| 770 uint8_t* original_plane = original_frame->data(plane); | |
| 771 uint8_t* output_plane = output_frame->data(plane); | |
| 772 | |
| 773 size_t rows = | |
| 774 media::VideoFrame::Rows(plane, kInputFormat, visible_size.height()); | |
| 775 size_t columns = | |
| 776 media::VideoFrame::Columns(plane, kInputFormat, visible_size.width()); | |
| 777 size_t stride = original_frame->stride(plane); | |
| 778 | |
| 779 for (size_t i = 0; i < rows; i++) | |
| 780 for (size_t j = 0; j < columns; j++) | |
| 781 difference += std::abs(original_plane[stride * i + j] - | |
| 782 output_plane[stride * i + j]); | |
| 783 } | |
| 784 // Divide the difference by the size of frame. | |
| 785 difference /= media::VideoFrame::AllocationSize(kInputFormat, visible_size); | |
| 786 EXPECT_TRUE(difference <= kDecodeSimilarityThreshold) | |
| 787 << "differrence = " << difference << " > decode similarity threshold"; | |
| 788 } | |
| 789 | |
| 790 class VEAClient : public VideoEncodeAccelerator::Client { | |
| 791 public: | |
| 792 VEAClient(TestStream* test_stream, | |
| 793 ClientStateNotification<ClientState>* note, | |
| 794 bool save_to_file, | |
| 795 unsigned int keyframe_period, | |
| 796 bool force_bitrate, | |
| 797 bool test_perf, | |
| 798 bool mid_stream_bitrate_switch, | |
| 799 bool mid_stream_framerate_switch, | |
| 800 bool verify_output); | |
| 801 ~VEAClient() override; | |
| 802 void CreateEncoder(); | |
| 803 void DestroyEncoder(); | |
| 804 | |
| 805 // VideoDecodeAccelerator::Client implementation. | |
| 806 void RequireBitstreamBuffers(unsigned int input_count, | |
| 807 const gfx::Size& input_coded_size, | |
| 808 size_t output_buffer_size) override; | |
| 809 void BitstreamBufferReady(int32_t bitstream_buffer_id, | |
| 810 size_t payload_size, | |
| 811 bool key_frame) override; | |
| 812 void NotifyError(VideoEncodeAccelerator::Error error) override; | |
| 813 | |
| 814 private: | |
| 815 bool has_encoder() { return encoder_.get(); } | |
| 816 | |
| 817 // Return the number of encoded frames per second. | |
| 818 double frames_per_second(); | |
| 819 | |
| 820 std::unique_ptr<media::VideoEncodeAccelerator> CreateFakeVEA(); | |
| 821 std::unique_ptr<media::VideoEncodeAccelerator> CreateV4L2VEA(); | |
| 822 std::unique_ptr<media::VideoEncodeAccelerator> CreateVaapiVEA(); | |
| 823 std::unique_ptr<media::VideoEncodeAccelerator> CreateVTVEA(); | |
| 824 | |
| 825 void SetState(ClientState new_state); | |
| 826 | |
| 827 // Set current stream parameters to given |bitrate| at |framerate|. | |
| 828 void SetStreamParameters(unsigned int bitrate, unsigned int framerate); | |
| 829 | |
| 830 // Called when encoder is done with a VideoFrame. | |
| 831 void InputNoLongerNeededCallback(int32_t input_id); | |
| 832 | |
| 833 // Feed the encoder with one input frame. | |
| 834 void FeedEncoderWithOneInput(); | |
| 835 | |
| 836 // Provide the encoder with a new output buffer. | |
| 837 void FeedEncoderWithOutput(base::SharedMemory* shm); | |
| 838 | |
| 839 // Called on finding a complete frame (with |keyframe| set to true for | |
| 840 // keyframes) in the stream, to perform codec-independent, per-frame checks | |
| 841 // and accounting. Returns false once we have collected all frames we needed. | |
| 842 bool HandleEncodedFrame(bool keyframe); | |
| 843 | |
| 844 // Verify the minimum FPS requirement. | |
| 845 void VerifyMinFPS(); | |
| 846 | |
| 847 // Verify that stream bitrate has been close to current_requested_bitrate_, | |
| 848 // assuming current_framerate_ since the last time VerifyStreamProperties() | |
| 849 // was called. Fail the test if |force_bitrate_| is true and the bitrate | |
| 850 // is not within kBitrateTolerance. | |
| 851 void VerifyStreamProperties(); | |
| 852 | |
| 853 // Log the performance data. | |
| 854 void LogPerf(); | |
| 855 | |
| 856 // Write IVF file header to test_stream_->out_filename. | |
| 857 void WriteIvfFileHeader(); | |
| 858 | |
| 859 // Write an IVF frame header to test_stream_->out_filename. | |
| 860 void WriteIvfFrameHeader(int frame_index, size_t frame_size); | |
| 861 | |
| 862 // Create and return a VideoFrame wrapping the data at |position| bytes in the | |
| 863 // input stream. | |
| 864 scoped_refptr<media::VideoFrame> CreateFrame(off_t position); | |
| 865 | |
| 866 // Prepare and return a frame wrapping the data at |position| bytes in the | |
| 867 // input stream, ready to be sent to encoder. | |
| 868 // The input frame id is returned in |input_id|. | |
| 869 scoped_refptr<media::VideoFrame> PrepareInputFrame(off_t position, | |
| 870 int32_t* input_id); | |
| 871 | |
| 872 // Update the parameters according to |mid_stream_bitrate_switch| and | |
| 873 // |mid_stream_framerate_switch|. | |
| 874 void UpdateTestStreamData(bool mid_stream_bitrate_switch, | |
| 875 bool mid_stream_framerate_switch); | |
| 876 | |
| 877 // Callback function of the |input_timer_|. | |
| 878 void OnInputTimer(); | |
| 879 | |
| 880 // Called when the quality validator has decoded all the frames. | |
| 881 void DecodeCompleted(); | |
| 882 | |
| 883 // Called when the quality validator fails to decode a frame. | |
| 884 void DecodeFailed(); | |
| 885 | |
| 886 ClientState state_; | |
| 887 std::unique_ptr<VideoEncodeAccelerator> encoder_; | |
| 888 | |
| 889 TestStream* test_stream_; | |
| 890 | |
| 891 // Used to notify another thread about the state. VEAClient does not own this. | |
| 892 ClientStateNotification<ClientState>* note_; | |
| 893 | |
| 894 // Ids assigned to VideoFrames. | |
| 895 std::set<int32_t> inputs_at_client_; | |
| 896 int32_t next_input_id_; | |
| 897 | |
| 898 // Encode start time of all encoded frames. The position in the vector is the | |
| 899 // frame input id. | |
| 900 std::vector<base::TimeTicks> encode_start_time_; | |
| 901 // The encode latencies of all encoded frames. We define encode latency as the | |
| 902 // time delay from input of each VideoFrame (VEA::Encode()) to output of the | |
| 903 // corresponding BitstreamBuffer (VEA::Client::BitstreamBufferReady()). | |
| 904 std::vector<base::TimeDelta> encode_latencies_; | |
| 905 | |
| 906 // Ids for output BitstreamBuffers. | |
| 907 typedef std::map<int32_t, base::SharedMemory*> IdToSHM; | |
| 908 ScopedVector<base::SharedMemory> output_shms_; | |
| 909 IdToSHM output_buffers_at_client_; | |
| 910 int32_t next_output_buffer_id_; | |
| 911 | |
| 912 // Current offset into input stream. | |
| 913 off_t pos_in_input_stream_; | |
| 914 gfx::Size input_coded_size_; | |
| 915 // Requested by encoder. | |
| 916 unsigned int num_required_input_buffers_; | |
| 917 size_t output_buffer_size_; | |
| 918 | |
| 919 // Number of frames to encode. This may differ from the number of frames in | |
| 920 // stream if we need more frames for bitrate tests. | |
| 921 unsigned int num_frames_to_encode_; | |
| 922 | |
| 923 // Number of encoded frames we've got from the encoder thus far. | |
| 924 unsigned int num_encoded_frames_; | |
| 925 | |
| 926 // Frames since last bitrate verification. | |
| 927 unsigned int num_frames_since_last_check_; | |
| 928 | |
| 929 // True if received a keyframe while processing current bitstream buffer. | |
| 930 bool seen_keyframe_in_this_buffer_; | |
| 931 | |
| 932 // True if we are to save the encoded stream to a file. | |
| 933 bool save_to_file_; | |
| 934 | |
| 935 // Request a keyframe every keyframe_period_ frames. | |
| 936 const unsigned int keyframe_period_; | |
| 937 | |
| 938 // Number of keyframes requested by now. | |
| 939 unsigned int num_keyframes_requested_; | |
| 940 | |
| 941 // Next keyframe expected before next_keyframe_at_ + kMaxKeyframeDelay. | |
| 942 unsigned int next_keyframe_at_; | |
| 943 | |
| 944 // True if we are asking encoder for a particular bitrate. | |
| 945 bool force_bitrate_; | |
| 946 | |
| 947 // Current requested bitrate. | |
| 948 unsigned int current_requested_bitrate_; | |
| 949 | |
| 950 // Current expected framerate. | |
| 951 unsigned int current_framerate_; | |
| 952 | |
| 953 // Byte size of the encoded stream (for bitrate calculation) since last | |
| 954 // time we checked bitrate. | |
| 955 size_t encoded_stream_size_since_last_check_; | |
| 956 | |
| 957 // If true, verify performance at the end of the test. | |
| 958 bool test_perf_; | |
| 959 | |
| 960 // Check the output frame quality of the encoder. | |
| 961 bool verify_output_; | |
| 962 | |
| 963 // Used to perform codec-specific sanity checks on the stream. | |
| 964 std::unique_ptr<StreamValidator> stream_validator_; | |
| 965 | |
| 966 // Used to validate the encoded frame quality. | |
| 967 std::unique_ptr<VideoFrameQualityValidator> quality_validator_; | |
| 968 | |
| 969 // The time when the first frame is submitted for encode. | |
| 970 base::TimeTicks first_frame_start_time_; | |
| 971 | |
| 972 // The time when the last encoded frame is ready. | |
| 973 base::TimeTicks last_frame_ready_time_; | |
| 974 | |
| 975 // All methods of this class should be run on the same thread. | |
| 976 base::ThreadChecker thread_checker_; | |
| 977 | |
| 978 // Requested bitrate in bits per second. | |
| 979 unsigned int requested_bitrate_; | |
| 980 | |
| 981 // Requested initial framerate. | |
| 982 unsigned int requested_framerate_; | |
| 983 | |
| 984 // Bitrate to switch to in the middle of the stream. | |
| 985 unsigned int requested_subsequent_bitrate_; | |
| 986 | |
| 987 // Framerate to switch to in the middle of the stream. | |
| 988 unsigned int requested_subsequent_framerate_; | |
| 989 | |
| 990 // The timer used to feed the encoder with the input frames. | |
| 991 std::unique_ptr<base::RepeatingTimer> input_timer_; | |
| 992 }; | |
| 993 | |
| 994 VEAClient::VEAClient(TestStream* test_stream, | |
| 995 ClientStateNotification<ClientState>* note, | |
| 996 bool save_to_file, | |
| 997 unsigned int keyframe_period, | |
| 998 bool force_bitrate, | |
| 999 bool test_perf, | |
| 1000 bool mid_stream_bitrate_switch, | |
| 1001 bool mid_stream_framerate_switch, | |
| 1002 bool verify_output) | |
| 1003 : state_(CS_CREATED), | |
| 1004 test_stream_(test_stream), | |
| 1005 note_(note), | |
| 1006 next_input_id_(0), | |
| 1007 next_output_buffer_id_(0), | |
| 1008 pos_in_input_stream_(0), | |
| 1009 num_required_input_buffers_(0), | |
| 1010 output_buffer_size_(0), | |
| 1011 num_frames_to_encode_(0), | |
| 1012 num_encoded_frames_(0), | |
| 1013 num_frames_since_last_check_(0), | |
| 1014 seen_keyframe_in_this_buffer_(false), | |
| 1015 save_to_file_(save_to_file), | |
| 1016 keyframe_period_(keyframe_period), | |
| 1017 num_keyframes_requested_(0), | |
| 1018 next_keyframe_at_(0), | |
| 1019 force_bitrate_(force_bitrate), | |
| 1020 current_requested_bitrate_(0), | |
| 1021 current_framerate_(0), | |
| 1022 encoded_stream_size_since_last_check_(0), | |
| 1023 test_perf_(test_perf), | |
| 1024 verify_output_(verify_output), | |
| 1025 requested_bitrate_(0), | |
| 1026 requested_framerate_(0), | |
| 1027 requested_subsequent_bitrate_(0), | |
| 1028 requested_subsequent_framerate_(0) { | |
| 1029 if (keyframe_period_) | |
| 1030 LOG_ASSERT(kMaxKeyframeDelay < keyframe_period_); | |
| 1031 | |
| 1032 // Fake encoder produces an invalid stream, so skip validating it. | |
| 1033 if (!g_fake_encoder) { | |
| 1034 stream_validator_ = StreamValidator::Create( | |
| 1035 test_stream_->requested_profile, | |
| 1036 base::Bind(&VEAClient::HandleEncodedFrame, base::Unretained(this))); | |
| 1037 CHECK(stream_validator_); | |
| 1038 } | |
| 1039 | |
| 1040 if (save_to_file_) { | |
| 1041 LOG_ASSERT(!test_stream_->out_filename.empty()); | |
| 1042 base::FilePath out_filename(test_stream_->out_filename); | |
| 1043 // This creates or truncates out_filename. | |
| 1044 // Without it, AppendToFile() will not work. | |
| 1045 EXPECT_EQ(0, base::WriteFile(out_filename, NULL, 0)); | |
| 1046 } | |
| 1047 | |
| 1048 // Initialize the parameters of the test streams. | |
| 1049 UpdateTestStreamData(mid_stream_bitrate_switch, mid_stream_framerate_switch); | |
| 1050 | |
| 1051 thread_checker_.DetachFromThread(); | |
| 1052 } | |
| 1053 | |
| 1054 VEAClient::~VEAClient() { LOG_ASSERT(!has_encoder()); } | |
| 1055 | |
| 1056 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateFakeVEA() { | |
| 1057 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
| 1058 if (g_fake_encoder) { | |
| 1059 encoder.reset(new media::FakeVideoEncodeAccelerator( | |
| 1060 scoped_refptr<base::SingleThreadTaskRunner>( | |
| 1061 base::ThreadTaskRunnerHandle::Get()))); | |
| 1062 } | |
| 1063 return encoder; | |
| 1064 } | |
| 1065 | |
| 1066 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateV4L2VEA() { | |
| 1067 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
| 1068 #if defined(OS_CHROMEOS) && (defined(ARCH_CPU_ARMEL) || \ | |
| 1069 (defined(USE_OZONE) && defined(USE_V4L2_CODEC))) | |
| 1070 scoped_refptr<V4L2Device> device = V4L2Device::Create(V4L2Device::kEncoder); | |
| 1071 if (device) | |
| 1072 encoder.reset(new V4L2VideoEncodeAccelerator(device)); | |
| 1073 #endif | |
| 1074 return encoder; | |
| 1075 } | |
| 1076 | |
| 1077 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVaapiVEA() { | |
| 1078 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
| 1079 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) | |
| 1080 encoder.reset(new VaapiVideoEncodeAccelerator()); | |
| 1081 #endif | |
| 1082 return encoder; | |
| 1083 } | |
| 1084 | |
| 1085 std::unique_ptr<media::VideoEncodeAccelerator> VEAClient::CreateVTVEA() { | |
| 1086 std::unique_ptr<media::VideoEncodeAccelerator> encoder; | |
| 1087 #if defined(OS_MACOSX) | |
| 1088 encoder.reset(new VTVideoEncodeAccelerator()); | |
| 1089 #endif | |
| 1090 return encoder; | |
| 1091 } | |
| 1092 | |
| 1093 void VEAClient::CreateEncoder() { | |
| 1094 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 1095 LOG_ASSERT(!has_encoder()); | |
| 1096 | |
| 1097 std::unique_ptr<media::VideoEncodeAccelerator> encoders[] = { | |
| 1098 CreateFakeVEA(), CreateV4L2VEA(), CreateVaapiVEA(), CreateVTVEA()}; | |
| 1099 | |
| 1100 DVLOG(1) << "Profile: " << test_stream_->requested_profile | |
| 1101 << ", initial bitrate: " << requested_bitrate_; | |
| 1102 | |
| 1103 for (size_t i = 0; i < arraysize(encoders); ++i) { | |
| 1104 if (!encoders[i]) | |
| 1105 continue; | |
| 1106 encoder_ = std::move(encoders[i]); | |
| 1107 SetState(CS_ENCODER_SET); | |
| 1108 if (encoder_->Initialize(kInputFormat, | |
| 1109 test_stream_->visible_size, | |
| 1110 test_stream_->requested_profile, | |
| 1111 requested_bitrate_, | |
| 1112 this)) { | |
| 1113 SetStreamParameters(requested_bitrate_, requested_framerate_); | |
| 1114 SetState(CS_INITIALIZED); | |
| 1115 | |
| 1116 if (verify_output_ && !g_fake_encoder) | |
| 1117 quality_validator_.reset(new VideoFrameQualityValidator( | |
| 1118 test_stream_->requested_profile, | |
| 1119 base::Bind(&VEAClient::DecodeCompleted, base::Unretained(this)), | |
| 1120 base::Bind(&VEAClient::DecodeFailed, base::Unretained(this)))); | |
| 1121 return; | |
| 1122 } | |
| 1123 } | |
| 1124 encoder_.reset(); | |
| 1125 LOG(ERROR) << "VideoEncodeAccelerator::Initialize() failed"; | |
| 1126 SetState(CS_ERROR); | |
| 1127 } | |
| 1128 | |
| 1129 void VEAClient::DecodeCompleted() { | |
| 1130 SetState(CS_VALIDATED); | |
| 1131 } | |
| 1132 | |
| 1133 void VEAClient::DecodeFailed() { | |
| 1134 SetState(CS_ERROR); | |
| 1135 } | |
| 1136 | |
| 1137 void VEAClient::DestroyEncoder() { | |
| 1138 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 1139 if (!has_encoder()) | |
| 1140 return; | |
| 1141 // Clear the objects that should be destroyed on the same thread as creation. | |
| 1142 encoder_.reset(); | |
| 1143 input_timer_.reset(); | |
| 1144 quality_validator_.reset(); | |
| 1145 } | |
| 1146 | |
| 1147 void VEAClient::UpdateTestStreamData(bool mid_stream_bitrate_switch, | |
| 1148 bool mid_stream_framerate_switch) { | |
| 1149 // Use defaults for bitrate/framerate if they are not provided. | |
| 1150 if (test_stream_->requested_bitrate == 0) | |
| 1151 requested_bitrate_ = kDefaultBitrate; | |
| 1152 else | |
| 1153 requested_bitrate_ = test_stream_->requested_bitrate; | |
| 1154 | |
| 1155 if (test_stream_->requested_framerate == 0) | |
| 1156 requested_framerate_ = kDefaultFramerate; | |
| 1157 else | |
| 1158 requested_framerate_ = test_stream_->requested_framerate; | |
| 1159 | |
| 1160 // If bitrate/framerate switch is requested, use the subsequent values if | |
| 1161 // provided, or, if not, calculate them from their initial values using | |
| 1162 // the default ratios. | |
| 1163 // Otherwise, if a switch is not requested, keep the initial values. | |
| 1164 if (mid_stream_bitrate_switch) { | |
| 1165 if (test_stream_->requested_subsequent_bitrate == 0) | |
| 1166 requested_subsequent_bitrate_ = | |
| 1167 requested_bitrate_ * kDefaultSubsequentBitrateRatio; | |
| 1168 else | |
| 1169 requested_subsequent_bitrate_ = | |
| 1170 test_stream_->requested_subsequent_bitrate; | |
| 1171 } else { | |
| 1172 requested_subsequent_bitrate_ = requested_bitrate_; | |
| 1173 } | |
| 1174 if (requested_subsequent_bitrate_ == 0) | |
| 1175 requested_subsequent_bitrate_ = 1; | |
| 1176 | |
| 1177 if (mid_stream_framerate_switch) { | |
| 1178 if (test_stream_->requested_subsequent_framerate == 0) | |
| 1179 requested_subsequent_framerate_ = | |
| 1180 requested_framerate_ * kDefaultSubsequentFramerateRatio; | |
| 1181 else | |
| 1182 requested_subsequent_framerate_ = | |
| 1183 test_stream_->requested_subsequent_framerate; | |
| 1184 } else { | |
| 1185 requested_subsequent_framerate_ = requested_framerate_; | |
| 1186 } | |
| 1187 if (requested_subsequent_framerate_ == 0) | |
| 1188 requested_subsequent_framerate_ = 1; | |
| 1189 } | |
| 1190 | |
| 1191 double VEAClient::frames_per_second() { | |
| 1192 LOG_ASSERT(num_encoded_frames_ != 0UL); | |
| 1193 base::TimeDelta duration = last_frame_ready_time_ - first_frame_start_time_; | |
| 1194 return num_encoded_frames_ / duration.InSecondsF(); | |
| 1195 } | |
| 1196 | |
| 1197 void VEAClient::RequireBitstreamBuffers(unsigned int input_count, | |
| 1198 const gfx::Size& input_coded_size, | |
| 1199 size_t output_size) { | |
| 1200 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 1201 ASSERT_EQ(state_, CS_INITIALIZED); | |
| 1202 SetState(CS_ENCODING); | |
| 1203 | |
| 1204 if (quality_validator_) | |
| 1205 quality_validator_->Initialize(input_coded_size, | |
| 1206 gfx::Rect(test_stream_->visible_size)); | |
| 1207 | |
| 1208 CreateAlignedInputStreamFile(input_coded_size, test_stream_); | |
| 1209 | |
| 1210 num_frames_to_encode_ = test_stream_->num_frames; | |
| 1211 if (g_num_frames_to_encode > 0) | |
| 1212 num_frames_to_encode_ = g_num_frames_to_encode; | |
| 1213 | |
| 1214 // We may need to loop over the stream more than once if more frames than | |
| 1215 // provided is required for bitrate tests. | |
| 1216 if (force_bitrate_ && num_frames_to_encode_ < kMinFramesForBitrateTests) { | |
| 1217 DVLOG(1) << "Stream too short for bitrate test (" | |
| 1218 << test_stream_->num_frames << " frames), will loop it to reach " | |
| 1219 << kMinFramesForBitrateTests << " frames"; | |
| 1220 num_frames_to_encode_ = kMinFramesForBitrateTests; | |
| 1221 } | |
| 1222 if (save_to_file_ && IsVP8(test_stream_->requested_profile)) | |
| 1223 WriteIvfFileHeader(); | |
| 1224 | |
| 1225 input_coded_size_ = input_coded_size; | |
| 1226 num_required_input_buffers_ = input_count; | |
| 1227 ASSERT_GT(num_required_input_buffers_, 0UL); | |
| 1228 | |
| 1229 output_buffer_size_ = output_size; | |
| 1230 ASSERT_GT(output_buffer_size_, 0UL); | |
| 1231 | |
| 1232 for (unsigned int i = 0; i < kNumOutputBuffers; ++i) { | |
| 1233 base::SharedMemory* shm = new base::SharedMemory(); | |
| 1234 LOG_ASSERT(shm->CreateAndMapAnonymous(output_buffer_size_)); | |
| 1235 output_shms_.push_back(shm); | |
| 1236 FeedEncoderWithOutput(shm); | |
| 1237 } | |
| 1238 | |
| 1239 if (g_env->run_at_fps()) { | |
| 1240 input_timer_.reset(new base::RepeatingTimer()); | |
| 1241 input_timer_->Start( | |
| 1242 FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_, | |
| 1243 base::Bind(&VEAClient::OnInputTimer, base::Unretained(this))); | |
| 1244 } else { | |
| 1245 while (inputs_at_client_.size() < | |
| 1246 num_required_input_buffers_ + kNumExtraInputFrames) | |
| 1247 FeedEncoderWithOneInput(); | |
| 1248 } | |
| 1249 } | |
| 1250 | |
| 1251 void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id, | |
| 1252 size_t payload_size, | |
| 1253 bool key_frame) { | |
| 1254 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 1255 ASSERT_LE(payload_size, output_buffer_size_); | |
| 1256 | |
| 1257 IdToSHM::iterator it = output_buffers_at_client_.find(bitstream_buffer_id); | |
| 1258 ASSERT_NE(it, output_buffers_at_client_.end()); | |
| 1259 base::SharedMemory* shm = it->second; | |
| 1260 output_buffers_at_client_.erase(it); | |
| 1261 | |
| 1262 if (state_ == CS_FINISHED || state_ == CS_VALIDATED) | |
| 1263 return; | |
| 1264 | |
| 1265 encoded_stream_size_since_last_check_ += payload_size; | |
| 1266 | |
| 1267 const uint8_t* stream_ptr = static_cast<const uint8_t*>(shm->memory()); | |
| 1268 if (payload_size > 0) { | |
| 1269 if (stream_validator_) { | |
| 1270 stream_validator_->ProcessStreamBuffer(stream_ptr, payload_size); | |
| 1271 } else { | |
| 1272 HandleEncodedFrame(key_frame); | |
| 1273 } | |
| 1274 | |
| 1275 if (quality_validator_) { | |
| 1276 scoped_refptr<media::DecoderBuffer> buffer(media::DecoderBuffer::CopyFrom( | |
| 1277 reinterpret_cast<const uint8_t*>(shm->memory()), | |
| 1278 static_cast<int>(payload_size))); | |
| 1279 quality_validator_->AddDecodeBuffer(buffer); | |
| 1280 // Insert EOS buffer to flush the decoder. | |
| 1281 if (num_encoded_frames_ == num_frames_to_encode_) | |
| 1282 quality_validator_->Flush(); | |
| 1283 } | |
| 1284 | |
| 1285 if (save_to_file_) { | |
| 1286 if (IsVP8(test_stream_->requested_profile)) | |
| 1287 WriteIvfFrameHeader(num_encoded_frames_ - 1, payload_size); | |
| 1288 | |
| 1289 EXPECT_TRUE(base::AppendToFile( | |
| 1290 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | |
| 1291 static_cast<char*>(shm->memory()), | |
| 1292 base::checked_cast<int>(payload_size))); | |
| 1293 } | |
| 1294 } | |
| 1295 | |
| 1296 EXPECT_EQ(key_frame, seen_keyframe_in_this_buffer_); | |
| 1297 seen_keyframe_in_this_buffer_ = false; | |
| 1298 | |
| 1299 FeedEncoderWithOutput(shm); | |
| 1300 } | |
| 1301 | |
| 1302 void VEAClient::NotifyError(VideoEncodeAccelerator::Error error) { | |
| 1303 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 1304 SetState(CS_ERROR); | |
| 1305 } | |
| 1306 | |
| 1307 void VEAClient::SetState(ClientState new_state) { | |
| 1308 DVLOG(4) << "Changing state " << state_ << "->" << new_state; | |
| 1309 note_->Notify(new_state); | |
| 1310 state_ = new_state; | |
| 1311 } | |
| 1312 | |
| 1313 void VEAClient::SetStreamParameters(unsigned int bitrate, | |
| 1314 unsigned int framerate) { | |
| 1315 current_requested_bitrate_ = bitrate; | |
| 1316 current_framerate_ = framerate; | |
| 1317 LOG_ASSERT(current_requested_bitrate_ > 0UL); | |
| 1318 LOG_ASSERT(current_framerate_ > 0UL); | |
| 1319 encoder_->RequestEncodingParametersChange(current_requested_bitrate_, | |
| 1320 current_framerate_); | |
| 1321 DVLOG(1) << "Switched parameters to " << current_requested_bitrate_ | |
| 1322 << " bps @ " << current_framerate_ << " FPS"; | |
| 1323 } | |
| 1324 | |
| 1325 void VEAClient::InputNoLongerNeededCallback(int32_t input_id) { | |
| 1326 std::set<int32_t>::iterator it = inputs_at_client_.find(input_id); | |
| 1327 ASSERT_NE(it, inputs_at_client_.end()); | |
| 1328 inputs_at_client_.erase(it); | |
| 1329 if (!g_env->run_at_fps()) | |
| 1330 FeedEncoderWithOneInput(); | |
| 1331 } | |
| 1332 | |
| 1333 scoped_refptr<media::VideoFrame> VEAClient::CreateFrame(off_t position) { | |
| 1334 uint8_t* frame_data_y = const_cast<uint8_t*>( | |
| 1335 test_stream_->mapped_aligned_in_file.data() + position); | |
| 1336 uint8_t* frame_data_u = frame_data_y + test_stream_->aligned_plane_size[0]; | |
| 1337 uint8_t* frame_data_v = frame_data_u + test_stream_->aligned_plane_size[1]; | |
| 1338 CHECK_GT(current_framerate_, 0U); | |
| 1339 | |
| 1340 scoped_refptr<media::VideoFrame> video_frame = | |
| 1341 media::VideoFrame::WrapExternalYuvData( | |
| 1342 kInputFormat, input_coded_size_, | |
| 1343 gfx::Rect(test_stream_->visible_size), test_stream_->visible_size, | |
| 1344 input_coded_size_.width(), input_coded_size_.width() / 2, | |
| 1345 input_coded_size_.width() / 2, frame_data_y, frame_data_u, | |
| 1346 frame_data_v, | |
| 1347 base::TimeDelta().FromMilliseconds( | |
| 1348 next_input_id_ * base::Time::kMillisecondsPerSecond / | |
| 1349 current_framerate_)); | |
| 1350 EXPECT_NE(nullptr, video_frame.get()); | |
| 1351 return video_frame; | |
| 1352 } | |
| 1353 | |
| 1354 scoped_refptr<media::VideoFrame> VEAClient::PrepareInputFrame( | |
| 1355 off_t position, | |
| 1356 int32_t* input_id) { | |
| 1357 CHECK_LE(position + test_stream_->aligned_buffer_size, | |
| 1358 test_stream_->mapped_aligned_in_file.length()); | |
| 1359 | |
| 1360 scoped_refptr<media::VideoFrame> frame = CreateFrame(position); | |
| 1361 EXPECT_TRUE(frame); | |
| 1362 frame->AddDestructionObserver( | |
| 1363 media::BindToCurrentLoop( | |
| 1364 base::Bind(&VEAClient::InputNoLongerNeededCallback, | |
| 1365 base::Unretained(this), | |
| 1366 next_input_id_))); | |
| 1367 | |
| 1368 LOG_ASSERT(inputs_at_client_.insert(next_input_id_).second); | |
| 1369 | |
| 1370 *input_id = next_input_id_++; | |
| 1371 return frame; | |
| 1372 } | |
| 1373 | |
| 1374 void VEAClient::OnInputTimer() { | |
| 1375 if (!has_encoder() || state_ != CS_ENCODING) | |
| 1376 input_timer_.reset(); | |
| 1377 else if (inputs_at_client_.size() < | |
| 1378 num_required_input_buffers_ + kNumExtraInputFrames) | |
| 1379 FeedEncoderWithOneInput(); | |
| 1380 else | |
| 1381 DVLOG(1) << "Dropping input frame"; | |
| 1382 } | |
| 1383 | |
| 1384 void VEAClient::FeedEncoderWithOneInput() { | |
| 1385 if (!has_encoder() || state_ != CS_ENCODING) | |
| 1386 return; | |
| 1387 | |
| 1388 size_t bytes_left = | |
| 1389 test_stream_->mapped_aligned_in_file.length() - pos_in_input_stream_; | |
| 1390 if (bytes_left < test_stream_->aligned_buffer_size) { | |
| 1391 DCHECK_EQ(bytes_left, 0UL); | |
| 1392 // Rewind if at the end of stream and we are still encoding. | |
| 1393 // This is to flush the encoder with additional frames from the beginning | |
| 1394 // of the stream, or if the stream is shorter that the number of frames | |
| 1395 // we require for bitrate tests. | |
| 1396 pos_in_input_stream_ = 0; | |
| 1397 } | |
| 1398 | |
| 1399 if (quality_validator_) | |
| 1400 quality_validator_->AddOriginalFrame(CreateFrame(pos_in_input_stream_)); | |
| 1401 | |
| 1402 int32_t input_id; | |
| 1403 scoped_refptr<media::VideoFrame> video_frame = | |
| 1404 PrepareInputFrame(pos_in_input_stream_, &input_id); | |
| 1405 pos_in_input_stream_ += test_stream_->aligned_buffer_size; | |
| 1406 | |
| 1407 bool force_keyframe = false; | |
| 1408 if (keyframe_period_ && input_id % keyframe_period_ == 0) { | |
| 1409 force_keyframe = true; | |
| 1410 ++num_keyframes_requested_; | |
| 1411 } | |
| 1412 | |
| 1413 if (input_id == 0) { | |
| 1414 first_frame_start_time_ = base::TimeTicks::Now(); | |
| 1415 } | |
| 1416 | |
| 1417 if (g_env->needs_encode_latency()) { | |
| 1418 LOG_ASSERT(input_id == static_cast<int32_t>(encode_start_time_.size())); | |
| 1419 encode_start_time_.push_back(base::TimeTicks::Now()); | |
| 1420 } | |
| 1421 encoder_->Encode(video_frame, force_keyframe); | |
| 1422 } | |
| 1423 | |
| 1424 void VEAClient::FeedEncoderWithOutput(base::SharedMemory* shm) { | |
| 1425 if (!has_encoder()) | |
| 1426 return; | |
| 1427 | |
| 1428 if (state_ != CS_ENCODING) | |
| 1429 return; | |
| 1430 | |
| 1431 base::SharedMemoryHandle dup_handle; | |
| 1432 LOG_ASSERT(shm->ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle)); | |
| 1433 | |
| 1434 media::BitstreamBuffer bitstream_buffer( | |
| 1435 next_output_buffer_id_++, dup_handle, output_buffer_size_); | |
| 1436 LOG_ASSERT(output_buffers_at_client_.insert( | |
| 1437 std::make_pair(bitstream_buffer.id(), shm)).second); | |
| 1438 encoder_->UseOutputBitstreamBuffer(bitstream_buffer); | |
| 1439 } | |
| 1440 | |
| 1441 bool VEAClient::HandleEncodedFrame(bool keyframe) { | |
| 1442 // This would be a bug in the test, which should not ignore false | |
| 1443 // return value from this method. | |
| 1444 LOG_ASSERT(num_encoded_frames_ <= num_frames_to_encode_); | |
| 1445 | |
| 1446 last_frame_ready_time_ = base::TimeTicks::Now(); | |
| 1447 | |
| 1448 if (g_env->needs_encode_latency()) { | |
| 1449 LOG_ASSERT(num_encoded_frames_ < encode_start_time_.size()); | |
| 1450 base::TimeTicks start_time = encode_start_time_[num_encoded_frames_]; | |
| 1451 LOG_ASSERT(!start_time.is_null()); | |
| 1452 encode_latencies_.push_back(last_frame_ready_time_ - start_time); | |
| 1453 } | |
| 1454 | |
| 1455 ++num_encoded_frames_; | |
| 1456 ++num_frames_since_last_check_; | |
| 1457 | |
| 1458 // Because the keyframe behavior requirements are loose, we give | |
| 1459 // the encoder more freedom here. It could either deliver a keyframe | |
| 1460 // immediately after we requested it, which could be for a frame number | |
| 1461 // before the one we requested it for (if the keyframe request | |
| 1462 // is asynchronous, i.e. not bound to any concrete frame, and because | |
| 1463 // the pipeline can be deeper than one frame), at that frame, or after. | |
| 1464 // So the only constraints we put here is that we get a keyframe not | |
| 1465 // earlier than we requested one (in time), and not later than | |
| 1466 // kMaxKeyframeDelay frames after the frame, for which we requested | |
| 1467 // it, comes back encoded. | |
| 1468 if (keyframe) { | |
| 1469 if (num_keyframes_requested_ > 0) { | |
| 1470 --num_keyframes_requested_; | |
| 1471 next_keyframe_at_ += keyframe_period_; | |
| 1472 } | |
| 1473 seen_keyframe_in_this_buffer_ = true; | |
| 1474 } | |
| 1475 | |
| 1476 if (num_keyframes_requested_ > 0) | |
| 1477 EXPECT_LE(num_encoded_frames_, next_keyframe_at_ + kMaxKeyframeDelay); | |
| 1478 | |
| 1479 if (num_encoded_frames_ == num_frames_to_encode_ / 2) { | |
| 1480 VerifyStreamProperties(); | |
| 1481 if (requested_subsequent_bitrate_ != current_requested_bitrate_ || | |
| 1482 requested_subsequent_framerate_ != current_framerate_) { | |
| 1483 SetStreamParameters(requested_subsequent_bitrate_, | |
| 1484 requested_subsequent_framerate_); | |
| 1485 if (g_env->run_at_fps() && input_timer_) | |
| 1486 input_timer_->Start( | |
| 1487 FROM_HERE, base::TimeDelta::FromSeconds(1) / current_framerate_, | |
| 1488 base::Bind(&VEAClient::OnInputTimer, base::Unretained(this))); | |
| 1489 } | |
| 1490 } else if (num_encoded_frames_ == num_frames_to_encode_) { | |
| 1491 LogPerf(); | |
| 1492 VerifyMinFPS(); | |
| 1493 VerifyStreamProperties(); | |
| 1494 SetState(CS_FINISHED); | |
| 1495 if (!quality_validator_) | |
| 1496 SetState(CS_VALIDATED); | |
| 1497 return false; | |
| 1498 } | |
| 1499 | |
| 1500 return true; | |
| 1501 } | |
| 1502 | |
| 1503 void VEAClient::LogPerf() { | |
| 1504 g_env->LogToFile("Measured encoder FPS", | |
| 1505 base::StringPrintf("%.3f", frames_per_second())); | |
| 1506 | |
| 1507 // Log encode latencies. | |
| 1508 if (g_env->needs_encode_latency()) { | |
| 1509 std::sort(encode_latencies_.begin(), encode_latencies_.end()); | |
| 1510 for (const auto& percentile : kLoggedLatencyPercentiles) { | |
| 1511 base::TimeDelta latency = Percentile(encode_latencies_, percentile); | |
| 1512 g_env->LogToFile( | |
| 1513 base::StringPrintf("Encode latency for the %dth percentile", | |
| 1514 percentile), | |
| 1515 base::StringPrintf("%" PRId64 " us", latency.InMicroseconds())); | |
| 1516 } | |
| 1517 } | |
| 1518 } | |
| 1519 | |
| 1520 void VEAClient::VerifyMinFPS() { | |
| 1521 if (test_perf_) | |
| 1522 EXPECT_GE(frames_per_second(), kMinPerfFPS); | |
| 1523 } | |
| 1524 | |
| 1525 void VEAClient::VerifyStreamProperties() { | |
| 1526 LOG_ASSERT(num_frames_since_last_check_ > 0UL); | |
| 1527 LOG_ASSERT(encoded_stream_size_since_last_check_ > 0UL); | |
| 1528 unsigned int bitrate = encoded_stream_size_since_last_check_ * 8 * | |
| 1529 current_framerate_ / num_frames_since_last_check_; | |
| 1530 DVLOG(1) << "Current chunk's bitrate: " << bitrate | |
| 1531 << " (expected: " << current_requested_bitrate_ | |
| 1532 << " @ " << current_framerate_ << " FPS," | |
| 1533 << " num frames in chunk: " << num_frames_since_last_check_; | |
| 1534 | |
| 1535 num_frames_since_last_check_ = 0; | |
| 1536 encoded_stream_size_since_last_check_ = 0; | |
| 1537 | |
| 1538 if (force_bitrate_) { | |
| 1539 EXPECT_NEAR(bitrate, | |
| 1540 current_requested_bitrate_, | |
| 1541 kBitrateTolerance * current_requested_bitrate_); | |
| 1542 } | |
| 1543 | |
| 1544 // All requested keyframes should've been provided. Allow the last requested | |
| 1545 // frame to remain undelivered if we haven't reached the maximum frame number | |
| 1546 // by which it should have arrived. | |
| 1547 if (num_encoded_frames_ < next_keyframe_at_ + kMaxKeyframeDelay) | |
| 1548 EXPECT_LE(num_keyframes_requested_, 1UL); | |
| 1549 else | |
| 1550 EXPECT_EQ(num_keyframes_requested_, 0UL); | |
| 1551 } | |
| 1552 | |
| 1553 void VEAClient::WriteIvfFileHeader() { | |
| 1554 media::IvfFileHeader header = {}; | |
| 1555 | |
| 1556 memcpy(header.signature, media::kIvfHeaderSignature, | |
| 1557 sizeof(header.signature)); | |
| 1558 header.version = 0; | |
| 1559 header.header_size = sizeof(header); | |
| 1560 header.fourcc = 0x30385056; // VP80 | |
| 1561 header.width = | |
| 1562 base::checked_cast<uint16_t>(test_stream_->visible_size.width()); | |
| 1563 header.height = | |
| 1564 base::checked_cast<uint16_t>(test_stream_->visible_size.height()); | |
| 1565 header.timebase_denum = requested_framerate_; | |
| 1566 header.timebase_num = 1; | |
| 1567 header.num_frames = num_frames_to_encode_; | |
| 1568 header.ByteSwap(); | |
| 1569 | |
| 1570 EXPECT_TRUE(base::AppendToFile( | |
| 1571 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | |
| 1572 reinterpret_cast<char*>(&header), sizeof(header))); | |
| 1573 } | |
| 1574 | |
| 1575 void VEAClient::WriteIvfFrameHeader(int frame_index, size_t frame_size) { | |
| 1576 media::IvfFrameHeader header = {}; | |
| 1577 | |
| 1578 header.frame_size = frame_size; | |
| 1579 header.timestamp = frame_index; | |
| 1580 header.ByteSwap(); | |
| 1581 EXPECT_TRUE(base::AppendToFile( | |
| 1582 base::FilePath::FromUTF8Unsafe(test_stream_->out_filename), | |
| 1583 reinterpret_cast<char*>(&header), sizeof(header))); | |
| 1584 } | |
| 1585 | |
| 1586 // Test parameters: | |
| 1587 // - Number of concurrent encoders. The value takes effect when there is only | |
| 1588 // one input stream; otherwise, one encoder per input stream will be | |
| 1589 // instantiated. | |
| 1590 // - If true, save output to file (provided an output filename was supplied). | |
| 1591 // - Force a keyframe every n frames. | |
| 1592 // - Force bitrate; the actual required value is provided as a property | |
| 1593 // of the input stream, because it depends on stream type/resolution/etc. | |
| 1594 // - If true, measure performance. | |
| 1595 // - If true, switch bitrate mid-stream. | |
| 1596 // - If true, switch framerate mid-stream. | |
| 1597 // - If true, verify the output frames of encoder. | |
| 1598 class VideoEncodeAcceleratorTest | |
| 1599 : public ::testing::TestWithParam< | |
| 1600 base::Tuple<int, bool, int, bool, bool, bool, bool, bool>> {}; | |
| 1601 | |
| 1602 TEST_P(VideoEncodeAcceleratorTest, TestSimpleEncode) { | |
| 1603 size_t num_concurrent_encoders = base::get<0>(GetParam()); | |
| 1604 const bool save_to_file = base::get<1>(GetParam()); | |
| 1605 const unsigned int keyframe_period = base::get<2>(GetParam()); | |
| 1606 const bool force_bitrate = base::get<3>(GetParam()); | |
| 1607 const bool test_perf = base::get<4>(GetParam()); | |
| 1608 const bool mid_stream_bitrate_switch = base::get<5>(GetParam()); | |
| 1609 const bool mid_stream_framerate_switch = base::get<6>(GetParam()); | |
| 1610 const bool verify_output = | |
| 1611 base::get<7>(GetParam()) || g_env->verify_all_output(); | |
| 1612 | |
| 1613 ScopedVector<ClientStateNotification<ClientState> > notes; | |
| 1614 ScopedVector<VEAClient> clients; | |
| 1615 base::Thread encoder_thread("EncoderThread"); | |
| 1616 ASSERT_TRUE(encoder_thread.Start()); | |
| 1617 | |
| 1618 if (g_env->test_streams_.size() > 1) | |
| 1619 num_concurrent_encoders = g_env->test_streams_.size(); | |
| 1620 | |
| 1621 // Create all encoders. | |
| 1622 for (size_t i = 0; i < num_concurrent_encoders; i++) { | |
| 1623 size_t test_stream_index = i % g_env->test_streams_.size(); | |
| 1624 // Disregard save_to_file if we didn't get an output filename. | |
| 1625 bool encoder_save_to_file = | |
| 1626 (save_to_file && | |
| 1627 !g_env->test_streams_[test_stream_index]->out_filename.empty()); | |
| 1628 | |
| 1629 notes.push_back(new ClientStateNotification<ClientState>()); | |
| 1630 clients.push_back(new VEAClient( | |
| 1631 g_env->test_streams_[test_stream_index], notes.back(), | |
| 1632 encoder_save_to_file, keyframe_period, force_bitrate, test_perf, | |
| 1633 mid_stream_bitrate_switch, mid_stream_framerate_switch, verify_output)); | |
| 1634 | |
| 1635 encoder_thread.message_loop()->PostTask( | |
| 1636 FROM_HERE, | |
| 1637 base::Bind(&VEAClient::CreateEncoder, | |
| 1638 base::Unretained(clients.back()))); | |
| 1639 } | |
| 1640 | |
| 1641 // All encoders must pass through states in this order. | |
| 1642 enum ClientState state_transitions[] = { | |
| 1643 CS_ENCODER_SET, CS_INITIALIZED, CS_ENCODING, CS_FINISHED, CS_VALIDATED}; | |
| 1644 | |
| 1645 // Wait for all encoders to go through all states and finish. | |
| 1646 // Do this by waiting for all encoders to advance to state n before checking | |
| 1647 // state n+1, to verify that they are able to operate concurrently. | |
| 1648 // It also simulates the real-world usage better, as the main thread, on which | |
| 1649 // encoders are created/destroyed, is a single GPU Process ChildThread. | |
| 1650 // Moreover, we can't have proper multithreading on X11, so this could cause | |
| 1651 // hard to debug issues there, if there were multiple "ChildThreads". | |
| 1652 for (size_t state_no = 0; state_no < arraysize(state_transitions); | |
| 1653 ++state_no) { | |
| 1654 for (size_t i = 0; i < num_concurrent_encoders; i++) | |
| 1655 ASSERT_EQ(notes[i]->Wait(), state_transitions[state_no]); | |
| 1656 } | |
| 1657 | |
| 1658 for (size_t i = 0; i < num_concurrent_encoders; ++i) { | |
| 1659 encoder_thread.message_loop()->PostTask( | |
| 1660 FROM_HERE, | |
| 1661 base::Bind(&VEAClient::DestroyEncoder, base::Unretained(clients[i]))); | |
| 1662 } | |
| 1663 | |
| 1664 // This ensures all tasks have finished. | |
| 1665 encoder_thread.Stop(); | |
| 1666 } | |
| 1667 | |
| 1668 #if !defined(OS_MACOSX) | |
| 1669 INSTANTIATE_TEST_CASE_P( | |
| 1670 SimpleEncode, | |
| 1671 VideoEncodeAcceleratorTest, | |
| 1672 ::testing::Values( | |
| 1673 base::MakeTuple(1, true, 0, false, false, false, false, false), | |
| 1674 base::MakeTuple(1, true, 0, false, false, false, false, true))); | |
| 1675 | |
| 1676 INSTANTIATE_TEST_CASE_P( | |
| 1677 EncoderPerf, | |
| 1678 VideoEncodeAcceleratorTest, | |
| 1679 ::testing::Values( | |
| 1680 base::MakeTuple(1, false, 0, false, true, false, false, false))); | |
| 1681 | |
| 1682 INSTANTIATE_TEST_CASE_P( | |
| 1683 ForceKeyframes, | |
| 1684 VideoEncodeAcceleratorTest, | |
| 1685 ::testing::Values( | |
| 1686 base::MakeTuple(1, false, 10, false, false, false, false, false))); | |
| 1687 | |
| 1688 INSTANTIATE_TEST_CASE_P( | |
| 1689 ForceBitrate, | |
| 1690 VideoEncodeAcceleratorTest, | |
| 1691 ::testing::Values( | |
| 1692 base::MakeTuple(1, false, 0, true, false, false, false, false))); | |
| 1693 | |
| 1694 INSTANTIATE_TEST_CASE_P( | |
| 1695 MidStreamParamSwitchBitrate, | |
| 1696 VideoEncodeAcceleratorTest, | |
| 1697 ::testing::Values( | |
| 1698 base::MakeTuple(1, false, 0, true, false, true, false, false))); | |
| 1699 | |
| 1700 INSTANTIATE_TEST_CASE_P( | |
| 1701 MidStreamParamSwitchFPS, | |
| 1702 VideoEncodeAcceleratorTest, | |
| 1703 ::testing::Values( | |
| 1704 base::MakeTuple(1, false, 0, true, false, false, true, false))); | |
| 1705 | |
| 1706 INSTANTIATE_TEST_CASE_P( | |
| 1707 MultipleEncoders, | |
| 1708 VideoEncodeAcceleratorTest, | |
| 1709 ::testing::Values( | |
| 1710 base::MakeTuple(3, false, 0, false, false, false, false, false), | |
| 1711 base::MakeTuple(3, false, 0, true, false, false, true, false), | |
| 1712 base::MakeTuple(3, false, 0, true, false, true, false, false))); | |
| 1713 #else | |
| 1714 INSTANTIATE_TEST_CASE_P( | |
| 1715 SimpleEncode, | |
| 1716 VideoEncodeAcceleratorTest, | |
| 1717 ::testing::Values( | |
| 1718 base::MakeTuple(1, true, 0, false, false, false, false, false), | |
| 1719 base::MakeTuple(1, true, 0, false, false, false, false, true))); | |
| 1720 | |
| 1721 INSTANTIATE_TEST_CASE_P( | |
| 1722 EncoderPerf, | |
| 1723 VideoEncodeAcceleratorTest, | |
| 1724 ::testing::Values( | |
| 1725 base::MakeTuple(1, false, 0, false, true, false, false, false))); | |
| 1726 | |
| 1727 INSTANTIATE_TEST_CASE_P( | |
| 1728 MultipleEncoders, | |
| 1729 VideoEncodeAcceleratorTest, | |
| 1730 ::testing::Values( | |
| 1731 base::MakeTuple(3, false, 0, false, false, false, false, false))); | |
| 1732 #endif | |
| 1733 | |
| 1734 // TODO(posciak): more tests: | |
| 1735 // - async FeedEncoderWithOutput | |
| 1736 // - out-of-order return of outputs to encoder | |
| 1737 // - multiple encoders + decoders | |
| 1738 // - mid-stream encoder_->Destroy() | |
| 1739 | |
| 1740 } // namespace | |
| 1741 } // namespace content | |
| 1742 | |
| 1743 int main(int argc, char** argv) { | |
| 1744 testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args. | |
| 1745 base::CommandLine::Init(argc, argv); | |
| 1746 | |
| 1747 base::ShadowingAtExitManager at_exit_manager; | |
| 1748 base::MessageLoop main_loop; | |
| 1749 | |
| 1750 std::unique_ptr<base::FilePath::StringType> test_stream_data( | |
| 1751 new base::FilePath::StringType( | |
| 1752 media::GetTestDataFilePath(content::g_default_in_filename).value() + | |
| 1753 content::g_default_in_parameters)); | |
| 1754 | |
| 1755 // Needed to enable DVLOG through --vmodule. | |
| 1756 logging::LoggingSettings settings; | |
| 1757 settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG; | |
| 1758 LOG_ASSERT(logging::InitLogging(settings)); | |
| 1759 | |
| 1760 const base::CommandLine* cmd_line = base::CommandLine::ForCurrentProcess(); | |
| 1761 DCHECK(cmd_line); | |
| 1762 | |
| 1763 bool run_at_fps = false; | |
| 1764 bool needs_encode_latency = false; | |
| 1765 bool verify_all_output = false; | |
| 1766 base::FilePath log_path; | |
| 1767 | |
| 1768 base::CommandLine::SwitchMap switches = cmd_line->GetSwitches(); | |
| 1769 for (base::CommandLine::SwitchMap::const_iterator it = switches.begin(); | |
| 1770 it != switches.end(); | |
| 1771 ++it) { | |
| 1772 if (it->first == "test_stream_data") { | |
| 1773 test_stream_data->assign(it->second.c_str()); | |
| 1774 continue; | |
| 1775 } | |
| 1776 // Output machine-readable logs with fixed formats to a file. | |
| 1777 if (it->first == "output_log") { | |
| 1778 log_path = base::FilePath( | |
| 1779 base::FilePath::StringType(it->second.begin(), it->second.end())); | |
| 1780 continue; | |
| 1781 } | |
| 1782 if (it->first == "num_frames_to_encode") { | |
| 1783 std::string input(it->second.begin(), it->second.end()); | |
| 1784 LOG_ASSERT(base::StringToInt(input, &content::g_num_frames_to_encode)); | |
| 1785 continue; | |
| 1786 } | |
| 1787 if (it->first == "measure_latency") { | |
| 1788 needs_encode_latency = true; | |
| 1789 continue; | |
| 1790 } | |
| 1791 if (it->first == "fake_encoder") { | |
| 1792 content::g_fake_encoder = true; | |
| 1793 continue; | |
| 1794 } | |
| 1795 if (it->first == "run_at_fps") { | |
| 1796 run_at_fps = true; | |
| 1797 continue; | |
| 1798 } | |
| 1799 if (it->first == "verify_all_output") { | |
| 1800 verify_all_output = true; | |
| 1801 continue; | |
| 1802 } | |
| 1803 if (it->first == "v" || it->first == "vmodule") | |
| 1804 continue; | |
| 1805 if (it->first == "ozone-platform" || it->first == "ozone-use-surfaceless") | |
| 1806 continue; | |
| 1807 LOG(FATAL) << "Unexpected switch: " << it->first << ":" << it->second; | |
| 1808 } | |
| 1809 | |
| 1810 if (needs_encode_latency && !run_at_fps) { | |
| 1811 // Encode latency can only be measured with --run_at_fps. Otherwise, we get | |
| 1812 // skewed results since it may queue too many frames at once with the same | |
| 1813 // encode start time. | |
| 1814 LOG(FATAL) << "--measure_latency requires --run_at_fps enabled to work."; | |
| 1815 } | |
| 1816 | |
| 1817 #if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY) | |
| 1818 content::VaapiWrapper::PreSandboxInitialization(); | |
| 1819 #endif | |
| 1820 | |
| 1821 content::g_env = | |
| 1822 reinterpret_cast<content::VideoEncodeAcceleratorTestEnvironment*>( | |
| 1823 testing::AddGlobalTestEnvironment( | |
| 1824 new content::VideoEncodeAcceleratorTestEnvironment( | |
| 1825 std::move(test_stream_data), log_path, run_at_fps, | |
| 1826 needs_encode_latency, verify_all_output))); | |
| 1827 | |
| 1828 return RUN_ALL_TESTS(); | |
| 1829 } | |
| OLD | NEW |