Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(1110)

Unified Diff: media/gpu/video_decode_accelerator_unittest.cc

Issue 1882373004: Migrate content/common/gpu/media code to media/gpu (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Fix several more bot-identified build issues Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
Index: media/gpu/video_decode_accelerator_unittest.cc
diff --git a/content/common/gpu/media/video_decode_accelerator_unittest.cc b/media/gpu/video_decode_accelerator_unittest.cc
similarity index 86%
rename from content/common/gpu/media/video_decode_accelerator_unittest.cc
rename to media/gpu/video_decode_accelerator_unittest.cc
index b62a9089b06ce7a047eac5a3db5054989319c261..4aa9b9ac713593f4b139c79bb912cf062f108bd9 100644
--- a/content/common/gpu/media/video_decode_accelerator_unittest.cc
+++ b/media/gpu/video_decode_accelerator_unittest.cc
@@ -46,28 +46,28 @@
#include "base/thread_task_runner_handle.h"
#include "base/threading/thread.h"
#include "build/build_config.h"
-#include "content/common/gpu/media/fake_video_decode_accelerator.h"
-#include "content/common/gpu/media/gpu_video_decode_accelerator_factory_impl.h"
-#include "content/common/gpu/media/rendering_helper.h"
-#include "content/common/gpu/media/video_accelerator_unittest_helpers.h"
#include "gpu/command_buffer/service/gpu_preferences.h"
#include "media/filters/h264_parser.h"
+#include "media/gpu/fake_video_decode_accelerator.h"
+#include "media/gpu/ipc/service/gpu_video_decode_accelerator_factory_impl.h"
+#include "media/gpu/rendering_helper.h"
+#include "media/gpu/video_accelerator_unittest_helpers.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/gfx/codec/png_codec.h"
#include "ui/gl/gl_image.h"
#if defined(OS_WIN)
#include "base/win/windows_version.h"
-#include "content/common/gpu/media/dxva_video_decode_accelerator_win.h"
+#include "media/gpu/dxva_video_decode_accelerator_win.h"
#elif defined(OS_CHROMEOS)
#if defined(USE_V4L2_CODEC)
-#include "content/common/gpu/media/v4l2_device.h"
-#include "content/common/gpu/media/v4l2_slice_video_decode_accelerator.h"
-#include "content/common/gpu/media/v4l2_video_decode_accelerator.h"
+#include "media/gpu/v4l2_device.h"
+#include "media/gpu/v4l2_slice_video_decode_accelerator.h"
+#include "media/gpu/v4l2_video_decode_accelerator.h"
#endif
#if defined(ARCH_CPU_X86_FAMILY)
-#include "content/common/gpu/media/vaapi_video_decode_accelerator.h"
-#include "content/common/gpu/media/vaapi_wrapper.h"
+#include "media/gpu/vaapi_video_decode_accelerator.h"
+#include "media/gpu/vaapi_wrapper.h"
#endif // defined(ARCH_CPU_X86_FAMILY)
#else
#error The VideoAccelerator tests are not supported on this platform.
@@ -80,7 +80,7 @@
using media::VideoDecodeAccelerator;
-namespace content {
+namespace media {
namespace {
using base::MakeTuple;
@@ -158,8 +158,7 @@ struct TestVideoFile {
min_fps_render(-1),
min_fps_no_render(-1),
profile(media::VIDEO_CODEC_PROFILE_UNKNOWN),
- reset_after_frame_num(END_OF_STREAM_RESET) {
- }
+ reset_after_frame_num(END_OF_STREAM_RESET) {}
base::FilePath::StringType file_name;
int width;
@@ -184,27 +183,26 @@ void ReadGoldenThumbnailMD5s(const TestVideoFile* video_file,
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".md5"));
std::string all_md5s;
base::ReadFileToString(filepath, &all_md5s);
- *md5_strings = base::SplitString(
- all_md5s, "\n", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
+ *md5_strings = base::SplitString(all_md5s, "\n", base::TRIM_WHITESPACE,
+ base::SPLIT_WANT_ALL);
// Check these are legitimate MD5s.
for (const std::string& md5_string : *md5_strings) {
- // Ignore the empty string added by SplitString
- if (!md5_string.length())
- continue;
- // Ignore comments
- if (md5_string.at(0) == '#')
- continue;
-
- LOG_ASSERT(static_cast<int>(md5_string.length()) ==
- kMD5StringLength) << md5_string;
- bool hex_only = std::count_if(md5_string.begin(),
- md5_string.end(), isxdigit) ==
- kMD5StringLength;
- LOG_ASSERT(hex_only) << md5_string;
+ // Ignore the empty string added by SplitString
+ if (!md5_string.length())
+ continue;
+ // Ignore comments
+ if (md5_string.at(0) == '#')
+ continue;
+
+ LOG_ASSERT(static_cast<int>(md5_string.length()) == kMD5StringLength)
+ << md5_string;
+ bool hex_only = std::count_if(md5_string.begin(), md5_string.end(),
+ isxdigit) == kMD5StringLength;
+ LOG_ASSERT(hex_only) << md5_string;
}
LOG_ASSERT(md5_strings->size() >= 1U) << " MD5 checksum file ("
- << filepath.MaybeAsASCII()
- << ") missing or empty.";
+ << filepath.MaybeAsASCII()
+ << ") missing or empty.";
}
// State of the GLRenderingVDAClient below. Order matters here as the test
@@ -377,8 +375,8 @@ class GLRenderingVDAClient
std::string GetBytesForNextFragment(size_t start_pos, size_t* end_pos);
// Helpers for GetBytesForNextFragment above.
void GetBytesForNextNALU(size_t start_pos, size_t* end_pos); // For h.264.
- std::string GetBytesForNextFrame(
- size_t start_pos, size_t* end_pos); // For VP8/9.
+ std::string GetBytesForNextFrame(size_t start_pos,
+ size_t* end_pos); // For VP8/9.
// Request decode of the next fragment in the encoded data.
void DecodeNextFragment();
@@ -560,19 +558,20 @@ void GLRenderingVDAClient::ProvidePictureBuffers(
for (uint32_t i = 0; i < requested_num_of_buffers; ++i) {
uint32_t texture_id;
base::WaitableEvent done(false, false);
- rendering_helper_->CreateTexture(
- texture_target_, &texture_id, dimensions, &done);
+ rendering_helper_->CreateTexture(texture_target_, &texture_id, dimensions,
+ &done);
done.Wait();
int32_t picture_buffer_id = next_picture_buffer_id_++;
- LOG_ASSERT(active_textures_
- .insert(std::make_pair(
- picture_buffer_id,
- new TextureRef(texture_id,
- base::Bind(&RenderingHelper::DeleteTexture,
- base::Unretained(rendering_helper_),
- texture_id))))
- .second);
+ LOG_ASSERT(
+ active_textures_
+ .insert(std::make_pair(
+ picture_buffer_id,
+ new TextureRef(texture_id,
+ base::Bind(&RenderingHelper::DeleteTexture,
+ base::Unretained(rendering_helper_),
+ texture_id))))
+ .second);
media::PictureBuffer::TextureIds ids;
ids.push_back(texture_id);
@@ -731,8 +730,7 @@ void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
output->WriteAtCurrentPos(s.data(), s.length());
base::TimeTicks t0 = initialize_done_ticks_;
for (size_t i = 0; i < frame_delivery_times_.size(); ++i) {
- s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n",
- i,
+ s = base::StringPrintf("frame %04" PRIuS ": %" PRId64 " us\n", i,
(frame_delivery_times_[i] - t0).InMicroseconds());
t0 = frame_delivery_times_[i];
output->WriteAtCurrentPos(s.data(), s.length());
@@ -740,8 +738,8 @@ void GLRenderingVDAClient::OutputFrameDeliveryTimes(base::File* output) {
}
static bool LookingAtNAL(const std::string& encoded, size_t pos) {
- return encoded[pos] == 0 && encoded[pos + 1] == 0 &&
- encoded[pos + 2] == 0 && encoded[pos + 3] == 1;
+ return encoded[pos] == 0 && encoded[pos + 1] == 0 && encoded[pos + 2] == 0 &&
+ encoded[pos + 3] == 1;
}
void GLRenderingVDAClient::SetState(ClientState new_state) {
@@ -781,12 +779,12 @@ void GLRenderingVDAClient::DeleteDecoder() {
SetState(static_cast<ClientState>(i));
}
-std::string GLRenderingVDAClient::GetBytesForFirstFragment(
- size_t start_pos, size_t* end_pos) {
+std::string GLRenderingVDAClient::GetBytesForFirstFragment(size_t start_pos,
+ size_t* end_pos) {
if (profile_ < media::H264PROFILE_MAX) {
*end_pos = start_pos;
while (*end_pos + 4 < encoded_data_.size()) {
- if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
+ if ((encoded_data_[*end_pos + 4] & 0x1f) == 0x7) // SPS start frame
return GetBytesForNextFragment(*end_pos, end_pos);
GetBytesForNextNALU(*end_pos, end_pos);
num_skipped_fragments_++;
@@ -798,8 +796,8 @@ std::string GLRenderingVDAClient::GetBytesForFirstFragment(
return GetBytesForNextFragment(start_pos, end_pos);
}
-std::string GLRenderingVDAClient::GetBytesForNextFragment(
- size_t start_pos, size_t* end_pos) {
+std::string GLRenderingVDAClient::GetBytesForNextFragment(size_t start_pos,
+ size_t* end_pos) {
if (profile_ < media::H264PROFILE_MAX) {
*end_pos = start_pos;
GetBytesForNextNALU(*end_pos, end_pos);
@@ -812,8 +810,8 @@ std::string GLRenderingVDAClient::GetBytesForNextFragment(
return GetBytesForNextFrame(start_pos, end_pos);
}
-void GLRenderingVDAClient::GetBytesForNextNALU(
- size_t start_pos, size_t* end_pos) {
+void GLRenderingVDAClient::GetBytesForNextNALU(size_t start_pos,
+ size_t* end_pos) {
*end_pos = start_pos;
if (*end_pos + 4 > encoded_data_.size())
return;
@@ -827,8 +825,8 @@ void GLRenderingVDAClient::GetBytesForNextNALU(
*end_pos = encoded_data_.size();
}
-std::string GLRenderingVDAClient::GetBytesForNextFrame(
- size_t start_pos, size_t* end_pos) {
+std::string GLRenderingVDAClient::GetBytesForNextFrame(size_t start_pos,
+ size_t* end_pos) {
// Helpful description: http://wiki.multimedia.cx/index.php?title=IVF
std::string bytes;
if (start_pos == 0)
@@ -845,8 +843,7 @@ std::string GLRenderingVDAClient::GetBytesForNextFrame(
static bool FragmentHasConfigInfo(const uint8_t* data,
size_t size,
media::VideoCodecProfile profile) {
- if (profile >= media::H264PROFILE_MIN &&
- profile <= media::H264PROFILE_MAX) {
+ if (profile >= media::H264PROFILE_MIN && profile <= media::H264PROFILE_MAX) {
media::H264Parser parser;
parser.SetStream(data, size);
media::H264NALU nalu;
@@ -899,11 +896,11 @@ void GLRenderingVDAClient::DecodeNextFragment() {
LOG_ASSERT(shm.CreateAndMapAnonymous(next_fragment_size));
memcpy(shm.memory(), next_fragment_bytes.data(), next_fragment_size);
base::SharedMemoryHandle dup_handle;
- bool result = shm.ShareToProcess(base::GetCurrentProcessHandle(),
- &dup_handle);
+ bool result =
+ shm.ShareToProcess(base::GetCurrentProcessHandle(), &dup_handle);
LOG_ASSERT(result);
- media::BitstreamBuffer bitstream_buffer(
- next_bitstream_buffer_id_, dup_handle, next_fragment_size);
+ media::BitstreamBuffer bitstream_buffer(next_bitstream_buffer_id_, dup_handle,
+ next_fragment_size);
decode_start_time_[next_bitstream_buffer_id_] = base::TimeTicks::Now();
// Mask against 30 bits, to avoid (undefined) wraparound on signed integer.
next_bitstream_buffer_id_ = (next_bitstream_buffer_id_ + 1) & 0x3FFFFFFF;
@@ -962,10 +959,9 @@ class VideoDecodeAcceleratorTest : public ::testing::Test {
// Update the parameters of |test_video_files| according to
// |num_concurrent_decoders| and |reset_point|. Ex: the expected number of
// frames should be adjusted if decoder is reset in the middle of the stream.
- void UpdateTestVideoFileParams(
- size_t num_concurrent_decoders,
- int reset_point,
- std::vector<TestVideoFile*>* test_video_files);
+ void UpdateTestVideoFileParams(size_t num_concurrent_decoders,
+ int reset_point,
+ std::vector<TestVideoFile*>* test_video_files);
void InitializeRenderingHelper(const RenderingHelperParams& helper_params);
void CreateAndStartDecoder(GLRenderingVDAClient* client,
@@ -985,8 +981,7 @@ class VideoDecodeAcceleratorTest : public ::testing::Test {
DISALLOW_COPY_AND_ASSIGN(VideoDecodeAcceleratorTest);
};
-VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest() {
-}
+VideoDecodeAcceleratorTest::VideoDecodeAcceleratorTest() {}
void VideoDecodeAcceleratorTest::SetUp() {
ParseAndReadTestVideoData(g_test_video_data, &test_video_files_);
@@ -1009,14 +1004,14 @@ void VideoDecodeAcceleratorTest::TearDown() {
void VideoDecodeAcceleratorTest::ParseAndReadTestVideoData(
base::FilePath::StringType data,
std::vector<TestVideoFile*>* test_video_files) {
- std::vector<base::FilePath::StringType> entries = base::SplitString(
- data, base::FilePath::StringType(1, ';'),
- base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
+ std::vector<base::FilePath::StringType> entries =
+ base::SplitString(data, base::FilePath::StringType(1, ';'),
+ base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
LOG_ASSERT(entries.size() >= 1U) << data;
for (size_t index = 0; index < entries.size(); ++index) {
- std::vector<base::FilePath::StringType> fields = base::SplitString(
- entries[index], base::FilePath::StringType(1, ':'),
- base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
+ std::vector<base::FilePath::StringType> fields =
+ base::SplitString(entries[index], base::FilePath::StringType(1, ':'),
+ base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
LOG_ASSERT(fields.size() >= 1U) << entries[index];
LOG_ASSERT(fields.size() <= 8U) << entries[index];
TestVideoFile* video_file = new TestVideoFile(fields[0]);
@@ -1129,8 +1124,7 @@ void VideoDecodeAcceleratorTest::OutputLogFile(
class VideoDecodeAcceleratorParamTest
: public VideoDecodeAcceleratorTest,
public ::testing::WithParamInterface<
- base::Tuple<int, int, int, ResetPoint, ClientState, bool, bool> > {
-};
+ base::Tuple<int, int, int, ResetPoint, ClientState, bool, bool>> {};
// Wait for |note| to report a state and if it's not |expected_state| then
// assert |client| has deleted its decoder.
@@ -1139,10 +1133,11 @@ static void AssertWaitForStateOrDeleted(
GLRenderingVDAClient* client,
ClientState expected_state) {
ClientState state = note->Wait();
- if (state == expected_state) return;
+ if (state == expected_state)
+ return;
ASSERT_TRUE(client->decoder_deleted())
- << "Decoder not deleted but Wait() returned " << state
- << ", instead of " << expected_state;
+ << "Decoder not deleted but Wait() returned " << state << ", instead of "
+ << expected_state;
}
// We assert a minimal number of concurrent decoders we expect to succeed.
@@ -1167,14 +1162,14 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
if (g_num_play_throughs > 0)
num_play_throughs = g_num_play_throughs;
- UpdateTestVideoFileParams(
- num_concurrent_decoders, reset_point, &test_video_files_);
+ UpdateTestVideoFileParams(num_concurrent_decoders, reset_point,
+ &test_video_files_);
// Suppress GL rendering for all tests when the "--rendering_fps" is 0.
const bool suppress_rendering = g_rendering_fps == 0;
- std::vector<ClientStateNotification<ClientState>*>
- notes(num_concurrent_decoders, NULL);
+ std::vector<ClientStateNotification<ClientState>*> notes(
+ num_concurrent_decoders, NULL);
std::vector<GLRenderingVDAClient*> clients(num_concurrent_decoders, NULL);
RenderingHelperParams helper_params;
@@ -1202,23 +1197,13 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
delay_after_frame_num = video_file->num_frames - kMaxFramesToDelayReuse;
}
- GLRenderingVDAClient* client =
- new GLRenderingVDAClient(index,
- &rendering_helper_,
- note,
- video_file->data_str,
- num_in_flight_decodes,
- num_play_throughs,
- video_file->reset_after_frame_num,
- delete_decoder_state,
- video_file->width,
- video_file->height,
- video_file->profile,
- g_fake_decoder,
- suppress_rendering,
- delay_after_frame_num,
- 0,
- render_as_thumbnails);
+ GLRenderingVDAClient* client = new GLRenderingVDAClient(
+ index, &rendering_helper_, note, video_file->data_str,
+ num_in_flight_decodes, num_play_throughs,
+ video_file->reset_after_frame_num, delete_decoder_state,
+ video_file->width, video_file->height, video_file->profile,
+ g_fake_decoder, suppress_rendering, delay_after_frame_num, 0,
+ render_as_thumbnails);
clients[index] = client;
helper_params.window_sizes.push_back(
@@ -1273,8 +1258,9 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
AssertWaitForStateOrDeleted(note, clients[i], CS_DESTROYED));
}
// Finally assert that decoding went as expected.
- for (size_t i = 0; i < num_concurrent_decoders &&
- !skip_performance_and_correctness_checks; ++i) {
+ for (size_t i = 0;
+ i < num_concurrent_decoders && !skip_performance_and_correctness_checks;
+ ++i) {
// We can only make performance/correctness assertions if the decoder was
// allowed to finish.
if (delete_decoder_state < CS_FLUSHED)
@@ -1291,14 +1277,14 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
}
if (reset_point == END_OF_STREAM_RESET) {
EXPECT_EQ(video_file->num_fragments, client->num_skipped_fragments() +
- client->num_queued_fragments());
+ client->num_queued_fragments());
EXPECT_EQ(client->num_done_bitstream_buffers(),
client->num_queued_fragments());
}
LOG(INFO) << "Decoder " << i << " fps: " << client->frames_per_second();
if (!render_as_thumbnails) {
- int min_fps = suppress_rendering ?
- video_file->min_fps_no_render : video_file->min_fps_render;
+ int min_fps = suppress_rendering ? video_file->min_fps_no_render
+ : video_file->min_fps_render;
if (min_fps > 0 && !test_reuse_delay)
EXPECT_GT(client->frames_per_second(), min_fps);
}
@@ -1323,22 +1309,18 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
if (match == golden_md5s.end()) {
// Convert raw RGB into PNG for export.
std::vector<unsigned char> png;
- gfx::PNGCodec::Encode(&rgb[0],
- gfx::PNGCodec::FORMAT_RGB,
+ gfx::PNGCodec::Encode(&rgb[0], gfx::PNGCodec::FORMAT_RGB,
kThumbnailsPageSize,
- kThumbnailsPageSize.width() * 3,
- true,
- std::vector<gfx::PNGCodec::Comment>(),
- &png);
+ kThumbnailsPageSize.width() * 3, true,
+ std::vector<gfx::PNGCodec::Comment>(), &png);
LOG(ERROR) << "Unknown thumbnails MD5: " << md5_string;
base::FilePath filepath(test_video_files_[0]->file_name);
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".bad_thumbnails"));
filepath = filepath.AddExtension(FILE_PATH_LITERAL(".png"));
- int num_bytes = base::WriteFile(filepath,
- reinterpret_cast<char*>(&png[0]),
- png.size());
+ int num_bytes = base::WriteFile(
+ filepath, reinterpret_cast<char*>(&png[0]), png.size());
ASSERT_EQ(num_bytes, static_cast<int>(png.size()));
}
ASSERT_NE(match, golden_md5s.end());
@@ -1371,39 +1353,49 @@ TEST_P(VideoDecodeAcceleratorParamTest, TestSimpleDecode) {
// Test that replay after EOS works fine.
INSTANTIATE_TEST_CASE_P(
- ReplayAfterEOS, VideoDecodeAcceleratorParamTest,
+ ReplayAfterEOS,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 4, END_OF_STREAM_RESET, CS_RESET, false, false)));
// Test that Reset() before the first Decode() works fine.
INSTANTIATE_TEST_CASE_P(
- ResetBeforeDecode, VideoDecodeAcceleratorParamTest,
+ ResetBeforeDecode,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, START_OF_STREAM_RESET, CS_RESET, false, false)));
// Test Reset() immediately after Decode() containing config info.
INSTANTIATE_TEST_CASE_P(
- ResetAfterFirstConfigInfo, VideoDecodeAcceleratorParamTest,
- ::testing::Values(
- MakeTuple(
- 1, 1, 1, RESET_AFTER_FIRST_CONFIG_INFO, CS_RESET, false, false)));
+ ResetAfterFirstConfigInfo,
+ VideoDecodeAcceleratorParamTest,
+ ::testing::Values(MakeTuple(1,
+ 1,
+ 1,
+ RESET_AFTER_FIRST_CONFIG_INFO,
+ CS_RESET,
+ false,
+ false)));
// Test that Reset() mid-stream works fine and doesn't affect decoding even when
// Decode() calls are made during the reset.
INSTANTIATE_TEST_CASE_P(
- MidStreamReset, VideoDecodeAcceleratorParamTest,
+ MidStreamReset,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, MID_STREAM_RESET, CS_RESET, false, false)));
INSTANTIATE_TEST_CASE_P(
- SlowRendering, VideoDecodeAcceleratorParamTest,
+ SlowRendering,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, true, false)));
// Test that Destroy() mid-stream works fine (primarily this is testing that no
// crashes occur).
INSTANTIATE_TEST_CASE_P(
- TearDownTiming, VideoDecodeAcceleratorParamTest,
+ TearDownTiming,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_DECODER_SET, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_INITIALIZED, false, false),
@@ -1411,16 +1403,32 @@ INSTANTIATE_TEST_CASE_P(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_FLUSHED, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESETTING, false, false),
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
- MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
- static_cast<ClientState>(-1), false, false),
- MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
- static_cast<ClientState>(-10), false, false),
- MakeTuple(1, 1, 1, END_OF_STREAM_RESET,
- static_cast<ClientState>(-100), false, false)));
+ MakeTuple(1,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ static_cast<ClientState>(-1),
+ false,
+ false),
+ MakeTuple(1,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ static_cast<ClientState>(-10),
+ false,
+ false),
+ MakeTuple(1,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ static_cast<ClientState>(-100),
+ false,
+ false)));
// Test that decoding various variation works with multiple in-flight decodes.
INSTANTIATE_TEST_CASE_P(
- DecodeVariations, VideoDecodeAcceleratorParamTest,
+ DecodeVariations,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
MakeTuple(1, 10, 1, END_OF_STREAM_RESET, CS_RESET, false, false),
@@ -1429,18 +1437,29 @@ INSTANTIATE_TEST_CASE_P(
// Find out how many concurrent decoders can go before we exhaust system
// resources.
-INSTANTIATE_TEST_CASE_P(
- ResourceExhaustion, VideoDecodeAcceleratorParamTest,
- ::testing::Values(
- // +0 hack below to promote enum to int.
- MakeTuple(kMinSupportedNumConcurrentDecoders + 0, 1, 1,
- END_OF_STREAM_RESET, CS_RESET, false, false),
- MakeTuple(kMinSupportedNumConcurrentDecoders + 1, 1, 1,
- END_OF_STREAM_RESET, CS_RESET, false, false)));
+INSTANTIATE_TEST_CASE_P(ResourceExhaustion,
+ VideoDecodeAcceleratorParamTest,
+ ::testing::Values(
+ // +0 hack below to promote enum to int.
+ MakeTuple(kMinSupportedNumConcurrentDecoders + 0,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ CS_RESET,
+ false,
+ false),
+ MakeTuple(kMinSupportedNumConcurrentDecoders + 1,
+ 1,
+ 1,
+ END_OF_STREAM_RESET,
+ CS_RESET,
+ false,
+ false)));
// Thumbnailing test
INSTANTIATE_TEST_CASE_P(
- Thumbnail, VideoDecodeAcceleratorParamTest,
+ Thumbnail,
+ VideoDecodeAcceleratorParamTest,
::testing::Values(
MakeTuple(1, 1, 1, END_OF_STREAM_RESET, CS_RESET, false, true)));
@@ -1456,23 +1475,13 @@ TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
ClientStateNotification<ClientState>* note =
new ClientStateNotification<ClientState>();
- GLRenderingVDAClient* client =
- new GLRenderingVDAClient(0,
- &rendering_helper_,
- note,
- test_video_files_[0]->data_str,
- 1,
- 1,
- test_video_files_[0]->reset_after_frame_num,
- CS_RESET,
- test_video_files_[0]->width,
- test_video_files_[0]->height,
- test_video_files_[0]->profile,
- g_fake_decoder,
- true,
- std::numeric_limits<int>::max(),
- kWebRtcDecodeCallsPerSecond,
- false /* render_as_thumbnail */);
+ GLRenderingVDAClient* client = new GLRenderingVDAClient(
+ 0, &rendering_helper_, note, test_video_files_[0]->data_str, 1, 1,
+ test_video_files_[0]->reset_after_frame_num, CS_RESET,
+ test_video_files_[0]->width, test_video_files_[0]->height,
+ test_video_files_[0]->profile, g_fake_decoder, true,
+ std::numeric_limits<int>::max(), kWebRtcDecodeCallsPerSecond,
+ false /* render_as_thumbnail */);
helper_params.window_sizes.push_back(
gfx::Size(test_video_files_[0]->width, test_video_files_[0]->height));
InitializeRenderingHelper(helper_params);
@@ -1500,9 +1509,9 @@ TEST_F(VideoDecodeAcceleratorTest, TestDecodeTimeMedian) {
// - Test frame size changes mid-stream
} // namespace
-} // namespace content
+} // namespace media
-int main(int argc, char **argv) {
+int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv); // Removes gtest-specific args.
base::CommandLine::Init(argc, argv);
@@ -1518,39 +1527,39 @@ int main(int argc, char **argv) {
for (base::CommandLine::SwitchMap::const_iterator it = switches.begin();
it != switches.end(); ++it) {
if (it->first == "test_video_data") {
- content::g_test_video_data = it->second.c_str();
+ media::g_test_video_data = it->second.c_str();
continue;
}
// The output log for VDA performance test.
if (it->first == "output_log") {
- content::g_output_log = it->second.c_str();
+ media::g_output_log = it->second.c_str();
continue;
}
if (it->first == "rendering_fps") {
// On Windows, CommandLine::StringType is wstring. We need to convert
// it to std::string first
std::string input(it->second.begin(), it->second.end());
- LOG_ASSERT(base::StringToDouble(input, &content::g_rendering_fps));
+ LOG_ASSERT(base::StringToDouble(input, &media::g_rendering_fps));
continue;
}
if (it->first == "rendering_warm_up") {
std::string input(it->second.begin(), it->second.end());
- LOG_ASSERT(base::StringToInt(input, &content::g_rendering_warm_up));
+ LOG_ASSERT(base::StringToInt(input, &media::g_rendering_warm_up));
continue;
}
// TODO(owenlin): Remove this flag once it is not used in autotest.
if (it->first == "disable_rendering") {
- content::g_rendering_fps = 0;
+ media::g_rendering_fps = 0;
continue;
}
if (it->first == "num_play_throughs") {
std::string input(it->second.begin(), it->second.end());
- LOG_ASSERT(base::StringToInt(input, &content::g_num_play_throughs));
+ LOG_ASSERT(base::StringToInt(input, &media::g_num_play_throughs));
continue;
}
if (it->first == "fake_decoder") {
- content::g_fake_decoder = 1;
+ media::g_fake_decoder = 1;
continue;
}
if (it->first == "v" || it->first == "vmodule")
@@ -1576,13 +1585,13 @@ int main(int argc, char **argv) {
#endif
#if defined(OS_CHROMEOS) && defined(ARCH_CPU_X86_FAMILY)
- content::VaapiWrapper::PreSandboxInitialization();
+ media::VaapiWrapper::PreSandboxInitialization();
#endif
- content::g_env =
- reinterpret_cast<content::VideoDecodeAcceleratorTestEnvironment*>(
+ media::g_env =
+ reinterpret_cast<media::VideoDecodeAcceleratorTestEnvironment*>(
testing::AddGlobalTestEnvironment(
- new content::VideoDecodeAcceleratorTestEnvironment()));
+ new media::VideoDecodeAcceleratorTestEnvironment()));
return RUN_ALL_TESTS();
}

Powered by Google App Engine
This is Rietveld 408576698