Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(210)

Unified Diff: media/test/pipeline_integration_test.cc

Issue 2692593002: Media Remoting: End to end integration tests. (Closed)
Patch Set: File rename only. Created 3 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « media/test/BUILD.gn ('k') | media/test/pipeline_integration_test_base.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: media/test/pipeline_integration_test.cc
diff --git a/media/test/pipeline_integration_test.cc b/media/test/pipeline_integration_test.cc
index 051a758b68201eaf1dd5afe89e7262dda17f4bba..39724c8fa82e8aa1e0b0a43e8d2d19cf22da4f13 100644
--- a/media/test/pipeline_integration_test.cc
+++ b/media/test/pipeline_integration_test.cc
@@ -80,12 +80,6 @@
#define MAYBE_TEXT(test) test
#endif
-#if defined(DISABLE_CLOCKLESS_TESTS)
-#define MAYBE_CLOCKLESS(test) DISABLED_##test
-#else
-#define MAYBE_CLOCKLESS(test) test
-#endif
-
using testing::_;
using testing::AnyNumber;
using testing::AtLeast;
@@ -94,6 +88,8 @@ using testing::SaveArg;
namespace media {
+namespace {
+
const char kSourceId[] = "SourceId";
const char kWebM[] = "video/webm; codecs=\"vp8,vorbis\"";
@@ -665,6 +661,8 @@ class FailingVideoDecoder : public VideoDecoder {
bool NeedsBitstreamConversion() const override { return true; }
};
+} // namespace
+
// TODO(xhwang): These tests have been disabled for some time as apptests and no
// longer pass. They need to be reconstituted as shell tests.
// Currently there are compile issues which must be resolved,
@@ -756,7 +754,7 @@ class PipelineIntegrationTest : public PipelineIntegrationTestHost {
EXPECT_CALL(*this, OnWaitingForDecryptionKey()).Times(0);
}
- pipeline_->Start(demuxer_.get(), CreateRenderer(), this,
+ pipeline_->Start(demuxer_.get(), renderer_factory_->CreateRenderer(), this,
base::Bind(&PipelineIntegrationTest::OnStatusCallback,
base::Unretained(this)));
@@ -801,12 +799,14 @@ class PipelineIntegrationTest : public PipelineIntegrationTestHost {
};
struct PlaybackTestData {
+ const PipelineType type;
const std::string filename;
const uint32_t start_time_ms;
const uint32_t duration_ms;
};
struct MSEPlaybackTestData {
+ const PipelineType type;
const std::string filename;
const std::string mimetype;
const size_t append_bytes;
@@ -824,15 +824,29 @@ std::ostream& operator<<(std::ostream& os, const MSEPlaybackTestData& data) {
class BasicPlaybackTest : public PipelineIntegrationTest,
public testing::WithParamInterface<PlaybackTestData> {
+ public:
+ BasicPlaybackTest() {
+#if BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ if (GetParam().type == PipelineType::MediaRemoting)
+ SetUpRemotingPipeline();
+#endif // BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ }
};
class BasicMSEPlaybackTest
: public ::testing::WithParamInterface<MSEPlaybackTestData>,
- public PipelineIntegrationTest {};
+ public PipelineIntegrationTest {
+ public:
+ BasicMSEPlaybackTest() {
+#if BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ if (GetParam().type == PipelineType::MediaRemoting)
+ SetUpRemotingPipeline();
+#endif // BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ }
+};
TEST_P(BasicPlaybackTest, PlayToEnd) {
PlaybackTestData data = GetParam();
-
ASSERT_EQ(PIPELINE_OK,
Start(data.filename, kClockless | kUnreliableDuration));
EXPECT_EQ(data.start_time_ms, demuxer_->GetStartTime().InMilliseconds());
@@ -844,7 +858,6 @@ TEST_P(BasicPlaybackTest, PlayToEnd) {
TEST_P(BasicMSEPlaybackTest, PlayToEnd) {
MSEPlaybackTestData data = GetParam();
-
MockMediaSource source(data.filename, data.mimetype, data.append_bytes);
// TODO -- ADD uint8_t test_type to StartWithMSE and pass clockless flags
ASSERT_EQ(PIPELINE_OK,
@@ -867,11 +880,20 @@ TEST_P(BasicMSEPlaybackTest, PlayToEnd) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+// Any new/changed entries should be made for both the ::Media and
+// ::MediaRemoting types. If you don't think something applies, please contact
+// one of the media/remoting/OWNERS.
const PlaybackTestData kADTSTests[] = {
- {"bear-audio-main-aac.aac", 0, 2724},
- {"bear-audio-lc-aac.aac", 0, 2858},
- {"bear-audio-implicit-he-aac-v1.aac", 0, 2812},
- {"bear-audio-implicit-he-aac-v2.aac", 0, 3047},
+ {PipelineType::Media, "bear-audio-main-aac.aac", 0, 2724},
+ {PipelineType::Media, "bear-audio-lc-aac.aac", 0, 2858},
+ {PipelineType::Media, "bear-audio-implicit-he-aac-v1.aac", 0, 2812},
+ {PipelineType::Media, "bear-audio-implicit-he-aac-v2.aac", 0, 3047},
+#if BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ {PipelineType::MediaRemoting, "bear-audio-main-aac.aac", 0, 2724},
+ {PipelineType::MediaRemoting, "bear-audio-lc-aac.aac", 0, 2858},
+ {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v1.aac", 0, 2812},
+ {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v2.aac", 0, 3047},
+#endif // BUILDFLAG(ENABLE_MEDIA_REMOTING)
};
// TODO(chcunningham): Migrate other basic playback tests to TEST_P.
@@ -879,11 +901,28 @@ INSTANTIATE_TEST_CASE_P(ProprietaryCodecs,
BasicPlaybackTest,
testing::ValuesIn(kADTSTests));
+// Any new/changed entries should be made for both the ::Media and
+// ::MediaRemoting types. If you don't think something applies, please contact
+// one of the media/remoting/OWNERS.
const MSEPlaybackTestData kMediaSourceADTSTests[] = {
- {"bear-audio-main-aac.aac", kADTS, kAppendWholeFile, 2773},
- {"bear-audio-lc-aac.aac", kADTS, kAppendWholeFile, 2794},
- {"bear-audio-implicit-he-aac-v1.aac", kADTS, kAppendWholeFile, 2858},
- {"bear-audio-implicit-he-aac-v2.aac", kADTS, kAppendWholeFile, 2901},
+ {PipelineType::Media, "bear-audio-main-aac.aac", kADTS, kAppendWholeFile,
+ 2773},
+ {PipelineType::Media, "bear-audio-lc-aac.aac", kADTS, kAppendWholeFile,
+ 2794},
+ {PipelineType::Media, "bear-audio-implicit-he-aac-v1.aac", kADTS,
+ kAppendWholeFile, 2858},
+ {PipelineType::Media, "bear-audio-implicit-he-aac-v2.aac", kADTS,
+ kAppendWholeFile, 2901},
+#if BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ {PipelineType::MediaRemoting, "bear-audio-main-aac.aac", kADTS,
+ kAppendWholeFile, 2773},
+ {PipelineType::MediaRemoting, "bear-audio-lc-aac.aac", kADTS,
+ kAppendWholeFile, 2794},
+ {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v1.aac", kADTS,
+ kAppendWholeFile, 2858},
+ {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v2.aac", kADTS,
+ kAppendWholeFile, 2901},
+#endif // BUILDFLAG(ENABLE_MEDIA_REMOTING)
};
// TODO(chcunningham): Migrate other basic MSE playback tests to TEST_P.
@@ -893,7 +932,29 @@ INSTANTIATE_TEST_CASE_P(ProprietaryCodecs,
#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
-TEST_F(PipelineIntegrationTest, BasicPlayback) {
+struct IntegrationTestData {
+ const PipelineType type;
+};
+
+// Tells gtest how to print our PlaybackTestData structure.
+std::ostream& operator<<(std::ostream& os, const IntegrationTestData& data) {
+ return os << (data.type == PipelineType::Media ? "Media" : "MediaRemoting");
+}
+
+// These tests are used to test both media pipeline and media remoting pipeline.
+class CommonPipelineIntegrationTest
+ : public PipelineIntegrationTest,
+ public testing::WithParamInterface<IntegrationTestData> {
+ public:
+ CommonPipelineIntegrationTest() {
+#if BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ if (GetParam().type == PipelineType::MediaRemoting)
+ SetUpRemotingPipeline();
+#endif // BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ }
+};
+
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
Play();
@@ -901,7 +962,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback) {
ASSERT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOgg) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackOpusOgg) {
ASSERT_EQ(PIPELINE_OK, Start("bear-opus.ogg"));
Play();
@@ -909,7 +970,7 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOgg) {
ASSERT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, BasicPlaybackHashed) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHashed) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
Play();
@@ -925,7 +986,8 @@ base::TimeDelta TimestampMs(int milliseconds) {
return base::TimeDelta::FromMilliseconds(milliseconds);
}
-TEST_F(PipelineIntegrationTest, PlaybackWithAudioTrackDisabledThenEnabled) {
+TEST_P(CommonPipelineIntegrationTest,
+ PlaybackWithAudioTrackDisabledThenEnabled) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
// Disable audio.
@@ -959,7 +1021,8 @@ TEST_F(PipelineIntegrationTest, PlaybackWithAudioTrackDisabledThenEnabled) {
EXPECT_HASH_EQ("-1.53,0.21,1.23,1.56,-0.34,-0.94,", GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, PlaybackWithVideoTrackDisabledThenEnabled) {
+TEST_P(CommonPipelineIntegrationTest,
+ PlaybackWithVideoTrackDisabledThenEnabled) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
// Disable video.
@@ -999,13 +1062,13 @@ TEST_F(PipelineIntegrationTest, PlaybackWithVideoTrackDisabledThenEnabled) {
EXPECT_HASH_EQ("fd59357dfd9c144ab4fb8181b2de32c3", GetVideoHash());
}
-TEST_F(PipelineIntegrationTest, TrackStatusChangesBeforePipelineStarted) {
+TEST_P(CommonPipelineIntegrationTest, TrackStatusChangesBeforePipelineStarted) {
std::vector<MediaTrack::Id> empty_track_ids;
pipeline_->OnEnabledAudioTracksChanged(empty_track_ids);
pipeline_->OnSelectedVideoTrackChanged(base::nullopt);
}
-TEST_F(PipelineIntegrationTest, TrackStatusChangesAfterPipelineEnded) {
+TEST_P(CommonPipelineIntegrationTest, TrackStatusChangesAfterPipelineEnded) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
Play();
ASSERT_TRUE(WaitUntilOnEnded());
@@ -1021,7 +1084,7 @@ TEST_F(PipelineIntegrationTest, TrackStatusChangesAfterPipelineEnded) {
pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1"));
}
-TEST_F(PipelineIntegrationTest, TrackStatusChangesWhileSuspended) {
+TEST_P(CommonPipelineIntegrationTest, TrackStatusChangesWhileSuspended) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
Play();
@@ -1059,54 +1122,7 @@ TEST_F(PipelineIntegrationTest, TrackStatusChangesWhileSuspended) {
ASSERT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, ReinitRenderersWhileAudioTrackIsDisabled) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
- Play();
-
- // These get triggered every time playback is resumed.
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240)))
- .Times(AnyNumber());
- EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber());
-
- // Disable the audio track.
- std::vector<MediaTrack::Id> track_ids;
- pipeline_->OnEnabledAudioTracksChanged(track_ids);
- // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and
- // reinitializes renderers while the audio track is disabled.
- ASSERT_TRUE(Suspend());
- ASSERT_TRUE(Resume(TimestampMs(100)));
- // Now re-enable the audio track, playback should continue successfully.
- EXPECT_CALL(*this, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)).Times(1);
- track_ids.push_back("2");
- pipeline_->OnEnabledAudioTracksChanged(track_ids);
- ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200)));
-
- Stop();
-}
-
-TEST_F(PipelineIntegrationTest, ReinitRenderersWhileVideoTrackIsDisabled) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
- Play();
-
- // These get triggered every time playback is resumed.
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240)))
- .Times(AnyNumber());
- EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber());
-
- // Disable the video track.
- pipeline_->OnSelectedVideoTrackChanged(base::nullopt);
- // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and
- // reinitializes renderers while the video track is disabled.
- ASSERT_TRUE(Suspend());
- ASSERT_TRUE(Resume(TimestampMs(100)));
- // Now re-enable the video track, playback should continue successfully.
- pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1"));
- ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200)));
-
- Stop();
-}
-
-TEST_F(PipelineIntegrationTest, PipelineStoppedWhileAudioRestartPending) {
+TEST_P(CommonPipelineIntegrationTest, PipelineStoppedWhileAudioRestartPending) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
Play();
@@ -1121,7 +1137,7 @@ TEST_F(PipelineIntegrationTest, PipelineStoppedWhileAudioRestartPending) {
Stop();
}
-TEST_F(PipelineIntegrationTest, PipelineStoppedWhileVideoRestartPending) {
+TEST_P(CommonPipelineIntegrationTest, PipelineStoppedWhileVideoRestartPending) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
Play();
@@ -1134,8 +1150,11 @@ TEST_F(PipelineIntegrationTest, PipelineStoppedWhileVideoRestartPending) {
Stop();
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_CLOCKLESS(BasicPlaybackOpusOggTrimmingHashed)) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackOpusOggTrimmingHashed) {
+#if defined(DISABLE_CLOCKLESS_TESTS)
+ return;
+#endif // defined(DISABLE_CLOCKLESS_TESTS)
+
ASSERT_EQ(PIPELINE_OK,
Start("opus-trimming-test.webm", kHashed | kClockless));
@@ -1158,8 +1177,11 @@ TEST_F(PipelineIntegrationTest,
EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash());
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_CLOCKLESS(BasicPlaybackOpusWebmTrimmingHashed)) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackOpusWebmTrimmingHashed) {
+#if defined(DISABLE_CLOCKLESS_TESTS)
+ return;
+#endif // defined(DISABLE_CLOCKLESS_TESTS)
+
ASSERT_EQ(PIPELINE_OK,
Start("opus-trimming-test.webm", kHashed | kClockless));
@@ -1182,8 +1204,12 @@ TEST_F(PipelineIntegrationTest,
EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash());
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_CLOCKLESS(BasicPlaybackOpusWebmTrimmingHashed_MediaSource)) {
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlaybackOpusWebmTrimmingHashed_MediaSource) {
+#if defined(DISABLE_CLOCKLESS_TESTS)
+ return;
+#endif // defined(DISABLE_CLOCKLESS_TESTS)
+
MockMediaSource source("opus-trimming-test.webm", kOpusAudioOnlyWebM,
kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(
@@ -1213,8 +1239,12 @@ TEST_F(PipelineIntegrationTest,
EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash());
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_CLOCKLESS(BasicPlaybackOpusPrerollExceedsCodecDelay)) {
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlaybackOpusPrerollExceedsCodecDelay) {
+#if defined(DISABLE_CLOCKLESS_TESTS)
+ return;
+#endif // defined(DISABLE_CLOCKLESS_TESTS)
+
ASSERT_EQ(PIPELINE_OK, Start("bear-opus.webm", kHashed | kClockless));
AudioDecoderConfig config =
@@ -1237,8 +1267,12 @@ TEST_F(PipelineIntegrationTest,
EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_2, GetAudioHash());
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_CLOCKLESS(BasicPlaybackOpusPrerollExceedsCodecDelay_MediaSource)) {
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlaybackOpusPrerollExceedsCodecDelay_MediaSource) {
+#if defined(DISABLE_CLOCKLESS_TESTS)
+ return;
+#endif // defined(DISABLE_CLOCKLESS_TESTS)
+
MockMediaSource source("bear-opus.webm", kOpusAudioOnlyWebM,
kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(
@@ -1267,7 +1301,7 @@ TEST_F(PipelineIntegrationTest,
EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_2, GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, BasicPlaybackLive) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackLive) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-live.webm", kHashed));
// Live stream does not have duration in the initialization segment.
@@ -1283,7 +1317,7 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackLive) {
EXPECT_EQ(kLiveTimelineOffset(), demuxer_->GetTimelineOffset());
}
-TEST_F(PipelineIntegrationTest, S32PlaybackHashed) {
+TEST_P(CommonPipelineIntegrationTest, S32PlaybackHashed) {
ASSERT_EQ(PIPELINE_OK, Start("sfx_s32le.wav", kHashed));
Play();
ASSERT_TRUE(WaitUntilOnEnded());
@@ -1291,7 +1325,7 @@ TEST_F(PipelineIntegrationTest, S32PlaybackHashed) {
EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, F32PlaybackHashed) {
+TEST_P(CommonPipelineIntegrationTest, F32PlaybackHashed) {
ASSERT_EQ(PIPELINE_OK, Start("sfx_f32le.wav", kHashed));
Play();
ASSERT_TRUE(WaitUntilOnEnded());
@@ -1299,22 +1333,7 @@ TEST_F(PipelineIntegrationTest, F32PlaybackHashed) {
EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, MAYBE_EME(BasicPlaybackEncrypted)) {
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- set_encrypted_media_init_data_cb(
- base::Bind(&FakeEncryptedMedia::OnEncryptedMediaInitData,
- base::Unretained(&encrypted_media)));
-
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-av_enc-av.webm",
- encrypted_media.GetCdmContext()));
-
- Play();
-
- ASSERT_TRUE(WaitUntilOnEnded());
- Stop();
-}
-
-TEST_F(PipelineIntegrationTest, FlacPlaybackHashed) {
+TEST_P(CommonPipelineIntegrationTest, FlacPlaybackHashed) {
ASSERT_EQ(PIPELINE_OK, Start("sfx.flac", kHashed));
Play();
ASSERT_TRUE(WaitUntilOnEnded());
@@ -1322,7 +1341,7 @@ TEST_F(PipelineIntegrationTest, FlacPlaybackHashed) {
EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource) {
MockMediaSource source("bear-320x240.webm", kWebM, 219229);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1341,7 +1360,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource) {
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Live) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_Live) {
MockMediaSource source("bear-320x240-live.webm", kWebM, 219221);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1360,7 +1379,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Live) {
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP9_WebM) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_VP9_WebM) {
MockMediaSource source("bear-vp9.webm", kWebMVP9, 67504);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1377,7 +1396,8 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP9_WebM) {
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP9_BlockGroup_WebM) {
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlayback_MediaSource_VP9_BlockGroup_WebM) {
MockMediaSource source("bear-vp9-blockgroup.webm", kWebMVP9, 67871);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1394,7 +1414,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP9_BlockGroup_WebM) {
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP8A_WebM) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_VP8A_WebM) {
MockMediaSource source("bear-vp8a.webm", kVideoOnlyWebM, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1411,7 +1431,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP8A_WebM) {
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Opus_WebM) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_Opus_WebM) {
MockMediaSource source("bear-opus-end-trimming.webm", kOpusAudioOnlyWebM,
kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
@@ -1429,7 +1449,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Opus_WebM) {
}
// Flaky. http://crbug.com/304776
-TEST_F(PipelineIntegrationTest, DISABLED_MediaSource_Opus_Seeking_WebM) {
+TEST_P(CommonPipelineIntegrationTest, DISABLED_MediaSource_Opus_Seeking_WebM) {
MockMediaSource source("bear-opus-end-trimming.webm", kOpusAudioOnlyWebM,
kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK,
@@ -1457,7 +1477,7 @@ TEST_F(PipelineIntegrationTest, DISABLED_MediaSource_Opus_Seeking_WebM) {
Stop();
}
-TEST_F(PipelineIntegrationTest, MediaSource_ConfigChange_WebM) {
+TEST_P(CommonPipelineIntegrationTest, MediaSource_ConfigChange_WebM) {
MockMediaSource source("bear-320x240-16x9-aspect.webm", kWebM,
kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
@@ -1482,7 +1502,8 @@ TEST_F(PipelineIntegrationTest, MediaSource_ConfigChange_WebM) {
Stop();
}
-TEST_F(PipelineIntegrationTest, MediaSource_Remove_Updates_BufferedRanges) {
+TEST_P(CommonPipelineIntegrationTest,
+ MediaSource_Remove_Updates_BufferedRanges) {
const char* input_filename = "bear-320x240.webm";
MockMediaSource source(input_filename, kWebM, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
@@ -1511,7 +1532,7 @@ TEST_F(PipelineIntegrationTest, MediaSource_Remove_Updates_BufferedRanges) {
// evicted data shold be reflected in the change of media::Pipeline buffered
// ranges (returned by GetBufferedTimeRanges). At that point the buffered ranges
// will no longer start at 0.
-TEST_F(PipelineIntegrationTest, MediaSource_FillUp_Buffer) {
+TEST_P(CommonPipelineIntegrationTest, MediaSource_FillUp_Buffer) {
const char* input_filename = "bear-320x240.webm";
MockMediaSource source(input_filename, kWebM, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
@@ -1541,97 +1562,8 @@ TEST_F(PipelineIntegrationTest, MediaSource_FillUp_Buffer) {
Stop();
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(MediaSource_ConfigChange_Encrypted_WebM)) {
- MockMediaSource source("bear-320x240-16x9-aspect-av_enc-av.webm", kWebM,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
-
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-640x360-av_enc-av.webm");
-
- ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
- source.EndOfStream();
-
- Play();
- EXPECT_TRUE(WaitUntilOnEnded());
-
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs,
- pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-
- source.Shutdown();
- Stop();
-}
-
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(MediaSource_ConfigChange_ClearThenEncrypted_WebM)) {
- MockMediaSource source("bear-320x240-16x9-aspect.webm", kWebM,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
-
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-640x360-av_enc-av.webm");
-
- EXPECT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
- source.EndOfStream();
-
- Play();
- EXPECT_TRUE(WaitUntilOnEnded());
-
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs,
- pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-
- source.Shutdown();
- Stop();
-}
-
-// Config change from encrypted to clear is allowed by the demuxer, and is
-// supported by the Renderer.
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(MediaSource_ConfigChange_EncryptedThenClear_WebM)) {
- MockMediaSource source("bear-320x240-16x9-aspect-av_enc-av.webm", kWebM,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
-
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-640x360.webm");
-
- ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
- source.EndOfStream();
-
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
-
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs,
- pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-
- source.Shutdown();
- Stop();
-}
-
#if defined(ARCH_CPU_X86_FAMILY) && !defined(OS_ANDROID)
-TEST_F(PipelineIntegrationTest, BasicPlaybackHi10PVP9) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHi10PVP9) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p-vp9.webm", kClockless));
Play();
@@ -1639,7 +1571,7 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackHi10PVP9) {
ASSERT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, BasicPlaybackHi12PVP9) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHi12PVP9) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi12p-vp9.webm", kClockless));
Play();
@@ -1650,7 +1582,7 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackHi12PVP9) {
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
-TEST_F(PipelineIntegrationTest, BasicPlaybackHi10P) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHi10P) {
ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p.mp4", kClockless));
Play();
@@ -1658,7 +1590,7 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackHi10P) {
ASSERT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, BasicFallback) {
+TEST_P(CommonPipelineIntegrationTest, BasicFallback) {
ScopedVector<VideoDecoder> failing_video_decoder;
failing_video_decoder.push_back(new FailingVideoDecoder());
@@ -1670,7 +1602,7 @@ TEST_F(PipelineIntegrationTest, BasicFallback) {
ASSERT_TRUE(WaitUntilOnEnded());
};
-TEST_F(PipelineIntegrationTest, MediaSource_ADTS) {
+TEST_P(CommonPipelineIntegrationTest, MediaSource_ADTS) {
MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1684,7 +1616,7 @@ TEST_F(PipelineIntegrationTest, MediaSource_ADTS) {
EXPECT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, MediaSource_ADTS_TimestampOffset) {
+TEST_P(CommonPipelineIntegrationTest, MediaSource_ADTS_TimestampOffset) {
MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK,
StartPipelineWithMediaSource(&source, kHashed, nullptr));
@@ -1715,7 +1647,7 @@ TEST_F(PipelineIntegrationTest, MediaSource_ADTS_TimestampOffset) {
EXPECT_HASH_EQ("-0.25,0.67,0.04,0.14,-0.49,-0.41,", GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_MP3) {
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHashed_MP3) {
ASSERT_EQ(PIPELINE_OK, Start("sfx.mp3", kHashed));
Play();
@@ -1726,104 +1658,35 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_MP3) {
EXPECT_HASH_EQ("1.30,2.72,4.56,5.08,3.74,2.03,", GetAudioHash());
}
-#if !defined(DISABLE_CLOCKLESS_TESTS)
-class Mp3FastSeekParams {
- public:
- Mp3FastSeekParams(const char* filename, const char* hash)
- : filename(filename), hash(hash) {}
- const char* filename;
- const char* hash;
-};
-
-class Mp3FastSeekIntegrationTest
- : public PipelineIntegrationTest,
- public testing::WithParamInterface<Mp3FastSeekParams> {};
-
-TEST_P(Mp3FastSeekIntegrationTest, FastSeekAccuracy_MP3) {
- Mp3FastSeekParams config = GetParam();
- ASSERT_EQ(PIPELINE_OK, Start(config.filename, kHashed));
+TEST_P(CommonPipelineIntegrationTest, MediaSource_MP3) {
+ MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile);
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithMediaSource(&source, kHashed, nullptr));
+ source.EndOfStream();
- // The XING TOC is inaccurate. We don't use it for CBR, we tolerate it for VBR
- // (best option for fast seeking; see Mp3SeekFFmpegDemuxerTest). The chosen
- // seek time exposes inaccuracy in TOC such that the hash will change if seek
- // logic is regressed. See https://crbug.com/545914.
- //
- // Quick TOC design (not pretty!):
- // - All MP3 TOCs are 100 bytes
- // - Each byte is read as a uint8_t; value between 0 - 255.
- // - The index into this array is the numerator in the ratio: index / 100.
- // This fraction represents a playback time as a percentage of duration.
- // - The value at the given index is the numerator in the ratio: value / 256.
- // This fraction represents a byte offset as a percentage of the file size.
- //
- // For CBR files, each frame is the same size, so the offset for time of
- // (0.98 * duration) should be around (0.98 * file size). This is 250.88 / 256
- // but the numerator will be truncated in the TOC as 250, losing precision.
- base::TimeDelta seek_time(0.98 * pipeline_->GetMediaDuration());
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ EXPECT_EQ(313, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
- ASSERT_TRUE(Seek(seek_time));
Play();
- ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_HASH_EQ(config.hash, GetAudioHash());
+ EXPECT_TRUE(WaitUntilOnEnded());
+
+ // Verify that codec delay was stripped.
+ EXPECT_HASH_EQ("1.01,2.71,4.18,4.32,3.04,1.12,", GetAudioHash());
}
-// CBR seeks should always be fast and accurate.
-INSTANTIATE_TEST_CASE_P(
- CBRSeek_HasTOC,
- Mp3FastSeekIntegrationTest,
- ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-has-TOC.mp3",
- "-0.71,0.36,2.96,2.68,2.11,-1.08,")));
+TEST_P(CommonPipelineIntegrationTest, MediaSource_MP3_TimestampOffset) {
+ MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile);
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
+ EXPECT_EQ(313, source.last_timestamp_offset().InMilliseconds());
-INSTANTIATE_TEST_CASE_P(
- CBRSeeks_NoTOC,
- Mp3FastSeekIntegrationTest,
- ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-no-TOC.mp3",
- "0.95,0.56,1.34,0.47,1.77,0.84,")));
-
-// VBR seeks can be fast *OR* accurate, but not both. We chose fast.
-INSTANTIATE_TEST_CASE_P(
- VBRSeeks_HasTOC,
- Mp3FastSeekIntegrationTest,
- ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-has-TOC.mp3",
- "-0.15,-0.83,0.54,1.00,1.94,0.93,")));
-
-INSTANTIATE_TEST_CASE_P(
- VBRSeeks_NoTOC,
- Mp3FastSeekIntegrationTest,
- ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-no-TOC.mp3",
- "-0.22,0.80,1.19,0.73,-0.31,-1.12,")));
-#endif // !defined(DISABLE_CLOCKLESS_TESTS)
-
-TEST_F(PipelineIntegrationTest, MediaSource_MP3) {
- MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile);
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithMediaSource(&source, kHashed, nullptr));
- source.EndOfStream();
-
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(313, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-
- Play();
-
- EXPECT_TRUE(WaitUntilOnEnded());
-
- // Verify that codec delay was stripped.
- EXPECT_HASH_EQ("1.01,2.71,4.18,4.32,3.04,1.12,", GetAudioHash());
-}
-
-TEST_F(PipelineIntegrationTest, MediaSource_MP3_TimestampOffset) {
- MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile);
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
- EXPECT_EQ(313, source.last_timestamp_offset().InMilliseconds());
-
- // There are 576 silent frames at the start of this mp3. The second append
- // should trim them off.
- const base::TimeDelta mp3_preroll_duration =
- base::TimeDelta::FromSecondsD(576.0 / 44100);
- const base::TimeDelta append_time =
- source.last_timestamp_offset() - mp3_preroll_duration;
+ // There are 576 silent frames at the start of this mp3. The second append
+ // should trim them off.
+ const base::TimeDelta mp3_preroll_duration =
+ base::TimeDelta::FromSecondsD(576.0 / 44100);
+ const base::TimeDelta append_time =
+ source.last_timestamp_offset() - mp3_preroll_duration;
scoped_refptr<DecoderBuffer> second_file = ReadTestDataFile("sfx.mp3");
source.AppendAtTimeWithWindow(append_time, append_time + mp3_preroll_duration,
@@ -1840,7 +1703,7 @@ TEST_F(PipelineIntegrationTest, MediaSource_MP3_TimestampOffset) {
EXPECT_EQ(613, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
}
-TEST_F(PipelineIntegrationTest, MediaSource_MP3_Icecast) {
+TEST_P(CommonPipelineIntegrationTest, MediaSource_MP3_Icecast) {
MockMediaSource source("icy_sfx.mp3", kMP3, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
@@ -1850,7 +1713,7 @@ TEST_F(PipelineIntegrationTest, MediaSource_MP3_Icecast) {
EXPECT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, MediaSource_ConfigChange_MP4) {
+TEST_P(CommonPipelineIntegrationTest, MediaSource_ConfigChange_MP4) {
MockMediaSource source("bear-640x360-av_frag.mp4", kMP4, kAppendWholeFile);
EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
@@ -1873,744 +1736,1008 @@ TEST_F(PipelineIntegrationTest, MediaSource_ConfigChange_MP4) {
source.Shutdown();
Stop();
}
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(MediaSource_ConfigChange_Encrypted_MP4_CENC_VideoOnly)) {
- MockMediaSource source("bear-640x360-v_frag-cenc.mp4", kMP4Video,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_16x9AspectRatio) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-16x9-aspect.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-1280x720-v_frag-cenc.mp4");
- ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+TEST_P(CommonPipelineIntegrationTest, Mp2ts_AAC_HE_SBR_Audio) {
+ MockMediaSource source("bear-1280x720-aac_he.ts", kMP2AudioSBR,
+ kAppendWholeFile);
+#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
- Play();
- EXPECT_TRUE(WaitUntilOnEnded());
-
+ // Check that SBR is taken into account correctly by mpeg2ts parser. When an
+ // SBR stream is parsed as non-SBR stream, then audio frame durations are
+ // calculated incorrectly and that leads to gaps in buffered ranges (so this
+ // check will fail) and eventually leads to stalled playback.
EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
- pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-
- source.Shutdown();
- Stop();
+#else
+ EXPECT_EQ(
+ DEMUXER_ERROR_COULD_NOT_OPEN,
+ StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
+#endif
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(
- MediaSource_ConfigChange_Encrypted_MP4_CENC_KeyRotation_VideoOnly)) {
- MockMediaSource source("bear-640x360-v_frag-cenc-key_rotation.mp4", kMP4Video,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_P(CommonPipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_6B) {
+ MockMediaSource source("bear-audio-mp4a.6B.ts",
+ "video/mp2t; codecs=\"mp4a.6B\"", kAppendWholeFile);
+#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
+ source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+#else
+ EXPECT_EQ(
+ DEMUXER_ERROR_COULD_NOT_OPEN,
+ StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
+#endif
+}
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-1280x720-v_frag-cenc-key_rotation.mp4");
- ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
+TEST_P(CommonPipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_69) {
+ MockMediaSource source("bear-audio-mp4a.69.ts",
+ "video/mp2t; codecs=\"mp4a.69\"", kAppendWholeFile);
+#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+#else
+ EXPECT_EQ(
+ DEMUXER_ERROR_COULD_NOT_OPEN,
+ StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
+#endif
+}
- Play();
- EXPECT_TRUE(WaitUntilOnEnded());
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlayback_MediaSource_VideoOnly_MP4_AVC3) {
+ MockMediaSource source("bear-1280x720-v_frag-avc3.mp4", kMP4VideoAVC3,
+ kAppendWholeFile);
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
+ source.EndOfStream();
EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
+ EXPECT_EQ(k1280IsoAVC3FileDurationMs,
pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
+ Play();
+
+ ASSERT_TRUE(WaitUntilOnEnded());
source.Shutdown();
Stop();
}
-// Config changes from clear to encrypted are not currently supported.
-// TODO(ddorwin): Figure out why this CHECKs in AppendAtTime().
-TEST_F(PipelineIntegrationTest,
- DISABLED_MediaSource_ConfigChange_ClearThenEncrypted_MP4_CENC) {
- MockMediaSource source("bear-640x360-av_frag.mp4", kMP4Video,
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlayback_MediaSource_VideoOnly_MP4_VP9) {
+ MockMediaSource source("bear-320x240-v_frag-vp9.mp4", kMP4VideoVP9,
kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
-
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-1280x720-v_frag-cenc.mp4");
- ASSERT_FALSE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
+ if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableVp9InMp4)) {
+ ASSERT_EQ(ChunkDemuxer::kNotSupported, source.AddId());
+ return;
+ }
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
-
- base::RunLoop().Run();
- EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, pipeline_status_);
-
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- // The second video was not added, so its time has not been added.
- EXPECT_EQ(k640IsoFileDurationMs,
- pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
Play();
- EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, WaitUntilEndedOrError());
+ ASSERT_TRUE(WaitUntilOnEnded());
source.Shutdown();
+ Stop();
}
-// Config changes from encrypted to clear are not currently supported.
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(MediaSource_ConfigChange_EncryptedThenClear_MP4_CENC)) {
- MockMediaSource source("bear-640x360-v_frag-cenc.mp4", kMP4Video,
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlayback_MediaSource_VideoOnly_MP4_HEVC1) {
+ // HEVC demuxing might be enabled even on platforms that don't support HEVC
+ // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which
+ // indicates indicates that we did pass media mime type checks and attempted
+ // to actually demux and decode the stream. On platforms that support both
+ // demuxing and decoding we'll get PIPELINE_OK.
+ MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC1,
kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
+ PipelineStatus status = StartPipelineWithMediaSource(&source);
+ EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED);
+#else
+ EXPECT_EQ(
+ DEMUXER_ERROR_COULD_NOT_OPEN,
+ StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
+#endif
+}
- scoped_refptr<DecoderBuffer> second_file =
- ReadTestDataFile("bear-1280x720-av_frag.mp4");
+TEST_P(CommonPipelineIntegrationTest,
+ BasicPlayback_MediaSource_VideoOnly_MP4_HEVC2) {
+ // HEVC demuxing might be enabled even on platforms that don't support HEVC
+ // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which
+ // indicates indicates that we did pass media mime type checks and attempted
+ // to actually demux and decode the stream. On platforms that support both
+ // demuxing and decoding we'll get PIPELINE_OK.
+ MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC2,
+ kAppendWholeFile);
+#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
+ PipelineStatus status = StartPipelineWithMediaSource(&source);
+ EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED);
+#else
+ EXPECT_EQ(
+ DEMUXER_ERROR_COULD_NOT_OPEN,
+ StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
+#endif
+}
- ASSERT_FALSE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
- second_file->data(),
- second_file->data_size()));
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
- source.EndOfStream();
+TEST_P(CommonPipelineIntegrationTest, SeekWhilePaused) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
- EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- // The second video was not added, so its time has not been added.
- EXPECT_EQ(k640IsoCencFileDurationMs,
- pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
+ base::TimeDelta duration(pipeline_->GetMediaDuration());
+ base::TimeDelta start_seek_time(duration / 4);
+ base::TimeDelta seek_time(duration * 3 / 4);
Play();
-
- EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, WaitUntilEndedOrError());
- source.Shutdown();
-}
-
-// Verify files which change configuration midstream fail gracefully.
-TEST_F(PipelineIntegrationTest, MidStreamConfigChangesFail) {
- ASSERT_EQ(PIPELINE_OK, Start("midstream_config_change.mp3"));
+ ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
+ Pause();
+ ASSERT_TRUE(Seek(seek_time));
+ EXPECT_EQ(seek_time, pipeline_->GetMediaTime());
Play();
- ASSERT_EQ(WaitUntilEndedOrError(), PIPELINE_ERROR_DECODE);
-}
-#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+ ASSERT_TRUE(WaitUntilOnEnded());
-TEST_F(PipelineIntegrationTest, BasicPlayback_16x9AspectRatio) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-16x9-aspect.webm"));
+ // Make sure seeking after reaching the end works as expected.
+ Pause();
+ ASSERT_TRUE(Seek(seek_time));
+ EXPECT_EQ(seek_time, pipeline_->GetMediaTime());
Play();
ASSERT_TRUE(WaitUntilOnEnded());
}
-TEST_F(PipelineIntegrationTest, MAYBE_EME(EncryptedPlayback_WebM)) {
- MockMediaSource source("bear-320x240-av_enc-av.webm", kWebM, 219816);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_P(CommonPipelineIntegrationTest, SeekWhilePlaying) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+ base::TimeDelta duration(pipeline_->GetMediaDuration());
+ base::TimeDelta start_seek_time(duration / 4);
+ base::TimeDelta seek_time(duration * 3 / 4);
Play();
-
+ ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
+ ASSERT_TRUE(Seek(seek_time));
+ EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
+ ASSERT_TRUE(WaitUntilOnEnded());
+
+ // Make sure seeking after reaching the end works as expected.
+ ASSERT_TRUE(Seek(seek_time));
+ EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
}
-TEST_F(PipelineIntegrationTest, MAYBE_EME(EncryptedPlayback_ClearStart_WebM)) {
- MockMediaSource source("bear-320x240-av_enc-av_clear-1s.webm", kWebM,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_P(CommonPipelineIntegrationTest, SuspendWhilePaused) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+ base::TimeDelta duration(pipeline_->GetMediaDuration());
+ base::TimeDelta start_seek_time(duration / 4);
+ base::TimeDelta seek_time(duration * 3 / 4);
Play();
+ ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
+ Pause();
+
+ // Suspend while paused.
+ ASSERT_TRUE(Suspend());
+ // Resuming the pipeline will create a new Renderer,
+ // which in turn will trigger video size and opacity notifications.
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1);
+ EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1);
+
+ ASSERT_TRUE(Resume(seek_time));
+ EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
+ Play();
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_WebM)) {
- MockMediaSource source("bear-320x240-av_enc-av_clear-all.webm", kWebM,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new NoResponseApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_P(CommonPipelineIntegrationTest, SuspendWhilePlaying) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+ base::TimeDelta duration(pipeline_->GetMediaDuration());
+ base::TimeDelta start_seek_time(duration / 4);
+ base::TimeDelta seek_time(duration * 3 / 4);
Play();
+ ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
+ ASSERT_TRUE(Suspend());
+
+ // Resuming the pipeline will create a new Renderer,
+ // which in turn will trigger video size and opacity notifications.
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1);
+ EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1);
+ ASSERT_TRUE(Resume(seek_time));
+ EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
}
#if BUILDFLAG(USE_PROPRIETARY_CODECS)
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_CENC_VideoOnly)) {
- MockMediaSource source("bear-1280x720-v_frag-cenc.mp4", kMP4Video,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_0) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_0.mp4"));
+ ASSERT_EQ(VIDEO_ROTATION_0, metadata_.video_rotation);
+}
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_90) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_90.mp4"));
+ ASSERT_EQ(VIDEO_ROTATION_90, metadata_.video_rotation);
+}
- Play();
+TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_180) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_180.mp4"));
+ ASSERT_EQ(VIDEO_ROTATION_180, metadata_.video_rotation);
+}
- ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
+TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_270) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_270.mp4"));
+ ASSERT_EQ(VIDEO_ROTATION_270, metadata_.video_rotation);
}
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_CENC_AudioOnly)) {
- MockMediaSource source("bear-1280x720-a_frag-cenc.mp4", kMP4Audio,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+// Verify audio decoder & renderer can handle aborted demuxer reads.
+TEST_P(CommonPipelineIntegrationTest, ChunkDemuxerAbortRead_AudioOnly) {
+ ASSERT_TRUE(TestSeekDuringRead("bear-320x240-audio-only.webm", kAudioOnlyWebM,
+ 16384, base::TimeDelta::FromMilliseconds(464),
+ base::TimeDelta::FromMilliseconds(617), 0x10CA,
+ 19730));
+}
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+// Verify video decoder & renderer can handle aborted demuxer reads.
+TEST_P(CommonPipelineIntegrationTest, ChunkDemuxerAbortRead_VideoOnly) {
+ ASSERT_TRUE(TestSeekDuringRead("bear-320x240-video-only.webm", kVideoOnlyWebM,
+ 32768, base::TimeDelta::FromMilliseconds(167),
+ base::TimeDelta::FromMilliseconds(1668),
+ 0x1C896, 65536));
+}
+// Verify that Opus audio in WebM containers can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_AudioOnly_Opus_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-opus-end-trimming.webm"));
Play();
-
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_MP4_CENC_VideoOnly)) {
- MockMediaSource source("bear-1280x720-v_frag-cenc_clear-all.mp4", kMP4Video,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new NoResponseApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+// Verify that VP9 video in WebM containers can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VideoOnly_VP9_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp9.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
- source.EndOfStream();
+// Verify that VP9 video and Opus audio in the same WebM container can be played
+// back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9_Opus_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-opus.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
+// Verify that VP8 video with alpha channel can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP8A_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a.webm"));
Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
+}
+// Verify that VP8A video with odd width/height can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP8A_Odd_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a-odd-dimensions.webm"));
+ Play();
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
+ EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
}
-TEST_F(PipelineIntegrationTest, Mp2ts_AAC_HE_SBR_Audio) {
- MockMediaSource source("bear-1280x720-aac_he.ts", kMP2AudioSBR,
- kAppendWholeFile);
-#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+// Verify that VP9 video with odd width/height can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9_Odd_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-odd-dimensions.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
- // Check that SBR is taken into account correctly by mpeg2ts parser. When an
- // SBR stream is parsed as non-SBR stream, then audio frame durations are
- // calculated incorrectly and that leads to gaps in buffered ranges (so this
- // check will fail) and eventually leads to stalled playback.
- EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
-#else
- EXPECT_EQ(
- DEMUXER_ERROR_COULD_NOT_OPEN,
- StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
-#endif
+// Verify that VP9 video with alpha channel can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9A_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
}
-TEST_F(PipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_6B) {
- MockMediaSource source("bear-audio-mp4a.6B.ts",
- "video/mp2t; codecs=\"mp4a.6B\"", kAppendWholeFile);
-#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
-#else
- EXPECT_EQ(
- DEMUXER_ERROR_COULD_NOT_OPEN,
- StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
-#endif
+// Verify that VP9A video with odd width/height can be played back.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9A_Odd_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a-odd-dimensions.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
}
-TEST_F(PipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_69) {
- MockMediaSource source("bear-audio-mp4a.69.ts",
- "video/mp2t; codecs=\"mp4a.69\"", kAppendWholeFile);
-#if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER)
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
- source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
-#else
- EXPECT_EQ(
- DEMUXER_ERROR_COULD_NOT_OPEN,
- StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
-#endif
+// Verify that VP9 video with 4:4:4 subsampling can be played back.
+TEST_P(CommonPipelineIntegrationTest, P444_VP9_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-P444.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV24);
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_MP4_CENC_AudioOnly)) {
- MockMediaSource source("bear-1280x720-a_frag-cenc_clear-all.mp4", kMP4Audio,
- kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new NoResponseApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+// Verify that frames of VP9 video in the BT.709 color space have the YV12HD
+// format.
+TEST_P(CommonPipelineIntegrationTest, BT709_VP9_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-bt709.webm"));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12);
+ EXPECT_COLOR_SPACE_EQ(last_video_frame_color_space_, COLOR_SPACE_HD_REC709);
+}
- source.EndOfStream();
+TEST_P(CommonPipelineIntegrationTest, HD_VP9_WebM) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-1280x720.webm", kClockless));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
+// Verify that videos with an odd frame size playback successfully.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_OddVideoSize) {
+ ASSERT_EQ(PIPELINE_OK, Start("butterfly-853x480.webm"));
Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
+// Verify that OPUS audio in a webm which reports a 44.1kHz sample rate plays
+// correctly at 48kHz
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_Opus441kHz) {
+ ASSERT_EQ(PIPELINE_OK, Start("sfx-opus-441.webm"));
+ Play();
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
+ EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO)
+ ->audio_decoder_config()
+ .samples_per_second());
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_CENC_SENC_Video)) {
- MockMediaSource source("bear-640x360-v_frag-cenc-senc.mp4", kMP4Video,
+// Same as above but using MediaSource.
+TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_Opus441kHz) {
+ MockMediaSource source("sfx-opus-441.webm", kOpusAudioOnlyWebM,
kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
-
+ EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
source.EndOfStream();
-
Play();
-
ASSERT_TRUE(WaitUntilOnEnded());
source.Shutdown();
Stop();
+ EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO)
+ ->audio_decoder_config()
+ .samples_per_second());
}
-// 'SAIZ' and 'SAIO' boxes contain redundant information which is already
-// available in 'SENC' box. Although 'SAIZ' and 'SAIO' boxes are required per
-// CENC spec for backward compatibility reasons, but we do not use the two
-// boxes if 'SENC' box is present, so the code should work even if the two
-// boxes are not present.
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_CENC_SENC_NO_SAIZ_SAIO_Video)) {
- MockMediaSource source("bear-640x360-v_frag-cenc-senc-no-saiz-saio.mp4",
- kMP4Video, kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+// Ensures audio-only playback with missing or negative timestamps works. Tests
+// the common live-streaming case for chained ogg. See http://crbug.com/396864.
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackChainedOgg) {
+ ASSERT_EQ(PIPELINE_OK, Start("double-sfx.ogg", kUnreliableDuration));
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+ ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime());
+}
- source.EndOfStream();
+// Tests that we signal ended even when audio runs longer than video track.
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackAudioLongerThanVideo) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear_audio_longer_than_video.ogv"));
+ // Audio track is 2000ms. Video track is 1001ms. Duration should be higher
+ // of the two.
+ EXPECT_EQ(2000, pipeline_->GetMediaDuration().InMilliseconds());
+ Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
+// Tests that we signal ended even when audio runs shorter than video track.
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackAudioShorterThanVideo) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear_audio_shorter_than_video.ogv"));
+ // Audio track is 500ms. Video track is 1001ms. Duration should be higher of
+ // the two.
+ EXPECT_EQ(1001, pipeline_->GetMediaDuration().InMilliseconds());
Play();
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
+TEST_P(CommonPipelineIntegrationTest, BasicPlaybackPositiveStartTime) {
+ ASSERT_EQ(PIPELINE_OK, Start("nonzero-start-time.webm"));
+ Play();
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
- Stop();
+ ASSERT_EQ(base::TimeDelta::FromMicroseconds(396000),
+ demuxer_->GetStartTime());
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_CENC_KeyRotation_Video)) {
- MockMediaSource source("bear-1280x720-v_frag-cenc-key_rotation.mp4",
- kMP4Video, kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+const IntegrationTestData kIntegrationTests[] = {
+ {PipelineType::Media},
+#if BUILDFLAG(ENABLE_MEDIA_REMOTING)
+ {PipelineType::MediaRemoting},
+#endif // BUILDFLAG(ENABLE_MEDIA_REMOTING)
+};
- source.EndOfStream();
+INSTANTIATE_TEST_CASE_P(,
+ CommonPipelineIntegrationTest,
+ testing::ValuesIn(kIntegrationTests));
+// Media Remoting currently doesn't support stream status change without
+// restarting pipeline.
+TEST_F(PipelineIntegrationTest, ReinitRenderersWhileAudioTrackIsDisabled) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
Play();
- ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
+ // These get triggered every time playback is resumed.
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240)))
+ .Times(AnyNumber());
+ EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber());
+
+ // Disable the audio track.
+ std::vector<MediaTrack::Id> track_ids;
+ pipeline_->OnEnabledAudioTracksChanged(track_ids);
+ // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and
+ // reinitializes renderers while the audio track is disabled.
+ ASSERT_TRUE(Suspend());
+ ASSERT_TRUE(Resume(TimestampMs(100)));
+ // Now re-enable the audio track, playback should continue successfully.
+ EXPECT_CALL(*this, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)).Times(1);
+ track_ids.push_back("2");
+ pipeline_->OnEnabledAudioTracksChanged(track_ids);
+ ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200)));
+
Stop();
}
-TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_CENC_KeyRotation_Audio)) {
- MockMediaSource source("bear-1280x720-a_frag-cenc-key_rotation.mp4",
- kMP4Audio, kAppendWholeFile);
- FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
- EXPECT_EQ(PIPELINE_OK,
- StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+TEST_F(PipelineIntegrationTest, ReinitRenderersWhileVideoTrackIsDisabled) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed));
+ Play();
- source.EndOfStream();
+ // These get triggered every time playback is resumed.
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240)))
+ .Times(AnyNumber());
+ EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber());
+
+ // Disable the video track.
+ pipeline_->OnSelectedVideoTrackChanged(base::nullopt);
+ // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and
+ // reinitializes renderers while the video track is disabled.
+ ASSERT_TRUE(Suspend());
+ ASSERT_TRUE(Resume(TimestampMs(100)));
+ // Now re-enable the video track, playback should continue successfully.
+ pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1"));
+ ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200)));
+
+ Stop();
+}
+
+TEST_F(PipelineIntegrationTest, MAYBE_EME(BasicPlaybackEncrypted)) {
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ set_encrypted_media_init_data_cb(
+ base::Bind(&FakeEncryptedMedia::OnEncryptedMediaInitData,
+ base::Unretained(&encrypted_media)));
+
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-av_enc-av.webm",
+ encrypted_media.GetCdmContext()));
Play();
ASSERT_TRUE(WaitUntilOnEnded());
- source.Shutdown();
Stop();
}
TEST_F(PipelineIntegrationTest,
- MAYBE_EME(EncryptedPlayback_MP4_VP9_CENC_VideoOnly)) {
- MockMediaSource source("bear-320x240-v_frag-vp9-cenc.mp4", kMP4VideoVP9,
+ MAYBE_EME(MediaSource_ConfigChange_Encrypted_WebM)) {
+ MockMediaSource source("bear-320x240-16x9-aspect-av_enc-av.webm", kWebM,
kAppendWholeFile);
- if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
- switches::kEnableVp9InMp4)) {
- ASSERT_EQ(ChunkDemuxer::kNotSupported, source.AddId());
- return;
- }
-
FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
EXPECT_EQ(PIPELINE_OK,
StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-640x360-av_enc-av.webm");
+ ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
source.EndOfStream();
Play();
+ EXPECT_TRUE(WaitUntilOnEnded());
+
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs,
+ pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
- ASSERT_TRUE(WaitUntilOnEnded());
source.Shutdown();
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_AVC3) {
- MockMediaSource source("bear-1280x720-v_frag-avc3.mp4", kMP4VideoAVC3,
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(MediaSource_ConfigChange_ClearThenEncrypted_WebM)) {
+ MockMediaSource source("bear-320x240-16x9-aspect.webm", kWebM,
kAppendWholeFile);
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-640x360-av_enc-av.webm");
+
+ EXPECT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
source.EndOfStream();
+ Play();
+ EXPECT_TRUE(WaitUntilOnEnded());
+
EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
- EXPECT_EQ(k1280IsoAVC3FileDurationMs,
+ EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs,
pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
- Play();
-
- ASSERT_TRUE(WaitUntilOnEnded());
source.Shutdown();
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_VP9) {
- MockMediaSource source("bear-320x240-v_frag-vp9.mp4", kMP4VideoVP9,
+// Config change from encrypted to clear is allowed by the demuxer, and is
+// supported by the Renderer.
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(MediaSource_ConfigChange_EncryptedThenClear_WebM)) {
+ MockMediaSource source("bear-320x240-16x9-aspect-av_enc-av.webm", kWebM,
kAppendWholeFile);
- if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
- switches::kEnableVp9InMp4)) {
- ASSERT_EQ(ChunkDemuxer::kNotSupported, source.AddId());
- return;
- }
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1);
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-640x360.webm");
+
+ ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
source.EndOfStream();
- ASSERT_EQ(PIPELINE_OK, pipeline_status_);
Play();
-
ASSERT_TRUE(WaitUntilOnEnded());
+
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs,
+ pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
+
source.Shutdown();
Stop();
}
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_HEVC1) {
- // HEVC demuxing might be enabled even on platforms that don't support HEVC
- // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which
- // indicates indicates that we did pass media mime type checks and attempted
- // to actually demux and decode the stream. On platforms that support both
- // demuxing and decoding we'll get PIPELINE_OK.
- MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC1,
- kAppendWholeFile);
-#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- PipelineStatus status = StartPipelineWithMediaSource(&source);
- EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED);
-#else
- EXPECT_EQ(
- DEMUXER_ERROR_COULD_NOT_OPEN,
- StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
-#endif
-}
-
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_HEVC2) {
- // HEVC demuxing might be enabled even on platforms that don't support HEVC
- // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which
- // indicates indicates that we did pass media mime type checks and attempted
- // to actually demux and decode the stream. On platforms that support both
- // demuxing and decoding we'll get PIPELINE_OK.
- MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC2,
- kAppendWholeFile);
-#if BUILDFLAG(ENABLE_HEVC_DEMUXING)
- PipelineStatus status = StartPipelineWithMediaSource(&source);
- EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED);
-#else
- EXPECT_EQ(
- DEMUXER_ERROR_COULD_NOT_OPEN,
- StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr));
-#endif
-}
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+#if !defined(DISABLE_CLOCKLESS_TESTS)
+class Mp3FastSeekParams {
+ public:
+ Mp3FastSeekParams(const char* filename, const char* hash)
+ : filename(filename), hash(hash) {}
+ const char* filename;
+ const char* hash;
+};
-#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+class Mp3FastSeekIntegrationTest
+ : public PipelineIntegrationTest,
+ public testing::WithParamInterface<Mp3FastSeekParams> {};
-TEST_F(PipelineIntegrationTest, SeekWhilePaused) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
+TEST_P(Mp3FastSeekIntegrationTest, FastSeekAccuracy_MP3) {
+ Mp3FastSeekParams config = GetParam();
+ ASSERT_EQ(PIPELINE_OK, Start(config.filename, kHashed));
- base::TimeDelta duration(pipeline_->GetMediaDuration());
- base::TimeDelta start_seek_time(duration / 4);
- base::TimeDelta seek_time(duration * 3 / 4);
+ // The XING TOC is inaccurate. We don't use it for CBR, we tolerate it for VBR
+ // (best option for fast seeking; see Mp3SeekFFmpegDemuxerTest). The chosen
+ // seek time exposes inaccuracy in TOC such that the hash will change if seek
+ // logic is regressed. See https://crbug.com/545914.
+ //
+ // Quick TOC design (not pretty!):
+ // - All MP3 TOCs are 100 bytes
+ // - Each byte is read as a uint8_t; value between 0 - 255.
+ // - The index into this array is the numerator in the ratio: index / 100.
+ // This fraction represents a playback time as a percentage of duration.
+ // - The value at the given index is the numerator in the ratio: value / 256.
+ // This fraction represents a byte offset as a percentage of the file size.
+ //
+ // For CBR files, each frame is the same size, so the offset for time of
+ // (0.98 * duration) should be around (0.98 * file size). This is 250.88 / 256
+ // but the numerator will be truncated in the TOC as 250, losing precision.
+ base::TimeDelta seek_time(0.98 * pipeline_->GetMediaDuration());
- Play();
- ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
- Pause();
ASSERT_TRUE(Seek(seek_time));
- EXPECT_EQ(seek_time, pipeline_->GetMediaTime());
Play();
ASSERT_TRUE(WaitUntilOnEnded());
- // Make sure seeking after reaching the end works as expected.
- Pause();
- ASSERT_TRUE(Seek(seek_time));
- EXPECT_EQ(seek_time, pipeline_->GetMediaTime());
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_HASH_EQ(config.hash, GetAudioHash());
}
-TEST_F(PipelineIntegrationTest, SeekWhilePlaying) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
+INSTANTIATE_TEST_CASE_P(
+ CBRSeek_HasTOC,
+ Mp3FastSeekIntegrationTest,
+ ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-has-TOC.mp3",
+ "-0.71,0.36,2.96,2.68,2.11,-1.08,")));
+INSTANTIATE_TEST_CASE_P(
+ CBRSeeks_NoTOC,
+ Mp3FastSeekIntegrationTest,
+ ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-no-TOC.mp3",
+ "0.95,0.56,1.34,0.47,1.77,0.84,")));
+// VBR seeks can be fast *OR* accurate, but not both. We chose fast.
+INSTANTIATE_TEST_CASE_P(
+ VBRSeeks_HasTOC,
+ Mp3FastSeekIntegrationTest,
+ ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-has-TOC.mp3",
+ "-0.15,-0.83,0.54,1.00,1.94,0.93,")));
+INSTANTIATE_TEST_CASE_P(
+ VBRSeeks_NoTOC,
+ Mp3FastSeekIntegrationTest,
+ ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-no-TOC.mp3",
+ "-0.22,0.80,1.19,0.73,-0.31,-1.12,")));
+#endif // !defined(DISABLE_CLOCKLESS_TESTS)
- base::TimeDelta duration(pipeline_->GetMediaDuration());
- base::TimeDelta start_seek_time(duration / 4);
- base::TimeDelta seek_time(duration * 3 / 4);
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(MediaSource_ConfigChange_Encrypted_MP4_CENC_VideoOnly)) {
+ MockMediaSource source("bear-640x360-v_frag-cenc.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-1280x720-v_frag-cenc.mp4");
+ ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
+ source.EndOfStream();
Play();
- ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
- ASSERT_TRUE(Seek(seek_time));
- EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
- ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_TRUE(WaitUntilOnEnded());
- // Make sure seeking after reaching the end works as expected.
- ASSERT_TRUE(Seek(seek_time));
- EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
- ASSERT_TRUE(WaitUntilOnEnded());
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
+ pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
+
+ source.Shutdown();
+ Stop();
}
-TEST_F(PipelineIntegrationTest, SuspendWhilePaused) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(
+ MediaSource_ConfigChange_Encrypted_MP4_CENC_KeyRotation_VideoOnly)) {
+ MockMediaSource source("bear-640x360-v_frag-cenc-key_rotation.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
- base::TimeDelta duration(pipeline_->GetMediaDuration());
- base::TimeDelta start_seek_time(duration / 4);
- base::TimeDelta seek_time(duration * 3 / 4);
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-1280x720-v_frag-cenc-key_rotation.mp4");
+ ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
+ source.EndOfStream();
Play();
- ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
- Pause();
-
- // Suspend while paused.
- ASSERT_TRUE(Suspend());
+ EXPECT_TRUE(WaitUntilOnEnded());
- // Resuming the pipeline will create a new Renderer,
- // which in turn will trigger video size and opacity notifications.
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1);
- EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1);
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs,
+ pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
- ASSERT_TRUE(Resume(seek_time));
- EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
+ source.Shutdown();
+ Stop();
}
-TEST_F(PipelineIntegrationTest, SuspendWhilePlaying) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm"));
+// Config changes from clear to encrypted are not currently supported.
+// TODO(ddorwin): Figure out why this CHECKs in AppendAtTime().
+TEST_F(PipelineIntegrationTest,
+ DISABLED_MediaSource_ConfigChange_ClearThenEncrypted_MP4_CENC) {
+ MockMediaSource source("bear-640x360-av_frag.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
- base::TimeDelta duration(pipeline_->GetMediaDuration());
- base::TimeDelta start_seek_time(duration / 4);
- base::TimeDelta seek_time(duration * 3 / 4);
+ EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1);
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-1280x720-v_frag-cenc.mp4");
+ ASSERT_FALSE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
- Play();
- ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time));
- ASSERT_TRUE(Suspend());
+ source.EndOfStream();
- // Resuming the pipeline will create a new Renderer,
- // which in turn will trigger video size and opacity notifications.
- EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1);
- EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1);
+ base::RunLoop().Run();
+ EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, pipeline_status_);
- ASSERT_TRUE(Resume(seek_time));
- EXPECT_GE(pipeline_->GetMediaTime(), seek_time);
- ASSERT_TRUE(WaitUntilOnEnded());
-}
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ // The second video was not added, so its time has not been added.
+ EXPECT_EQ(k640IsoFileDurationMs,
+ pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-#if BUILDFLAG(USE_PROPRIETARY_CODECS)
-TEST_F(PipelineIntegrationTest, Rotated_Metadata_0) {
- ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_0.mp4"));
- ASSERT_EQ(VIDEO_ROTATION_0, metadata_.video_rotation);
-}
+ Play();
-TEST_F(PipelineIntegrationTest, Rotated_Metadata_90) {
- ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_90.mp4"));
- ASSERT_EQ(VIDEO_ROTATION_90, metadata_.video_rotation);
+ EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, WaitUntilEndedOrError());
+ source.Shutdown();
}
-TEST_F(PipelineIntegrationTest, Rotated_Metadata_180) {
- ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_180.mp4"));
- ASSERT_EQ(VIDEO_ROTATION_180, metadata_.video_rotation);
-}
+// Config changes from encrypted to clear are not currently supported.
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(MediaSource_ConfigChange_EncryptedThenClear_MP4_CENC)) {
+ MockMediaSource source("bear-640x360-v_frag-cenc.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
-TEST_F(PipelineIntegrationTest, Rotated_Metadata_270) {
- ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_270.mp4"));
- ASSERT_EQ(VIDEO_ROTATION_270, metadata_.video_rotation);
-}
-#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+ scoped_refptr<DecoderBuffer> second_file =
+ ReadTestDataFile("bear-1280x720-av_frag.mp4");
-// Verify audio decoder & renderer can handle aborted demuxer reads.
-TEST_F(PipelineIntegrationTest, ChunkDemuxerAbortRead_AudioOnly) {
- ASSERT_TRUE(TestSeekDuringRead("bear-320x240-audio-only.webm", kAudioOnlyWebM,
- 16384, base::TimeDelta::FromMilliseconds(464),
- base::TimeDelta::FromMilliseconds(617), 0x10CA,
- 19730));
-}
+ ASSERT_FALSE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec),
+ second_file->data(),
+ second_file->data_size()));
-// Verify video decoder & renderer can handle aborted demuxer reads.
-TEST_F(PipelineIntegrationTest, ChunkDemuxerAbortRead_VideoOnly) {
- ASSERT_TRUE(TestSeekDuringRead("bear-320x240-video-only.webm", kVideoOnlyWebM,
- 32768, base::TimeDelta::FromMilliseconds(167),
- base::TimeDelta::FromMilliseconds(1668),
- 0x1C896, 65536));
-}
+ source.EndOfStream();
-// Verify that Opus audio in WebM containers can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_AudioOnly_Opus_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-opus-end-trimming.webm"));
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
-}
+ EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size());
+ EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds());
+ // The second video was not added, so its time has not been added.
+ EXPECT_EQ(k640IsoCencFileDurationMs,
+ pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds());
-// Verify that VP9 video in WebM containers can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VideoOnly_VP9_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp9.webm"));
Play();
- ASSERT_TRUE(WaitUntilOnEnded());
+
+ EXPECT_EQ(CHUNK_DEMUXER_ERROR_APPEND_FAILED, WaitUntilEndedOrError());
+ source.Shutdown();
}
-// Verify that VP9 video and Opus audio in the same WebM container can be played
-// back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VP9_Opus_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-opus.webm"));
+// Verify files which change configuration midstream fail gracefully.
+TEST_F(PipelineIntegrationTest, MidStreamConfigChangesFail) {
+ ASSERT_EQ(PIPELINE_OK, Start("midstream_config_change.mp3"));
Play();
- ASSERT_TRUE(WaitUntilOnEnded());
+ ASSERT_EQ(WaitUntilEndedOrError(), PIPELINE_ERROR_DECODE);
}
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+
+TEST_F(PipelineIntegrationTest, MAYBE_EME(EncryptedPlayback_WebM)) {
+ MockMediaSource source("bear-320x240-av_enc-av.webm", kWebM, 219816);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
-// Verify that VP8 video with alpha channel can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VP8A_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a.webm"));
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
+ source.Shutdown();
+ Stop();
}
-// Verify that VP8A video with odd width/height can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VP8A_Odd_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a-odd-dimensions.webm"));
+TEST_F(PipelineIntegrationTest, MAYBE_EME(EncryptedPlayback_ClearStart_WebM)) {
+ MockMediaSource source("bear-320x240-av_enc-av_clear-1s.webm", kWebM,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
+ source.Shutdown();
+ Stop();
}
-// Verify that VP9 video with odd width/height can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VP9_Odd_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-odd-dimensions.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_WebM)) {
+ MockMediaSource source("bear-320x240-av_enc-av_clear-all.webm", kWebM,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new NoResponseApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
+ source.Shutdown();
+ Stop();
}
-// Verify that VP9 video with alpha channel can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VP9A_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a.webm"));
+#if BUILDFLAG(USE_PROPRIETARY_CODECS)
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_CENC_VideoOnly)) {
+ MockMediaSource source("bear-1280x720-v_frag-cenc.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
+ source.Shutdown();
+ Stop();
}
-// Verify that VP9A video with odd width/height can be played back.
-TEST_F(PipelineIntegrationTest, BasicPlayback_VP9A_Odd_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a-odd-dimensions.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_CENC_AudioOnly)) {
+ MockMediaSource source("bear-1280x720-a_frag-cenc.mp4", kMP4Audio,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+ ASSERT_EQ(PIPELINE_OK, pipeline_status_);
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A);
+ source.Shutdown();
+ Stop();
}
-// Verify that VP8 video with inband text track can be played back.
-TEST_F(PipelineIntegrationTest, MAYBE_TEXT(BasicPlayback_VP8_WebVTT_WebM)) {
- EXPECT_CALL(*this, OnAddTextTrack(_, _));
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp8-webvtt.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_MP4_CENC_VideoOnly)) {
+ MockMediaSource source("bear-1280x720-v_frag-cenc_clear-all.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new NoResponseApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
+ source.Shutdown();
+ Stop();
}
-// Verify that VP9 video with 4:4:4 subsampling can be played back.
-TEST_F(PipelineIntegrationTest, P444_VP9_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-P444.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_MP4_CENC_AudioOnly)) {
+ MockMediaSource source("bear-1280x720-a_frag-cenc_clear-all.mp4", kMP4Audio,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new NoResponseApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV24);
+ source.Shutdown();
+ Stop();
}
-// Verify that frames of VP9 video in the BT.709 color space have the YV12HD
-// format.
-TEST_F(PipelineIntegrationTest, BT709_VP9_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-bt709.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_CENC_SENC_Video)) {
+ MockMediaSource source("bear-640x360-v_frag-cenc-senc.mp4", kMP4Video,
+ kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12);
- EXPECT_COLOR_SPACE_EQ(last_video_frame_color_space_, COLOR_SPACE_HD_REC709);
+ source.Shutdown();
+ Stop();
}
-TEST_F(PipelineIntegrationTest, HD_VP9_WebM) {
- ASSERT_EQ(PIPELINE_OK, Start("bear-1280x720.webm", kClockless));
+// 'SAIZ' and 'SAIO' boxes contain redundant information which is already
+// available in 'SENC' box. Although 'SAIZ' and 'SAIO' boxes are required per
+// CENC spec for backward compatibility reasons, but we do not use the two
+// boxes if 'SENC' box is present, so the code should work even if the two
+// boxes are not present.
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_CENC_SENC_NO_SAIZ_SAIO_Video)) {
+ MockMediaSource source("bear-640x360-v_frag-cenc-senc-no-saiz-saio.mp4",
+ kMP4Video, kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
+ source.Shutdown();
+ Stop();
}
-// Verify that videos with an odd frame size playback successfully.
-TEST_F(PipelineIntegrationTest, BasicPlayback_OddVideoSize) {
- ASSERT_EQ(PIPELINE_OK, Start("butterfly-853x480.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_CENC_KeyRotation_Video)) {
+ MockMediaSource source("bear-1280x720-v_frag-cenc-key_rotation.mp4",
+ kMP4Video, kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
+ source.Shutdown();
+ Stop();
}
-// Verify that OPUS audio in a webm which reports a 44.1kHz sample rate plays
-// correctly at 48kHz
-TEST_F(PipelineIntegrationTest, BasicPlayback_Opus441kHz) {
- ASSERT_EQ(PIPELINE_OK, Start("sfx-opus-441.webm"));
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_CENC_KeyRotation_Audio)) {
+ MockMediaSource source("bear-1280x720-a_frag-cenc-key_rotation.mp4",
+ kMP4Audio, kAppendWholeFile);
+ FakeEncryptedMedia encrypted_media(new RotatingKeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
+ source.EndOfStream();
+
Play();
- ASSERT_TRUE(WaitUntilOnEnded());
- EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO)
- ->audio_decoder_config()
- .samples_per_second());
+ ASSERT_TRUE(WaitUntilOnEnded());
+ source.Shutdown();
+ Stop();
}
-// Same as above but using MediaSource.
-TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Opus441kHz) {
- MockMediaSource source("sfx-opus-441.webm", kOpusAudioOnlyWebM,
+TEST_F(PipelineIntegrationTest,
+ MAYBE_EME(EncryptedPlayback_MP4_VP9_CENC_VideoOnly)) {
+ MockMediaSource source("bear-320x240-v_frag-vp9-cenc.mp4", kMP4VideoVP9,
kAppendWholeFile);
- EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source));
+ if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kEnableVp9InMp4)) {
+ ASSERT_EQ(ChunkDemuxer::kNotSupported, source.AddId());
+ return;
+ }
+
+ FakeEncryptedMedia encrypted_media(new KeyProvidingApp());
+ EXPECT_EQ(PIPELINE_OK,
+ StartPipelineWithEncryptedMedia(&source, &encrypted_media));
+
source.EndOfStream();
+
Play();
+
ASSERT_TRUE(WaitUntilOnEnded());
source.Shutdown();
Stop();
- EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO)
- ->audio_decoder_config()
- .samples_per_second());
}
-// Ensures audio-only playback with missing or negative timestamps works. Tests
-// the common live-streaming case for chained ogg. See http://crbug.com/396864.
-TEST_F(PipelineIntegrationTest, BasicPlaybackChainedOgg) {
- ASSERT_EQ(PIPELINE_OK, Start("double-sfx.ogg", kUnreliableDuration));
+#endif // BUILDFLAG(USE_PROPRIETARY_CODECS)
+
+// Verify that VP8 video with inband text track can be played back.
+TEST_F(PipelineIntegrationTest, MAYBE_TEXT(BasicPlayback_VP8_WebVTT_WebM)) {
+ EXPECT_CALL(*this, OnAddTextTrack(_, _));
+ ASSERT_EQ(PIPELINE_OK, Start("bear-vp8-webvtt.webm"));
Play();
ASSERT_TRUE(WaitUntilOnEnded());
- ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime());
}
// Ensures audio-video playback with missing or negative timestamps fails softly
@@ -2622,32 +2749,4 @@ TEST_F(PipelineIntegrationTest, BasicPlaybackChainedOggVideo) {
ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime());
}
-// Tests that we signal ended even when audio runs longer than video track.
-TEST_F(PipelineIntegrationTest, BasicPlaybackAudioLongerThanVideo) {
- ASSERT_EQ(PIPELINE_OK, Start("bear_audio_longer_than_video.ogv"));
- // Audio track is 2000ms. Video track is 1001ms. Duration should be higher
- // of the two.
- EXPECT_EQ(2000, pipeline_->GetMediaDuration().InMilliseconds());
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
-}
-
-// Tests that we signal ended even when audio runs shorter than video track.
-TEST_F(PipelineIntegrationTest, BasicPlaybackAudioShorterThanVideo) {
- ASSERT_EQ(PIPELINE_OK, Start("bear_audio_shorter_than_video.ogv"));
- // Audio track is 500ms. Video track is 1001ms. Duration should be higher of
- // the two.
- EXPECT_EQ(1001, pipeline_->GetMediaDuration().InMilliseconds());
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
-}
-
-TEST_F(PipelineIntegrationTest, BasicPlaybackPositiveStartTime) {
- ASSERT_EQ(PIPELINE_OK, Start("nonzero-start-time.webm"));
- Play();
- ASSERT_TRUE(WaitUntilOnEnded());
- ASSERT_EQ(base::TimeDelta::FromMicroseconds(396000),
- demuxer_->GetStartTime());
-}
-
} // namespace media
« no previous file with comments | « media/test/BUILD.gn ('k') | media/test/pipeline_integration_test_base.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698