OLD | NEW |
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include <stddef.h> | 5 #include <stddef.h> |
6 #include <stdint.h> | 6 #include <stdint.h> |
7 | 7 |
8 #include <memory> | 8 #include <memory> |
9 #include <utility> | 9 #include <utility> |
10 | 10 |
(...skipping 62 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
73 #else | 73 #else |
74 #define MAYBE_EME(test) test | 74 #define MAYBE_EME(test) test |
75 #endif | 75 #endif |
76 | 76 |
77 #if defined(DISABLE_TEXT_TRACK_TESTS) | 77 #if defined(DISABLE_TEXT_TRACK_TESTS) |
78 #define MAYBE_TEXT(test) DISABLED_##test | 78 #define MAYBE_TEXT(test) DISABLED_##test |
79 #else | 79 #else |
80 #define MAYBE_TEXT(test) test | 80 #define MAYBE_TEXT(test) test |
81 #endif | 81 #endif |
82 | 82 |
| 83 #if defined(DISABLE_CLOCKLESS_TESTS) |
| 84 #define MAYBE_CLOCKLESS(test) DISABLED_##test |
| 85 #else |
| 86 #define MAYBE_CLOCKLESS(test) test |
| 87 #endif |
| 88 |
83 using testing::_; | 89 using testing::_; |
84 using testing::AnyNumber; | 90 using testing::AnyNumber; |
85 using testing::AtLeast; | 91 using testing::AtLeast; |
86 using testing::AtMost; | 92 using testing::AtMost; |
87 using testing::SaveArg; | 93 using testing::SaveArg; |
88 | 94 |
89 namespace media { | 95 namespace media { |
90 | 96 |
91 namespace { | |
92 | |
93 const char kSourceId[] = "SourceId"; | 97 const char kSourceId[] = "SourceId"; |
94 | 98 |
95 const char kWebM[] = "video/webm; codecs=\"vp8,vorbis\""; | 99 const char kWebM[] = "video/webm; codecs=\"vp8,vorbis\""; |
96 const char kWebMVP9[] = "video/webm; codecs=\"vp9\""; | 100 const char kWebMVP9[] = "video/webm; codecs=\"vp9\""; |
97 const char kAudioOnlyWebM[] = "video/webm; codecs=\"vorbis\""; | 101 const char kAudioOnlyWebM[] = "video/webm; codecs=\"vorbis\""; |
98 const char kOpusAudioOnlyWebM[] = "video/webm; codecs=\"opus\""; | 102 const char kOpusAudioOnlyWebM[] = "video/webm; codecs=\"opus\""; |
99 const char kVideoOnlyWebM[] = "video/webm; codecs=\"vp8\""; | 103 const char kVideoOnlyWebM[] = "video/webm; codecs=\"vp8\""; |
100 #if BUILDFLAG(USE_PROPRIETARY_CODECS) | 104 #if BUILDFLAG(USE_PROPRIETARY_CODECS) |
101 const char kADTS[] = "audio/aac"; | 105 const char kADTS[] = "audio/aac"; |
102 const char kMP4[] = "video/mp4; codecs=\"avc1.4D4041,mp4a.40.2\""; | 106 const char kMP4[] = "video/mp4; codecs=\"avc1.4D4041,mp4a.40.2\""; |
(...skipping 551 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
654 } | 658 } |
655 void Decode(const scoped_refptr<DecoderBuffer>& buffer, | 659 void Decode(const scoped_refptr<DecoderBuffer>& buffer, |
656 const DecodeCB& decode_cb) override { | 660 const DecodeCB& decode_cb) override { |
657 base::ThreadTaskRunnerHandle::Get()->PostTask( | 661 base::ThreadTaskRunnerHandle::Get()->PostTask( |
658 FROM_HERE, base::Bind(decode_cb, DecodeStatus::DECODE_ERROR)); | 662 FROM_HERE, base::Bind(decode_cb, DecodeStatus::DECODE_ERROR)); |
659 } | 663 } |
660 void Reset(const base::Closure& closure) override { closure.Run(); } | 664 void Reset(const base::Closure& closure) override { closure.Run(); } |
661 bool NeedsBitstreamConversion() const override { return true; } | 665 bool NeedsBitstreamConversion() const override { return true; } |
662 }; | 666 }; |
663 | 667 |
664 } // namespace | |
665 | |
666 // TODO(xhwang): These tests have been disabled for some time as apptests and no | 668 // TODO(xhwang): These tests have been disabled for some time as apptests and no |
667 // longer pass. They need to be reconstituted as shell tests. | 669 // longer pass. They need to be reconstituted as shell tests. |
668 // Currently there are compile issues which must be resolved, | 670 // Currently there are compile issues which must be resolved, |
669 // preferably by eliminating multiple inheritance here which is | 671 // preferably by eliminating multiple inheritance here which is |
670 // banned by Google C++ style. | 672 // banned by Google C++ style. |
671 #if defined(MOJO_RENDERER) && defined(ENABLE_MOJO_PIPELINE_INTEGRATION_TEST) | 673 #if defined(MOJO_RENDERER) && defined(ENABLE_MOJO_PIPELINE_INTEGRATION_TEST) |
672 class PipelineIntegrationTestHost : public service_manager::test::ServiceTest, | 674 class PipelineIntegrationTestHost : public service_manager::test::ServiceTest, |
673 public PipelineIntegrationTestBase { | 675 public PipelineIntegrationTestBase { |
674 public: | 676 public: |
675 PipelineIntegrationTestHost() | 677 PipelineIntegrationTestHost() |
(...skipping 71 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
747 // never called. | 749 // never called. |
748 EXPECT_CALL(*this, OnWaitingForDecryptionKey()).Times(0); | 750 EXPECT_CALL(*this, OnWaitingForDecryptionKey()).Times(0); |
749 pipeline_->SetCdm(encrypted_media->GetCdmContext(), | 751 pipeline_->SetCdm(encrypted_media->GetCdmContext(), |
750 base::Bind(&PipelineIntegrationTest::DecryptorAttached, | 752 base::Bind(&PipelineIntegrationTest::DecryptorAttached, |
751 base::Unretained(this))); | 753 base::Unretained(this))); |
752 } else { | 754 } else { |
753 // Encrypted content not used, so this is never called. | 755 // Encrypted content not used, so this is never called. |
754 EXPECT_CALL(*this, OnWaitingForDecryptionKey()).Times(0); | 756 EXPECT_CALL(*this, OnWaitingForDecryptionKey()).Times(0); |
755 } | 757 } |
756 | 758 |
757 pipeline_->Start(demuxer_.get(), renderer_factory_->CreateRenderer(), this, | 759 pipeline_->Start(demuxer_.get(), CreateRenderer(), this, |
758 base::Bind(&PipelineIntegrationTest::OnStatusCallback, | 760 base::Bind(&PipelineIntegrationTest::OnStatusCallback, |
759 base::Unretained(this))); | 761 base::Unretained(this))); |
760 | 762 |
761 if (encrypted_media) { | 763 if (encrypted_media) { |
762 source->set_encrypted_media_init_data_cb( | 764 source->set_encrypted_media_init_data_cb( |
763 base::Bind(&FakeEncryptedMedia::OnEncryptedMediaInitData, | 765 base::Bind(&FakeEncryptedMedia::OnEncryptedMediaInitData, |
764 base::Unretained(encrypted_media))); | 766 base::Unretained(encrypted_media))); |
765 } | 767 } |
766 base::RunLoop().Run(); | 768 base::RunLoop().Run(); |
767 return pipeline_status_; | 769 return pipeline_status_; |
(...skipping 24 matching lines...) Expand all Loading... |
792 | 794 |
793 source.EndOfStream(); | 795 source.EndOfStream(); |
794 | 796 |
795 source.Shutdown(); | 797 source.Shutdown(); |
796 Stop(); | 798 Stop(); |
797 return true; | 799 return true; |
798 } | 800 } |
799 }; | 801 }; |
800 | 802 |
801 struct PlaybackTestData { | 803 struct PlaybackTestData { |
802 const PipelineType type; | |
803 const std::string filename; | 804 const std::string filename; |
804 const uint32_t start_time_ms; | 805 const uint32_t start_time_ms; |
805 const uint32_t duration_ms; | 806 const uint32_t duration_ms; |
806 }; | 807 }; |
807 | 808 |
808 struct MSEPlaybackTestData { | 809 struct MSEPlaybackTestData { |
809 const PipelineType type; | |
810 const std::string filename; | 810 const std::string filename; |
811 const std::string mimetype; | 811 const std::string mimetype; |
812 const size_t append_bytes; | 812 const size_t append_bytes; |
813 const uint32_t duration_ms; | 813 const uint32_t duration_ms; |
814 }; | 814 }; |
815 | 815 |
816 // Tells gtest how to print our PlaybackTestData structure. | 816 // Tells gtest how to print our PlaybackTestData structure. |
817 std::ostream& operator<<(std::ostream& os, const PlaybackTestData& data) { | 817 std::ostream& operator<<(std::ostream& os, const PlaybackTestData& data) { |
818 return os << data.filename; | 818 return os << data.filename; |
819 } | 819 } |
820 | 820 |
821 std::ostream& operator<<(std::ostream& os, const MSEPlaybackTestData& data) { | 821 std::ostream& operator<<(std::ostream& os, const MSEPlaybackTestData& data) { |
822 return os << data.filename; | 822 return os << data.filename; |
823 } | 823 } |
824 | 824 |
825 class BasicPlaybackTest : public PipelineIntegrationTest, | 825 class BasicPlaybackTest : public PipelineIntegrationTest, |
826 public testing::WithParamInterface<PlaybackTestData> { | 826 public testing::WithParamInterface<PlaybackTestData> { |
827 public: | |
828 BasicPlaybackTest() { | |
829 #if BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
830 if (GetParam().type == PipelineType::MediaRemoting) | |
831 SetUpRemotingPipeline(); | |
832 #endif // BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
833 } | |
834 }; | 827 }; |
835 | 828 |
836 class BasicMSEPlaybackTest | 829 class BasicMSEPlaybackTest |
837 : public ::testing::WithParamInterface<MSEPlaybackTestData>, | 830 : public ::testing::WithParamInterface<MSEPlaybackTestData>, |
838 public PipelineIntegrationTest { | 831 public PipelineIntegrationTest {}; |
839 public: | |
840 BasicMSEPlaybackTest() { | |
841 #if BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
842 if (GetParam().type == PipelineType::MediaRemoting) | |
843 SetUpRemotingPipeline(); | |
844 #endif // BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
845 } | |
846 }; | |
847 | 832 |
848 TEST_P(BasicPlaybackTest, PlayToEnd) { | 833 TEST_P(BasicPlaybackTest, PlayToEnd) { |
849 PlaybackTestData data = GetParam(); | 834 PlaybackTestData data = GetParam(); |
| 835 |
850 ASSERT_EQ(PIPELINE_OK, | 836 ASSERT_EQ(PIPELINE_OK, |
851 Start(data.filename, kClockless | kUnreliableDuration)); | 837 Start(data.filename, kClockless | kUnreliableDuration)); |
852 EXPECT_EQ(data.start_time_ms, demuxer_->GetStartTime().InMilliseconds()); | 838 EXPECT_EQ(data.start_time_ms, demuxer_->GetStartTime().InMilliseconds()); |
853 EXPECT_EQ(data.duration_ms, pipeline_->GetMediaDuration().InMilliseconds()); | 839 EXPECT_EQ(data.duration_ms, pipeline_->GetMediaDuration().InMilliseconds()); |
854 | 840 |
855 Play(); | 841 Play(); |
856 ASSERT_TRUE(WaitUntilOnEnded()); | 842 ASSERT_TRUE(WaitUntilOnEnded()); |
857 } | 843 } |
858 | 844 |
859 TEST_P(BasicMSEPlaybackTest, PlayToEnd) { | 845 TEST_P(BasicMSEPlaybackTest, PlayToEnd) { |
860 MSEPlaybackTestData data = GetParam(); | 846 MSEPlaybackTestData data = GetParam(); |
| 847 |
861 MockMediaSource source(data.filename, data.mimetype, data.append_bytes); | 848 MockMediaSource source(data.filename, data.mimetype, data.append_bytes); |
862 // TODO -- ADD uint8_t test_type to StartWithMSE and pass clockless flags | 849 // TODO -- ADD uint8_t test_type to StartWithMSE and pass clockless flags |
863 ASSERT_EQ(PIPELINE_OK, | 850 ASSERT_EQ(PIPELINE_OK, |
864 StartPipelineWithMediaSource(&source, kClockless, nullptr)); | 851 StartPipelineWithMediaSource(&source, kClockless, nullptr)); |
865 source.EndOfStream(); | 852 source.EndOfStream(); |
866 | 853 |
867 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 854 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
868 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 855 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
869 EXPECT_EQ(data.duration_ms, | 856 EXPECT_EQ(data.duration_ms, |
870 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 857 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
871 | 858 |
872 Play(); | 859 Play(); |
873 | 860 |
874 ASSERT_TRUE(WaitUntilOnEnded()); | 861 ASSERT_TRUE(WaitUntilOnEnded()); |
875 | 862 |
876 EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null()); | 863 EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null()); |
877 source.Shutdown(); | 864 source.Shutdown(); |
878 Stop(); | 865 Stop(); |
879 } | 866 } |
880 | 867 |
881 #if BUILDFLAG(USE_PROPRIETARY_CODECS) | 868 #if BUILDFLAG(USE_PROPRIETARY_CODECS) |
882 | 869 |
883 // Any new/changed entries should be made for both the ::Media and | |
884 // ::MediaRemoting types. If you don't think something applies, please contact | |
885 // one of the media/remoting/OWNERS. | |
886 const PlaybackTestData kADTSTests[] = { | 870 const PlaybackTestData kADTSTests[] = { |
887 {PipelineType::Media, "bear-audio-main-aac.aac", 0, 2724}, | 871 {"bear-audio-main-aac.aac", 0, 2724}, |
888 {PipelineType::Media, "bear-audio-lc-aac.aac", 0, 2858}, | 872 {"bear-audio-lc-aac.aac", 0, 2858}, |
889 {PipelineType::Media, "bear-audio-implicit-he-aac-v1.aac", 0, 2812}, | 873 {"bear-audio-implicit-he-aac-v1.aac", 0, 2812}, |
890 {PipelineType::Media, "bear-audio-implicit-he-aac-v2.aac", 0, 3047}, | 874 {"bear-audio-implicit-he-aac-v2.aac", 0, 3047}, |
891 #if BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
892 {PipelineType::MediaRemoting, "bear-audio-main-aac.aac", 0, 2724}, | |
893 {PipelineType::MediaRemoting, "bear-audio-lc-aac.aac", 0, 2858}, | |
894 {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v1.aac", 0, 2812}, | |
895 {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v2.aac", 0, 3047}, | |
896 #endif // BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
897 }; | 875 }; |
898 | 876 |
899 // TODO(chcunningham): Migrate other basic playback tests to TEST_P. | 877 // TODO(chcunningham): Migrate other basic playback tests to TEST_P. |
900 INSTANTIATE_TEST_CASE_P(ProprietaryCodecs, | 878 INSTANTIATE_TEST_CASE_P(ProprietaryCodecs, |
901 BasicPlaybackTest, | 879 BasicPlaybackTest, |
902 testing::ValuesIn(kADTSTests)); | 880 testing::ValuesIn(kADTSTests)); |
903 | 881 |
904 // Any new/changed entries should be made for both the ::Media and | |
905 // ::MediaRemoting types. If you don't think something applies, please contact | |
906 // one of the media/remoting/OWNERS. | |
907 const MSEPlaybackTestData kMediaSourceADTSTests[] = { | 882 const MSEPlaybackTestData kMediaSourceADTSTests[] = { |
908 {PipelineType::Media, "bear-audio-main-aac.aac", kADTS, kAppendWholeFile, | 883 {"bear-audio-main-aac.aac", kADTS, kAppendWholeFile, 2773}, |
909 2773}, | 884 {"bear-audio-lc-aac.aac", kADTS, kAppendWholeFile, 2794}, |
910 {PipelineType::Media, "bear-audio-lc-aac.aac", kADTS, kAppendWholeFile, | 885 {"bear-audio-implicit-he-aac-v1.aac", kADTS, kAppendWholeFile, 2858}, |
911 2794}, | 886 {"bear-audio-implicit-he-aac-v2.aac", kADTS, kAppendWholeFile, 2901}, |
912 {PipelineType::Media, "bear-audio-implicit-he-aac-v1.aac", kADTS, | |
913 kAppendWholeFile, 2858}, | |
914 {PipelineType::Media, "bear-audio-implicit-he-aac-v2.aac", kADTS, | |
915 kAppendWholeFile, 2901}, | |
916 #if BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
917 {PipelineType::MediaRemoting, "bear-audio-main-aac.aac", kADTS, | |
918 kAppendWholeFile, 2773}, | |
919 {PipelineType::MediaRemoting, "bear-audio-lc-aac.aac", kADTS, | |
920 kAppendWholeFile, 2794}, | |
921 {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v1.aac", kADTS, | |
922 kAppendWholeFile, 2858}, | |
923 {PipelineType::MediaRemoting, "bear-audio-implicit-he-aac-v2.aac", kADTS, | |
924 kAppendWholeFile, 2901}, | |
925 #endif // BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
926 }; | 887 }; |
927 | 888 |
928 // TODO(chcunningham): Migrate other basic MSE playback tests to TEST_P. | 889 // TODO(chcunningham): Migrate other basic MSE playback tests to TEST_P. |
929 INSTANTIATE_TEST_CASE_P(ProprietaryCodecs, | 890 INSTANTIATE_TEST_CASE_P(ProprietaryCodecs, |
930 BasicMSEPlaybackTest, | 891 BasicMSEPlaybackTest, |
931 testing::ValuesIn(kMediaSourceADTSTests)); | 892 testing::ValuesIn(kMediaSourceADTSTests)); |
932 | 893 |
933 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) | 894 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) |
934 | 895 |
935 struct IntegrationTestData { | 896 TEST_F(PipelineIntegrationTest, BasicPlayback) { |
936 const PipelineType type; | |
937 }; | |
938 | |
939 // Tells gtest how to print our PlaybackTestData structure. | |
940 std::ostream& operator<<(std::ostream& os, const IntegrationTestData& data) { | |
941 return os << (data.type == PipelineType::Media ? "Media" : "MediaRemoting"); | |
942 } | |
943 | |
944 // These tests are used to test both media pipeline and media remoting pipeline. | |
945 class CommonPipelineIntegrationTest | |
946 : public PipelineIntegrationTest, | |
947 public testing::WithParamInterface<IntegrationTestData> { | |
948 public: | |
949 CommonPipelineIntegrationTest() { | |
950 #if BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
951 if (GetParam().type == PipelineType::MediaRemoting) | |
952 SetUpRemotingPipeline(); | |
953 #endif // BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
954 } | |
955 }; | |
956 | |
957 TEST_P(CommonPipelineIntegrationTest, BasicPlayback) { | |
958 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | 897 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
959 | 898 |
960 Play(); | 899 Play(); |
961 | 900 |
962 ASSERT_TRUE(WaitUntilOnEnded()); | 901 ASSERT_TRUE(WaitUntilOnEnded()); |
963 } | 902 } |
964 | 903 |
965 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackOpusOgg) { | 904 TEST_F(PipelineIntegrationTest, BasicPlaybackOpusOgg) { |
966 ASSERT_EQ(PIPELINE_OK, Start("bear-opus.ogg")); | 905 ASSERT_EQ(PIPELINE_OK, Start("bear-opus.ogg")); |
967 | 906 |
968 Play(); | 907 Play(); |
969 | 908 |
970 ASSERT_TRUE(WaitUntilOnEnded()); | 909 ASSERT_TRUE(WaitUntilOnEnded()); |
971 } | 910 } |
972 | 911 |
973 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHashed) { | 912 TEST_F(PipelineIntegrationTest, BasicPlaybackHashed) { |
974 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); | 913 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); |
975 | 914 |
976 Play(); | 915 Play(); |
977 | 916 |
978 ASSERT_TRUE(WaitUntilOnEnded()); | 917 ASSERT_TRUE(WaitUntilOnEnded()); |
979 | 918 |
980 EXPECT_HASH_EQ("f0be120a90a811506777c99a2cdf7cc1", GetVideoHash()); | 919 EXPECT_HASH_EQ("f0be120a90a811506777c99a2cdf7cc1", GetVideoHash()); |
981 EXPECT_HASH_EQ("-3.59,-2.06,-0.43,2.15,0.77,-0.95,", GetAudioHash()); | 920 EXPECT_HASH_EQ("-3.59,-2.06,-0.43,2.15,0.77,-0.95,", GetAudioHash()); |
982 EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null()); | 921 EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null()); |
983 } | 922 } |
984 | 923 |
985 base::TimeDelta TimestampMs(int milliseconds) { | 924 base::TimeDelta TimestampMs(int milliseconds) { |
986 return base::TimeDelta::FromMilliseconds(milliseconds); | 925 return base::TimeDelta::FromMilliseconds(milliseconds); |
987 } | 926 } |
988 | 927 |
989 TEST_P(CommonPipelineIntegrationTest, | 928 TEST_F(PipelineIntegrationTest, PlaybackWithAudioTrackDisabledThenEnabled) { |
990 PlaybackWithAudioTrackDisabledThenEnabled) { | |
991 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); | 929 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); |
992 | 930 |
993 // Disable audio. | 931 // Disable audio. |
994 std::vector<MediaTrack::Id> empty; | 932 std::vector<MediaTrack::Id> empty; |
995 pipeline_->OnEnabledAudioTracksChanged(empty); | 933 pipeline_->OnEnabledAudioTracksChanged(empty); |
996 base::RunLoop().RunUntilIdle(); | 934 base::RunLoop().RunUntilIdle(); |
997 | 935 |
998 // Seek to flush the pipeline and ensure there's no prerolled audio data. | 936 // Seek to flush the pipeline and ensure there's no prerolled audio data. |
999 ASSERT_TRUE(Seek(base::TimeDelta())); | 937 ASSERT_TRUE(Seek(base::TimeDelta())); |
1000 | 938 |
(...skipping 13 matching lines...) Expand all Loading... |
1014 | 952 |
1015 // Restart playback from 500ms position. | 953 // Restart playback from 500ms position. |
1016 ASSERT_TRUE(Seek(k500ms)); | 954 ASSERT_TRUE(Seek(k500ms)); |
1017 Play(); | 955 Play(); |
1018 ASSERT_TRUE(WaitUntilOnEnded()); | 956 ASSERT_TRUE(WaitUntilOnEnded()); |
1019 | 957 |
1020 // Verify that audio has been playing after being enabled. | 958 // Verify that audio has been playing after being enabled. |
1021 EXPECT_HASH_EQ("-1.53,0.21,1.23,1.56,-0.34,-0.94,", GetAudioHash()); | 959 EXPECT_HASH_EQ("-1.53,0.21,1.23,1.56,-0.34,-0.94,", GetAudioHash()); |
1022 } | 960 } |
1023 | 961 |
1024 TEST_P(CommonPipelineIntegrationTest, | 962 TEST_F(PipelineIntegrationTest, PlaybackWithVideoTrackDisabledThenEnabled) { |
1025 PlaybackWithVideoTrackDisabledThenEnabled) { | |
1026 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); | 963 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); |
1027 | 964 |
1028 // Disable video. | 965 // Disable video. |
1029 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); | 966 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); |
1030 base::RunLoop().RunUntilIdle(); | 967 base::RunLoop().RunUntilIdle(); |
1031 | 968 |
1032 // Seek to flush the pipeline and ensure there's no prerolled video data. | 969 // Seek to flush the pipeline and ensure there's no prerolled video data. |
1033 ASSERT_TRUE(Seek(base::TimeDelta())); | 970 ASSERT_TRUE(Seek(base::TimeDelta())); |
1034 | 971 |
1035 // Reset the video hash in case some of the prerolled video frames have been | 972 // Reset the video hash in case some of the prerolled video frames have been |
(...skipping 19 matching lines...) Expand all Loading... |
1055 | 992 |
1056 // Restart playback from 500ms position. | 993 // Restart playback from 500ms position. |
1057 ASSERT_TRUE(Seek(k500ms)); | 994 ASSERT_TRUE(Seek(k500ms)); |
1058 Play(); | 995 Play(); |
1059 ASSERT_TRUE(WaitUntilOnEnded()); | 996 ASSERT_TRUE(WaitUntilOnEnded()); |
1060 | 997 |
1061 // Verify that video has been rendered after being enabled. | 998 // Verify that video has been rendered after being enabled. |
1062 EXPECT_HASH_EQ("fd59357dfd9c144ab4fb8181b2de32c3", GetVideoHash()); | 999 EXPECT_HASH_EQ("fd59357dfd9c144ab4fb8181b2de32c3", GetVideoHash()); |
1063 } | 1000 } |
1064 | 1001 |
1065 TEST_P(CommonPipelineIntegrationTest, TrackStatusChangesBeforePipelineStarted) { | 1002 TEST_F(PipelineIntegrationTest, TrackStatusChangesBeforePipelineStarted) { |
1066 std::vector<MediaTrack::Id> empty_track_ids; | 1003 std::vector<MediaTrack::Id> empty_track_ids; |
1067 pipeline_->OnEnabledAudioTracksChanged(empty_track_ids); | 1004 pipeline_->OnEnabledAudioTracksChanged(empty_track_ids); |
1068 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); | 1005 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); |
1069 } | 1006 } |
1070 | 1007 |
1071 TEST_P(CommonPipelineIntegrationTest, TrackStatusChangesAfterPipelineEnded) { | 1008 TEST_F(PipelineIntegrationTest, TrackStatusChangesAfterPipelineEnded) { |
1072 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); | 1009 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); |
1073 Play(); | 1010 Play(); |
1074 ASSERT_TRUE(WaitUntilOnEnded()); | 1011 ASSERT_TRUE(WaitUntilOnEnded()); |
1075 std::vector<MediaTrack::Id> track_ids; | 1012 std::vector<MediaTrack::Id> track_ids; |
1076 // Disable audio track. | 1013 // Disable audio track. |
1077 pipeline_->OnEnabledAudioTracksChanged(track_ids); | 1014 pipeline_->OnEnabledAudioTracksChanged(track_ids); |
1078 // Re-enable audio track. | 1015 // Re-enable audio track. |
1079 track_ids.push_back("2"); | 1016 track_ids.push_back("2"); |
1080 pipeline_->OnEnabledAudioTracksChanged(track_ids); | 1017 pipeline_->OnEnabledAudioTracksChanged(track_ids); |
1081 // Disable video track. | 1018 // Disable video track. |
1082 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); | 1019 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); |
1083 // Re-enable video track. | 1020 // Re-enable video track. |
1084 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); | 1021 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); |
1085 } | 1022 } |
1086 | 1023 |
1087 TEST_P(CommonPipelineIntegrationTest, TrackStatusChangesWhileSuspended) { | 1024 TEST_F(PipelineIntegrationTest, TrackStatusChangesWhileSuspended) { |
1088 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); | 1025 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); |
1089 Play(); | 1026 Play(); |
1090 | 1027 |
1091 ASSERT_TRUE(Suspend()); | 1028 ASSERT_TRUE(Suspend()); |
1092 | 1029 |
1093 // These get triggered every time playback is resumed. | 1030 // These get triggered every time playback is resumed. |
1094 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))) | 1031 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))) |
1095 .Times(AnyNumber()); | 1032 .Times(AnyNumber()); |
1096 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber()); | 1033 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber()); |
1097 | 1034 |
(...skipping 17 matching lines...) Expand all Loading... |
1115 ASSERT_TRUE(Resume(TimestampMs(300))); | 1052 ASSERT_TRUE(Resume(TimestampMs(300))); |
1116 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(400))); | 1053 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(400))); |
1117 ASSERT_TRUE(Suspend()); | 1054 ASSERT_TRUE(Suspend()); |
1118 | 1055 |
1119 // Re-enable video track. | 1056 // Re-enable video track. |
1120 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); | 1057 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); |
1121 ASSERT_TRUE(Resume(TimestampMs(400))); | 1058 ASSERT_TRUE(Resume(TimestampMs(400))); |
1122 ASSERT_TRUE(WaitUntilOnEnded()); | 1059 ASSERT_TRUE(WaitUntilOnEnded()); |
1123 } | 1060 } |
1124 | 1061 |
1125 TEST_P(CommonPipelineIntegrationTest, PipelineStoppedWhileAudioRestartPending) { | 1062 TEST_F(PipelineIntegrationTest, ReinitRenderersWhileAudioTrackIsDisabled) { |
1126 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | 1063 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
1127 Play(); | 1064 Play(); |
1128 | 1065 |
| 1066 // These get triggered every time playback is resumed. |
| 1067 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))) |
| 1068 .Times(AnyNumber()); |
| 1069 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber()); |
| 1070 |
| 1071 // Disable the audio track. |
| 1072 std::vector<MediaTrack::Id> track_ids; |
| 1073 pipeline_->OnEnabledAudioTracksChanged(track_ids); |
| 1074 // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and |
| 1075 // reinitializes renderers while the audio track is disabled. |
| 1076 ASSERT_TRUE(Suspend()); |
| 1077 ASSERT_TRUE(Resume(TimestampMs(100))); |
| 1078 // Now re-enable the audio track, playback should continue successfully. |
| 1079 EXPECT_CALL(*this, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)).Times(1); |
| 1080 track_ids.push_back("2"); |
| 1081 pipeline_->OnEnabledAudioTracksChanged(track_ids); |
| 1082 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); |
| 1083 |
| 1084 Stop(); |
| 1085 } |
| 1086 |
| 1087 TEST_F(PipelineIntegrationTest, ReinitRenderersWhileVideoTrackIsDisabled) { |
| 1088 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); |
| 1089 Play(); |
| 1090 |
| 1091 // These get triggered every time playback is resumed. |
| 1092 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))) |
| 1093 .Times(AnyNumber()); |
| 1094 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber()); |
| 1095 |
| 1096 // Disable the video track. |
| 1097 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); |
| 1098 // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and |
| 1099 // reinitializes renderers while the video track is disabled. |
| 1100 ASSERT_TRUE(Suspend()); |
| 1101 ASSERT_TRUE(Resume(TimestampMs(100))); |
| 1102 // Now re-enable the video track, playback should continue successfully. |
| 1103 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); |
| 1104 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); |
| 1105 |
| 1106 Stop(); |
| 1107 } |
| 1108 |
| 1109 TEST_F(PipelineIntegrationTest, PipelineStoppedWhileAudioRestartPending) { |
| 1110 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
| 1111 Play(); |
| 1112 |
1129 // Disable audio track first, to re-enable it later and stop the pipeline | 1113 // Disable audio track first, to re-enable it later and stop the pipeline |
1130 // (which destroys the media renderer) while audio restart is pending. | 1114 // (which destroys the media renderer) while audio restart is pending. |
1131 std::vector<MediaTrack::Id> track_ids; | 1115 std::vector<MediaTrack::Id> track_ids; |
1132 pipeline_->OnEnabledAudioTracksChanged(track_ids); | 1116 pipeline_->OnEnabledAudioTracksChanged(track_ids); |
1133 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); | 1117 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); |
1134 | 1118 |
1135 track_ids.push_back("2"); | 1119 track_ids.push_back("2"); |
1136 pipeline_->OnEnabledAudioTracksChanged(track_ids); | 1120 pipeline_->OnEnabledAudioTracksChanged(track_ids); |
1137 Stop(); | 1121 Stop(); |
1138 } | 1122 } |
1139 | 1123 |
1140 TEST_P(CommonPipelineIntegrationTest, PipelineStoppedWhileVideoRestartPending) { | 1124 TEST_F(PipelineIntegrationTest, PipelineStoppedWhileVideoRestartPending) { |
1141 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | 1125 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
1142 Play(); | 1126 Play(); |
1143 | 1127 |
1144 // Disable video track first, to re-enable it later and stop the pipeline | 1128 // Disable video track first, to re-enable it later and stop the pipeline |
1145 // (which destroys the media renderer) while video restart is pending. | 1129 // (which destroys the media renderer) while video restart is pending. |
1146 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); | 1130 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); |
1147 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); | 1131 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); |
1148 | 1132 |
1149 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); | 1133 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); |
1150 Stop(); | 1134 Stop(); |
1151 } | 1135 } |
1152 | 1136 |
1153 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackOpusOggTrimmingHashed) { | 1137 TEST_F(PipelineIntegrationTest, |
1154 #if defined(DISABLE_CLOCKLESS_TESTS) | 1138 MAYBE_CLOCKLESS(BasicPlaybackOpusOggTrimmingHashed)) { |
1155 return; | |
1156 #endif // defined(DISABLE_CLOCKLESS_TESTS) | |
1157 | |
1158 ASSERT_EQ(PIPELINE_OK, | 1139 ASSERT_EQ(PIPELINE_OK, |
1159 Start("opus-trimming-test.webm", kHashed | kClockless)); | 1140 Start("opus-trimming-test.webm", kHashed | kClockless)); |
1160 | 1141 |
1161 Play(); | 1142 Play(); |
1162 | 1143 |
1163 ASSERT_TRUE(WaitUntilOnEnded()); | 1144 ASSERT_TRUE(WaitUntilOnEnded()); |
1164 EXPECT_HASH_EQ(kOpusEndTrimmingHash_1, GetAudioHash()); | 1145 EXPECT_HASH_EQ(kOpusEndTrimmingHash_1, GetAudioHash()); |
1165 | 1146 |
1166 // Seek within the pre-skip section, this should not cause a beep. | 1147 // Seek within the pre-skip section, this should not cause a beep. |
1167 ASSERT_TRUE(Seek(base::TimeDelta::FromSeconds(1))); | 1148 ASSERT_TRUE(Seek(base::TimeDelta::FromSeconds(1))); |
1168 Play(); | 1149 Play(); |
1169 ASSERT_TRUE(WaitUntilOnEnded()); | 1150 ASSERT_TRUE(WaitUntilOnEnded()); |
1170 EXPECT_HASH_EQ(kOpusEndTrimmingHash_2, GetAudioHash()); | 1151 EXPECT_HASH_EQ(kOpusEndTrimmingHash_2, GetAudioHash()); |
1171 | 1152 |
1172 // Seek somewhere outside of the pre-skip / end-trim section, demxuer should | 1153 // Seek somewhere outside of the pre-skip / end-trim section, demxuer should |
1173 // correctly preroll enough to accurately decode this segment. | 1154 // correctly preroll enough to accurately decode this segment. |
1174 ASSERT_TRUE(Seek(base::TimeDelta::FromMilliseconds(6360))); | 1155 ASSERT_TRUE(Seek(base::TimeDelta::FromMilliseconds(6360))); |
1175 Play(); | 1156 Play(); |
1176 ASSERT_TRUE(WaitUntilOnEnded()); | 1157 ASSERT_TRUE(WaitUntilOnEnded()); |
1177 EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash()); | 1158 EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash()); |
1178 } | 1159 } |
1179 | 1160 |
1180 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackOpusWebmTrimmingHashed) { | 1161 TEST_F(PipelineIntegrationTest, |
1181 #if defined(DISABLE_CLOCKLESS_TESTS) | 1162 MAYBE_CLOCKLESS(BasicPlaybackOpusWebmTrimmingHashed)) { |
1182 return; | |
1183 #endif // defined(DISABLE_CLOCKLESS_TESTS) | |
1184 | |
1185 ASSERT_EQ(PIPELINE_OK, | 1163 ASSERT_EQ(PIPELINE_OK, |
1186 Start("opus-trimming-test.webm", kHashed | kClockless)); | 1164 Start("opus-trimming-test.webm", kHashed | kClockless)); |
1187 | 1165 |
1188 Play(); | 1166 Play(); |
1189 | 1167 |
1190 ASSERT_TRUE(WaitUntilOnEnded()); | 1168 ASSERT_TRUE(WaitUntilOnEnded()); |
1191 EXPECT_HASH_EQ(kOpusEndTrimmingHash_1, GetAudioHash()); | 1169 EXPECT_HASH_EQ(kOpusEndTrimmingHash_1, GetAudioHash()); |
1192 | 1170 |
1193 // Seek within the pre-skip section, this should not cause a beep. | 1171 // Seek within the pre-skip section, this should not cause a beep. |
1194 ASSERT_TRUE(Seek(base::TimeDelta::FromSeconds(1))); | 1172 ASSERT_TRUE(Seek(base::TimeDelta::FromSeconds(1))); |
1195 Play(); | 1173 Play(); |
1196 ASSERT_TRUE(WaitUntilOnEnded()); | 1174 ASSERT_TRUE(WaitUntilOnEnded()); |
1197 EXPECT_HASH_EQ(kOpusEndTrimmingHash_2, GetAudioHash()); | 1175 EXPECT_HASH_EQ(kOpusEndTrimmingHash_2, GetAudioHash()); |
1198 | 1176 |
1199 // Seek somewhere outside of the pre-skip / end-trim section, demxuer should | 1177 // Seek somewhere outside of the pre-skip / end-trim section, demxuer should |
1200 // correctly preroll enough to accurately decode this segment. | 1178 // correctly preroll enough to accurately decode this segment. |
1201 ASSERT_TRUE(Seek(base::TimeDelta::FromMilliseconds(6360))); | 1179 ASSERT_TRUE(Seek(base::TimeDelta::FromMilliseconds(6360))); |
1202 Play(); | 1180 Play(); |
1203 ASSERT_TRUE(WaitUntilOnEnded()); | 1181 ASSERT_TRUE(WaitUntilOnEnded()); |
1204 EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash()); | 1182 EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash()); |
1205 } | 1183 } |
1206 | 1184 |
1207 TEST_P(CommonPipelineIntegrationTest, | 1185 TEST_F(PipelineIntegrationTest, |
1208 BasicPlaybackOpusWebmTrimmingHashed_MediaSource) { | 1186 MAYBE_CLOCKLESS(BasicPlaybackOpusWebmTrimmingHashed_MediaSource)) { |
1209 #if defined(DISABLE_CLOCKLESS_TESTS) | |
1210 return; | |
1211 #endif // defined(DISABLE_CLOCKLESS_TESTS) | |
1212 | |
1213 MockMediaSource source("opus-trimming-test.webm", kOpusAudioOnlyWebM, | 1187 MockMediaSource source("opus-trimming-test.webm", kOpusAudioOnlyWebM, |
1214 kAppendWholeFile); | 1188 kAppendWholeFile); |
1215 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource( | 1189 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource( |
1216 &source, kClockless | kHashed, nullptr)); | 1190 &source, kClockless | kHashed, nullptr)); |
1217 source.EndOfStream(); | 1191 source.EndOfStream(); |
1218 | 1192 |
1219 Play(); | 1193 Play(); |
1220 | 1194 |
1221 ASSERT_TRUE(WaitUntilOnEnded()); | 1195 ASSERT_TRUE(WaitUntilOnEnded()); |
1222 EXPECT_HASH_EQ(kOpusEndTrimmingHash_1, GetAudioHash()); | 1196 EXPECT_HASH_EQ(kOpusEndTrimmingHash_1, GetAudioHash()); |
1223 | 1197 |
1224 // Seek within the pre-skip section, this should not cause a beep. | 1198 // Seek within the pre-skip section, this should not cause a beep. |
1225 base::TimeDelta seek_time = base::TimeDelta::FromSeconds(1); | 1199 base::TimeDelta seek_time = base::TimeDelta::FromSeconds(1); |
1226 source.Seek(seek_time); | 1200 source.Seek(seek_time); |
1227 ASSERT_TRUE(Seek(seek_time)); | 1201 ASSERT_TRUE(Seek(seek_time)); |
1228 Play(); | 1202 Play(); |
1229 ASSERT_TRUE(WaitUntilOnEnded()); | 1203 ASSERT_TRUE(WaitUntilOnEnded()); |
1230 EXPECT_HASH_EQ(kOpusEndTrimmingHash_2, GetAudioHash()); | 1204 EXPECT_HASH_EQ(kOpusEndTrimmingHash_2, GetAudioHash()); |
1231 | 1205 |
1232 // Seek somewhere outside of the pre-skip / end-trim section, demuxer should | 1206 // Seek somewhere outside of the pre-skip / end-trim section, demuxer should |
1233 // correctly preroll enough to accurately decode this segment. | 1207 // correctly preroll enough to accurately decode this segment. |
1234 seek_time = base::TimeDelta::FromMilliseconds(6360); | 1208 seek_time = base::TimeDelta::FromMilliseconds(6360); |
1235 source.Seek(seek_time); | 1209 source.Seek(seek_time); |
1236 ASSERT_TRUE(Seek(seek_time)); | 1210 ASSERT_TRUE(Seek(seek_time)); |
1237 Play(); | 1211 Play(); |
1238 ASSERT_TRUE(WaitUntilOnEnded()); | 1212 ASSERT_TRUE(WaitUntilOnEnded()); |
1239 EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash()); | 1213 EXPECT_HASH_EQ(kOpusEndTrimmingHash_3, GetAudioHash()); |
1240 } | 1214 } |
1241 | 1215 |
1242 TEST_P(CommonPipelineIntegrationTest, | 1216 TEST_F(PipelineIntegrationTest, |
1243 BasicPlaybackOpusPrerollExceedsCodecDelay) { | 1217 MAYBE_CLOCKLESS(BasicPlaybackOpusPrerollExceedsCodecDelay)) { |
1244 #if defined(DISABLE_CLOCKLESS_TESTS) | |
1245 return; | |
1246 #endif // defined(DISABLE_CLOCKLESS_TESTS) | |
1247 | |
1248 ASSERT_EQ(PIPELINE_OK, Start("bear-opus.webm", kHashed | kClockless)); | 1218 ASSERT_EQ(PIPELINE_OK, Start("bear-opus.webm", kHashed | kClockless)); |
1249 | 1219 |
1250 AudioDecoderConfig config = | 1220 AudioDecoderConfig config = |
1251 demuxer_->GetFirstStream(DemuxerStream::AUDIO)->audio_decoder_config(); | 1221 demuxer_->GetFirstStream(DemuxerStream::AUDIO)->audio_decoder_config(); |
1252 | 1222 |
1253 // Verify that this file's preroll is not eclipsed by the codec delay so we | 1223 // Verify that this file's preroll is not eclipsed by the codec delay so we |
1254 // can detect when preroll is not properly performed. | 1224 // can detect when preroll is not properly performed. |
1255 base::TimeDelta codec_delay = base::TimeDelta::FromSecondsD( | 1225 base::TimeDelta codec_delay = base::TimeDelta::FromSecondsD( |
1256 static_cast<double>(config.codec_delay()) / config.samples_per_second()); | 1226 static_cast<double>(config.codec_delay()) / config.samples_per_second()); |
1257 ASSERT_GT(config.seek_preroll(), codec_delay); | 1227 ASSERT_GT(config.seek_preroll(), codec_delay); |
1258 | 1228 |
1259 Play(); | 1229 Play(); |
1260 ASSERT_TRUE(WaitUntilOnEnded()); | 1230 ASSERT_TRUE(WaitUntilOnEnded()); |
1261 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_1, GetAudioHash()); | 1231 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_1, GetAudioHash()); |
1262 | 1232 |
1263 // Seek halfway through the file to invoke seek preroll. | 1233 // Seek halfway through the file to invoke seek preroll. |
1264 ASSERT_TRUE(Seek(base::TimeDelta::FromSecondsD(1.414))); | 1234 ASSERT_TRUE(Seek(base::TimeDelta::FromSecondsD(1.414))); |
1265 Play(); | 1235 Play(); |
1266 ASSERT_TRUE(WaitUntilOnEnded()); | 1236 ASSERT_TRUE(WaitUntilOnEnded()); |
1267 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_2, GetAudioHash()); | 1237 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_2, GetAudioHash()); |
1268 } | 1238 } |
1269 | 1239 |
1270 TEST_P(CommonPipelineIntegrationTest, | 1240 TEST_F(PipelineIntegrationTest, |
1271 BasicPlaybackOpusPrerollExceedsCodecDelay_MediaSource) { | 1241 MAYBE_CLOCKLESS(BasicPlaybackOpusPrerollExceedsCodecDelay_MediaSource)) { |
1272 #if defined(DISABLE_CLOCKLESS_TESTS) | |
1273 return; | |
1274 #endif // defined(DISABLE_CLOCKLESS_TESTS) | |
1275 | |
1276 MockMediaSource source("bear-opus.webm", kOpusAudioOnlyWebM, | 1242 MockMediaSource source("bear-opus.webm", kOpusAudioOnlyWebM, |
1277 kAppendWholeFile); | 1243 kAppendWholeFile); |
1278 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource( | 1244 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource( |
1279 &source, kClockless | kHashed, nullptr)); | 1245 &source, kClockless | kHashed, nullptr)); |
1280 source.EndOfStream(); | 1246 source.EndOfStream(); |
1281 | 1247 |
1282 AudioDecoderConfig config = | 1248 AudioDecoderConfig config = |
1283 demuxer_->GetFirstStream(DemuxerStream::AUDIO)->audio_decoder_config(); | 1249 demuxer_->GetFirstStream(DemuxerStream::AUDIO)->audio_decoder_config(); |
1284 | 1250 |
1285 // Verify that this file's preroll is not eclipsed by the codec delay so we | 1251 // Verify that this file's preroll is not eclipsed by the codec delay so we |
1286 // can detect when preroll is not properly performed. | 1252 // can detect when preroll is not properly performed. |
1287 base::TimeDelta codec_delay = base::TimeDelta::FromSecondsD( | 1253 base::TimeDelta codec_delay = base::TimeDelta::FromSecondsD( |
1288 static_cast<double>(config.codec_delay()) / config.samples_per_second()); | 1254 static_cast<double>(config.codec_delay()) / config.samples_per_second()); |
1289 ASSERT_GT(config.seek_preroll(), codec_delay); | 1255 ASSERT_GT(config.seek_preroll(), codec_delay); |
1290 | 1256 |
1291 Play(); | 1257 Play(); |
1292 ASSERT_TRUE(WaitUntilOnEnded()); | 1258 ASSERT_TRUE(WaitUntilOnEnded()); |
1293 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_1, GetAudioHash()); | 1259 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_1, GetAudioHash()); |
1294 | 1260 |
1295 // Seek halfway through the file to invoke seek preroll. | 1261 // Seek halfway through the file to invoke seek preroll. |
1296 base::TimeDelta seek_time = base::TimeDelta::FromSecondsD(1.414); | 1262 base::TimeDelta seek_time = base::TimeDelta::FromSecondsD(1.414); |
1297 source.Seek(seek_time); | 1263 source.Seek(seek_time); |
1298 ASSERT_TRUE(Seek(seek_time)); | 1264 ASSERT_TRUE(Seek(seek_time)); |
1299 Play(); | 1265 Play(); |
1300 ASSERT_TRUE(WaitUntilOnEnded()); | 1266 ASSERT_TRUE(WaitUntilOnEnded()); |
1301 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_2, GetAudioHash()); | 1267 EXPECT_HASH_EQ(kOpusSmallCodecDelayHash_2, GetAudioHash()); |
1302 } | 1268 } |
1303 | 1269 |
1304 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackLive) { | 1270 TEST_F(PipelineIntegrationTest, BasicPlaybackLive) { |
1305 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-live.webm", kHashed)); | 1271 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-live.webm", kHashed)); |
1306 | 1272 |
1307 // Live stream does not have duration in the initialization segment. | 1273 // Live stream does not have duration in the initialization segment. |
1308 // It will be set after the entire file is available. | 1274 // It will be set after the entire file is available. |
1309 EXPECT_CALL(*this, OnDurationChange()).Times(1); | 1275 EXPECT_CALL(*this, OnDurationChange()).Times(1); |
1310 | 1276 |
1311 Play(); | 1277 Play(); |
1312 | 1278 |
1313 ASSERT_TRUE(WaitUntilOnEnded()); | 1279 ASSERT_TRUE(WaitUntilOnEnded()); |
1314 | 1280 |
1315 EXPECT_HASH_EQ("f0be120a90a811506777c99a2cdf7cc1", GetVideoHash()); | 1281 EXPECT_HASH_EQ("f0be120a90a811506777c99a2cdf7cc1", GetVideoHash()); |
1316 EXPECT_HASH_EQ("-3.59,-2.06,-0.43,2.15,0.77,-0.95,", GetAudioHash()); | 1282 EXPECT_HASH_EQ("-3.59,-2.06,-0.43,2.15,0.77,-0.95,", GetAudioHash()); |
1317 EXPECT_EQ(kLiveTimelineOffset(), demuxer_->GetTimelineOffset()); | 1283 EXPECT_EQ(kLiveTimelineOffset(), demuxer_->GetTimelineOffset()); |
1318 } | 1284 } |
1319 | 1285 |
1320 TEST_P(CommonPipelineIntegrationTest, S32PlaybackHashed) { | 1286 TEST_F(PipelineIntegrationTest, S32PlaybackHashed) { |
1321 ASSERT_EQ(PIPELINE_OK, Start("sfx_s32le.wav", kHashed)); | 1287 ASSERT_EQ(PIPELINE_OK, Start("sfx_s32le.wav", kHashed)); |
1322 Play(); | 1288 Play(); |
1323 ASSERT_TRUE(WaitUntilOnEnded()); | 1289 ASSERT_TRUE(WaitUntilOnEnded()); |
1324 EXPECT_HASH_EQ(std::string(kNullVideoHash), GetVideoHash()); | 1290 EXPECT_HASH_EQ(std::string(kNullVideoHash), GetVideoHash()); |
1325 EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash()); | 1291 EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash()); |
1326 } | 1292 } |
1327 | 1293 |
1328 TEST_P(CommonPipelineIntegrationTest, F32PlaybackHashed) { | 1294 TEST_F(PipelineIntegrationTest, F32PlaybackHashed) { |
1329 ASSERT_EQ(PIPELINE_OK, Start("sfx_f32le.wav", kHashed)); | 1295 ASSERT_EQ(PIPELINE_OK, Start("sfx_f32le.wav", kHashed)); |
1330 Play(); | 1296 Play(); |
1331 ASSERT_TRUE(WaitUntilOnEnded()); | 1297 ASSERT_TRUE(WaitUntilOnEnded()); |
1332 EXPECT_HASH_EQ(std::string(kNullVideoHash), GetVideoHash()); | 1298 EXPECT_HASH_EQ(std::string(kNullVideoHash), GetVideoHash()); |
1333 EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash()); | 1299 EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash()); |
1334 } | 1300 } |
1335 | 1301 |
1336 TEST_P(CommonPipelineIntegrationTest, FlacPlaybackHashed) { | 1302 TEST_F(PipelineIntegrationTest, MAYBE_EME(BasicPlaybackEncrypted)) { |
| 1303 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); |
| 1304 set_encrypted_media_init_data_cb( |
| 1305 base::Bind(&FakeEncryptedMedia::OnEncryptedMediaInitData, |
| 1306 base::Unretained(&encrypted_media))); |
| 1307 |
| 1308 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-av_enc-av.webm", |
| 1309 encrypted_media.GetCdmContext())); |
| 1310 |
| 1311 Play(); |
| 1312 |
| 1313 ASSERT_TRUE(WaitUntilOnEnded()); |
| 1314 Stop(); |
| 1315 } |
| 1316 |
| 1317 TEST_F(PipelineIntegrationTest, FlacPlaybackHashed) { |
1337 ASSERT_EQ(PIPELINE_OK, Start("sfx.flac", kHashed)); | 1318 ASSERT_EQ(PIPELINE_OK, Start("sfx.flac", kHashed)); |
1338 Play(); | 1319 Play(); |
1339 ASSERT_TRUE(WaitUntilOnEnded()); | 1320 ASSERT_TRUE(WaitUntilOnEnded()); |
1340 EXPECT_HASH_EQ(std::string(kNullVideoHash), GetVideoHash()); | 1321 EXPECT_HASH_EQ(std::string(kNullVideoHash), GetVideoHash()); |
1341 EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash()); | 1322 EXPECT_HASH_EQ("3.03,2.86,2.99,3.31,3.57,4.06,", GetAudioHash()); |
1342 } | 1323 } |
1343 | 1324 |
1344 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource) { | 1325 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource) { |
1345 MockMediaSource source("bear-320x240.webm", kWebM, 219229); | 1326 MockMediaSource source("bear-320x240.webm", kWebM, 219229); |
1346 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1327 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1347 source.EndOfStream(); | 1328 source.EndOfStream(); |
1348 | 1329 |
1349 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1330 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1350 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1331 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1351 EXPECT_EQ(k320WebMFileDurationMs, | 1332 EXPECT_EQ(k320WebMFileDurationMs, |
1352 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1333 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1353 | 1334 |
1354 Play(); | 1335 Play(); |
1355 | 1336 |
1356 ASSERT_TRUE(WaitUntilOnEnded()); | 1337 ASSERT_TRUE(WaitUntilOnEnded()); |
1357 | 1338 |
1358 EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null()); | 1339 EXPECT_TRUE(demuxer_->GetTimelineOffset().is_null()); |
1359 source.Shutdown(); | 1340 source.Shutdown(); |
1360 Stop(); | 1341 Stop(); |
1361 } | 1342 } |
1362 | 1343 |
1363 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_Live) { | 1344 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Live) { |
1364 MockMediaSource source("bear-320x240-live.webm", kWebM, 219221); | 1345 MockMediaSource source("bear-320x240-live.webm", kWebM, 219221); |
1365 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1346 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1366 source.EndOfStream(); | 1347 source.EndOfStream(); |
1367 | 1348 |
1368 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1349 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1369 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1350 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1370 EXPECT_EQ(k320WebMFileDurationMs, | 1351 EXPECT_EQ(k320WebMFileDurationMs, |
1371 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1352 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1372 | 1353 |
1373 Play(); | 1354 Play(); |
1374 | 1355 |
1375 ASSERT_TRUE(WaitUntilOnEnded()); | 1356 ASSERT_TRUE(WaitUntilOnEnded()); |
1376 | 1357 |
1377 EXPECT_EQ(kLiveTimelineOffset(), demuxer_->GetTimelineOffset()); | 1358 EXPECT_EQ(kLiveTimelineOffset(), demuxer_->GetTimelineOffset()); |
1378 source.Shutdown(); | 1359 source.Shutdown(); |
1379 Stop(); | 1360 Stop(); |
1380 } | 1361 } |
1381 | 1362 |
1382 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_VP9_WebM) { | 1363 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP9_WebM) { |
1383 MockMediaSource source("bear-vp9.webm", kWebMVP9, 67504); | 1364 MockMediaSource source("bear-vp9.webm", kWebMVP9, 67504); |
1384 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1365 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1385 source.EndOfStream(); | 1366 source.EndOfStream(); |
1386 | 1367 |
1387 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1368 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1388 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1369 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1389 EXPECT_EQ(kVP9WebMFileDurationMs, | 1370 EXPECT_EQ(kVP9WebMFileDurationMs, |
1390 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1371 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1391 | 1372 |
1392 Play(); | 1373 Play(); |
1393 | 1374 |
1394 ASSERT_TRUE(WaitUntilOnEnded()); | 1375 ASSERT_TRUE(WaitUntilOnEnded()); |
1395 source.Shutdown(); | 1376 source.Shutdown(); |
1396 Stop(); | 1377 Stop(); |
1397 } | 1378 } |
1398 | 1379 |
1399 TEST_P(CommonPipelineIntegrationTest, | 1380 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP9_BlockGroup_WebM) { |
1400 BasicPlayback_MediaSource_VP9_BlockGroup_WebM) { | |
1401 MockMediaSource source("bear-vp9-blockgroup.webm", kWebMVP9, 67871); | 1381 MockMediaSource source("bear-vp9-blockgroup.webm", kWebMVP9, 67871); |
1402 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1382 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1403 source.EndOfStream(); | 1383 source.EndOfStream(); |
1404 | 1384 |
1405 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1385 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1406 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1386 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1407 EXPECT_EQ(kVP9WebMFileDurationMs, | 1387 EXPECT_EQ(kVP9WebMFileDurationMs, |
1408 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1388 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1409 | 1389 |
1410 Play(); | 1390 Play(); |
1411 | 1391 |
1412 ASSERT_TRUE(WaitUntilOnEnded()); | 1392 ASSERT_TRUE(WaitUntilOnEnded()); |
1413 source.Shutdown(); | 1393 source.Shutdown(); |
1414 Stop(); | 1394 Stop(); |
1415 } | 1395 } |
1416 | 1396 |
1417 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_VP8A_WebM) { | 1397 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VP8A_WebM) { |
1418 MockMediaSource source("bear-vp8a.webm", kVideoOnlyWebM, kAppendWholeFile); | 1398 MockMediaSource source("bear-vp8a.webm", kVideoOnlyWebM, kAppendWholeFile); |
1419 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1399 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1420 source.EndOfStream(); | 1400 source.EndOfStream(); |
1421 | 1401 |
1422 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1402 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1423 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1403 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1424 EXPECT_EQ(kVP8AWebMFileDurationMs, | 1404 EXPECT_EQ(kVP8AWebMFileDurationMs, |
1425 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1405 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1426 | 1406 |
1427 Play(); | 1407 Play(); |
1428 | 1408 |
1429 ASSERT_TRUE(WaitUntilOnEnded()); | 1409 ASSERT_TRUE(WaitUntilOnEnded()); |
1430 source.Shutdown(); | 1410 source.Shutdown(); |
1431 Stop(); | 1411 Stop(); |
1432 } | 1412 } |
1433 | 1413 |
1434 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_Opus_WebM) { | 1414 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Opus_WebM) { |
1435 MockMediaSource source("bear-opus-end-trimming.webm", kOpusAudioOnlyWebM, | 1415 MockMediaSource source("bear-opus-end-trimming.webm", kOpusAudioOnlyWebM, |
1436 kAppendWholeFile); | 1416 kAppendWholeFile); |
1437 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1417 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1438 source.EndOfStream(); | 1418 source.EndOfStream(); |
1439 | 1419 |
1440 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1420 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1441 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1421 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1442 EXPECT_EQ(kOpusEndTrimmingWebMFileDurationMs, | 1422 EXPECT_EQ(kOpusEndTrimmingWebMFileDurationMs, |
1443 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1423 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1444 Play(); | 1424 Play(); |
1445 | 1425 |
1446 ASSERT_TRUE(WaitUntilOnEnded()); | 1426 ASSERT_TRUE(WaitUntilOnEnded()); |
1447 source.Shutdown(); | 1427 source.Shutdown(); |
1448 Stop(); | 1428 Stop(); |
1449 } | 1429 } |
1450 | 1430 |
1451 // Flaky. http://crbug.com/304776 | 1431 // Flaky. http://crbug.com/304776 |
1452 TEST_P(CommonPipelineIntegrationTest, DISABLED_MediaSource_Opus_Seeking_WebM) { | 1432 TEST_F(PipelineIntegrationTest, DISABLED_MediaSource_Opus_Seeking_WebM) { |
1453 MockMediaSource source("bear-opus-end-trimming.webm", kOpusAudioOnlyWebM, | 1433 MockMediaSource source("bear-opus-end-trimming.webm", kOpusAudioOnlyWebM, |
1454 kAppendWholeFile); | 1434 kAppendWholeFile); |
1455 EXPECT_EQ(PIPELINE_OK, | 1435 EXPECT_EQ(PIPELINE_OK, |
1456 StartPipelineWithMediaSource(&source, kHashed, nullptr)); | 1436 StartPipelineWithMediaSource(&source, kHashed, nullptr)); |
1457 | 1437 |
1458 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1438 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1459 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1439 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1460 EXPECT_EQ(kOpusEndTrimmingWebMFileDurationMs, | 1440 EXPECT_EQ(kOpusEndTrimmingWebMFileDurationMs, |
1461 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1441 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1462 | 1442 |
1463 base::TimeDelta start_seek_time = base::TimeDelta::FromMilliseconds(1000); | 1443 base::TimeDelta start_seek_time = base::TimeDelta::FromMilliseconds(1000); |
1464 base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(2000); | 1444 base::TimeDelta seek_time = base::TimeDelta::FromMilliseconds(2000); |
1465 | 1445 |
1466 Play(); | 1446 Play(); |
1467 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); | 1447 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); |
1468 source.Seek(seek_time, 0x1D5, 34017); | 1448 source.Seek(seek_time, 0x1D5, 34017); |
1469 source.EndOfStream(); | 1449 source.EndOfStream(); |
1470 ASSERT_TRUE(Seek(seek_time)); | 1450 ASSERT_TRUE(Seek(seek_time)); |
1471 | 1451 |
1472 ASSERT_TRUE(WaitUntilOnEnded()); | 1452 ASSERT_TRUE(WaitUntilOnEnded()); |
1473 | 1453 |
1474 EXPECT_HASH_EQ("0.76,0.20,-0.82,-0.58,-1.29,-0.29,", GetAudioHash()); | 1454 EXPECT_HASH_EQ("0.76,0.20,-0.82,-0.58,-1.29,-0.29,", GetAudioHash()); |
1475 | 1455 |
1476 source.Shutdown(); | 1456 source.Shutdown(); |
1477 Stop(); | 1457 Stop(); |
1478 } | 1458 } |
1479 | 1459 |
1480 TEST_P(CommonPipelineIntegrationTest, MediaSource_ConfigChange_WebM) { | 1460 TEST_F(PipelineIntegrationTest, MediaSource_ConfigChange_WebM) { |
1481 MockMediaSource source("bear-320x240-16x9-aspect.webm", kWebM, | 1461 MockMediaSource source("bear-320x240-16x9-aspect.webm", kWebM, |
1482 kAppendWholeFile); | 1462 kAppendWholeFile); |
1483 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1463 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1484 | 1464 |
1485 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1); | 1465 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1); |
1486 scoped_refptr<DecoderBuffer> second_file = | 1466 scoped_refptr<DecoderBuffer> second_file = |
1487 ReadTestDataFile("bear-640x360.webm"); | 1467 ReadTestDataFile("bear-640x360.webm"); |
1488 ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec), | 1468 ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec), |
1489 second_file->data(), | 1469 second_file->data(), |
1490 second_file->data_size())); | 1470 second_file->data_size())); |
1491 source.EndOfStream(); | 1471 source.EndOfStream(); |
1492 | 1472 |
1493 Play(); | 1473 Play(); |
1494 EXPECT_TRUE(WaitUntilOnEnded()); | 1474 EXPECT_TRUE(WaitUntilOnEnded()); |
1495 | 1475 |
1496 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1476 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
1497 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1477 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
1498 EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs, | 1478 EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs, |
1499 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1479 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
1500 | 1480 |
1501 source.Shutdown(); | 1481 source.Shutdown(); |
1502 Stop(); | 1482 Stop(); |
1503 } | 1483 } |
1504 | 1484 |
1505 TEST_P(CommonPipelineIntegrationTest, | 1485 TEST_F(PipelineIntegrationTest, MediaSource_Remove_Updates_BufferedRanges) { |
1506 MediaSource_Remove_Updates_BufferedRanges) { | |
1507 const char* input_filename = "bear-320x240.webm"; | 1486 const char* input_filename = "bear-320x240.webm"; |
1508 MockMediaSource source(input_filename, kWebM, kAppendWholeFile); | 1487 MockMediaSource source(input_filename, kWebM, kAppendWholeFile); |
1509 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1488 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1510 | 1489 |
1511 auto buffered_ranges = pipeline_->GetBufferedTimeRanges(); | 1490 auto buffered_ranges = pipeline_->GetBufferedTimeRanges(); |
1512 EXPECT_EQ(1u, buffered_ranges.size()); | 1491 EXPECT_EQ(1u, buffered_ranges.size()); |
1513 EXPECT_EQ(0, buffered_ranges.start(0).InMilliseconds()); | 1492 EXPECT_EQ(0, buffered_ranges.start(0).InMilliseconds()); |
1514 EXPECT_EQ(k320WebMFileDurationMs, buffered_ranges.end(0).InMilliseconds()); | 1493 EXPECT_EQ(k320WebMFileDurationMs, buffered_ranges.end(0).InMilliseconds()); |
1515 | 1494 |
1516 source.RemoveRange(base::TimeDelta::FromMilliseconds(1000), | 1495 source.RemoveRange(base::TimeDelta::FromMilliseconds(1000), |
1517 base::TimeDelta::FromMilliseconds(k320WebMFileDurationMs)); | 1496 base::TimeDelta::FromMilliseconds(k320WebMFileDurationMs)); |
1518 base::RunLoop().RunUntilIdle(); | 1497 base::RunLoop().RunUntilIdle(); |
1519 | 1498 |
1520 buffered_ranges = pipeline_->GetBufferedTimeRanges(); | 1499 buffered_ranges = pipeline_->GetBufferedTimeRanges(); |
1521 EXPECT_EQ(1u, buffered_ranges.size()); | 1500 EXPECT_EQ(1u, buffered_ranges.size()); |
1522 EXPECT_EQ(0, buffered_ranges.start(0).InMilliseconds()); | 1501 EXPECT_EQ(0, buffered_ranges.start(0).InMilliseconds()); |
1523 EXPECT_EQ(1001, buffered_ranges.end(0).InMilliseconds()); | 1502 EXPECT_EQ(1001, buffered_ranges.end(0).InMilliseconds()); |
1524 | 1503 |
1525 source.Shutdown(); | 1504 source.Shutdown(); |
1526 Stop(); | 1505 Stop(); |
1527 } | 1506 } |
1528 | 1507 |
1529 // This test case imitates media playback with advancing media_time and | 1508 // This test case imitates media playback with advancing media_time and |
1530 // continuously adding new data. At some point we should reach the buffering | 1509 // continuously adding new data. At some point we should reach the buffering |
1531 // limit, after that MediaSource should evict some buffered data and that | 1510 // limit, after that MediaSource should evict some buffered data and that |
1532 // evicted data shold be reflected in the change of media::Pipeline buffered | 1511 // evicted data shold be reflected in the change of media::Pipeline buffered |
1533 // ranges (returned by GetBufferedTimeRanges). At that point the buffered ranges | 1512 // ranges (returned by GetBufferedTimeRanges). At that point the buffered ranges |
1534 // will no longer start at 0. | 1513 // will no longer start at 0. |
1535 TEST_P(CommonPipelineIntegrationTest, MediaSource_FillUp_Buffer) { | 1514 TEST_F(PipelineIntegrationTest, MediaSource_FillUp_Buffer) { |
1536 const char* input_filename = "bear-320x240.webm"; | 1515 const char* input_filename = "bear-320x240.webm"; |
1537 MockMediaSource source(input_filename, kWebM, kAppendWholeFile); | 1516 MockMediaSource source(input_filename, kWebM, kAppendWholeFile); |
1538 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | 1517 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
1539 source.SetMemoryLimits(1048576); | 1518 source.SetMemoryLimits(1048576); |
1540 | 1519 |
1541 scoped_refptr<DecoderBuffer> file = ReadTestDataFile(input_filename); | 1520 scoped_refptr<DecoderBuffer> file = ReadTestDataFile(input_filename); |
1542 | 1521 |
1543 auto buffered_ranges = pipeline_->GetBufferedTimeRanges(); | 1522 auto buffered_ranges = pipeline_->GetBufferedTimeRanges(); |
1544 EXPECT_EQ(1u, buffered_ranges.size()); | 1523 EXPECT_EQ(1u, buffered_ranges.size()); |
1545 do { | 1524 do { |
1546 // Advance media_time to the end of the currently buffered data | 1525 // Advance media_time to the end of the currently buffered data |
1547 base::TimeDelta media_time = buffered_ranges.end(0); | 1526 base::TimeDelta media_time = buffered_ranges.end(0); |
1548 source.Seek(media_time); | 1527 source.Seek(media_time); |
1549 // Ask MediaSource to evict buffered data if buffering limit has been | 1528 // Ask MediaSource to evict buffered data if buffering limit has been |
1550 // reached (the data will be evicted from the front of the buffered range). | 1529 // reached (the data will be evicted from the front of the buffered range). |
1551 source.EvictCodedFrames(media_time, file->data_size()); | 1530 source.EvictCodedFrames(media_time, file->data_size()); |
1552 ASSERT_TRUE( | 1531 ASSERT_TRUE( |
1553 source.AppendAtTime(media_time, file->data(), file->data_size())); | 1532 source.AppendAtTime(media_time, file->data(), file->data_size())); |
1554 base::RunLoop().RunUntilIdle(); | 1533 base::RunLoop().RunUntilIdle(); |
1555 | 1534 |
1556 buffered_ranges = pipeline_->GetBufferedTimeRanges(); | 1535 buffered_ranges = pipeline_->GetBufferedTimeRanges(); |
1557 } while (buffered_ranges.size() == 1 && | 1536 } while (buffered_ranges.size() == 1 && |
1558 buffered_ranges.start(0) == base::TimeDelta::FromSeconds(0)); | 1537 buffered_ranges.start(0) == base::TimeDelta::FromSeconds(0)); |
1559 | 1538 |
1560 EXPECT_EQ(1u, buffered_ranges.size()); | 1539 EXPECT_EQ(1u, buffered_ranges.size()); |
1561 source.Shutdown(); | 1540 source.Shutdown(); |
1562 Stop(); | 1541 Stop(); |
1563 } | 1542 } |
1564 | 1543 |
1565 #if defined(ARCH_CPU_X86_FAMILY) && !defined(OS_ANDROID) | |
1566 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHi10PVP9) { | |
1567 ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p-vp9.webm", kClockless)); | |
1568 | |
1569 Play(); | |
1570 | |
1571 ASSERT_TRUE(WaitUntilOnEnded()); | |
1572 } | |
1573 | |
1574 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHi12PVP9) { | |
1575 ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi12p-vp9.webm", kClockless)); | |
1576 | |
1577 Play(); | |
1578 | |
1579 ASSERT_TRUE(WaitUntilOnEnded()); | |
1580 } | |
1581 #endif | |
1582 | |
1583 #if BUILDFLAG(USE_PROPRIETARY_CODECS) | |
1584 | |
1585 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHi10P) { | |
1586 ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p.mp4", kClockless)); | |
1587 | |
1588 Play(); | |
1589 | |
1590 ASSERT_TRUE(WaitUntilOnEnded()); | |
1591 } | |
1592 | |
1593 TEST_P(CommonPipelineIntegrationTest, BasicFallback) { | |
1594 ScopedVector<VideoDecoder> failing_video_decoder; | |
1595 failing_video_decoder.push_back(new FailingVideoDecoder()); | |
1596 | |
1597 ASSERT_EQ(PIPELINE_OK, | |
1598 Start("bear.mp4", kClockless, std::move(failing_video_decoder))); | |
1599 | |
1600 Play(); | |
1601 | |
1602 ASSERT_TRUE(WaitUntilOnEnded()); | |
1603 }; | |
1604 | |
1605 TEST_P(CommonPipelineIntegrationTest, MediaSource_ADTS) { | |
1606 MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile); | |
1607 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1608 source.EndOfStream(); | |
1609 | |
1610 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1611 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | |
1612 EXPECT_EQ(325, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | |
1613 | |
1614 Play(); | |
1615 | |
1616 EXPECT_TRUE(WaitUntilOnEnded()); | |
1617 } | |
1618 | |
1619 TEST_P(CommonPipelineIntegrationTest, MediaSource_ADTS_TimestampOffset) { | |
1620 MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile); | |
1621 EXPECT_EQ(PIPELINE_OK, | |
1622 StartPipelineWithMediaSource(&source, kHashed, nullptr)); | |
1623 EXPECT_EQ(325, source.last_timestamp_offset().InMilliseconds()); | |
1624 | |
1625 // Trim off multiple frames off the beginning of the segment which will cause | |
1626 // the first decoded frame to be incorrect if preroll isn't implemented. | |
1627 const base::TimeDelta adts_preroll_duration = | |
1628 base::TimeDelta::FromSecondsD(2.5 * 1024 / 44100); | |
1629 const base::TimeDelta append_time = | |
1630 source.last_timestamp_offset() - adts_preroll_duration; | |
1631 | |
1632 scoped_refptr<DecoderBuffer> second_file = ReadTestDataFile("sfx.adts"); | |
1633 source.AppendAtTimeWithWindow( | |
1634 append_time, append_time + adts_preroll_duration, kInfiniteDuration, | |
1635 second_file->data(), second_file->data_size()); | |
1636 source.EndOfStream(); | |
1637 | |
1638 Play(); | |
1639 EXPECT_TRUE(WaitUntilOnEnded()); | |
1640 | |
1641 EXPECT_EQ(592, source.last_timestamp_offset().InMilliseconds()); | |
1642 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1643 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | |
1644 EXPECT_EQ(592, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | |
1645 | |
1646 // Verify preroll is stripped. | |
1647 EXPECT_HASH_EQ("-0.25,0.67,0.04,0.14,-0.49,-0.41,", GetAudioHash()); | |
1648 } | |
1649 | |
1650 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackHashed_MP3) { | |
1651 ASSERT_EQ(PIPELINE_OK, Start("sfx.mp3", kHashed)); | |
1652 | |
1653 Play(); | |
1654 | |
1655 ASSERT_TRUE(WaitUntilOnEnded()); | |
1656 | |
1657 // Verify codec delay and preroll are stripped. | |
1658 EXPECT_HASH_EQ("1.30,2.72,4.56,5.08,3.74,2.03,", GetAudioHash()); | |
1659 } | |
1660 | |
1661 TEST_P(CommonPipelineIntegrationTest, MediaSource_MP3) { | |
1662 MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile); | |
1663 EXPECT_EQ(PIPELINE_OK, | |
1664 StartPipelineWithMediaSource(&source, kHashed, nullptr)); | |
1665 source.EndOfStream(); | |
1666 | |
1667 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1668 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | |
1669 EXPECT_EQ(313, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | |
1670 | |
1671 Play(); | |
1672 | |
1673 EXPECT_TRUE(WaitUntilOnEnded()); | |
1674 | |
1675 // Verify that codec delay was stripped. | |
1676 EXPECT_HASH_EQ("1.01,2.71,4.18,4.32,3.04,1.12,", GetAudioHash()); | |
1677 } | |
1678 | |
1679 TEST_P(CommonPipelineIntegrationTest, MediaSource_MP3_TimestampOffset) { | |
1680 MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile); | |
1681 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1682 EXPECT_EQ(313, source.last_timestamp_offset().InMilliseconds()); | |
1683 | |
1684 // There are 576 silent frames at the start of this mp3. The second append | |
1685 // should trim them off. | |
1686 const base::TimeDelta mp3_preroll_duration = | |
1687 base::TimeDelta::FromSecondsD(576.0 / 44100); | |
1688 const base::TimeDelta append_time = | |
1689 source.last_timestamp_offset() - mp3_preroll_duration; | |
1690 | |
1691 scoped_refptr<DecoderBuffer> second_file = ReadTestDataFile("sfx.mp3"); | |
1692 source.AppendAtTimeWithWindow(append_time, append_time + mp3_preroll_duration, | |
1693 kInfiniteDuration, second_file->data(), | |
1694 second_file->data_size()); | |
1695 source.EndOfStream(); | |
1696 | |
1697 Play(); | |
1698 EXPECT_TRUE(WaitUntilOnEnded()); | |
1699 | |
1700 EXPECT_EQ(613, source.last_timestamp_offset().InMilliseconds()); | |
1701 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1702 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | |
1703 EXPECT_EQ(613, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | |
1704 } | |
1705 | |
1706 TEST_P(CommonPipelineIntegrationTest, MediaSource_MP3_Icecast) { | |
1707 MockMediaSource source("icy_sfx.mp3", kMP3, kAppendWholeFile); | |
1708 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1709 source.EndOfStream(); | |
1710 | |
1711 Play(); | |
1712 | |
1713 EXPECT_TRUE(WaitUntilOnEnded()); | |
1714 } | |
1715 | |
1716 TEST_P(CommonPipelineIntegrationTest, MediaSource_ConfigChange_MP4) { | |
1717 MockMediaSource source("bear-640x360-av_frag.mp4", kMP4, kAppendWholeFile); | |
1718 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1719 | |
1720 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1); | |
1721 scoped_refptr<DecoderBuffer> second_file = | |
1722 ReadTestDataFile("bear-1280x720-av_frag.mp4"); | |
1723 ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec), | |
1724 second_file->data(), | |
1725 second_file->data_size())); | |
1726 source.EndOfStream(); | |
1727 | |
1728 Play(); | |
1729 EXPECT_TRUE(WaitUntilOnEnded()); | |
1730 | |
1731 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1732 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | |
1733 EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs, | |
1734 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | |
1735 | |
1736 source.Shutdown(); | |
1737 Stop(); | |
1738 } | |
1739 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) | |
1740 | |
1741 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_16x9AspectRatio) { | |
1742 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-16x9-aspect.webm")); | |
1743 Play(); | |
1744 ASSERT_TRUE(WaitUntilOnEnded()); | |
1745 } | |
1746 | |
1747 #if BUILDFLAG(USE_PROPRIETARY_CODECS) | |
1748 TEST_P(CommonPipelineIntegrationTest, Mp2ts_AAC_HE_SBR_Audio) { | |
1749 MockMediaSource source("bear-1280x720-aac_he.ts", kMP2AudioSBR, | |
1750 kAppendWholeFile); | |
1751 #if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER) | |
1752 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1753 source.EndOfStream(); | |
1754 ASSERT_EQ(PIPELINE_OK, pipeline_status_); | |
1755 | |
1756 // Check that SBR is taken into account correctly by mpeg2ts parser. When an | |
1757 // SBR stream is parsed as non-SBR stream, then audio frame durations are | |
1758 // calculated incorrectly and that leads to gaps in buffered ranges (so this | |
1759 // check will fail) and eventually leads to stalled playback. | |
1760 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1761 #else | |
1762 EXPECT_EQ( | |
1763 DEMUXER_ERROR_COULD_NOT_OPEN, | |
1764 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); | |
1765 #endif | |
1766 } | |
1767 | |
1768 TEST_P(CommonPipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_6B) { | |
1769 MockMediaSource source("bear-audio-mp4a.6B.ts", | |
1770 "video/mp2t; codecs=\"mp4a.6B\"", kAppendWholeFile); | |
1771 #if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER) | |
1772 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1773 source.EndOfStream(); | |
1774 ASSERT_EQ(PIPELINE_OK, pipeline_status_); | |
1775 #else | |
1776 EXPECT_EQ( | |
1777 DEMUXER_ERROR_COULD_NOT_OPEN, | |
1778 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); | |
1779 #endif | |
1780 } | |
1781 | |
1782 TEST_P(CommonPipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_69) { | |
1783 MockMediaSource source("bear-audio-mp4a.69.ts", | |
1784 "video/mp2t; codecs=\"mp4a.69\"", kAppendWholeFile); | |
1785 #if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER) | |
1786 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1787 source.EndOfStream(); | |
1788 ASSERT_EQ(PIPELINE_OK, pipeline_status_); | |
1789 #else | |
1790 EXPECT_EQ( | |
1791 DEMUXER_ERROR_COULD_NOT_OPEN, | |
1792 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); | |
1793 #endif | |
1794 } | |
1795 | |
1796 TEST_P(CommonPipelineIntegrationTest, | |
1797 BasicPlayback_MediaSource_VideoOnly_MP4_AVC3) { | |
1798 MockMediaSource source("bear-1280x720-v_frag-avc3.mp4", kMP4VideoAVC3, | |
1799 kAppendWholeFile); | |
1800 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1801 source.EndOfStream(); | |
1802 | |
1803 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | |
1804 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | |
1805 EXPECT_EQ(k1280IsoAVC3FileDurationMs, | |
1806 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | |
1807 | |
1808 Play(); | |
1809 | |
1810 ASSERT_TRUE(WaitUntilOnEnded()); | |
1811 source.Shutdown(); | |
1812 Stop(); | |
1813 } | |
1814 | |
1815 TEST_P(CommonPipelineIntegrationTest, | |
1816 BasicPlayback_MediaSource_VideoOnly_MP4_VP9) { | |
1817 MockMediaSource source("bear-320x240-v_frag-vp9.mp4", kMP4VideoVP9, | |
1818 kAppendWholeFile); | |
1819 if (!base::CommandLine::ForCurrentProcess()->HasSwitch( | |
1820 switches::kEnableVp9InMp4)) { | |
1821 ASSERT_EQ(ChunkDemuxer::kNotSupported, source.AddId()); | |
1822 return; | |
1823 } | |
1824 | |
1825 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
1826 source.EndOfStream(); | |
1827 ASSERT_EQ(PIPELINE_OK, pipeline_status_); | |
1828 | |
1829 Play(); | |
1830 | |
1831 ASSERT_TRUE(WaitUntilOnEnded()); | |
1832 source.Shutdown(); | |
1833 Stop(); | |
1834 } | |
1835 | |
1836 TEST_P(CommonPipelineIntegrationTest, | |
1837 BasicPlayback_MediaSource_VideoOnly_MP4_HEVC1) { | |
1838 // HEVC demuxing might be enabled even on platforms that don't support HEVC | |
1839 // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which | |
1840 // indicates indicates that we did pass media mime type checks and attempted | |
1841 // to actually demux and decode the stream. On platforms that support both | |
1842 // demuxing and decoding we'll get PIPELINE_OK. | |
1843 MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC1, | |
1844 kAppendWholeFile); | |
1845 #if BUILDFLAG(ENABLE_HEVC_DEMUXING) | |
1846 PipelineStatus status = StartPipelineWithMediaSource(&source); | |
1847 EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED); | |
1848 #else | |
1849 EXPECT_EQ( | |
1850 DEMUXER_ERROR_COULD_NOT_OPEN, | |
1851 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); | |
1852 #endif | |
1853 } | |
1854 | |
1855 TEST_P(CommonPipelineIntegrationTest, | |
1856 BasicPlayback_MediaSource_VideoOnly_MP4_HEVC2) { | |
1857 // HEVC demuxing might be enabled even on platforms that don't support HEVC | |
1858 // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which | |
1859 // indicates indicates that we did pass media mime type checks and attempted | |
1860 // to actually demux and decode the stream. On platforms that support both | |
1861 // demuxing and decoding we'll get PIPELINE_OK. | |
1862 MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC2, | |
1863 kAppendWholeFile); | |
1864 #if BUILDFLAG(ENABLE_HEVC_DEMUXING) | |
1865 PipelineStatus status = StartPipelineWithMediaSource(&source); | |
1866 EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED); | |
1867 #else | |
1868 EXPECT_EQ( | |
1869 DEMUXER_ERROR_COULD_NOT_OPEN, | |
1870 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); | |
1871 #endif | |
1872 } | |
1873 | |
1874 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) | |
1875 | |
1876 TEST_P(CommonPipelineIntegrationTest, SeekWhilePaused) { | |
1877 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | |
1878 | |
1879 base::TimeDelta duration(pipeline_->GetMediaDuration()); | |
1880 base::TimeDelta start_seek_time(duration / 4); | |
1881 base::TimeDelta seek_time(duration * 3 / 4); | |
1882 | |
1883 Play(); | |
1884 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); | |
1885 Pause(); | |
1886 ASSERT_TRUE(Seek(seek_time)); | |
1887 EXPECT_EQ(seek_time, pipeline_->GetMediaTime()); | |
1888 Play(); | |
1889 ASSERT_TRUE(WaitUntilOnEnded()); | |
1890 | |
1891 // Make sure seeking after reaching the end works as expected. | |
1892 Pause(); | |
1893 ASSERT_TRUE(Seek(seek_time)); | |
1894 EXPECT_EQ(seek_time, pipeline_->GetMediaTime()); | |
1895 Play(); | |
1896 ASSERT_TRUE(WaitUntilOnEnded()); | |
1897 } | |
1898 | |
1899 TEST_P(CommonPipelineIntegrationTest, SeekWhilePlaying) { | |
1900 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | |
1901 | |
1902 base::TimeDelta duration(pipeline_->GetMediaDuration()); | |
1903 base::TimeDelta start_seek_time(duration / 4); | |
1904 base::TimeDelta seek_time(duration * 3 / 4); | |
1905 | |
1906 Play(); | |
1907 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); | |
1908 ASSERT_TRUE(Seek(seek_time)); | |
1909 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); | |
1910 ASSERT_TRUE(WaitUntilOnEnded()); | |
1911 | |
1912 // Make sure seeking after reaching the end works as expected. | |
1913 ASSERT_TRUE(Seek(seek_time)); | |
1914 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); | |
1915 ASSERT_TRUE(WaitUntilOnEnded()); | |
1916 } | |
1917 | |
1918 TEST_P(CommonPipelineIntegrationTest, SuspendWhilePaused) { | |
1919 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | |
1920 | |
1921 base::TimeDelta duration(pipeline_->GetMediaDuration()); | |
1922 base::TimeDelta start_seek_time(duration / 4); | |
1923 base::TimeDelta seek_time(duration * 3 / 4); | |
1924 | |
1925 Play(); | |
1926 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); | |
1927 Pause(); | |
1928 | |
1929 // Suspend while paused. | |
1930 ASSERT_TRUE(Suspend()); | |
1931 | |
1932 // Resuming the pipeline will create a new Renderer, | |
1933 // which in turn will trigger video size and opacity notifications. | |
1934 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1); | |
1935 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1); | |
1936 | |
1937 ASSERT_TRUE(Resume(seek_time)); | |
1938 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); | |
1939 Play(); | |
1940 ASSERT_TRUE(WaitUntilOnEnded()); | |
1941 } | |
1942 | |
1943 TEST_P(CommonPipelineIntegrationTest, SuspendWhilePlaying) { | |
1944 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | |
1945 | |
1946 base::TimeDelta duration(pipeline_->GetMediaDuration()); | |
1947 base::TimeDelta start_seek_time(duration / 4); | |
1948 base::TimeDelta seek_time(duration * 3 / 4); | |
1949 | |
1950 Play(); | |
1951 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); | |
1952 ASSERT_TRUE(Suspend()); | |
1953 | |
1954 // Resuming the pipeline will create a new Renderer, | |
1955 // which in turn will trigger video size and opacity notifications. | |
1956 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1); | |
1957 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1); | |
1958 | |
1959 ASSERT_TRUE(Resume(seek_time)); | |
1960 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); | |
1961 ASSERT_TRUE(WaitUntilOnEnded()); | |
1962 } | |
1963 | |
1964 #if BUILDFLAG(USE_PROPRIETARY_CODECS) | |
1965 TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_0) { | |
1966 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_0.mp4")); | |
1967 ASSERT_EQ(VIDEO_ROTATION_0, metadata_.video_rotation); | |
1968 } | |
1969 | |
1970 TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_90) { | |
1971 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_90.mp4")); | |
1972 ASSERT_EQ(VIDEO_ROTATION_90, metadata_.video_rotation); | |
1973 } | |
1974 | |
1975 TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_180) { | |
1976 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_180.mp4")); | |
1977 ASSERT_EQ(VIDEO_ROTATION_180, metadata_.video_rotation); | |
1978 } | |
1979 | |
1980 TEST_P(CommonPipelineIntegrationTest, Rotated_Metadata_270) { | |
1981 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_270.mp4")); | |
1982 ASSERT_EQ(VIDEO_ROTATION_270, metadata_.video_rotation); | |
1983 } | |
1984 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) | |
1985 | |
1986 // Verify audio decoder & renderer can handle aborted demuxer reads. | |
1987 TEST_P(CommonPipelineIntegrationTest, ChunkDemuxerAbortRead_AudioOnly) { | |
1988 ASSERT_TRUE(TestSeekDuringRead("bear-320x240-audio-only.webm", kAudioOnlyWebM, | |
1989 16384, base::TimeDelta::FromMilliseconds(464), | |
1990 base::TimeDelta::FromMilliseconds(617), 0x10CA, | |
1991 19730)); | |
1992 } | |
1993 | |
1994 // Verify video decoder & renderer can handle aborted demuxer reads. | |
1995 TEST_P(CommonPipelineIntegrationTest, ChunkDemuxerAbortRead_VideoOnly) { | |
1996 ASSERT_TRUE(TestSeekDuringRead("bear-320x240-video-only.webm", kVideoOnlyWebM, | |
1997 32768, base::TimeDelta::FromMilliseconds(167), | |
1998 base::TimeDelta::FromMilliseconds(1668), | |
1999 0x1C896, 65536)); | |
2000 } | |
2001 | |
2002 // Verify that Opus audio in WebM containers can be played back. | |
2003 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_AudioOnly_Opus_WebM) { | |
2004 ASSERT_EQ(PIPELINE_OK, Start("bear-opus-end-trimming.webm")); | |
2005 Play(); | |
2006 ASSERT_TRUE(WaitUntilOnEnded()); | |
2007 } | |
2008 | |
2009 // Verify that VP9 video in WebM containers can be played back. | |
2010 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VideoOnly_VP9_WebM) { | |
2011 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9.webm")); | |
2012 Play(); | |
2013 ASSERT_TRUE(WaitUntilOnEnded()); | |
2014 } | |
2015 | |
2016 // Verify that VP9 video and Opus audio in the same WebM container can be played | |
2017 // back. | |
2018 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9_Opus_WebM) { | |
2019 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-opus.webm")); | |
2020 Play(); | |
2021 ASSERT_TRUE(WaitUntilOnEnded()); | |
2022 } | |
2023 | |
2024 // Verify that VP8 video with alpha channel can be played back. | |
2025 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP8A_WebM) { | |
2026 ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a.webm")); | |
2027 Play(); | |
2028 ASSERT_TRUE(WaitUntilOnEnded()); | |
2029 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); | |
2030 } | |
2031 | |
2032 // Verify that VP8A video with odd width/height can be played back. | |
2033 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP8A_Odd_WebM) { | |
2034 ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a-odd-dimensions.webm")); | |
2035 Play(); | |
2036 ASSERT_TRUE(WaitUntilOnEnded()); | |
2037 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); | |
2038 } | |
2039 | |
2040 // Verify that VP9 video with odd width/height can be played back. | |
2041 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9_Odd_WebM) { | |
2042 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-odd-dimensions.webm")); | |
2043 Play(); | |
2044 ASSERT_TRUE(WaitUntilOnEnded()); | |
2045 } | |
2046 | |
2047 // Verify that VP9 video with alpha channel can be played back. | |
2048 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9A_WebM) { | |
2049 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a.webm")); | |
2050 Play(); | |
2051 ASSERT_TRUE(WaitUntilOnEnded()); | |
2052 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); | |
2053 } | |
2054 | |
2055 // Verify that VP9A video with odd width/height can be played back. | |
2056 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_VP9A_Odd_WebM) { | |
2057 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a-odd-dimensions.webm")); | |
2058 Play(); | |
2059 ASSERT_TRUE(WaitUntilOnEnded()); | |
2060 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); | |
2061 } | |
2062 | |
2063 // Verify that VP9 video with 4:4:4 subsampling can be played back. | |
2064 TEST_P(CommonPipelineIntegrationTest, P444_VP9_WebM) { | |
2065 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-P444.webm")); | |
2066 Play(); | |
2067 ASSERT_TRUE(WaitUntilOnEnded()); | |
2068 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV24); | |
2069 } | |
2070 | |
2071 // Verify that frames of VP9 video in the BT.709 color space have the YV12HD | |
2072 // format. | |
2073 TEST_P(CommonPipelineIntegrationTest, BT709_VP9_WebM) { | |
2074 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-bt709.webm")); | |
2075 Play(); | |
2076 ASSERT_TRUE(WaitUntilOnEnded()); | |
2077 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12); | |
2078 EXPECT_COLOR_SPACE_EQ(last_video_frame_color_space_, COLOR_SPACE_HD_REC709); | |
2079 } | |
2080 | |
2081 TEST_P(CommonPipelineIntegrationTest, HD_VP9_WebM) { | |
2082 ASSERT_EQ(PIPELINE_OK, Start("bear-1280x720.webm", kClockless)); | |
2083 Play(); | |
2084 ASSERT_TRUE(WaitUntilOnEnded()); | |
2085 } | |
2086 | |
2087 // Verify that videos with an odd frame size playback successfully. | |
2088 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_OddVideoSize) { | |
2089 ASSERT_EQ(PIPELINE_OK, Start("butterfly-853x480.webm")); | |
2090 Play(); | |
2091 ASSERT_TRUE(WaitUntilOnEnded()); | |
2092 } | |
2093 | |
2094 // Verify that OPUS audio in a webm which reports a 44.1kHz sample rate plays | |
2095 // correctly at 48kHz | |
2096 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_Opus441kHz) { | |
2097 ASSERT_EQ(PIPELINE_OK, Start("sfx-opus-441.webm")); | |
2098 Play(); | |
2099 ASSERT_TRUE(WaitUntilOnEnded()); | |
2100 EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO) | |
2101 ->audio_decoder_config() | |
2102 .samples_per_second()); | |
2103 } | |
2104 | |
2105 // Same as above but using MediaSource. | |
2106 TEST_P(CommonPipelineIntegrationTest, BasicPlayback_MediaSource_Opus441kHz) { | |
2107 MockMediaSource source("sfx-opus-441.webm", kOpusAudioOnlyWebM, | |
2108 kAppendWholeFile); | |
2109 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); | |
2110 source.EndOfStream(); | |
2111 Play(); | |
2112 ASSERT_TRUE(WaitUntilOnEnded()); | |
2113 source.Shutdown(); | |
2114 Stop(); | |
2115 EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO) | |
2116 ->audio_decoder_config() | |
2117 .samples_per_second()); | |
2118 } | |
2119 | |
2120 // Ensures audio-only playback with missing or negative timestamps works. Tests | |
2121 // the common live-streaming case for chained ogg. See http://crbug.com/396864. | |
2122 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackChainedOgg) { | |
2123 ASSERT_EQ(PIPELINE_OK, Start("double-sfx.ogg", kUnreliableDuration)); | |
2124 Play(); | |
2125 ASSERT_TRUE(WaitUntilOnEnded()); | |
2126 ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime()); | |
2127 } | |
2128 | |
2129 // Tests that we signal ended even when audio runs longer than video track. | |
2130 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackAudioLongerThanVideo) { | |
2131 ASSERT_EQ(PIPELINE_OK, Start("bear_audio_longer_than_video.ogv")); | |
2132 // Audio track is 2000ms. Video track is 1001ms. Duration should be higher | |
2133 // of the two. | |
2134 EXPECT_EQ(2000, pipeline_->GetMediaDuration().InMilliseconds()); | |
2135 Play(); | |
2136 ASSERT_TRUE(WaitUntilOnEnded()); | |
2137 } | |
2138 | |
2139 // Tests that we signal ended even when audio runs shorter than video track. | |
2140 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackAudioShorterThanVideo) { | |
2141 ASSERT_EQ(PIPELINE_OK, Start("bear_audio_shorter_than_video.ogv")); | |
2142 // Audio track is 500ms. Video track is 1001ms. Duration should be higher of | |
2143 // the two. | |
2144 EXPECT_EQ(1001, pipeline_->GetMediaDuration().InMilliseconds()); | |
2145 Play(); | |
2146 ASSERT_TRUE(WaitUntilOnEnded()); | |
2147 } | |
2148 | |
2149 TEST_P(CommonPipelineIntegrationTest, BasicPlaybackPositiveStartTime) { | |
2150 ASSERT_EQ(PIPELINE_OK, Start("nonzero-start-time.webm")); | |
2151 Play(); | |
2152 ASSERT_TRUE(WaitUntilOnEnded()); | |
2153 ASSERT_EQ(base::TimeDelta::FromMicroseconds(396000), | |
2154 demuxer_->GetStartTime()); | |
2155 } | |
2156 | |
2157 const IntegrationTestData kIntegrationTests[] = { | |
2158 {PipelineType::Media}, | |
2159 #if BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
2160 {PipelineType::MediaRemoting}, | |
2161 #endif // BUILDFLAG(ENABLE_MEDIA_REMOTING) | |
2162 }; | |
2163 | |
2164 INSTANTIATE_TEST_CASE_P(, | |
2165 CommonPipelineIntegrationTest, | |
2166 testing::ValuesIn(kIntegrationTests)); | |
2167 | |
2168 // Media Remoting currently doesn't support stream status change without | |
2169 // restarting pipeline. | |
2170 TEST_F(PipelineIntegrationTest, ReinitRenderersWhileAudioTrackIsDisabled) { | |
2171 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); | |
2172 Play(); | |
2173 | |
2174 // These get triggered every time playback is resumed. | |
2175 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))) | |
2176 .Times(AnyNumber()); | |
2177 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber()); | |
2178 | |
2179 // Disable the audio track. | |
2180 std::vector<MediaTrack::Id> track_ids; | |
2181 pipeline_->OnEnabledAudioTracksChanged(track_ids); | |
2182 // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and | |
2183 // reinitializes renderers while the audio track is disabled. | |
2184 ASSERT_TRUE(Suspend()); | |
2185 ASSERT_TRUE(Resume(TimestampMs(100))); | |
2186 // Now re-enable the audio track, playback should continue successfully. | |
2187 EXPECT_CALL(*this, OnBufferingStateChange(BUFFERING_HAVE_ENOUGH)).Times(1); | |
2188 track_ids.push_back("2"); | |
2189 pipeline_->OnEnabledAudioTracksChanged(track_ids); | |
2190 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); | |
2191 | |
2192 Stop(); | |
2193 } | |
2194 | |
2195 TEST_F(PipelineIntegrationTest, ReinitRenderersWhileVideoTrackIsDisabled) { | |
2196 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm", kHashed)); | |
2197 Play(); | |
2198 | |
2199 // These get triggered every time playback is resumed. | |
2200 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))) | |
2201 .Times(AnyNumber()); | |
2202 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(AnyNumber()); | |
2203 | |
2204 // Disable the video track. | |
2205 pipeline_->OnSelectedVideoTrackChanged(base::nullopt); | |
2206 // pipeline.Suspend() releases renderers and pipeline.Resume() recreates and | |
2207 // reinitializes renderers while the video track is disabled. | |
2208 ASSERT_TRUE(Suspend()); | |
2209 ASSERT_TRUE(Resume(TimestampMs(100))); | |
2210 // Now re-enable the video track, playback should continue successfully. | |
2211 pipeline_->OnSelectedVideoTrackChanged(MediaTrack::Id("1")); | |
2212 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(TimestampMs(200))); | |
2213 | |
2214 Stop(); | |
2215 } | |
2216 | |
2217 TEST_F(PipelineIntegrationTest, MAYBE_EME(BasicPlaybackEncrypted)) { | |
2218 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); | |
2219 set_encrypted_media_init_data_cb( | |
2220 base::Bind(&FakeEncryptedMedia::OnEncryptedMediaInitData, | |
2221 base::Unretained(&encrypted_media))); | |
2222 | |
2223 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-av_enc-av.webm", | |
2224 encrypted_media.GetCdmContext())); | |
2225 | |
2226 Play(); | |
2227 | |
2228 ASSERT_TRUE(WaitUntilOnEnded()); | |
2229 Stop(); | |
2230 } | |
2231 | |
2232 TEST_F(PipelineIntegrationTest, | 1544 TEST_F(PipelineIntegrationTest, |
2233 MAYBE_EME(MediaSource_ConfigChange_Encrypted_WebM)) { | 1545 MAYBE_EME(MediaSource_ConfigChange_Encrypted_WebM)) { |
2234 MockMediaSource source("bear-320x240-16x9-aspect-av_enc-av.webm", kWebM, | 1546 MockMediaSource source("bear-320x240-16x9-aspect-av_enc-av.webm", kWebM, |
2235 kAppendWholeFile); | 1547 kAppendWholeFile); |
2236 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); | 1548 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); |
2237 EXPECT_EQ(PIPELINE_OK, | 1549 EXPECT_EQ(PIPELINE_OK, |
2238 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); | 1550 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); |
2239 | 1551 |
2240 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1); | 1552 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(640, 360))).Times(1); |
2241 scoped_refptr<DecoderBuffer> second_file = | 1553 scoped_refptr<DecoderBuffer> second_file = |
2242 ReadTestDataFile("bear-640x360-av_enc-av.webm"); | 1554 ReadTestDataFile("bear-640x360-av_enc-av.webm"); |
| 1555 |
2243 ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec), | 1556 ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec), |
2244 second_file->data(), | 1557 second_file->data(), |
2245 second_file->data_size())); | 1558 second_file->data_size())); |
2246 source.EndOfStream(); | 1559 source.EndOfStream(); |
2247 | 1560 |
2248 Play(); | 1561 Play(); |
2249 EXPECT_TRUE(WaitUntilOnEnded()); | 1562 EXPECT_TRUE(WaitUntilOnEnded()); |
2250 | 1563 |
2251 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1564 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
2252 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1565 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
(...skipping 57 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2310 | 1623 |
2311 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); | 1624 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
2312 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); | 1625 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
2313 EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs, | 1626 EXPECT_EQ(kAppendTimeMs + k640WebMFileDurationMs, |
2314 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); | 1627 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
2315 | 1628 |
2316 source.Shutdown(); | 1629 source.Shutdown(); |
2317 Stop(); | 1630 Stop(); |
2318 } | 1631 } |
2319 | 1632 |
| 1633 #if defined(ARCH_CPU_X86_FAMILY) && !defined(OS_ANDROID) |
| 1634 TEST_F(PipelineIntegrationTest, BasicPlaybackHi10PVP9) { |
| 1635 ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p-vp9.webm", kClockless)); |
| 1636 |
| 1637 Play(); |
| 1638 |
| 1639 ASSERT_TRUE(WaitUntilOnEnded()); |
| 1640 } |
| 1641 |
| 1642 TEST_F(PipelineIntegrationTest, BasicPlaybackHi12PVP9) { |
| 1643 ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi12p-vp9.webm", kClockless)); |
| 1644 |
| 1645 Play(); |
| 1646 |
| 1647 ASSERT_TRUE(WaitUntilOnEnded()); |
| 1648 } |
| 1649 #endif |
| 1650 |
2320 #if BUILDFLAG(USE_PROPRIETARY_CODECS) | 1651 #if BUILDFLAG(USE_PROPRIETARY_CODECS) |
| 1652 |
| 1653 TEST_F(PipelineIntegrationTest, BasicPlaybackHi10P) { |
| 1654 ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p.mp4", kClockless)); |
| 1655 |
| 1656 Play(); |
| 1657 |
| 1658 ASSERT_TRUE(WaitUntilOnEnded()); |
| 1659 } |
| 1660 |
| 1661 TEST_F(PipelineIntegrationTest, BasicFallback) { |
| 1662 ScopedVector<VideoDecoder> failing_video_decoder; |
| 1663 failing_video_decoder.push_back(new FailingVideoDecoder()); |
| 1664 |
| 1665 ASSERT_EQ(PIPELINE_OK, |
| 1666 Start("bear.mp4", kClockless, std::move(failing_video_decoder))); |
| 1667 |
| 1668 Play(); |
| 1669 |
| 1670 ASSERT_TRUE(WaitUntilOnEnded()); |
| 1671 }; |
| 1672 |
| 1673 TEST_F(PipelineIntegrationTest, MediaSource_ADTS) { |
| 1674 MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile); |
| 1675 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 1676 source.EndOfStream(); |
| 1677 |
| 1678 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 1679 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
| 1680 EXPECT_EQ(325, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
| 1681 |
| 1682 Play(); |
| 1683 |
| 1684 EXPECT_TRUE(WaitUntilOnEnded()); |
| 1685 } |
| 1686 |
| 1687 TEST_F(PipelineIntegrationTest, MediaSource_ADTS_TimestampOffset) { |
| 1688 MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile); |
| 1689 EXPECT_EQ(PIPELINE_OK, |
| 1690 StartPipelineWithMediaSource(&source, kHashed, nullptr)); |
| 1691 EXPECT_EQ(325, source.last_timestamp_offset().InMilliseconds()); |
| 1692 |
| 1693 // Trim off multiple frames off the beginning of the segment which will cause |
| 1694 // the first decoded frame to be incorrect if preroll isn't implemented. |
| 1695 const base::TimeDelta adts_preroll_duration = |
| 1696 base::TimeDelta::FromSecondsD(2.5 * 1024 / 44100); |
| 1697 const base::TimeDelta append_time = |
| 1698 source.last_timestamp_offset() - adts_preroll_duration; |
| 1699 |
| 1700 scoped_refptr<DecoderBuffer> second_file = ReadTestDataFile("sfx.adts"); |
| 1701 source.AppendAtTimeWithWindow( |
| 1702 append_time, append_time + adts_preroll_duration, kInfiniteDuration, |
| 1703 second_file->data(), second_file->data_size()); |
| 1704 source.EndOfStream(); |
| 1705 |
| 1706 Play(); |
| 1707 EXPECT_TRUE(WaitUntilOnEnded()); |
| 1708 |
| 1709 EXPECT_EQ(592, source.last_timestamp_offset().InMilliseconds()); |
| 1710 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 1711 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
| 1712 EXPECT_EQ(592, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
| 1713 |
| 1714 // Verify preroll is stripped. |
| 1715 EXPECT_HASH_EQ("-0.25,0.67,0.04,0.14,-0.49,-0.41,", GetAudioHash()); |
| 1716 } |
| 1717 |
| 1718 TEST_F(PipelineIntegrationTest, BasicPlaybackHashed_MP3) { |
| 1719 ASSERT_EQ(PIPELINE_OK, Start("sfx.mp3", kHashed)); |
| 1720 |
| 1721 Play(); |
| 1722 |
| 1723 ASSERT_TRUE(WaitUntilOnEnded()); |
| 1724 |
| 1725 // Verify codec delay and preroll are stripped. |
| 1726 EXPECT_HASH_EQ("1.30,2.72,4.56,5.08,3.74,2.03,", GetAudioHash()); |
| 1727 } |
| 1728 |
2321 #if !defined(DISABLE_CLOCKLESS_TESTS) | 1729 #if !defined(DISABLE_CLOCKLESS_TESTS) |
2322 class Mp3FastSeekParams { | 1730 class Mp3FastSeekParams { |
2323 public: | 1731 public: |
2324 Mp3FastSeekParams(const char* filename, const char* hash) | 1732 Mp3FastSeekParams(const char* filename, const char* hash) |
2325 : filename(filename), hash(hash) {} | 1733 : filename(filename), hash(hash) {} |
2326 const char* filename; | 1734 const char* filename; |
2327 const char* hash; | 1735 const char* hash; |
2328 }; | 1736 }; |
2329 | 1737 |
2330 class Mp3FastSeekIntegrationTest | 1738 class Mp3FastSeekIntegrationTest |
(...skipping 22 matching lines...) Expand all Loading... |
2353 // but the numerator will be truncated in the TOC as 250, losing precision. | 1761 // but the numerator will be truncated in the TOC as 250, losing precision. |
2354 base::TimeDelta seek_time(0.98 * pipeline_->GetMediaDuration()); | 1762 base::TimeDelta seek_time(0.98 * pipeline_->GetMediaDuration()); |
2355 | 1763 |
2356 ASSERT_TRUE(Seek(seek_time)); | 1764 ASSERT_TRUE(Seek(seek_time)); |
2357 Play(); | 1765 Play(); |
2358 ASSERT_TRUE(WaitUntilOnEnded()); | 1766 ASSERT_TRUE(WaitUntilOnEnded()); |
2359 | 1767 |
2360 EXPECT_HASH_EQ(config.hash, GetAudioHash()); | 1768 EXPECT_HASH_EQ(config.hash, GetAudioHash()); |
2361 } | 1769 } |
2362 | 1770 |
| 1771 // CBR seeks should always be fast and accurate. |
2363 INSTANTIATE_TEST_CASE_P( | 1772 INSTANTIATE_TEST_CASE_P( |
2364 CBRSeek_HasTOC, | 1773 CBRSeek_HasTOC, |
2365 Mp3FastSeekIntegrationTest, | 1774 Mp3FastSeekIntegrationTest, |
2366 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-has-TOC.mp3", | 1775 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-has-TOC.mp3", |
2367 "-0.71,0.36,2.96,2.68,2.11,-1.08,"))); | 1776 "-0.71,0.36,2.96,2.68,2.11,-1.08,"))); |
| 1777 |
2368 INSTANTIATE_TEST_CASE_P( | 1778 INSTANTIATE_TEST_CASE_P( |
2369 CBRSeeks_NoTOC, | 1779 CBRSeeks_NoTOC, |
2370 Mp3FastSeekIntegrationTest, | 1780 Mp3FastSeekIntegrationTest, |
2371 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-no-TOC.mp3", | 1781 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-CBR-no-TOC.mp3", |
2372 "0.95,0.56,1.34,0.47,1.77,0.84,"))); | 1782 "0.95,0.56,1.34,0.47,1.77,0.84,"))); |
| 1783 |
2373 // VBR seeks can be fast *OR* accurate, but not both. We chose fast. | 1784 // VBR seeks can be fast *OR* accurate, but not both. We chose fast. |
2374 INSTANTIATE_TEST_CASE_P( | 1785 INSTANTIATE_TEST_CASE_P( |
2375 VBRSeeks_HasTOC, | 1786 VBRSeeks_HasTOC, |
2376 Mp3FastSeekIntegrationTest, | 1787 Mp3FastSeekIntegrationTest, |
2377 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-has-TOC.mp3", | 1788 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-has-TOC.mp3", |
2378 "-0.15,-0.83,0.54,1.00,1.94,0.93,"))); | 1789 "-0.15,-0.83,0.54,1.00,1.94,0.93,"))); |
| 1790 |
2379 INSTANTIATE_TEST_CASE_P( | 1791 INSTANTIATE_TEST_CASE_P( |
2380 VBRSeeks_NoTOC, | 1792 VBRSeeks_NoTOC, |
2381 Mp3FastSeekIntegrationTest, | 1793 Mp3FastSeekIntegrationTest, |
2382 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-no-TOC.mp3", | 1794 ::testing::Values(Mp3FastSeekParams("bear-audio-10s-VBR-no-TOC.mp3", |
2383 "-0.22,0.80,1.19,0.73,-0.31,-1.12,"))); | 1795 "-0.22,0.80,1.19,0.73,-0.31,-1.12,"))); |
2384 #endif // !defined(DISABLE_CLOCKLESS_TESTS) | 1796 #endif // !defined(DISABLE_CLOCKLESS_TESTS) |
2385 | 1797 |
| 1798 TEST_F(PipelineIntegrationTest, MediaSource_MP3) { |
| 1799 MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile); |
| 1800 EXPECT_EQ(PIPELINE_OK, |
| 1801 StartPipelineWithMediaSource(&source, kHashed, nullptr)); |
| 1802 source.EndOfStream(); |
| 1803 |
| 1804 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 1805 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
| 1806 EXPECT_EQ(313, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
| 1807 |
| 1808 Play(); |
| 1809 |
| 1810 EXPECT_TRUE(WaitUntilOnEnded()); |
| 1811 |
| 1812 // Verify that codec delay was stripped. |
| 1813 EXPECT_HASH_EQ("1.01,2.71,4.18,4.32,3.04,1.12,", GetAudioHash()); |
| 1814 } |
| 1815 |
| 1816 TEST_F(PipelineIntegrationTest, MediaSource_MP3_TimestampOffset) { |
| 1817 MockMediaSource source("sfx.mp3", kMP3, kAppendWholeFile); |
| 1818 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 1819 EXPECT_EQ(313, source.last_timestamp_offset().InMilliseconds()); |
| 1820 |
| 1821 // There are 576 silent frames at the start of this mp3. The second append |
| 1822 // should trim them off. |
| 1823 const base::TimeDelta mp3_preroll_duration = |
| 1824 base::TimeDelta::FromSecondsD(576.0 / 44100); |
| 1825 const base::TimeDelta append_time = |
| 1826 source.last_timestamp_offset() - mp3_preroll_duration; |
| 1827 |
| 1828 scoped_refptr<DecoderBuffer> second_file = ReadTestDataFile("sfx.mp3"); |
| 1829 source.AppendAtTimeWithWindow(append_time, append_time + mp3_preroll_duration, |
| 1830 kInfiniteDuration, second_file->data(), |
| 1831 second_file->data_size()); |
| 1832 source.EndOfStream(); |
| 1833 |
| 1834 Play(); |
| 1835 EXPECT_TRUE(WaitUntilOnEnded()); |
| 1836 |
| 1837 EXPECT_EQ(613, source.last_timestamp_offset().InMilliseconds()); |
| 1838 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 1839 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
| 1840 EXPECT_EQ(613, pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
| 1841 } |
| 1842 |
| 1843 TEST_F(PipelineIntegrationTest, MediaSource_MP3_Icecast) { |
| 1844 MockMediaSource source("icy_sfx.mp3", kMP3, kAppendWholeFile); |
| 1845 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 1846 source.EndOfStream(); |
| 1847 |
| 1848 Play(); |
| 1849 |
| 1850 EXPECT_TRUE(WaitUntilOnEnded()); |
| 1851 } |
| 1852 |
| 1853 TEST_F(PipelineIntegrationTest, MediaSource_ConfigChange_MP4) { |
| 1854 MockMediaSource source("bear-640x360-av_frag.mp4", kMP4, kAppendWholeFile); |
| 1855 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 1856 |
| 1857 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1); |
| 1858 scoped_refptr<DecoderBuffer> second_file = |
| 1859 ReadTestDataFile("bear-1280x720-av_frag.mp4"); |
| 1860 ASSERT_TRUE(source.AppendAtTime(base::TimeDelta::FromSeconds(kAppendTimeSec), |
| 1861 second_file->data(), |
| 1862 second_file->data_size())); |
| 1863 source.EndOfStream(); |
| 1864 |
| 1865 Play(); |
| 1866 EXPECT_TRUE(WaitUntilOnEnded()); |
| 1867 |
| 1868 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 1869 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
| 1870 EXPECT_EQ(kAppendTimeMs + k1280IsoFileDurationMs, |
| 1871 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
| 1872 |
| 1873 source.Shutdown(); |
| 1874 Stop(); |
| 1875 } |
| 1876 |
2386 TEST_F(PipelineIntegrationTest, | 1877 TEST_F(PipelineIntegrationTest, |
2387 MAYBE_EME(MediaSource_ConfigChange_Encrypted_MP4_CENC_VideoOnly)) { | 1878 MAYBE_EME(MediaSource_ConfigChange_Encrypted_MP4_CENC_VideoOnly)) { |
2388 MockMediaSource source("bear-640x360-v_frag-cenc.mp4", kMP4Video, | 1879 MockMediaSource source("bear-640x360-v_frag-cenc.mp4", kMP4Video, |
2389 kAppendWholeFile); | 1880 kAppendWholeFile); |
2390 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); | 1881 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); |
2391 EXPECT_EQ(PIPELINE_OK, | 1882 EXPECT_EQ(PIPELINE_OK, |
2392 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); | 1883 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); |
2393 | 1884 |
2394 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1); | 1885 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(1280, 720))).Times(1); |
2395 scoped_refptr<DecoderBuffer> second_file = | 1886 scoped_refptr<DecoderBuffer> second_file = |
(...skipping 109 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2505 } | 1996 } |
2506 | 1997 |
2507 // Verify files which change configuration midstream fail gracefully. | 1998 // Verify files which change configuration midstream fail gracefully. |
2508 TEST_F(PipelineIntegrationTest, MidStreamConfigChangesFail) { | 1999 TEST_F(PipelineIntegrationTest, MidStreamConfigChangesFail) { |
2509 ASSERT_EQ(PIPELINE_OK, Start("midstream_config_change.mp3")); | 2000 ASSERT_EQ(PIPELINE_OK, Start("midstream_config_change.mp3")); |
2510 Play(); | 2001 Play(); |
2511 ASSERT_EQ(WaitUntilEndedOrError(), PIPELINE_ERROR_DECODE); | 2002 ASSERT_EQ(WaitUntilEndedOrError(), PIPELINE_ERROR_DECODE); |
2512 } | 2003 } |
2513 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) | 2004 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) |
2514 | 2005 |
| 2006 TEST_F(PipelineIntegrationTest, BasicPlayback_16x9AspectRatio) { |
| 2007 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-16x9-aspect.webm")); |
| 2008 Play(); |
| 2009 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2010 } |
| 2011 |
2515 TEST_F(PipelineIntegrationTest, MAYBE_EME(EncryptedPlayback_WebM)) { | 2012 TEST_F(PipelineIntegrationTest, MAYBE_EME(EncryptedPlayback_WebM)) { |
2516 MockMediaSource source("bear-320x240-av_enc-av.webm", kWebM, 219816); | 2013 MockMediaSource source("bear-320x240-av_enc-av.webm", kWebM, 219816); |
2517 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); | 2014 FakeEncryptedMedia encrypted_media(new KeyProvidingApp()); |
2518 EXPECT_EQ(PIPELINE_OK, | 2015 EXPECT_EQ(PIPELINE_OK, |
2519 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); | 2016 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); |
2520 | 2017 |
2521 source.EndOfStream(); | 2018 source.EndOfStream(); |
2522 ASSERT_EQ(PIPELINE_OK, pipeline_status_); | 2019 ASSERT_EQ(PIPELINE_OK, pipeline_status_); |
2523 | 2020 |
2524 Play(); | 2021 Play(); |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2610 | 2107 |
2611 source.EndOfStream(); | 2108 source.EndOfStream(); |
2612 | 2109 |
2613 Play(); | 2110 Play(); |
2614 | 2111 |
2615 ASSERT_TRUE(WaitUntilOnEnded()); | 2112 ASSERT_TRUE(WaitUntilOnEnded()); |
2616 source.Shutdown(); | 2113 source.Shutdown(); |
2617 Stop(); | 2114 Stop(); |
2618 } | 2115 } |
2619 | 2116 |
| 2117 TEST_F(PipelineIntegrationTest, Mp2ts_AAC_HE_SBR_Audio) { |
| 2118 MockMediaSource source("bear-1280x720-aac_he.ts", kMP2AudioSBR, |
| 2119 kAppendWholeFile); |
| 2120 #if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER) |
| 2121 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 2122 source.EndOfStream(); |
| 2123 ASSERT_EQ(PIPELINE_OK, pipeline_status_); |
| 2124 |
| 2125 // Check that SBR is taken into account correctly by mpeg2ts parser. When an |
| 2126 // SBR stream is parsed as non-SBR stream, then audio frame durations are |
| 2127 // calculated incorrectly and that leads to gaps in buffered ranges (so this |
| 2128 // check will fail) and eventually leads to stalled playback. |
| 2129 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 2130 #else |
| 2131 EXPECT_EQ( |
| 2132 DEMUXER_ERROR_COULD_NOT_OPEN, |
| 2133 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); |
| 2134 #endif |
| 2135 } |
| 2136 |
| 2137 TEST_F(PipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_6B) { |
| 2138 MockMediaSource source("bear-audio-mp4a.6B.ts", |
| 2139 "video/mp2t; codecs=\"mp4a.6B\"", kAppendWholeFile); |
| 2140 #if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER) |
| 2141 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 2142 source.EndOfStream(); |
| 2143 ASSERT_EQ(PIPELINE_OK, pipeline_status_); |
| 2144 #else |
| 2145 EXPECT_EQ( |
| 2146 DEMUXER_ERROR_COULD_NOT_OPEN, |
| 2147 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); |
| 2148 #endif |
| 2149 } |
| 2150 |
| 2151 TEST_F(PipelineIntegrationTest, Mpeg2ts_MP3Audio_Mp4a_69) { |
| 2152 MockMediaSource source("bear-audio-mp4a.69.ts", |
| 2153 "video/mp2t; codecs=\"mp4a.69\"", kAppendWholeFile); |
| 2154 #if BUILDFLAG(ENABLE_MSE_MPEG2TS_STREAM_PARSER) |
| 2155 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 2156 source.EndOfStream(); |
| 2157 ASSERT_EQ(PIPELINE_OK, pipeline_status_); |
| 2158 #else |
| 2159 EXPECT_EQ( |
| 2160 DEMUXER_ERROR_COULD_NOT_OPEN, |
| 2161 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); |
| 2162 #endif |
| 2163 } |
| 2164 |
2620 TEST_F(PipelineIntegrationTest, | 2165 TEST_F(PipelineIntegrationTest, |
2621 MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_MP4_CENC_AudioOnly)) { | 2166 MAYBE_EME(EncryptedPlayback_NoEncryptedFrames_MP4_CENC_AudioOnly)) { |
2622 MockMediaSource source("bear-1280x720-a_frag-cenc_clear-all.mp4", kMP4Audio, | 2167 MockMediaSource source("bear-1280x720-a_frag-cenc_clear-all.mp4", kMP4Audio, |
2623 kAppendWholeFile); | 2168 kAppendWholeFile); |
2624 FakeEncryptedMedia encrypted_media(new NoResponseApp()); | 2169 FakeEncryptedMedia encrypted_media(new NoResponseApp()); |
2625 EXPECT_EQ(PIPELINE_OK, | 2170 EXPECT_EQ(PIPELINE_OK, |
2626 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); | 2171 StartPipelineWithEncryptedMedia(&source, &encrypted_media)); |
2627 | 2172 |
2628 source.EndOfStream(); | 2173 source.EndOfStream(); |
2629 | 2174 |
(...skipping 93 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
2723 | 2268 |
2724 source.EndOfStream(); | 2269 source.EndOfStream(); |
2725 | 2270 |
2726 Play(); | 2271 Play(); |
2727 | 2272 |
2728 ASSERT_TRUE(WaitUntilOnEnded()); | 2273 ASSERT_TRUE(WaitUntilOnEnded()); |
2729 source.Shutdown(); | 2274 source.Shutdown(); |
2730 Stop(); | 2275 Stop(); |
2731 } | 2276 } |
2732 | 2277 |
| 2278 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_AVC3) { |
| 2279 MockMediaSource source("bear-1280x720-v_frag-avc3.mp4", kMP4VideoAVC3, |
| 2280 kAppendWholeFile); |
| 2281 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 2282 source.EndOfStream(); |
| 2283 |
| 2284 EXPECT_EQ(1u, pipeline_->GetBufferedTimeRanges().size()); |
| 2285 EXPECT_EQ(0, pipeline_->GetBufferedTimeRanges().start(0).InMilliseconds()); |
| 2286 EXPECT_EQ(k1280IsoAVC3FileDurationMs, |
| 2287 pipeline_->GetBufferedTimeRanges().end(0).InMilliseconds()); |
| 2288 |
| 2289 Play(); |
| 2290 |
| 2291 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2292 source.Shutdown(); |
| 2293 Stop(); |
| 2294 } |
| 2295 |
| 2296 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_VP9) { |
| 2297 MockMediaSource source("bear-320x240-v_frag-vp9.mp4", kMP4VideoVP9, |
| 2298 kAppendWholeFile); |
| 2299 if (!base::CommandLine::ForCurrentProcess()->HasSwitch( |
| 2300 switches::kEnableVp9InMp4)) { |
| 2301 ASSERT_EQ(ChunkDemuxer::kNotSupported, source.AddId()); |
| 2302 return; |
| 2303 } |
| 2304 |
| 2305 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 2306 source.EndOfStream(); |
| 2307 ASSERT_EQ(PIPELINE_OK, pipeline_status_); |
| 2308 |
| 2309 Play(); |
| 2310 |
| 2311 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2312 source.Shutdown(); |
| 2313 Stop(); |
| 2314 } |
| 2315 |
| 2316 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_HEVC1) { |
| 2317 // HEVC demuxing might be enabled even on platforms that don't support HEVC |
| 2318 // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which |
| 2319 // indicates indicates that we did pass media mime type checks and attempted |
| 2320 // to actually demux and decode the stream. On platforms that support both |
| 2321 // demuxing and decoding we'll get PIPELINE_OK. |
| 2322 MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC1, |
| 2323 kAppendWholeFile); |
| 2324 #if BUILDFLAG(ENABLE_HEVC_DEMUXING) |
| 2325 PipelineStatus status = StartPipelineWithMediaSource(&source); |
| 2326 EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED); |
| 2327 #else |
| 2328 EXPECT_EQ( |
| 2329 DEMUXER_ERROR_COULD_NOT_OPEN, |
| 2330 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); |
| 2331 #endif |
| 2332 } |
| 2333 |
| 2334 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_HEVC2) { |
| 2335 // HEVC demuxing might be enabled even on platforms that don't support HEVC |
| 2336 // decoding. For those cases we'll get DECODER_ERROR_NOT_SUPPORTED, which |
| 2337 // indicates indicates that we did pass media mime type checks and attempted |
| 2338 // to actually demux and decode the stream. On platforms that support both |
| 2339 // demuxing and decoding we'll get PIPELINE_OK. |
| 2340 MockMediaSource source("bear-320x240-v_frag-hevc.mp4", kMP4VideoHEVC2, |
| 2341 kAppendWholeFile); |
| 2342 #if BUILDFLAG(ENABLE_HEVC_DEMUXING) |
| 2343 PipelineStatus status = StartPipelineWithMediaSource(&source); |
| 2344 EXPECT_TRUE(status == PIPELINE_OK || status == DECODER_ERROR_NOT_SUPPORTED); |
| 2345 #else |
| 2346 EXPECT_EQ( |
| 2347 DEMUXER_ERROR_COULD_NOT_OPEN, |
| 2348 StartPipelineWithMediaSource(&source, kExpectDemuxerFailure, nullptr)); |
| 2349 #endif |
| 2350 } |
| 2351 |
2733 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) | 2352 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) |
2734 | 2353 |
| 2354 TEST_F(PipelineIntegrationTest, SeekWhilePaused) { |
| 2355 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
| 2356 |
| 2357 base::TimeDelta duration(pipeline_->GetMediaDuration()); |
| 2358 base::TimeDelta start_seek_time(duration / 4); |
| 2359 base::TimeDelta seek_time(duration * 3 / 4); |
| 2360 |
| 2361 Play(); |
| 2362 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); |
| 2363 Pause(); |
| 2364 ASSERT_TRUE(Seek(seek_time)); |
| 2365 EXPECT_EQ(seek_time, pipeline_->GetMediaTime()); |
| 2366 Play(); |
| 2367 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2368 |
| 2369 // Make sure seeking after reaching the end works as expected. |
| 2370 Pause(); |
| 2371 ASSERT_TRUE(Seek(seek_time)); |
| 2372 EXPECT_EQ(seek_time, pipeline_->GetMediaTime()); |
| 2373 Play(); |
| 2374 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2375 } |
| 2376 |
| 2377 TEST_F(PipelineIntegrationTest, SeekWhilePlaying) { |
| 2378 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
| 2379 |
| 2380 base::TimeDelta duration(pipeline_->GetMediaDuration()); |
| 2381 base::TimeDelta start_seek_time(duration / 4); |
| 2382 base::TimeDelta seek_time(duration * 3 / 4); |
| 2383 |
| 2384 Play(); |
| 2385 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); |
| 2386 ASSERT_TRUE(Seek(seek_time)); |
| 2387 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); |
| 2388 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2389 |
| 2390 // Make sure seeking after reaching the end works as expected. |
| 2391 ASSERT_TRUE(Seek(seek_time)); |
| 2392 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); |
| 2393 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2394 } |
| 2395 |
| 2396 TEST_F(PipelineIntegrationTest, SuspendWhilePaused) { |
| 2397 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
| 2398 |
| 2399 base::TimeDelta duration(pipeline_->GetMediaDuration()); |
| 2400 base::TimeDelta start_seek_time(duration / 4); |
| 2401 base::TimeDelta seek_time(duration * 3 / 4); |
| 2402 |
| 2403 Play(); |
| 2404 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); |
| 2405 Pause(); |
| 2406 |
| 2407 // Suspend while paused. |
| 2408 ASSERT_TRUE(Suspend()); |
| 2409 |
| 2410 // Resuming the pipeline will create a new Renderer, |
| 2411 // which in turn will trigger video size and opacity notifications. |
| 2412 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1); |
| 2413 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1); |
| 2414 |
| 2415 ASSERT_TRUE(Resume(seek_time)); |
| 2416 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); |
| 2417 Play(); |
| 2418 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2419 } |
| 2420 |
| 2421 TEST_F(PipelineIntegrationTest, SuspendWhilePlaying) { |
| 2422 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240.webm")); |
| 2423 |
| 2424 base::TimeDelta duration(pipeline_->GetMediaDuration()); |
| 2425 base::TimeDelta start_seek_time(duration / 4); |
| 2426 base::TimeDelta seek_time(duration * 3 / 4); |
| 2427 |
| 2428 Play(); |
| 2429 ASSERT_TRUE(WaitUntilCurrentTimeIsAfter(start_seek_time)); |
| 2430 ASSERT_TRUE(Suspend()); |
| 2431 |
| 2432 // Resuming the pipeline will create a new Renderer, |
| 2433 // which in turn will trigger video size and opacity notifications. |
| 2434 EXPECT_CALL(*this, OnVideoNaturalSizeChange(gfx::Size(320, 240))).Times(1); |
| 2435 EXPECT_CALL(*this, OnVideoOpacityChange(true)).Times(1); |
| 2436 |
| 2437 ASSERT_TRUE(Resume(seek_time)); |
| 2438 EXPECT_GE(pipeline_->GetMediaTime(), seek_time); |
| 2439 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2440 } |
| 2441 |
| 2442 #if BUILDFLAG(USE_PROPRIETARY_CODECS) |
| 2443 TEST_F(PipelineIntegrationTest, Rotated_Metadata_0) { |
| 2444 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_0.mp4")); |
| 2445 ASSERT_EQ(VIDEO_ROTATION_0, metadata_.video_rotation); |
| 2446 } |
| 2447 |
| 2448 TEST_F(PipelineIntegrationTest, Rotated_Metadata_90) { |
| 2449 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_90.mp4")); |
| 2450 ASSERT_EQ(VIDEO_ROTATION_90, metadata_.video_rotation); |
| 2451 } |
| 2452 |
| 2453 TEST_F(PipelineIntegrationTest, Rotated_Metadata_180) { |
| 2454 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_180.mp4")); |
| 2455 ASSERT_EQ(VIDEO_ROTATION_180, metadata_.video_rotation); |
| 2456 } |
| 2457 |
| 2458 TEST_F(PipelineIntegrationTest, Rotated_Metadata_270) { |
| 2459 ASSERT_EQ(PIPELINE_OK, Start("bear_rotate_270.mp4")); |
| 2460 ASSERT_EQ(VIDEO_ROTATION_270, metadata_.video_rotation); |
| 2461 } |
| 2462 #endif // BUILDFLAG(USE_PROPRIETARY_CODECS) |
| 2463 |
| 2464 // Verify audio decoder & renderer can handle aborted demuxer reads. |
| 2465 TEST_F(PipelineIntegrationTest, ChunkDemuxerAbortRead_AudioOnly) { |
| 2466 ASSERT_TRUE(TestSeekDuringRead("bear-320x240-audio-only.webm", kAudioOnlyWebM, |
| 2467 16384, base::TimeDelta::FromMilliseconds(464), |
| 2468 base::TimeDelta::FromMilliseconds(617), 0x10CA, |
| 2469 19730)); |
| 2470 } |
| 2471 |
| 2472 // Verify video decoder & renderer can handle aborted demuxer reads. |
| 2473 TEST_F(PipelineIntegrationTest, ChunkDemuxerAbortRead_VideoOnly) { |
| 2474 ASSERT_TRUE(TestSeekDuringRead("bear-320x240-video-only.webm", kVideoOnlyWebM, |
| 2475 32768, base::TimeDelta::FromMilliseconds(167), |
| 2476 base::TimeDelta::FromMilliseconds(1668), |
| 2477 0x1C896, 65536)); |
| 2478 } |
| 2479 |
| 2480 // Verify that Opus audio in WebM containers can be played back. |
| 2481 TEST_F(PipelineIntegrationTest, BasicPlayback_AudioOnly_Opus_WebM) { |
| 2482 ASSERT_EQ(PIPELINE_OK, Start("bear-opus-end-trimming.webm")); |
| 2483 Play(); |
| 2484 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2485 } |
| 2486 |
| 2487 // Verify that VP9 video in WebM containers can be played back. |
| 2488 TEST_F(PipelineIntegrationTest, BasicPlayback_VideoOnly_VP9_WebM) { |
| 2489 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9.webm")); |
| 2490 Play(); |
| 2491 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2492 } |
| 2493 |
| 2494 // Verify that VP9 video and Opus audio in the same WebM container can be played |
| 2495 // back. |
| 2496 TEST_F(PipelineIntegrationTest, BasicPlayback_VP9_Opus_WebM) { |
| 2497 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-opus.webm")); |
| 2498 Play(); |
| 2499 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2500 } |
| 2501 |
| 2502 // Verify that VP8 video with alpha channel can be played back. |
| 2503 TEST_F(PipelineIntegrationTest, BasicPlayback_VP8A_WebM) { |
| 2504 ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a.webm")); |
| 2505 Play(); |
| 2506 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2507 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); |
| 2508 } |
| 2509 |
| 2510 // Verify that VP8A video with odd width/height can be played back. |
| 2511 TEST_F(PipelineIntegrationTest, BasicPlayback_VP8A_Odd_WebM) { |
| 2512 ASSERT_EQ(PIPELINE_OK, Start("bear-vp8a-odd-dimensions.webm")); |
| 2513 Play(); |
| 2514 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2515 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); |
| 2516 } |
| 2517 |
| 2518 // Verify that VP9 video with odd width/height can be played back. |
| 2519 TEST_F(PipelineIntegrationTest, BasicPlayback_VP9_Odd_WebM) { |
| 2520 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-odd-dimensions.webm")); |
| 2521 Play(); |
| 2522 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2523 } |
| 2524 |
| 2525 // Verify that VP9 video with alpha channel can be played back. |
| 2526 TEST_F(PipelineIntegrationTest, BasicPlayback_VP9A_WebM) { |
| 2527 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a.webm")); |
| 2528 Play(); |
| 2529 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2530 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); |
| 2531 } |
| 2532 |
| 2533 // Verify that VP9A video with odd width/height can be played back. |
| 2534 TEST_F(PipelineIntegrationTest, BasicPlayback_VP9A_Odd_WebM) { |
| 2535 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9a-odd-dimensions.webm")); |
| 2536 Play(); |
| 2537 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2538 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12A); |
| 2539 } |
| 2540 |
2735 // Verify that VP8 video with inband text track can be played back. | 2541 // Verify that VP8 video with inband text track can be played back. |
2736 TEST_F(PipelineIntegrationTest, MAYBE_TEXT(BasicPlayback_VP8_WebVTT_WebM)) { | 2542 TEST_F(PipelineIntegrationTest, MAYBE_TEXT(BasicPlayback_VP8_WebVTT_WebM)) { |
2737 EXPECT_CALL(*this, OnAddTextTrack(_, _)); | 2543 EXPECT_CALL(*this, OnAddTextTrack(_, _)); |
2738 ASSERT_EQ(PIPELINE_OK, Start("bear-vp8-webvtt.webm")); | 2544 ASSERT_EQ(PIPELINE_OK, Start("bear-vp8-webvtt.webm")); |
2739 Play(); | 2545 Play(); |
2740 ASSERT_TRUE(WaitUntilOnEnded()); | 2546 ASSERT_TRUE(WaitUntilOnEnded()); |
2741 } | 2547 } |
2742 | 2548 |
| 2549 // Verify that VP9 video with 4:4:4 subsampling can be played back. |
| 2550 TEST_F(PipelineIntegrationTest, P444_VP9_WebM) { |
| 2551 ASSERT_EQ(PIPELINE_OK, Start("bear-320x240-P444.webm")); |
| 2552 Play(); |
| 2553 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2554 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV24); |
| 2555 } |
| 2556 |
| 2557 // Verify that frames of VP9 video in the BT.709 color space have the YV12HD |
| 2558 // format. |
| 2559 TEST_F(PipelineIntegrationTest, BT709_VP9_WebM) { |
| 2560 ASSERT_EQ(PIPELINE_OK, Start("bear-vp9-bt709.webm")); |
| 2561 Play(); |
| 2562 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2563 EXPECT_VIDEO_FORMAT_EQ(last_video_frame_format_, PIXEL_FORMAT_YV12); |
| 2564 EXPECT_COLOR_SPACE_EQ(last_video_frame_color_space_, COLOR_SPACE_HD_REC709); |
| 2565 } |
| 2566 |
| 2567 TEST_F(PipelineIntegrationTest, HD_VP9_WebM) { |
| 2568 ASSERT_EQ(PIPELINE_OK, Start("bear-1280x720.webm", kClockless)); |
| 2569 Play(); |
| 2570 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2571 } |
| 2572 |
| 2573 // Verify that videos with an odd frame size playback successfully. |
| 2574 TEST_F(PipelineIntegrationTest, BasicPlayback_OddVideoSize) { |
| 2575 ASSERT_EQ(PIPELINE_OK, Start("butterfly-853x480.webm")); |
| 2576 Play(); |
| 2577 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2578 } |
| 2579 |
| 2580 // Verify that OPUS audio in a webm which reports a 44.1kHz sample rate plays |
| 2581 // correctly at 48kHz |
| 2582 TEST_F(PipelineIntegrationTest, BasicPlayback_Opus441kHz) { |
| 2583 ASSERT_EQ(PIPELINE_OK, Start("sfx-opus-441.webm")); |
| 2584 Play(); |
| 2585 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2586 |
| 2587 EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO) |
| 2588 ->audio_decoder_config() |
| 2589 .samples_per_second()); |
| 2590 } |
| 2591 |
| 2592 // Same as above but using MediaSource. |
| 2593 TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_Opus441kHz) { |
| 2594 MockMediaSource source("sfx-opus-441.webm", kOpusAudioOnlyWebM, |
| 2595 kAppendWholeFile); |
| 2596 EXPECT_EQ(PIPELINE_OK, StartPipelineWithMediaSource(&source)); |
| 2597 source.EndOfStream(); |
| 2598 Play(); |
| 2599 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2600 source.Shutdown(); |
| 2601 Stop(); |
| 2602 EXPECT_EQ(48000, demuxer_->GetFirstStream(DemuxerStream::AUDIO) |
| 2603 ->audio_decoder_config() |
| 2604 .samples_per_second()); |
| 2605 } |
| 2606 |
| 2607 // Ensures audio-only playback with missing or negative timestamps works. Tests |
| 2608 // the common live-streaming case for chained ogg. See http://crbug.com/396864. |
| 2609 TEST_F(PipelineIntegrationTest, BasicPlaybackChainedOgg) { |
| 2610 ASSERT_EQ(PIPELINE_OK, Start("double-sfx.ogg", kUnreliableDuration)); |
| 2611 Play(); |
| 2612 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2613 ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime()); |
| 2614 } |
| 2615 |
2743 // Ensures audio-video playback with missing or negative timestamps fails softly | 2616 // Ensures audio-video playback with missing or negative timestamps fails softly |
2744 // instead of crashing. See http://crbug.com/396864. | 2617 // instead of crashing. See http://crbug.com/396864. |
2745 TEST_F(PipelineIntegrationTest, BasicPlaybackChainedOggVideo) { | 2618 TEST_F(PipelineIntegrationTest, BasicPlaybackChainedOggVideo) { |
2746 ASSERT_EQ(PIPELINE_OK, Start("double-bear.ogv", kUnreliableDuration)); | 2619 ASSERT_EQ(PIPELINE_OK, Start("double-bear.ogv", kUnreliableDuration)); |
2747 Play(); | 2620 Play(); |
2748 EXPECT_EQ(PIPELINE_ERROR_DECODE, WaitUntilEndedOrError()); | 2621 EXPECT_EQ(PIPELINE_ERROR_DECODE, WaitUntilEndedOrError()); |
2749 ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime()); | 2622 ASSERT_EQ(base::TimeDelta(), demuxer_->GetStartTime()); |
2750 } | 2623 } |
2751 | 2624 |
| 2625 // Tests that we signal ended even when audio runs longer than video track. |
| 2626 TEST_F(PipelineIntegrationTest, BasicPlaybackAudioLongerThanVideo) { |
| 2627 ASSERT_EQ(PIPELINE_OK, Start("bear_audio_longer_than_video.ogv")); |
| 2628 // Audio track is 2000ms. Video track is 1001ms. Duration should be higher |
| 2629 // of the two. |
| 2630 EXPECT_EQ(2000, pipeline_->GetMediaDuration().InMilliseconds()); |
| 2631 Play(); |
| 2632 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2633 } |
| 2634 |
| 2635 // Tests that we signal ended even when audio runs shorter than video track. |
| 2636 TEST_F(PipelineIntegrationTest, BasicPlaybackAudioShorterThanVideo) { |
| 2637 ASSERT_EQ(PIPELINE_OK, Start("bear_audio_shorter_than_video.ogv")); |
| 2638 // Audio track is 500ms. Video track is 1001ms. Duration should be higher of |
| 2639 // the two. |
| 2640 EXPECT_EQ(1001, pipeline_->GetMediaDuration().InMilliseconds()); |
| 2641 Play(); |
| 2642 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2643 } |
| 2644 |
| 2645 TEST_F(PipelineIntegrationTest, BasicPlaybackPositiveStartTime) { |
| 2646 ASSERT_EQ(PIPELINE_OK, Start("nonzero-start-time.webm")); |
| 2647 Play(); |
| 2648 ASSERT_TRUE(WaitUntilOnEnded()); |
| 2649 ASSERT_EQ(base::TimeDelta::FromMicroseconds(396000), |
| 2650 demuxer_->GetStartTime()); |
| 2651 } |
| 2652 |
2752 } // namespace media | 2653 } // namespace media |
OLD | NEW |