Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(364)

Unified Diff: media/blink/webmediaplayer_impl.cc

Issue 495353003: Move WebMediaPlayerImpl and its dependencies to media/blink. (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: . Created 6 years, 3 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View side-by-side diff with in-line comments
Download patch
« no previous file with comments | « media/blink/webmediaplayer_impl.h ('k') | media/blink/webmediaplayer_params.h » ('j') | no next file with comments »
Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
Index: media/blink/webmediaplayer_impl.cc
diff --git a/content/renderer/media/webmediaplayer_impl.cc b/media/blink/webmediaplayer_impl.cc
similarity index 89%
rename from content/renderer/media/webmediaplayer_impl.cc
rename to media/blink/webmediaplayer_impl.cc
index 83e620267aef10d15275c5b6bbbe34c546c8ad91..bed92998d3670c8370328271bc5d9f55147a2df3 100644
--- a/content/renderer/media/webmediaplayer_impl.cc
+++ b/media/blink/webmediaplayer_impl.cc
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#include "content/renderer/media/webmediaplayer_impl.h"
+#include "media/blink/webmediaplayer_impl.h"
#include <algorithm>
#include <limits>
@@ -21,15 +21,6 @@
#include "base/synchronization/waitable_event.h"
#include "cc/blink/web_layer_impl.h"
#include "cc/layers/video_layer.h"
-#include "content/renderer/media/buffered_data_source.h"
-#include "content/renderer/media/crypto/encrypted_media_player_support.h"
-#include "content/renderer/media/texttrack_impl.h"
-#include "content/renderer/media/webaudiosourceprovider_impl.h"
-#include "content/renderer/media/webinbandtexttrack_impl.h"
-#include "content/renderer/media/webmediaplayer_delegate.h"
-#include "content/renderer/media/webmediaplayer_params.h"
-#include "content/renderer/media/webmediaplayer_util.h"
-#include "content/renderer/media/webmediasource_impl.h"
#include "gpu/GLES2/gl2extchromium.h"
#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/audio/null_audio_sink.h"
@@ -40,6 +31,15 @@
#include "media/base/pipeline.h"
#include "media/base/text_renderer.h"
#include "media/base/video_frame.h"
+#include "media/blink/buffered_data_source.h"
+#include "media/blink/encrypted_media_player_support.h"
+#include "media/blink/texttrack_impl.h"
+#include "media/blink/webaudiosourceprovider_impl.h"
+#include "media/blink/webinbandtexttrack_impl.h"
+#include "media/blink/webmediaplayer_delegate.h"
+#include "media/blink/webmediaplayer_params.h"
+#include "media/blink/webmediaplayer_util.h"
+#include "media/blink/webmediasource_impl.h"
#include "media/filters/audio_renderer_impl.h"
#include "media/filters/chunk_demuxer.h"
#include "media/filters/ffmpeg_audio_decoder.h"
@@ -65,7 +65,6 @@ using blink::WebMediaPlayer;
using blink::WebRect;
using blink::WebSize;
using blink::WebString;
-using media::PipelineStatus;
namespace {
@@ -107,7 +106,7 @@ class SyncPointClientImpl : public media::VideoFrame::SyncPointClient {
} // namespace
-namespace content {
+namespace media {
class BufferedDataSourceHostImpl;
@@ -122,13 +121,13 @@ COMPILE_ASSERT_MATCHING_ENUM(UseCredentials);
#define BIND_TO_RENDER_LOOP(function) \
(DCHECK(main_task_runner_->BelongsToCurrentThread()), \
- media::BindToCurrentLoop(base::Bind(function, AsWeakPtr())))
+ BindToCurrentLoop(base::Bind(function, AsWeakPtr())))
#define BIND_TO_RENDER_LOOP1(function, arg1) \
(DCHECK(main_task_runner_->BelongsToCurrentThread()), \
- media::BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1)))
+ BindToCurrentLoop(base::Bind(function, AsWeakPtr(), arg1)))
-static void LogMediaSourceError(const scoped_refptr<media::MediaLog>& media_log,
+static void LogMediaSourceError(const scoped_refptr<MediaLog>& media_log,
const std::string& error) {
media_log->AddEvent(media_log->CreateMediaSourceErrorEvent(error));
}
@@ -176,7 +175,7 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
compositor_task_runner_ = base::MessageLoopProxy::current();
media_log_->AddEvent(
- media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_CREATED));
+ media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_CREATED));
// |gpu_factories_| requires that its entry points be called on its
// |GetTaskRunner()|. Since |pipeline_| will own decoders created from the
@@ -188,7 +187,7 @@ WebMediaPlayerImpl::WebMediaPlayerImpl(
audio_source_provider_ = new WebAudioSourceProviderImpl(
params.audio_renderer_sink().get()
? params.audio_renderer_sink()
- : new media::NullAudioSink(media_task_runner_));
+ : new NullAudioSink(media_task_runner_));
}
WebMediaPlayerImpl::~WebMediaPlayerImpl() {
@@ -196,7 +195,7 @@ WebMediaPlayerImpl::~WebMediaPlayerImpl() {
DCHECK(main_task_runner_->BelongsToCurrentThread());
media_log_->AddEvent(
- media_log_->CreateEvent(media::MediaLogEvent::WEBMEDIAPLAYER_DESTROYED));
+ media_log_->CreateEvent(MediaLogEvent::WEBMEDIAPLAYER_DESTROYED));
if (delegate_.get())
delegate_->PlayerGone(this);
@@ -280,7 +279,7 @@ void WebMediaPlayerImpl::play() {
if (data_source_)
data_source_->MediaIsPlaying();
- media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PLAY));
+ media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PLAY));
if (delegate_.get())
delegate_->DidPlay(this);
@@ -296,7 +295,7 @@ void WebMediaPlayerImpl::pause() {
data_source_->MediaIsPaused();
paused_time_ = pipeline_.GetMediaTime();
- media_log_->AddEvent(media_log_->CreateEvent(media::MediaLogEvent::PAUSE));
+ media_log_->AddEvent(media_log_->CreateEvent(MediaLogEvent::PAUSE));
if (delegate_.get())
delegate_->DidPause(this);
@@ -469,11 +468,11 @@ WebMediaPlayer::ReadyState WebMediaPlayerImpl::readyState() const {
blink::WebTimeRanges WebMediaPlayerImpl::buffered() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- media::Ranges<base::TimeDelta> buffered_time_ranges =
+ Ranges<base::TimeDelta> buffered_time_ranges =
pipeline_.GetBufferedTimeRanges();
const base::TimeDelta duration = pipeline_.GetMediaDuration();
- if (duration != media::kInfiniteDuration()) {
+ if (duration != kInfiniteDuration()) {
buffered_data_source_host_.AddBufferedTimeRanges(
&buffered_time_ranges, duration);
}
@@ -519,7 +518,7 @@ void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas,
// - We haven't reached HAVE_CURRENT_DATA and need to paint black
// - We're painting to a canvas
// See http://crbug.com/341225 http://crbug.com/342621 for details.
- scoped_refptr<media::VideoFrame> video_frame =
+ scoped_refptr<VideoFrame> video_frame =
GetCurrentFrameFromCompositor();
gfx::Rect gfx_rect(rect);
@@ -551,28 +550,28 @@ double WebMediaPlayerImpl::mediaTimeForTimeValue(double timeValue) const {
unsigned WebMediaPlayerImpl::decodedFrameCount() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- media::PipelineStatistics stats = pipeline_.GetStatistics();
+ PipelineStatistics stats = pipeline_.GetStatistics();
return stats.video_frames_decoded;
}
unsigned WebMediaPlayerImpl::droppedFrameCount() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- media::PipelineStatistics stats = pipeline_.GetStatistics();
+ PipelineStatistics stats = pipeline_.GetStatistics();
return stats.video_frames_dropped;
}
unsigned WebMediaPlayerImpl::audioDecodedByteCount() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- media::PipelineStatistics stats = pipeline_.GetStatistics();
+ PipelineStatistics stats = pipeline_.GetStatistics();
return stats.audio_bytes_decoded;
}
unsigned WebMediaPlayerImpl::videoDecodedByteCount() const {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- media::PipelineStatistics stats = pipeline_.GetStatistics();
+ PipelineStatistics stats = pipeline_.GetStatistics();
return stats.video_bytes_decoded;
}
@@ -586,12 +585,12 @@ bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
bool flip_y) {
TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
- scoped_refptr<media::VideoFrame> video_frame =
+ scoped_refptr<VideoFrame> video_frame =
GetCurrentFrameFromCompositor();
if (!video_frame.get())
return false;
- if (video_frame->format() != media::VideoFrame::NATIVE_TEXTURE)
+ if (video_frame->format() != VideoFrame::NATIVE_TEXTURE)
return false;
const gpu::MailboxHolder* mailbox_holder = video_frame->mailbox_holder();
@@ -693,7 +692,7 @@ void WebMediaPlayerImpl::OnPipelineSeeked(bool time_changed,
return;
}
- if (status != media::PIPELINE_OK) {
+ if (status != PIPELINE_OK) {
OnPipelineError(status);
return;
}
@@ -719,7 +718,7 @@ void WebMediaPlayerImpl::OnPipelineEnded() {
void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
- DCHECK_NE(error, media::PIPELINE_OK);
+ DCHECK_NE(error, PIPELINE_OK);
if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing) {
// Any error that occurs before reaching ReadyStateHaveMetadata should
@@ -730,19 +729,19 @@ void WebMediaPlayerImpl::OnPipelineError(PipelineStatus error) {
SetNetworkState(PipelineErrorToNetworkState(error));
- if (error == media::PIPELINE_ERROR_DECRYPT)
+ if (error == PIPELINE_ERROR_DECRYPT)
encrypted_media_support_->OnPipelineDecryptError();
}
void WebMediaPlayerImpl::OnPipelineMetadata(
- media::PipelineMetadata metadata) {
+ PipelineMetadata metadata) {
DVLOG(1) << __FUNCTION__;
pipeline_metadata_ = metadata;
UMA_HISTOGRAM_ENUMERATION("Media.VideoRotation",
metadata.video_rotation,
- media::VIDEO_ROTATION_MAX + 1);
+ VIDEO_ROTATION_MAX + 1);
SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata);
if (hasVideo()) {
@@ -750,8 +749,8 @@ void WebMediaPlayerImpl::OnPipelineMetadata(
scoped_refptr<cc::VideoLayer> layer =
cc::VideoLayer::Create(compositor_, pipeline_metadata_.video_rotation);
- if (pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_90 ||
- pipeline_metadata_.video_rotation == media::VIDEO_ROTATION_270) {
+ if (pipeline_metadata_.video_rotation == VIDEO_ROTATION_90 ||
+ pipeline_metadata_.video_rotation == VIDEO_ROTATION_270) {
gfx::Size size = pipeline_metadata_.natural_size;
pipeline_metadata_.natural_size = gfx::Size(size.height(), size.width());
}
@@ -763,7 +762,7 @@ void WebMediaPlayerImpl::OnPipelineMetadata(
}
void WebMediaPlayerImpl::OnPipelineBufferingStateChanged(
- media::BufferingState buffering_state) {
+ BufferingState buffering_state) {
DVLOG(1) << __FUNCTION__ << "(" << buffering_state << ")";
// Ignore buffering state changes until we've completed all outstanding seeks.
@@ -772,7 +771,7 @@ void WebMediaPlayerImpl::OnPipelineBufferingStateChanged(
// TODO(scherkus): Handle other buffering states when Pipeline starts using
// them and translate them ready state changes http://crbug.com/144683
- DCHECK_EQ(buffering_state, media::BUFFERING_HAVE_ENOUGH);
+ DCHECK_EQ(buffering_state, BUFFERING_HAVE_ENOUGH);
SetReadyState(WebMediaPlayer::ReadyStateHaveEnoughData);
// Blink expects a timeChanged() in response to a seek().
@@ -787,8 +786,8 @@ void WebMediaPlayerImpl::OnDemuxerOpened() {
}
void WebMediaPlayerImpl::OnAddTextTrack(
- const media::TextTrackConfig& config,
- const media::AddTextTrackDoneCB& done_cb) {
+ const TextTrackConfig& config,
+ const AddTextTrackDoneCB& done_cb) {
DCHECK(main_task_runner_->BelongsToCurrentThread());
const WebInbandTextTrackImpl::Kind web_kind =
@@ -804,7 +803,7 @@ void WebMediaPlayerImpl::OnAddTextTrack(
new WebInbandTextTrackImpl(web_kind, web_label, web_language, web_id,
text_track_index_++));
- scoped_ptr<media::TextTrack> text_track(new TextTrackImpl(
+ scoped_ptr<TextTrack> text_track(new TextTrackImpl(
main_task_runner_, client_, web_inband_text_track.Pass()));
done_cb.Run(text_track.Pass());
@@ -828,25 +827,24 @@ void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) {
SetNetworkState(WebMediaPlayer::NetworkStateLoading);
media_log_->AddEvent(
media_log_->CreateBooleanEvent(
- media::MediaLogEvent::NETWORK_ACTIVITY_SET,
+ MediaLogEvent::NETWORK_ACTIVITY_SET,
"is_downloading_data", is_downloading));
}
// TODO(xhwang): Move this to a factory class so that we can create different
// renderers.
-scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() {
- media::SetDecryptorReadyCB set_decryptor_ready_cb =
+scoped_ptr<Renderer> WebMediaPlayerImpl::CreateRenderer() {
+ SetDecryptorReadyCB set_decryptor_ready_cb =
encrypted_media_support_->CreateSetDecryptorReadyCB();
// Create our audio decoders and renderer.
- ScopedVector<media::AudioDecoder> audio_decoders;
+ ScopedVector<AudioDecoder> audio_decoders;
- media::LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_);
- audio_decoders.push_back(new media::FFmpegAudioDecoder(media_task_runner_,
- log_cb));
- audio_decoders.push_back(new media::OpusAudioDecoder(media_task_runner_));
+ LogCB log_cb = base::Bind(&LogMediaSourceError, media_log_);
+ audio_decoders.push_back(new FFmpegAudioDecoder(media_task_runner_, log_cb));
+ audio_decoders.push_back(new OpusAudioDecoder(media_task_runner_));
- scoped_ptr<media::AudioRenderer> audio_renderer(new media::AudioRendererImpl(
+ scoped_ptr<AudioRenderer> audio_renderer(new AudioRendererImpl(
media_task_runner_,
audio_source_provider_.get(),
audio_decoders.Pass(),
@@ -854,21 +852,21 @@ scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() {
audio_hardware_config_));
// Create our video decoders and renderer.
- ScopedVector<media::VideoDecoder> video_decoders;
+ ScopedVector<VideoDecoder> video_decoders;
if (gpu_factories_.get()) {
video_decoders.push_back(
- new media::GpuVideoDecoder(gpu_factories_, media_log_));
+ new GpuVideoDecoder(gpu_factories_, media_log_));
}
#if !defined(MEDIA_DISABLE_LIBVPX)
- video_decoders.push_back(new media::VpxVideoDecoder(media_task_runner_));
+ video_decoders.push_back(new VpxVideoDecoder(media_task_runner_));
#endif // !defined(MEDIA_DISABLE_LIBVPX)
- video_decoders.push_back(new media::FFmpegVideoDecoder(media_task_runner_));
+ video_decoders.push_back(new FFmpegVideoDecoder(media_task_runner_));
- scoped_ptr<media::VideoRenderer> video_renderer(
- new media::VideoRendererImpl(
+ scoped_ptr<VideoRenderer> video_renderer(
+ new VideoRendererImpl(
media_task_runner_,
video_decoders.Pass(),
set_decryptor_ready_cb,
@@ -876,7 +874,7 @@ scoped_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer() {
true));
// Create renderer.
- return scoped_ptr<media::Renderer>(new media::RendererImpl(
+ return scoped_ptr<Renderer>(new RendererImpl(
media_task_runner_,
demuxer_.get(),
audio_renderer.Pass(),
@@ -890,8 +888,8 @@ void WebMediaPlayerImpl::StartPipeline() {
UMA_HISTOGRAM_BOOLEAN("Media.MSE.Playback",
(load_type_ == LoadTypeMediaSource));
- media::LogCB mse_log_cb;
- media::Demuxer::NeedKeyCB need_key_cb =
+ LogCB mse_log_cb;
+ Demuxer::NeedKeyCB need_key_cb =
encrypted_media_support_->CreateNeedKeyCB();
// Figure out which demuxer to use.
@@ -899,7 +897,7 @@ void WebMediaPlayerImpl::StartPipeline() {
DCHECK(!chunk_demuxer_);
DCHECK(data_source_);
- demuxer_.reset(new media::FFmpegDemuxer(
+ demuxer_.reset(new FFmpegDemuxer(
media_task_runner_, data_source_.get(),
need_key_cb,
media_log_));
@@ -909,7 +907,7 @@ void WebMediaPlayerImpl::StartPipeline() {
mse_log_cb = base::Bind(&LogMediaSourceError, media_log_);
- chunk_demuxer_ = new media::ChunkDemuxer(
+ chunk_demuxer_ = new ChunkDemuxer(
BIND_TO_RENDER_LOOP(&WebMediaPlayerImpl::OnDemuxerOpened),
need_key_cb,
mse_log_cb,
@@ -962,7 +960,7 @@ double WebMediaPlayerImpl::GetPipelineDuration() const {
// Return positive infinity if the resource is unbounded.
// http://www.whatwg.org/specs/web-apps/current-work/multipage/video.html#dom-media-duration
- if (duration == media::kInfiniteDuration())
+ if (duration == kInfiniteDuration())
return std::numeric_limits<double>::infinity();
return duration.InSecondsF();
@@ -997,7 +995,7 @@ void WebMediaPlayerImpl::OnOpacityChanged(bool opaque) {
}
void WebMediaPlayerImpl::FrameReady(
- const scoped_refptr<media::VideoFrame>& frame) {
+ const scoped_refptr<VideoFrame>& frame) {
compositor_task_runner_->PostTask(
FROM_HERE,
base::Bind(&VideoFrameCompositor::UpdateCurrentFrame,
@@ -1007,14 +1005,14 @@ void WebMediaPlayerImpl::FrameReady(
static void GetCurrentFrameAndSignal(
VideoFrameCompositor* compositor,
- scoped_refptr<media::VideoFrame>* video_frame_out,
+ scoped_refptr<VideoFrame>* video_frame_out,
base::WaitableEvent* event) {
TRACE_EVENT0("media", "GetCurrentFrameAndSignal");
*video_frame_out = compositor->GetCurrentFrame();
event->Signal();
}
-scoped_refptr<media::VideoFrame>
+scoped_refptr<VideoFrame>
WebMediaPlayerImpl::GetCurrentFrameFromCompositor() {
TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
if (compositor_task_runner_->BelongsToCurrentThread())
@@ -1022,7 +1020,7 @@ WebMediaPlayerImpl::GetCurrentFrameFromCompositor() {
// Use a posted task and waitable event instead of a lock otherwise
// WebGL/Canvas can see different content than what the compositor is seeing.
- scoped_refptr<media::VideoFrame> video_frame;
+ scoped_refptr<VideoFrame> video_frame;
base::WaitableEvent event(false, false);
compositor_task_runner_->PostTask(FROM_HERE,
base::Bind(&GetCurrentFrameAndSignal,
@@ -1033,4 +1031,4 @@ WebMediaPlayerImpl::GetCurrentFrameFromCompositor() {
return video_frame;
}
-} // namespace content
+} // namespace media
« no previous file with comments | « media/blink/webmediaplayer_impl.h ('k') | media/blink/webmediaplayer_params.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698