Chromium Code Reviews| Index: media/blink/webmediaplayer_impl.cc |
| diff --git a/media/blink/webmediaplayer_impl.cc b/media/blink/webmediaplayer_impl.cc |
| index 5dd65d08719b7a279d88dca947fdb3f0d3ec82f9..d18bc7f262cb853d597c9f15cfefd6882de2671e 100644 |
| --- a/media/blink/webmediaplayer_impl.cc |
| +++ b/media/blink/webmediaplayer_impl.cc |
| @@ -59,11 +59,25 @@ |
| #include "third_party/WebKit/public/web/WebSecurityOrigin.h" |
| #include "third_party/WebKit/public/web/WebView.h" |
| +#if defined(WIMPI_CAST) |
| +#include "gpu/GLES2/gl2extchromium.h" |
| +#include "gpu/command_buffer/client/gles2_interface.h" |
| +#include "media/base/android/media_common_android.h" |
| +#include "third_party/skia/include/core/SkCanvas.h" |
| +#include "third_party/skia/include/core/SkPaint.h" |
| +#include "third_party/skia/include/core/SkTypeface.h" |
| +#include "third_party/skia/include/gpu/GrContext.h" |
| +#include "third_party/skia/include/gpu/SkGrPixelRef.h" |
| +#endif |
| + |
| using blink::WebCanvas; |
| using blink::WebMediaPlayer; |
| using blink::WebRect; |
| using blink::WebSize; |
| using blink::WebString; |
| +using gpu::gles2::GLES2Interface; |
| + |
| +namespace media { |
| namespace { |
| @@ -86,22 +100,39 @@ namespace { |
| const double kMinRate = 0.0625; |
| const double kMaxRate = 16.0; |
| -void SetSinkIdOnMediaThread( |
| - scoped_refptr<media::WebAudioSourceProviderImpl> sink, |
| - const std::string& device_id, |
| - const url::Origin& security_origin, |
| - const media::SwitchOutputDeviceCB& callback) { |
| +void SetSinkIdOnMediaThread(scoped_refptr<WebAudioSourceProviderImpl> sink, |
| + const std::string& device_id, |
| + const url::Origin& security_origin, |
| + const SwitchOutputDeviceCB& callback) { |
| if (sink->GetOutputDevice()) { |
| sink->GetOutputDevice()->SwitchOutputDevice(device_id, security_origin, |
| callback); |
| } else { |
| - callback.Run(media::OUTPUT_DEVICE_STATUS_ERROR_INTERNAL); |
| + callback.Run(OUTPUT_DEVICE_STATUS_ERROR_INTERNAL); |
| } |
| } |
| -} // namespace |
| +#if defined(WIMPI_CAST) |
| +// File-static function is to allow it to run even after WMPA is deleted. |
| +void OnReleaseTexture(const WebMediaPlayerParams::Context3DCB& context_3d_cb, |
| + GLuint texture_id, |
| + const gpu::SyncToken& sync_token) { |
| + Context3D context_3d; |
| + if (!context_3d_cb.is_null()) |
| + context_3d = context_3d_cb.Run(); |
| + // GPU Process crashed. |
| + if (!context_3d.gl) |
| + return; |
| -namespace media { |
| + GLES2Interface* gl = context_3d.gl; |
| + gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); |
| + gl->DeleteTextures(1, &texture_id); |
| + // Flush to ensure that the stream texture gets deleted in a timely fashion. |
| + gl->ShallowFlushCHROMIUM(); |
| +} |
| +#endif // WIMPI_CAST |
| + |
| +} // namespace |
| class BufferedDataSourceHostImpl; |
| @@ -207,6 +238,15 @@ WebMediaPlayerImpl::WebMediaPlayerImpl( |
| } |
| WebMediaPlayerImpl::~WebMediaPlayerImpl() { |
| +#if defined(WIMPI_CAST) |
| + if (player_manager_) { |
| + if (is_player_initialized_) |
| + player_manager_->DestroyPlayer(player_id_); |
| + |
| + player_manager_->UnregisterMediaPlayer(player_id_); |
| + } |
| +#endif |
| + |
| client_->setWebLayer(NULL); |
| DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| @@ -300,13 +340,31 @@ void WebMediaPlayerImpl::DoLoad(LoadType load_type, |
| data_source_->SetBufferingStrategy(buffering_strategy_); |
| data_source_->Initialize( |
| base::Bind(&WebMediaPlayerImpl::DataSourceInitialized, AsWeakPtr())); |
| + |
| +#if defined(WIMPI_CAST) |
| + player_manager_->Initialize(MEDIA_PLAYER_TYPE_URL, player_id_, url, |
| + frame_->document().firstPartyForCookies(), 0, |
| + frame_->document().url(), true); |
| + |
| + is_player_initialized_ = true; |
| +#endif // defined(WIMPI_CAST) |
| } |
| void WebMediaPlayerImpl::play() { |
| DVLOG(1) << __FUNCTION__; |
| DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| +#if defined(WIMPI_CAST) |
| + if (is_remote_ && paused_) { |
| + player_manager_->Start(player_id_); |
| + remote_time_at_ = base::TimeTicks::Now(); |
| + paused_ = false; |
| + return; |
| + } |
| +#endif |
| + |
| paused_ = false; |
| + |
| pipeline_.SetPlaybackRate(playback_rate_); |
| if (data_source_) |
| data_source_->MediaIsPlaying(); |
| @@ -323,6 +381,14 @@ void WebMediaPlayerImpl::pause() { |
| const bool was_already_paused = paused_ || playback_rate_ == 0; |
| paused_ = true; |
| + |
| +#if defined(WIMPI_CAST) |
| + if (is_remote_) { |
| + player_manager_->Pause(player_id_, true); |
| + return; |
| + } |
| +#endif |
| + |
| pipeline_.SetPlaybackRate(0.0); |
| UpdatePausedTime(); |
| @@ -343,12 +409,20 @@ void WebMediaPlayerImpl::seek(double seconds) { |
| ended_ = false; |
| + base::TimeDelta new_seek_time = base::TimeDelta::FromSecondsD(seconds); |
| + |
| +#if defined(WIMPI_CAST) |
| + if (is_remote_) { |
| + should_notify_time_changed_ = true; |
| + player_manager_->Seek(player_id_, new_seek_time); |
| + return; |
| + } |
| +#endif |
| + |
| ReadyState old_state = ready_state_; |
| if (ready_state_ > WebMediaPlayer::ReadyStateHaveMetadata) |
| SetReadyState(WebMediaPlayer::ReadyStateHaveMetadata); |
| - base::TimeDelta new_seek_time = base::TimeDelta::FromSecondsD(seconds); |
| - |
| if (seeking_ || suspended_) { |
| // Once resuming, it's too late to change the resume time and so the |
| // implementation is a little different. |
| @@ -422,7 +496,6 @@ void WebMediaPlayerImpl::seek(double seconds) { |
| if (chunk_demuxer_) |
| chunk_demuxer_->StartWaitingForSeek(seek_time_); |
| - // Kick off the asynchronous seek! |
| pipeline_.Seek(seek_time_, BIND_TO_RENDER_LOOP1( |
| &WebMediaPlayerImpl::OnPipelineSeeked, true)); |
| } |
| @@ -539,6 +612,10 @@ blink::WebSize WebMediaPlayerImpl::naturalSize() const { |
| bool WebMediaPlayerImpl::paused() const { |
| DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| +#if defined(WIMPI_CAST) |
| + if (is_remote_) |
| + return paused_; |
| +#endif |
| return pipeline_.GetPlaybackRate() == 0.0f; |
| } |
| @@ -586,7 +663,21 @@ double WebMediaPlayerImpl::currentTime() const { |
| : seek_time_.InSecondsF(); |
| } |
| - return (paused_ ? paused_time_ : pipeline_.GetMediaTime()).InSecondsF(); |
| +#if defined(WIMPI_CAST) |
| + if (is_remote_) { |
| + base::TimeDelta ret = remote_time_; |
| + if (!paused_) { |
| + ret += base::TimeTicks::Now() - remote_time_at_; |
| + } |
| + return ret.InSecondsF(); |
| + } |
| +#endif |
| + |
| + if (paused_) { |
| + return paused_time_.InSecondsF(); |
| + } |
| + |
| + return pipeline_.GetMediaTime().InSecondsF(); |
| } |
| WebMediaPlayer::NetworkState WebMediaPlayerImpl::networkState() const { |
| @@ -1048,6 +1139,9 @@ void WebMediaPlayerImpl::OnHidden() { |
| if (!pipeline_.IsRunning()) |
| return; |
| + if (suspended_ || suspending_) |
| + return; |
| + |
| if (resuming_ || seeking_) { |
| pending_suspend_ = true; |
| return; |
| @@ -1101,7 +1195,7 @@ void WebMediaPlayerImpl::OnShown() { |
| // We may not be suspended if we were not yet subscribed or the pipeline was |
| // not yet started when OnHidden() fired. |
| - if (!suspended_) |
| + if (!suspended_ || resuming_) |
| return; |
| Resume(); |
| @@ -1140,6 +1234,273 @@ void WebMediaPlayerImpl::Resume() { |
| time_changed)); |
| } |
| +#if defined(WIMPI_CAST) |
| +void WebMediaPlayerImpl::set_media_player_manager( |
| + RendererMediaPlayerManagerInterface* media_player_manager) { |
| + player_manager_ = media_player_manager; |
| + player_id_ = player_manager_->RegisterMediaPlayer(this); |
| +} |
| + |
| +void WebMediaPlayerImpl::requestRemotePlayback() { |
| + player_manager_->Seek(player_id_, |
| + base::TimeDelta::FromSecondsD(currentTime())); |
| + player_manager_->RequestRemotePlayback(player_id_); |
| +} |
| + |
| +void WebMediaPlayerImpl::requestRemotePlaybackControl() { |
| + player_manager_->RequestRemotePlaybackControl(player_id_); |
| +} |
| + |
| +// RendererMediaPlayerInterface implementation |
| +void WebMediaPlayerImpl::OnMediaMetadataChanged(base::TimeDelta duration, |
| + int width, |
| + int height, |
| + bool success) {} |
| +void WebMediaPlayerImpl::OnPlaybackComplete() { |
| + DVLOG(1) << __FUNCTION__; |
| + ended_ = true; |
| + client_->timeChanged(); |
| +} |
| +void WebMediaPlayerImpl::OnBufferingUpdate(int percentage) { |
| + DVLOG(1) << __FUNCTION__; |
| +} |
| +void WebMediaPlayerImpl::OnSeekRequest(const base::TimeDelta& time_to_seek) { |
| + DVLOG(1) << __FUNCTION__; |
| + DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| + // DCHECK(main_thread_checker_.CalledOnValidThread()); |
| + client_->requestSeek(time_to_seek.InSecondsF()); |
| +} |
| +void WebMediaPlayerImpl::OnSeekComplete(const base::TimeDelta& current_time) { |
| + DVLOG(1) << __FUNCTION__; |
| + DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| + remote_time_at_ = base::TimeTicks::Now(); |
| + remote_time_ = current_time; |
| + OnPipelineSeeked(true, PIPELINE_OK); |
| +} |
| + |
| +void WebMediaPlayerImpl::OnMediaError(int error_type) { |
| + DVLOG(1) << __FUNCTION__; |
| +} |
| +void WebMediaPlayerImpl::OnVideoSizeChanged(int width, int height) { |
| + DVLOG(1) << __FUNCTION__; |
| +} |
| + |
| +void WebMediaPlayerImpl::OnTimeUpdate(base::TimeDelta current_timestamp, |
| + base::TimeTicks current_time_ticks) { |
| + DVLOG(1) << __FUNCTION__ << " " << current_timestamp.InSecondsF(); |
| + DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| + remote_time_at_ = current_time_ticks; |
| + remote_time_ = current_timestamp; |
| +} |
| + |
| +void WebMediaPlayerImpl::OnPlayerReleased() { |
| + DVLOG(1) << __FUNCTION__; |
| +} |
| + |
| +void WebMediaPlayerImpl::OnConnectedToRemoteDevice( |
| + const std::string& remote_playback_message) { |
| + DVLOG(1) << __FUNCTION__; |
| + DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| + remote_time_ = base::TimeDelta::FromSecondsD(currentTime()); |
| + // Set paused so that progress bar doesn't advance while remote playback |
| + // is starting. |
| + pause(); |
| + is_remote_ = true; |
| + DrawRemotePlaybackText(remote_playback_message); |
| + client_->connectedToRemoteDevice(); |
| +} |
| + |
| +void WebMediaPlayerImpl::OnDisconnectedFromRemoteDevice() { |
| + DVLOG(1) << __FUNCTION__; |
| + DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| + paused_time_ = base::TimeDelta::FromSecondsD(currentTime()); |
| + is_remote_ = false; |
| + if (suspended_ && !resuming_) |
| + Resume(); |
| + if (ended_ || |
| + paused_time_ + base::TimeDelta::FromMilliseconds( |
| + media::kTimeUpdateInterval * 2) > |
| + pipeline_.GetMediaDuration()) { |
| + paused_time_ = pipeline_.GetMediaDuration(); |
| + } |
| + |
| + // We already told the delegate we're paused when remoting started. |
| + pipeline_.SetPlaybackRate(0.0); |
| + client_->disconnectedFromRemoteDevice(); |
| + if (!paused_) { |
| + paused_ = true; |
| + client_->playbackStateChanged(); |
| + } |
| + seek(paused_time_.InSecondsF()); |
| + // Seek resets ended_, which is not really what we want. |
| + if (paused_time_ == pipeline_.GetMediaDuration()) { |
| + ended_ = true; |
| + } |
| +} |
| + |
| +void WebMediaPlayerImpl::OnDidExitFullscreen() { |
| + DVLOG(1) << __FUNCTION__; |
| +} |
| +void WebMediaPlayerImpl::OnMediaPlayerPlay() { |
| + DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; |
| + if (is_remote_ && paused_) { |
| + paused_ = false; |
| + remote_time_at_ = base::TimeTicks::Now(); |
| + client_->playbackStateChanged(); |
| + } |
| + // Blink expects a timeChanged() in response to a seek(). |
| + if (should_notify_time_changed_) |
| + client_->timeChanged(); |
| +} |
| +void WebMediaPlayerImpl::OnMediaPlayerPause() { |
| + DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; |
| + if (is_remote_ && !paused_) { |
| + paused_ = true; |
| + client_->playbackStateChanged(); |
| + } |
| +} |
| +void WebMediaPlayerImpl::OnRemoteRouteAvailabilityChanged( |
| + bool routes_available) { |
| + DVLOG(1) << __FUNCTION__; |
| + client_->remoteRouteAvailabilityChanged(routes_available); |
| +} |
| + |
| +void WebMediaPlayerImpl::ReleaseMediaResources() {} |
| + |
| +#if defined(VIDEO_HOLE) |
| +bool WebMediaPlayerImpl::UpdateBoundaryRectangle() { |
| + return false; |
| +} |
| +const gfx::RectF WebMediaPlayerImpl::GetBoundaryRectangle() { |
| + return gfx::RectF(); |
| +} |
| +#endif // defined(VIDEO_HOLE) |
| + |
| +void WebMediaPlayerImpl::DrawRemotePlaybackText( |
|
DaleCurtis
2016/01/07 21:28:39
Can you just abstract the existing code in WebMedi
hubbe
2016/01/11 22:47:24
I think it can, but it will not make it easier to
|
| + const std::string& remote_playback_message) { |
| + DVLOG(1) << __FUNCTION__; |
| + DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| + // DCHECK(main_thread_checker_.CalledOnValidThread()); |
| + if (!video_weblayer_) |
| + return; |
| + |
| + // TODO(johnme): Should redraw this frame if the layer bounds change; but |
| + // there seems no easy way to listen for the layer resizing (as opposed to |
| + // OnVideoSizeChanged, which is when the frame sizes of the video file |
| + // change). Perhaps have to poll (on main thread of course)? |
| + gfx::Size video_size_css_px = video_weblayer_->bounds(); |
| + float device_scale_factor = frame_->view()->deviceScaleFactor(); |
| + // canvas_size will be the size in device pixels when pageScaleFactor == 1 |
| + gfx::Size canvas_size( |
| + static_cast<int>(video_size_css_px.width() * device_scale_factor), |
| + static_cast<int>(video_size_css_px.height() * device_scale_factor)); |
| + |
| + SkBitmap bitmap; |
| + bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height()); |
| + |
| + // Create the canvas and draw the "Casting to <Chromecast>" text on it. |
| + SkCanvas canvas(bitmap); |
| + canvas.drawColor(SK_ColorBLACK); |
| + |
| + const SkScalar kTextSize(40); |
| + const SkScalar kMinPadding(40); |
| + |
| + SkPaint paint; |
| + paint.setAntiAlias(true); |
| + paint.setFilterQuality(kHigh_SkFilterQuality); |
| + paint.setColor(SK_ColorWHITE); |
| + paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold)); |
| + paint.setTextSize(kTextSize); |
| + |
| + // Calculate the vertical margin from the top |
| + SkPaint::FontMetrics font_metrics; |
| + paint.getFontMetrics(&font_metrics); |
| + SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent; |
| + |
| + // Measure the width of the entire text to display |
| + size_t display_text_width = paint.measureText(remote_playback_message.c_str(), |
| + remote_playback_message.size()); |
| + std::string display_text(remote_playback_message); |
| + |
| + if (display_text_width + (kMinPadding * 2) > canvas_size.width()) { |
| + // The text is too long to fit in one line, truncate it and append ellipsis |
| + // to the end. |
| + |
| + // First, figure out how much of the canvas the '...' will take up. |
| + const std::string kTruncationEllipsis("\xE2\x80\xA6"); |
| + SkScalar sk_ellipse_width = paint.measureText(kTruncationEllipsis.c_str(), |
| + kTruncationEllipsis.size()); |
| + |
| + // Then calculate how much of the text can be drawn with the '...' appended |
| + // to the end of the string. |
| + SkScalar sk_max_original_text_width(canvas_size.width() - |
| + (kMinPadding * 2) - sk_ellipse_width); |
| + size_t sk_max_original_text_length = paint.breakText( |
| + remote_playback_message.c_str(), remote_playback_message.size(), |
| + sk_max_original_text_width); |
| + |
| + // Remove the part of the string that doesn't fit and append '...'. |
| + display_text.erase( |
| + sk_max_original_text_length, |
| + remote_playback_message.size() - sk_max_original_text_length); |
| + display_text.append(kTruncationEllipsis); |
| + display_text_width = |
| + paint.measureText(display_text.c_str(), display_text.size()); |
| + } |
| + |
| + // Center the text horizontally. |
| + SkScalar sk_horizontal_margin = |
| + (canvas_size.width() - display_text_width) / 2.0; |
| + canvas.drawText(display_text.c_str(), display_text.size(), |
| + sk_horizontal_margin, sk_vertical_margin, paint); |
| + |
| + Context3D context_3d; |
| + if (!context_3d_cb_.is_null()) |
| + context_3d = context_3d_cb_.Run(); |
| + // GPU Process crashed. |
| + if (!context_3d.gl) |
| + return; |
| + |
| + GLES2Interface* gl = context_3d.gl; |
| + GLuint remote_playback_texture_id = 0; |
| + gl->GenTextures(1, &remote_playback_texture_id); |
| + GLuint texture_target = GL_TEXTURE_2D; |
| + gl->BindTexture(texture_target, remote_playback_texture_id); |
| + gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
| + gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
| + gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
| + gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
| + |
| + { |
| + SkAutoLockPixels lock(bitmap); |
| + gl->TexImage2D(texture_target, 0 /* level */, GL_RGBA /* internalformat */, |
| + bitmap.width(), bitmap.height(), 0 /* border */, |
| + GL_RGBA /* format */, GL_UNSIGNED_BYTE /* type */, |
| + bitmap.getPixels()); |
| + } |
| + |
| + gpu::Mailbox texture_mailbox; |
| + gl->GenMailboxCHROMIUM(texture_mailbox.name); |
| + gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name); |
| + gl->Flush(); |
| + gpu::SyncToken texture_mailbox_sync_token(gl->InsertSyncPointCHROMIUM()); |
| + |
| + scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture( |
| + media::PIXEL_FORMAT_ARGB, |
| + gpu::MailboxHolder(texture_mailbox, texture_mailbox_sync_token, |
| + texture_target), |
| + media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, context_3d_cb_, |
| + remote_playback_texture_id)), |
| + canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, |
| + canvas_size /* natural_size */, base::TimeDelta() /* timestamp */); |
| + |
| + if (!suspended_ && !suspending_) |
| + Suspend(); |
| + compositor_->PaintFrameUsingOldRenderingPath(new_frame); |
| +} |
| + |
| +#endif // defined(WIMPI_CAST) |
| + |
| void WebMediaPlayerImpl::DataSourceInitialized(bool success) { |
| DVLOG(1) << __FUNCTION__; |
| DCHECK(main_task_runner_->BelongsToCurrentThread()); |
| @@ -1321,6 +1682,12 @@ void WebMediaPlayerImpl::UpdatePausedTime() { |
| } |
| void WebMediaPlayerImpl::NotifyPlaybackStarted() { |
| +#if defined(WIMPI_CAST) |
| + // We do not tell our delegates about remote playback, becuase that would |
| + // keep the device awake, which is not what we want. |
| + if (!is_remote_) |
|
liberato (no reviews please)
2016/01/08 16:37:58
the comment makes me expect that it should check f
hubbe
2016/01/11 22:47:24
Good catch, fixed.
(Testing...)
|
| + return; |
| +#endif |
| if (delegate_) |
| delegate_->DidPlay(this); |
| if (!memory_usage_reporting_timer_.IsRunning()) { |
| @@ -1331,6 +1698,10 @@ void WebMediaPlayerImpl::NotifyPlaybackStarted() { |
| } |
| void WebMediaPlayerImpl::NotifyPlaybackPaused() { |
| +#if defined(WIMPI_CAST) |
| + if (!is_remote_) |
| + return; |
| +#endif |
| if (delegate_) |
| delegate_->DidPause(this); |
| memory_usage_reporting_timer_.Stop(); |