OLD | NEW |
(Empty) | |
| 1 // Copyright 2016 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. |
| 4 |
| 5 #include "media/blink/webmediaplayer_cast_android.h" |
| 6 |
| 7 #include "gpu/GLES2/gl2extchromium.h" |
| 8 #include "gpu/blink/webgraphicscontext3d_impl.h" |
| 9 #include "gpu/command_buffer/client/gles2_interface.h" |
| 10 #include "gpu/command_buffer/common/sync_token.h" |
| 11 #include "media/base/android/media_common_android.h" |
| 12 #include "media/base/bind_to_current_loop.h" |
| 13 #include "media/blink/webmediaplayer_impl.h" |
| 14 #include "media/blink/webmediaplayer_params.h" |
| 15 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h" |
| 16 #include "third_party/WebKit/public/web/WebDocument.h" |
| 17 #include "third_party/WebKit/public/web/WebLocalFrame.h" |
| 18 #include "third_party/skia/include/core/SkCanvas.h" |
| 19 #include "third_party/skia/include/core/SkPaint.h" |
| 20 #include "third_party/skia/include/core/SkTypeface.h" |
| 21 #include "third_party/skia/include/gpu/GrContext.h" |
| 22 #include "third_party/skia/include/gpu/SkGrPixelRef.h" |
| 23 |
| 24 using gpu::gles2::GLES2Interface; |
| 25 |
| 26 namespace media { |
| 27 |
| 28 namespace { |
| 29 // File-static function is to allow it to run even after WMPI is deleted. |
| 30 void OnReleaseTexture(const WebMediaPlayerCast::GLContextCB& context_3d_cb, |
| 31 GLuint texture_id, |
| 32 const gpu::SyncToken& sync_token) { |
| 33 GLES2Interface* gl = context_3d_cb.Run(); |
| 34 if (!gl) |
| 35 return; |
| 36 |
| 37 gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); |
| 38 gl->DeleteTextures(1, &texture_id); |
| 39 // Flush to ensure that the texture gets deleted in a timely fashion. |
| 40 gl->ShallowFlushCHROMIUM(); |
| 41 } |
| 42 |
| 43 GLES2Interface* GLCBShim( |
| 44 const WebMediaPlayerParams::Context3DCB& context_3d_cb) { |
| 45 return context_3d_cb.Run().gl; |
| 46 } |
| 47 |
| 48 } // namespace |
| 49 |
| 50 scoped_refptr<VideoFrame> WebMediaPlayerCast::MakeTextFrameForCast( |
| 51 const std::string& remote_playback_message, |
| 52 gfx::Size canvas_size, |
| 53 gfx::Size natural_size, |
| 54 const WebMediaPlayerCast::GLContextCB& context_3d_cb) { |
| 55 SkBitmap bitmap; |
| 56 bitmap.allocN32Pixels(canvas_size.width(), canvas_size.height()); |
| 57 |
| 58 // Create the canvas and draw the "Casting to <Chromecast>" text on it. |
| 59 SkCanvas canvas(bitmap); |
| 60 canvas.drawColor(SK_ColorBLACK); |
| 61 |
| 62 const SkScalar kTextSize(40); |
| 63 const SkScalar kMinPadding(40); |
| 64 |
| 65 SkPaint paint; |
| 66 paint.setAntiAlias(true); |
| 67 paint.setFilterQuality(kHigh_SkFilterQuality); |
| 68 paint.setColor(SK_ColorWHITE); |
| 69 paint.setTypeface(SkTypeface::CreateFromName("sans", SkTypeface::kBold)); |
| 70 paint.setTextSize(kTextSize); |
| 71 |
| 72 // Calculate the vertical margin from the top |
| 73 SkPaint::FontMetrics font_metrics; |
| 74 paint.getFontMetrics(&font_metrics); |
| 75 SkScalar sk_vertical_margin = kMinPadding - font_metrics.fAscent; |
| 76 |
| 77 // Measure the width of the entire text to display |
| 78 size_t display_text_width = paint.measureText(remote_playback_message.c_str(), |
| 79 remote_playback_message.size()); |
| 80 std::string display_text(remote_playback_message); |
| 81 |
| 82 if (display_text_width + (kMinPadding * 2) > canvas_size.width()) { |
| 83 // The text is too long to fit in one line, truncate it and append ellipsis |
| 84 // to the end. |
| 85 |
| 86 // First, figure out how much of the canvas the '...' will take up. |
| 87 const std::string kTruncationEllipsis("\xE2\x80\xA6"); |
| 88 SkScalar sk_ellipse_width = paint.measureText(kTruncationEllipsis.c_str(), |
| 89 kTruncationEllipsis.size()); |
| 90 |
| 91 // Then calculate how much of the text can be drawn with the '...' appended |
| 92 // to the end of the string. |
| 93 SkScalar sk_max_original_text_width(canvas_size.width() - |
| 94 (kMinPadding * 2) - sk_ellipse_width); |
| 95 size_t sk_max_original_text_length = paint.breakText( |
| 96 remote_playback_message.c_str(), remote_playback_message.size(), |
| 97 sk_max_original_text_width); |
| 98 |
| 99 // Remove the part of the string that doesn't fit and append '...'. |
| 100 display_text.erase( |
| 101 sk_max_original_text_length, |
| 102 remote_playback_message.size() - sk_max_original_text_length); |
| 103 display_text.append(kTruncationEllipsis); |
| 104 display_text_width = |
| 105 paint.measureText(display_text.c_str(), display_text.size()); |
| 106 } |
| 107 |
| 108 // Center the text horizontally. |
| 109 SkScalar sk_horizontal_margin = |
| 110 (canvas_size.width() - display_text_width) / 2.0; |
| 111 canvas.drawText(display_text.c_str(), display_text.size(), |
| 112 sk_horizontal_margin, sk_vertical_margin, paint); |
| 113 |
| 114 GLES2Interface* gl = context_3d_cb.Run(); |
| 115 |
| 116 // GPU Process crashed. |
| 117 if (!gl) |
| 118 return nullptr; |
| 119 GLuint remote_playback_texture_id = 0; |
| 120 gl->GenTextures(1, &remote_playback_texture_id); |
| 121 GLuint texture_target = GL_TEXTURE_2D; |
| 122 gl->BindTexture(texture_target, remote_playback_texture_id); |
| 123 gl->TexParameteri(texture_target, GL_TEXTURE_MIN_FILTER, GL_LINEAR); |
| 124 gl->TexParameteri(texture_target, GL_TEXTURE_MAG_FILTER, GL_LINEAR); |
| 125 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); |
| 126 gl->TexParameteri(texture_target, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); |
| 127 |
| 128 { |
| 129 SkAutoLockPixels lock(bitmap); |
| 130 gl->TexImage2D(texture_target, 0 /* level */, GL_RGBA /* internalformat */, |
| 131 bitmap.width(), bitmap.height(), 0 /* border */, |
| 132 GL_RGBA /* format */, GL_UNSIGNED_BYTE /* type */, |
| 133 bitmap.getPixels()); |
| 134 } |
| 135 |
| 136 gpu::Mailbox texture_mailbox; |
| 137 gl->GenMailboxCHROMIUM(texture_mailbox.name); |
| 138 gl->ProduceTextureCHROMIUM(texture_target, texture_mailbox.name); |
| 139 gl->Flush(); |
| 140 gpu::SyncToken texture_mailbox_sync_token(gl->InsertSyncPointCHROMIUM()); |
| 141 |
| 142 return VideoFrame::WrapNativeTexture( |
| 143 media::PIXEL_FORMAT_ARGB, |
| 144 gpu::MailboxHolder(texture_mailbox, texture_mailbox_sync_token, |
| 145 texture_target), |
| 146 media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, context_3d_cb, |
| 147 remote_playback_texture_id)), |
| 148 canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, |
| 149 natural_size /* natural_size */, base::TimeDelta() /* timestamp */); |
| 150 } |
| 151 |
| 152 WebMediaPlayerCast::WebMediaPlayerCast( |
| 153 WebMediaPlayerImpl* impl, |
| 154 blink::WebMediaPlayerClient* client, |
| 155 const WebMediaPlayerParams::Context3DCB& context_3d_cb, |
| 156 base::WeakPtr<WebMediaPlayerDelegate> delegate) |
| 157 : webmediaplayer_(impl), |
| 158 client_(client), |
| 159 context_3d_cb_(context_3d_cb), |
| 160 delegate_(delegate) {} |
| 161 |
| 162 WebMediaPlayerCast::~WebMediaPlayerCast() { |
| 163 if (player_manager_) { |
| 164 if (is_player_initialized_) |
| 165 player_manager_->DestroyPlayer(player_id_); |
| 166 |
| 167 player_manager_->UnregisterMediaPlayer(player_id_); |
| 168 } |
| 169 } |
| 170 |
| 171 void WebMediaPlayerCast::Initialize(const GURL& url, |
| 172 blink::WebLocalFrame* frame) { |
| 173 player_manager_->Initialize(MEDIA_PLAYER_TYPE_REMOTE_ONLY, player_id_, url, |
| 174 frame->document().firstPartyForCookies(), 0, |
| 175 frame->document().url(), true); |
| 176 is_player_initialized_ = true; |
| 177 } |
| 178 |
| 179 void WebMediaPlayerCast::SetMediaPlayerManager( |
| 180 RendererMediaPlayerManagerInterface* media_player_manager) { |
| 181 player_manager_ = media_player_manager; |
| 182 player_id_ = player_manager_->RegisterMediaPlayer(this); |
| 183 } |
| 184 |
| 185 void WebMediaPlayerCast::requestRemotePlayback() { |
| 186 player_manager_->Seek(player_id_, base::TimeDelta::FromSecondsD( |
| 187 webmediaplayer_->currentTime())); |
| 188 player_manager_->RequestRemotePlayback(player_id_); |
| 189 } |
| 190 |
| 191 void WebMediaPlayerCast::requestRemotePlaybackControl() { |
| 192 player_manager_->RequestRemotePlaybackControl(player_id_); |
| 193 } |
| 194 |
| 195 void WebMediaPlayerCast::OnMediaMetadataChanged(base::TimeDelta duration, |
| 196 int width, |
| 197 int height, |
| 198 bool success) {} |
| 199 |
| 200 void WebMediaPlayerCast::OnPlaybackComplete() { |
| 201 DVLOG(1) << __FUNCTION__; |
| 202 webmediaplayer_->OnRemotePlaybackEnded(); |
| 203 } |
| 204 |
| 205 void WebMediaPlayerCast::OnBufferingUpdate(int percentage) { |
| 206 DVLOG(1) << __FUNCTION__; |
| 207 } |
| 208 |
| 209 void WebMediaPlayerCast::OnSeekRequest(const base::TimeDelta& time_to_seek) { |
| 210 DVLOG(1) << __FUNCTION__; |
| 211 client_->requestSeek(time_to_seek.InSecondsF()); |
| 212 } |
| 213 |
| 214 void WebMediaPlayerCast::OnSeekComplete(const base::TimeDelta& current_time) { |
| 215 DVLOG(1) << __FUNCTION__; |
| 216 remote_time_at_ = base::TimeTicks::Now(); |
| 217 remote_time_ = current_time; |
| 218 webmediaplayer_->OnPipelineSeeked(true, PIPELINE_OK); |
| 219 } |
| 220 |
| 221 void WebMediaPlayerCast::OnMediaError(int error_type) { |
| 222 DVLOG(1) << __FUNCTION__; |
| 223 } |
| 224 |
| 225 void WebMediaPlayerCast::OnVideoSizeChanged(int width, int height) { |
| 226 DVLOG(1) << __FUNCTION__; |
| 227 } |
| 228 |
| 229 void WebMediaPlayerCast::OnTimeUpdate(base::TimeDelta current_timestamp, |
| 230 base::TimeTicks current_time_ticks) { |
| 231 DVLOG(1) << __FUNCTION__ << " " << current_timestamp.InSecondsF(); |
| 232 remote_time_at_ = current_time_ticks; |
| 233 remote_time_ = current_timestamp; |
| 234 } |
| 235 |
| 236 void WebMediaPlayerCast::OnPlayerReleased() { |
| 237 DVLOG(1) << __FUNCTION__; |
| 238 } |
| 239 |
| 240 void WebMediaPlayerCast::OnConnectedToRemoteDevice( |
| 241 const std::string& remote_playback_message) { |
| 242 DVLOG(1) << __FUNCTION__; |
| 243 remote_time_ = base::TimeDelta::FromSecondsD(webmediaplayer_->currentTime()); |
| 244 is_remote_ = true; |
| 245 initializing_ = true; |
| 246 paused_ = false; |
| 247 if (delegate_) |
| 248 delegate_->DidPlay(webmediaplayer_); |
| 249 client_->playbackStateChanged(); |
| 250 |
| 251 remote_playback_message_ = remote_playback_message; |
| 252 webmediaplayer_->SuspendForRemote(); |
| 253 client_->connectedToRemoteDevice(); |
| 254 } |
| 255 |
| 256 double WebMediaPlayerCast::currentTime() const { |
| 257 base::TimeDelta ret = remote_time_; |
| 258 if (!paused_ && !initializing_) { |
| 259 ret += base::TimeTicks::Now() - remote_time_at_; |
| 260 } |
| 261 return ret.InSecondsF(); |
| 262 } |
| 263 |
| 264 void WebMediaPlayerCast::play() { |
| 265 if (!paused_) |
| 266 return; |
| 267 |
| 268 player_manager_->Start(player_id_); |
| 269 remote_time_at_ = base::TimeTicks::Now(); |
| 270 paused_ = false; |
| 271 if (delegate_) |
| 272 delegate_->DidPlay(webmediaplayer_); |
| 273 } |
| 274 |
| 275 void WebMediaPlayerCast::pause() { |
| 276 player_manager_->Pause(player_id_, true); |
| 277 } |
| 278 |
| 279 void WebMediaPlayerCast::seek(base::TimeDelta t) { |
| 280 should_notify_time_changed_ = true; |
| 281 player_manager_->Seek(player_id_, t); |
| 282 } |
| 283 |
| 284 void WebMediaPlayerCast::OnDisconnectedFromRemoteDevice() { |
| 285 DVLOG(1) << __FUNCTION__; |
| 286 if (!paused_) { |
| 287 paused_ = true; |
| 288 if (delegate_) |
| 289 delegate_->DidPause(webmediaplayer_); |
| 290 } |
| 291 is_remote_ = false; |
| 292 double t = currentTime(); |
| 293 if (t + media::kTimeUpdateInterval * 2 / 1000 > webmediaplayer_->duration()) { |
| 294 t = webmediaplayer_->duration(); |
| 295 } |
| 296 webmediaplayer_->OnDisconnectedFromRemoteDevice(t); |
| 297 } |
| 298 |
| 299 void WebMediaPlayerCast::OnDidExitFullscreen() { |
| 300 DVLOG(1) << __FUNCTION__; |
| 301 } |
| 302 |
| 303 void WebMediaPlayerCast::OnMediaPlayerPlay() { |
| 304 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; |
| 305 initializing_ = false; |
| 306 if (is_remote_ && paused_) { |
| 307 paused_ = false; |
| 308 if (paused_) |
| 309 delegate_->DidPlay(webmediaplayer_); |
| 310 remote_time_at_ = base::TimeTicks::Now(); |
| 311 client_->playbackStateChanged(); |
| 312 } |
| 313 // Blink expects a timeChanged() in response to a seek(). |
| 314 if (should_notify_time_changed_) |
| 315 client_->timeChanged(); |
| 316 } |
| 317 |
| 318 void WebMediaPlayerCast::OnMediaPlayerPause() { |
| 319 DVLOG(1) << __FUNCTION__ << " is_remote_ = " << is_remote_; |
| 320 if (is_remote_ && !paused_) { |
| 321 paused_ = true; |
| 322 if (delegate_) |
| 323 delegate_->DidPause(webmediaplayer_); |
| 324 client_->playbackStateChanged(); |
| 325 } |
| 326 } |
| 327 |
| 328 void WebMediaPlayerCast::OnRemoteRouteAvailabilityChanged( |
| 329 bool routes_available) { |
| 330 DVLOG(1) << __FUNCTION__; |
| 331 client_->remoteRouteAvailabilityChanged(routes_available); |
| 332 } |
| 333 |
| 334 void WebMediaPlayerCast::SuspendAndReleaseResources() {} |
| 335 void WebMediaPlayerCast::OnWaitingForDecryptionKey() {} |
| 336 |
| 337 bool WebMediaPlayerCast::hasVideo() const { |
| 338 return true; |
| 339 } |
| 340 |
| 341 bool WebMediaPlayerCast::paused() const { |
| 342 return paused_; |
| 343 } |
| 344 |
| 345 #if defined(VIDEO_HOLE) |
| 346 bool WebMediaPlayerCast::UpdateBoundaryRectangle() { |
| 347 return false; |
| 348 } |
| 349 |
| 350 const gfx::RectF WebMediaPlayerCast::GetBoundaryRectangle() { |
| 351 return gfx::RectF(); |
| 352 } |
| 353 #endif // defined(VIDEO_HOLE) |
| 354 |
| 355 void WebMediaPlayerCast::SetDeviceScaleFactor(float scale_factor) { |
| 356 device_scale_factor_ = scale_factor; |
| 357 } |
| 358 |
| 359 scoped_refptr<VideoFrame> WebMediaPlayerCast::GetCastingBanner() { |
| 360 DVLOG(1) << __FUNCTION__; |
| 361 |
| 362 // TODO(johnme): Should redraw this frame if the layer bounds change; but |
| 363 // there seems no easy way to listen for the layer resizing (as opposed to |
| 364 // OnVideoSizeChanged, which is when the frame sizes of the video file |
| 365 // change). Perhaps have to poll (on main thread of course)? |
| 366 gfx::Size video_size_css_px = webmediaplayer_->GetCanvasSize(); |
| 367 if (!video_size_css_px.width()) |
| 368 return nullptr; |
| 369 |
| 370 // canvas_size will be the size in device pixels when pageScaleFactor == 1 |
| 371 gfx::Size canvas_size( |
| 372 static_cast<int>(video_size_css_px.width() * device_scale_factor_), |
| 373 static_cast<int>(video_size_css_px.height() * device_scale_factor_)); |
| 374 |
| 375 if (!canvas_size.width()) |
| 376 return nullptr; |
| 377 |
| 378 return MakeTextFrameForCast(remote_playback_message_, canvas_size, |
| 379 webmediaplayer_->naturalSize(), |
| 380 base::Bind(&GLCBShim, context_3d_cb_)); |
| 381 } |
| 382 |
| 383 } // namespace media |
OLD | NEW |