| OLD | NEW |
| (Empty) |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 #include "content/renderer/media/android/webmediaplayer_android.h" | |
| 6 | |
| 7 #include <stddef.h> | |
| 8 | |
| 9 #include <limits> | |
| 10 | |
| 11 #include "base/android/build_info.h" | |
| 12 #include "base/bind.h" | |
| 13 #include "base/callback_helpers.h" | |
| 14 #include "base/command_line.h" | |
| 15 #include "base/files/file_path.h" | |
| 16 #include "base/logging.h" | |
| 17 #include "base/metrics/histogram_macros.h" | |
| 18 #include "base/single_thread_task_runner.h" | |
| 19 #include "base/strings/string_number_conversions.h" | |
| 20 #include "base/strings/utf_string_conversions.h" | |
| 21 #include "base/threading/thread_task_runner_handle.h" | |
| 22 #include "cc/blink/web_layer_impl.h" | |
| 23 #include "cc/layers/video_layer.h" | |
| 24 #include "cc/paint/paint_flags.h" | |
| 25 #include "content/public/common/content_client.h" | |
| 26 #include "content/public/common/content_switches.h" | |
| 27 #include "content/public/common/renderer_preferences.h" | |
| 28 #include "content/public/renderer/render_frame.h" | |
| 29 #include "content/renderer/media/android/renderer_media_player_manager.h" | |
| 30 #include "content/renderer/render_frame_impl.h" | |
| 31 #include "content/renderer/render_thread_impl.h" | |
| 32 #include "content/renderer/render_view_impl.h" | |
| 33 #include "gpu/GLES2/gl2extchromium.h" | |
| 34 #include "gpu/command_buffer/client/gles2_interface.h" | |
| 35 #include "gpu/command_buffer/common/constants.h" | |
| 36 #include "gpu/command_buffer/common/mailbox_holder.h" | |
| 37 #include "media/base/android/media_codec_util.h" | |
| 38 #include "media/base/android/media_common_android.h" | |
| 39 #include "media/base/android/media_player_android.h" | |
| 40 #include "media/base/bind_to_current_loop.h" | |
| 41 #include "media/base/media_content_type.h" | |
| 42 #include "media/base/media_log.h" | |
| 43 #include "media/base/media_switches.h" | |
| 44 #include "media/base/timestamp_constants.h" | |
| 45 #include "media/base/video_frame.h" | |
| 46 #include "media/blink/webcontentdecryptionmodule_impl.h" | |
| 47 #include "media/blink/webmediaplayer_cast_android.h" | |
| 48 #include "media/blink/webmediaplayer_delegate.h" | |
| 49 #include "media/blink/webmediaplayer_util.h" | |
| 50 #include "net/base/mime_util.h" | |
| 51 #include "skia/ext/texture_handle.h" | |
| 52 #include "third_party/WebKit/public/platform/Platform.h" | |
| 53 #include "third_party/WebKit/public/platform/URLConversion.h" | |
| 54 #include "third_party/WebKit/public/platform/WebContentDecryptionModuleResult.h" | |
| 55 #include "third_party/WebKit/public/platform/WebEncryptedMediaTypes.h" | |
| 56 #include "third_party/WebKit/public/platform/WebGraphicsContext3DProvider.h" | |
| 57 #include "third_party/WebKit/public/platform/WebMediaPlayerClient.h" | |
| 58 #include "third_party/WebKit/public/platform/WebMediaPlayerEncryptedMediaClient.
h" | |
| 59 #include "third_party/WebKit/public/platform/WebMediaPlayerSource.h" | |
| 60 #include "third_party/WebKit/public/platform/WebSecurityOrigin.h" | |
| 61 #include "third_party/WebKit/public/platform/WebString.h" | |
| 62 #include "third_party/WebKit/public/platform/WebURL.h" | |
| 63 #include "third_party/WebKit/public/web/WebDocument.h" | |
| 64 #include "third_party/WebKit/public/web/WebFrame.h" | |
| 65 #include "third_party/WebKit/public/web/WebView.h" | |
| 66 #include "third_party/skia/include/core/SkCanvas.h" | |
| 67 #include "third_party/skia/include/core/SkImage.h" | |
| 68 #include "third_party/skia/include/core/SkTypeface.h" | |
| 69 #include "ui/gfx/image/image.h" | |
| 70 #include "url/gurl.h" | |
| 71 #include "url/origin.h" | |
| 72 | |
| 73 static const uint32_t kGLTextureExternalOES = 0x8D65; | |
| 74 static const int kSDKVersionToSupportSecurityOriginCheck = 20; | |
| 75 | |
| 76 using blink::WebMediaPlayer; | |
| 77 using blink::WebSize; | |
| 78 using blink::WebString; | |
| 79 using blink::WebURL; | |
| 80 using gpu::gles2::GLES2Interface; | |
| 81 using media::LogHelper; | |
| 82 using media::MediaLog; | |
| 83 using media::MediaPlayerAndroid; | |
| 84 using media::VideoFrame; | |
| 85 | |
| 86 namespace { | |
| 87 | |
| 88 // Values for Media.Android.IsHttpLiveStreamingMediaPredictionResult UMA. | |
| 89 // Never reuse values! | |
| 90 enum MediaTypePredictionResult { | |
| 91 PREDICTION_RESULT_ALL_CORRECT, | |
| 92 PREDICTION_RESULT_ALL_INCORRECT, | |
| 93 PREDICTION_RESULT_PATH_BASED_WAS_BETTER, | |
| 94 PREDICTION_RESULT_URL_BASED_WAS_BETTER, | |
| 95 // Must always be larger than the largest logged value. | |
| 96 PREDICTION_RESULT_MAX | |
| 97 }; | |
| 98 | |
| 99 // File-static function is to allow it to run even after WMPA is deleted. | |
| 100 void OnReleaseTexture( | |
| 101 const scoped_refptr<content::StreamTextureFactory>& factories, | |
| 102 uint32_t texture_id, | |
| 103 const gpu::SyncToken& sync_token) { | |
| 104 GLES2Interface* gl = factories->ContextGL(); | |
| 105 gl->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); | |
| 106 gl->DeleteTextures(1, &texture_id); | |
| 107 // Flush to ensure that the stream texture gets deleted in a timely fashion. | |
| 108 gl->ShallowFlushCHROMIUM(); | |
| 109 } | |
| 110 | |
| 111 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { | |
| 112 public: | |
| 113 explicit SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl) : gl_(gl) {} | |
| 114 ~SyncTokenClientImpl() override {} | |
| 115 void GenerateSyncToken(gpu::SyncToken* sync_token) override { | |
| 116 const GLuint64 fence_sync = gl_->InsertFenceSyncCHROMIUM(); | |
| 117 gl_->ShallowFlushCHROMIUM(); | |
| 118 gl_->GenSyncTokenCHROMIUM(fence_sync, sync_token->GetData()); | |
| 119 } | |
| 120 void WaitSyncToken(const gpu::SyncToken& sync_token) override { | |
| 121 gl_->WaitSyncTokenCHROMIUM(sync_token.GetConstData()); | |
| 122 } | |
| 123 | |
| 124 private: | |
| 125 gpu::gles2::GLES2Interface* gl_; | |
| 126 }; | |
| 127 | |
| 128 } // namespace | |
| 129 | |
| 130 namespace content { | |
| 131 | |
| 132 WebMediaPlayerAndroid::WebMediaPlayerAndroid( | |
| 133 blink::WebFrame* frame, | |
| 134 blink::WebMediaPlayerClient* client, | |
| 135 blink::WebMediaPlayerEncryptedMediaClient* encrypted_client, | |
| 136 media::WebMediaPlayerDelegate* delegate, | |
| 137 RendererMediaPlayerManager* player_manager, | |
| 138 scoped_refptr<StreamTextureFactory> factory, | |
| 139 int frame_id, | |
| 140 bool enable_texture_copy, | |
| 141 const media::WebMediaPlayerParams& params) | |
| 142 : frame_(frame), | |
| 143 client_(client), | |
| 144 delegate_(delegate), | |
| 145 delegate_id_(0), | |
| 146 defer_load_cb_(params.defer_load_cb()), | |
| 147 buffered_(static_cast<size_t>(1)), | |
| 148 media_task_runner_(params.media_task_runner()), | |
| 149 pending_seek_(false), | |
| 150 seeking_(false), | |
| 151 did_loading_progress_(false), | |
| 152 player_manager_(player_manager), | |
| 153 network_state_(WebMediaPlayer::NetworkStateEmpty), | |
| 154 ready_state_(WebMediaPlayer::ReadyStateHaveNothing), | |
| 155 texture_id_(0), | |
| 156 is_player_initialized_(false), | |
| 157 is_playing_(false), | |
| 158 is_play_pending_(false), | |
| 159 needs_establish_peer_(true), | |
| 160 has_size_info_(false), | |
| 161 // Threaded compositing isn't enabled universally yet. | |
| 162 compositor_task_runner_(params.compositor_task_runner() | |
| 163 ? params.compositor_task_runner() | |
| 164 : base::ThreadTaskRunnerHandle::Get()), | |
| 165 stream_texture_factory_(factory), | |
| 166 is_fullscreen_(false), | |
| 167 video_frame_provider_client_(nullptr), | |
| 168 player_type_(MEDIA_PLAYER_TYPE_URL), | |
| 169 is_remote_(false), | |
| 170 media_log_(params.media_log()), | |
| 171 allow_stored_credentials_(false), | |
| 172 is_local_resource_(false), | |
| 173 interpolator_(&default_tick_clock_), | |
| 174 frame_id_(frame_id), | |
| 175 enable_texture_copy_(enable_texture_copy), | |
| 176 suppress_deleting_texture_(false), | |
| 177 playback_completed_(false), | |
| 178 volume_(1.0), | |
| 179 volume_multiplier_(1.0), | |
| 180 weak_factory_(this) { | |
| 181 DCHECK(player_manager_); | |
| 182 DCHECK(delegate_); | |
| 183 | |
| 184 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 185 | |
| 186 delegate_id_ = delegate_->AddObserver(this); | |
| 187 delegate_->SetIdle(delegate_id_, true); | |
| 188 | |
| 189 player_id_ = player_manager_->RegisterMediaPlayer(this); | |
| 190 | |
| 191 TryCreateStreamTextureProxyIfNeeded(); | |
| 192 interpolator_.SetUpperBound(base::TimeDelta()); | |
| 193 } | |
| 194 | |
| 195 WebMediaPlayerAndroid::~WebMediaPlayerAndroid() { | |
| 196 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 197 SetVideoFrameProviderClient(NULL); | |
| 198 client_->setWebLayer(NULL); | |
| 199 | |
| 200 if (is_player_initialized_) | |
| 201 player_manager_->DestroyPlayer(player_id_); | |
| 202 | |
| 203 player_manager_->UnregisterMediaPlayer(player_id_); | |
| 204 | |
| 205 if (texture_id_) { | |
| 206 GLES2Interface* gl = stream_texture_factory_->ContextGL(); | |
| 207 gl->DeleteTextures(1, &texture_id_); | |
| 208 // Flush to ensure that the stream texture gets deleted in a timely fashion. | |
| 209 gl->ShallowFlushCHROMIUM(); | |
| 210 texture_id_ = 0; | |
| 211 texture_mailbox_ = gpu::Mailbox(); | |
| 212 } | |
| 213 | |
| 214 { | |
| 215 base::AutoLock auto_lock(current_frame_lock_); | |
| 216 current_frame_ = NULL; | |
| 217 } | |
| 218 | |
| 219 delegate_->PlayerGone(delegate_id_); | |
| 220 delegate_->RemoveObserver(delegate_id_); | |
| 221 } | |
| 222 | |
| 223 void WebMediaPlayerAndroid::load(LoadType load_type, | |
| 224 const blink::WebMediaPlayerSource& source, | |
| 225 CORSMode cors_mode) { | |
| 226 // Only URL or MSE blob URL is supported. | |
| 227 DCHECK(source.isURL()); | |
| 228 blink::WebURL url = source.getAsURL(); | |
| 229 if (!defer_load_cb_.is_null()) { | |
| 230 defer_load_cb_.Run(base::Bind(&WebMediaPlayerAndroid::DoLoad, | |
| 231 weak_factory_.GetWeakPtr(), load_type, url, | |
| 232 cors_mode)); | |
| 233 return; | |
| 234 } | |
| 235 DoLoad(load_type, url, cors_mode); | |
| 236 } | |
| 237 | |
| 238 void WebMediaPlayerAndroid::DoLoad(LoadType load_type, | |
| 239 const blink::WebURL& url, | |
| 240 CORSMode cors_mode) { | |
| 241 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 242 | |
| 243 media::ReportMetrics(load_type, GURL(url), frame_->getSecurityOrigin()); | |
| 244 DCHECK_EQ(load_type, LoadTypeURL) | |
| 245 << "WebMediaPlayerAndroid doesn't support MediaStream or " | |
| 246 "MediaSource on this platform"; | |
| 247 | |
| 248 url_ = url; | |
| 249 is_local_resource_ = IsLocalResource(); | |
| 250 info_loader_.reset(new MediaInfoLoader( | |
| 251 url, cors_mode, base::Bind(&WebMediaPlayerAndroid::DidLoadMediaInfo, | |
| 252 weak_factory_.GetWeakPtr()))); | |
| 253 info_loader_->Start(frame_); | |
| 254 | |
| 255 UpdateNetworkState(WebMediaPlayer::NetworkStateLoading); | |
| 256 UpdateReadyState(WebMediaPlayer::ReadyStateHaveNothing); | |
| 257 } | |
| 258 | |
| 259 void WebMediaPlayerAndroid::DidLoadMediaInfo( | |
| 260 MediaInfoLoader::Status status, | |
| 261 const GURL& redirected_url, | |
| 262 const GURL& first_party_for_cookies, | |
| 263 bool allow_stored_credentials) { | |
| 264 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 265 if (status == MediaInfoLoader::kFailed) { | |
| 266 info_loader_.reset(); | |
| 267 UpdateNetworkState(WebMediaPlayer::NetworkStateNetworkError); | |
| 268 return; | |
| 269 } | |
| 270 redirected_url_ = redirected_url; | |
| 271 InitializePlayer(redirected_url, first_party_for_cookies, | |
| 272 allow_stored_credentials); | |
| 273 | |
| 274 UpdateNetworkState(WebMediaPlayer::NetworkStateIdle); | |
| 275 } | |
| 276 | |
| 277 bool WebMediaPlayerAndroid::IsLocalResource() { | |
| 278 if (url_.SchemeIsFile() || url_.SchemeIsBlob()) | |
| 279 return true; | |
| 280 | |
| 281 std::string host = url_.host(); | |
| 282 if (!host.compare("localhost") || !host.compare("127.0.0.1") || | |
| 283 !host.compare("[::1]")) { | |
| 284 return true; | |
| 285 } | |
| 286 | |
| 287 return false; | |
| 288 } | |
| 289 | |
| 290 void WebMediaPlayerAndroid::play() { | |
| 291 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 292 | |
| 293 if (hasVideo() && player_manager_->render_frame()->IsHidden()) { | |
| 294 bool can_video_play_in_background = | |
| 295 base::CommandLine::ForCurrentProcess()->HasSwitch( | |
| 296 switches::kDisableMediaSuspend) || | |
| 297 (IsBackgroundVideoCandidate() && | |
| 298 delegate_->IsBackgroundVideoPlaybackUnlocked()); | |
| 299 if (!can_video_play_in_background) { | |
| 300 is_play_pending_ = true; | |
| 301 return; | |
| 302 } | |
| 303 } | |
| 304 is_play_pending_ = false; | |
| 305 | |
| 306 TryCreateStreamTextureProxyIfNeeded(); | |
| 307 // There is no need to establish the surface texture peer for fullscreen | |
| 308 // video. | |
| 309 if ((hasVideo() || IsHLSStream()) && needs_establish_peer_ && | |
| 310 !is_fullscreen_) { | |
| 311 EstablishSurfaceTexturePeer(); | |
| 312 } | |
| 313 | |
| 314 // UpdatePlayingState() must be run before calling Start() to ensure that the | |
| 315 // browser side MediaPlayerAndroid values for hasAudio() and hasVideo() take | |
| 316 // precedent over the guesses that we make based on mime type. | |
| 317 const bool is_paused = paused(); | |
| 318 UpdatePlayingState(true); | |
| 319 if (is_paused) | |
| 320 player_manager_->Start(player_id_); | |
| 321 UpdateNetworkState(WebMediaPlayer::NetworkStateLoading); | |
| 322 } | |
| 323 | |
| 324 void WebMediaPlayerAndroid::pause() { | |
| 325 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 326 Pause(true); | |
| 327 } | |
| 328 | |
| 329 void WebMediaPlayerAndroid::requestRemotePlayback() { | |
| 330 player_manager_->RequestRemotePlayback(player_id_); | |
| 331 } | |
| 332 | |
| 333 void WebMediaPlayerAndroid::requestRemotePlaybackControl() { | |
| 334 player_manager_->RequestRemotePlaybackControl(player_id_); | |
| 335 } | |
| 336 | |
| 337 void WebMediaPlayerAndroid::requestRemotePlaybackStop() { | |
| 338 player_manager_->RequestRemotePlaybackStop(player_id_); | |
| 339 } | |
| 340 | |
| 341 void WebMediaPlayerAndroid::seek(double seconds) { | |
| 342 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 343 DVLOG(1) << __func__ << "(" << seconds << ")"; | |
| 344 | |
| 345 playback_completed_ = false; | |
| 346 base::TimeDelta new_seek_time = base::TimeDelta::FromSecondsD(seconds); | |
| 347 | |
| 348 if (seeking_) { | |
| 349 if (new_seek_time == seek_time_) { | |
| 350 pending_seek_ = false; | |
| 351 return; | |
| 352 } | |
| 353 | |
| 354 pending_seek_ = true; | |
| 355 pending_seek_time_ = new_seek_time; | |
| 356 | |
| 357 // Later, OnSeekComplete will trigger the pending seek. | |
| 358 return; | |
| 359 } | |
| 360 | |
| 361 seeking_ = true; | |
| 362 seek_time_ = new_seek_time; | |
| 363 | |
| 364 // Kick off the asynchronous seek! | |
| 365 player_manager_->Seek(player_id_, seek_time_); | |
| 366 } | |
| 367 | |
| 368 bool WebMediaPlayerAndroid::supportsSave() const { | |
| 369 return false; | |
| 370 } | |
| 371 | |
| 372 void WebMediaPlayerAndroid::setRate(double rate) {} | |
| 373 | |
| 374 void WebMediaPlayerAndroid::setVolume(double volume) { | |
| 375 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 376 volume_ = volume; | |
| 377 player_manager_->SetVolume(player_id_, volume_ * volume_multiplier_); | |
| 378 } | |
| 379 | |
| 380 void WebMediaPlayerAndroid::setSinkId( | |
| 381 const blink::WebString& sink_id, | |
| 382 const blink::WebSecurityOrigin& security_origin, | |
| 383 blink::WebSetSinkIdCallbacks* web_callback) { | |
| 384 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 385 std::unique_ptr<blink::WebSetSinkIdCallbacks> callback(web_callback); | |
| 386 callback->onError(blink::WebSetSinkIdError::NotSupported); | |
| 387 } | |
| 388 | |
| 389 bool WebMediaPlayerAndroid::hasVideo() const { | |
| 390 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 391 // If we have obtained video size information before, use it. | |
| 392 if (has_size_info_) | |
| 393 return !natural_size_.isEmpty(); | |
| 394 | |
| 395 // TODO(qinmin): need a better method to determine whether the current media | |
| 396 // content contains video. Android does not provide any function to do | |
| 397 // this. | |
| 398 // We don't know whether the current media content has video unless | |
| 399 // the player is prepared. If the player is not prepared, we fall back | |
| 400 // to the mime-type. There may be no mime-type on a redirect URL. | |
| 401 // In that case, we conservatively assume it contains video so that | |
| 402 // enterfullscreen call will not fail. | |
| 403 if (!url_.has_path()) | |
| 404 return false; | |
| 405 std::string mime; | |
| 406 if (!net::GetMimeTypeFromFile(base::FilePath(url_.path()), &mime)) | |
| 407 return true; | |
| 408 return mime.find("audio/") == std::string::npos; | |
| 409 } | |
| 410 | |
| 411 bool WebMediaPlayerAndroid::hasAudio() const { | |
| 412 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 413 if (!url_.has_path()) | |
| 414 return false; | |
| 415 std::string mime; | |
| 416 if (!net::GetMimeTypeFromFile(base::FilePath(url_.path()), &mime)) | |
| 417 return true; | |
| 418 | |
| 419 if (mime.find("audio/") != std::string::npos || | |
| 420 mime.find("video/") != std::string::npos || | |
| 421 mime.find("application/ogg") != std::string::npos || | |
| 422 mime.find("application/x-mpegurl") != std::string::npos) { | |
| 423 return true; | |
| 424 } | |
| 425 return false; | |
| 426 } | |
| 427 | |
| 428 bool WebMediaPlayerAndroid::isRemote() const { | |
| 429 return is_remote_; | |
| 430 } | |
| 431 | |
| 432 bool WebMediaPlayerAndroid::paused() const { | |
| 433 return !is_playing_; | |
| 434 } | |
| 435 | |
| 436 bool WebMediaPlayerAndroid::seeking() const { | |
| 437 return seeking_; | |
| 438 } | |
| 439 | |
| 440 double WebMediaPlayerAndroid::duration() const { | |
| 441 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 442 if (duration_ == media::kInfiniteDuration) | |
| 443 return std::numeric_limits<double>::infinity(); | |
| 444 | |
| 445 return duration_.InSecondsF(); | |
| 446 } | |
| 447 | |
| 448 double WebMediaPlayerAndroid::timelineOffset() const { | |
| 449 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 450 base::Time timeline_offset; | |
| 451 if (timeline_offset.is_null()) | |
| 452 return std::numeric_limits<double>::quiet_NaN(); | |
| 453 | |
| 454 return timeline_offset.ToJsTime(); | |
| 455 } | |
| 456 | |
| 457 double WebMediaPlayerAndroid::currentTime() const { | |
| 458 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 459 // If the player is processing a seek, return the seek time. | |
| 460 // Blink may still query us if updatePlaybackState() occurs while seeking. | |
| 461 if (seeking()) { | |
| 462 return pending_seek_ ? | |
| 463 pending_seek_time_.InSecondsF() : seek_time_.InSecondsF(); | |
| 464 } | |
| 465 | |
| 466 return std::min( | |
| 467 (const_cast<media::TimeDeltaInterpolator*>( | |
| 468 &interpolator_))->GetInterpolatedTime(), duration_).InSecondsF(); | |
| 469 } | |
| 470 | |
| 471 WebSize WebMediaPlayerAndroid::naturalSize() const { | |
| 472 return natural_size_; | |
| 473 } | |
| 474 | |
| 475 WebMediaPlayer::NetworkState WebMediaPlayerAndroid::getNetworkState() const { | |
| 476 return network_state_; | |
| 477 } | |
| 478 | |
| 479 WebMediaPlayer::ReadyState WebMediaPlayerAndroid::getReadyState() const { | |
| 480 return ready_state_; | |
| 481 } | |
| 482 | |
| 483 blink::WebString WebMediaPlayerAndroid::getErrorMessage() { | |
| 484 return blink::WebString::fromUTF8(media_log_->GetLastErrorMessage()); | |
| 485 } | |
| 486 | |
| 487 blink::WebTimeRanges WebMediaPlayerAndroid::buffered() const { | |
| 488 return buffered_; | |
| 489 } | |
| 490 | |
| 491 blink::WebTimeRanges WebMediaPlayerAndroid::seekable() const { | |
| 492 if (ready_state_ < WebMediaPlayer::ReadyStateHaveMetadata) | |
| 493 return blink::WebTimeRanges(); | |
| 494 | |
| 495 // TODO(dalecurtis): Technically this allows seeking on media which return an | |
| 496 // infinite duration. While not expected, disabling this breaks semi-live | |
| 497 // players, http://crbug.com/427412. | |
| 498 const blink::WebTimeRange seekable_range(0.0, duration()); | |
| 499 return blink::WebTimeRanges(&seekable_range, 1); | |
| 500 } | |
| 501 | |
| 502 bool WebMediaPlayerAndroid::didLoadingProgress() { | |
| 503 bool ret = did_loading_progress_; | |
| 504 did_loading_progress_ = false; | |
| 505 return ret; | |
| 506 } | |
| 507 | |
| 508 void WebMediaPlayerAndroid::paint(blink::WebCanvas* canvas, | |
| 509 const blink::WebRect& rect, | |
| 510 cc::PaintFlags& paint) { | |
| 511 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 512 std::unique_ptr<blink::WebGraphicsContext3DProvider> provider( | |
| 513 blink::Platform::current() | |
| 514 ->createSharedOffscreenGraphicsContext3DProvider()); | |
| 515 if (!provider) | |
| 516 return; | |
| 517 gpu::gles2::GLES2Interface* gl = provider->contextGL(); | |
| 518 | |
| 519 scoped_refptr<VideoFrame> video_frame; | |
| 520 { | |
| 521 base::AutoLock auto_lock(current_frame_lock_); | |
| 522 video_frame = current_frame_; | |
| 523 } | |
| 524 | |
| 525 if (!video_frame.get() || !video_frame->HasTextures()) | |
| 526 return; | |
| 527 DCHECK_EQ(1u, media::VideoFrame::NumPlanes(video_frame->format())); | |
| 528 const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0); | |
| 529 | |
| 530 gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData()); | |
| 531 | |
| 532 uint32_t src_texture = gl->CreateAndConsumeTextureCHROMIUM( | |
| 533 mailbox_holder.texture_target, mailbox_holder.mailbox.name); | |
| 534 | |
| 535 GrGLTextureInfo texture_info; | |
| 536 texture_info.fID = src_texture; | |
| 537 texture_info.fTarget = mailbox_holder.texture_target; | |
| 538 | |
| 539 GrBackendTextureDesc desc; | |
| 540 desc.fWidth = naturalSize().width; | |
| 541 desc.fHeight = naturalSize().height; | |
| 542 desc.fOrigin = kTopLeft_GrSurfaceOrigin; | |
| 543 desc.fConfig = kRGBA_8888_GrPixelConfig; | |
| 544 desc.fSampleCnt = 0; | |
| 545 desc.fTextureHandle = skia::GrGLTextureInfoToGrBackendObject(texture_info); | |
| 546 | |
| 547 sk_sp<SkImage> image(SkImage::MakeFromTexture(provider->grContext(), desc, | |
| 548 kOpaque_SkAlphaType)); | |
| 549 if (!image) | |
| 550 return; | |
| 551 | |
| 552 // Draw the texture based image onto the Canvas. If the canvas is | |
| 553 // hardware based, this will do a GPU-GPU texture copy. | |
| 554 // If the canvas is software based, the texture based bitmap will be | |
| 555 // readbacked to system memory then draw onto the canvas. | |
| 556 SkRect dest; | |
| 557 dest.set(rect.x, rect.y, rect.x + rect.width, rect.y + rect.height); | |
| 558 cc::PaintFlags video_paint; | |
| 559 video_paint.setAlpha(paint.getAlpha()); | |
| 560 video_paint.setBlendMode(paint.getBlendMode()); | |
| 561 // It is not necessary to pass the dest into the drawBitmap call since all | |
| 562 // the context have been set up before calling paintCurrentFrameInContext. | |
| 563 canvas->drawImageRect(image, dest, &video_paint); | |
| 564 | |
| 565 // Ensure the Skia draw of the GL texture is flushed to GL, delete the | |
| 566 // mailboxed texture from this context, and then signal that we're done with | |
| 567 // the video frame. | |
| 568 canvas->flush(); | |
| 569 gl->DeleteTextures(1, &src_texture); | |
| 570 gl->Flush(); | |
| 571 SyncTokenClientImpl client(gl); | |
| 572 video_frame->UpdateReleaseSyncToken(&client); | |
| 573 } | |
| 574 | |
| 575 bool WebMediaPlayerAndroid::copyVideoTextureToPlatformTexture( | |
| 576 gpu::gles2::GLES2Interface* gl, | |
| 577 unsigned int texture, | |
| 578 bool premultiply_alpha, | |
| 579 bool flip_y) { | |
| 580 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 581 scoped_refptr<VideoFrame> video_frame; | |
| 582 { | |
| 583 base::AutoLock auto_lock(current_frame_lock_); | |
| 584 video_frame = current_frame_; | |
| 585 } | |
| 586 | |
| 587 if (!video_frame.get() || !video_frame->HasTextures()) | |
| 588 return false; | |
| 589 DCHECK_EQ(1u, media::VideoFrame::NumPlanes(video_frame->format())); | |
| 590 const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0); | |
| 591 DCHECK((!is_remote_ && | |
| 592 mailbox_holder.texture_target == GL_TEXTURE_EXTERNAL_OES) || | |
| 593 (is_remote_ && mailbox_holder.texture_target == GL_TEXTURE_2D)); | |
| 594 | |
| 595 gl->WaitSyncTokenCHROMIUM(mailbox_holder.sync_token.GetConstData()); | |
| 596 | |
| 597 // Ensure the target of texture is set before copyTextureCHROMIUM, otherwise | |
| 598 // an invalid texture target may be used for copy texture. | |
| 599 uint32_t src_texture = gl->CreateAndConsumeTextureCHROMIUM( | |
| 600 mailbox_holder.texture_target, mailbox_holder.mailbox.name); | |
| 601 | |
| 602 // Application itself needs to take care of setting the right flip_y | |
| 603 // value down to get the expected result. | |
| 604 // flip_y==true means to reverse the video orientation while | |
| 605 // flip_y==false means to keep the intrinsic orientation. | |
| 606 | |
| 607 // The video's texture might be larger than the natural size because | |
| 608 // the encoder might have had to round up to the size of a macroblock. | |
| 609 // Make sure to only copy the natural size to avoid putting garbage | |
| 610 // into the bottom of the destination texture. | |
| 611 const gfx::Size& natural_size = video_frame->natural_size(); | |
| 612 gl->CopySubTextureCHROMIUM(src_texture, 0, GL_TEXTURE_2D, texture, 0, 0, 0, 0, | |
| 613 0, natural_size.width(), natural_size.height(), | |
| 614 flip_y, premultiply_alpha, false); | |
| 615 gl->DeleteTextures(1, &src_texture); | |
| 616 gl->Flush(); | |
| 617 | |
| 618 SyncTokenClientImpl client(gl); | |
| 619 video_frame->UpdateReleaseSyncToken(&client); | |
| 620 return true; | |
| 621 } | |
| 622 | |
| 623 bool WebMediaPlayerAndroid::hasSingleSecurityOrigin() const { | |
| 624 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 625 if (player_type_ != MEDIA_PLAYER_TYPE_URL) | |
| 626 return true; | |
| 627 | |
| 628 if (!info_loader_ || !info_loader_->HasSingleOrigin()) | |
| 629 return false; | |
| 630 | |
| 631 // TODO(qinmin): After fixing crbug.com/592017, remove this command line. | |
| 632 base::CommandLine* command_line = base::CommandLine::ForCurrentProcess(); | |
| 633 if (command_line->HasSwitch(switches::kReduceSecurityForTesting)) | |
| 634 return true; | |
| 635 | |
| 636 // TODO(qinmin): The url might be redirected when android media player | |
| 637 // requests the stream. As a result, we cannot guarantee there is only | |
| 638 // a single origin. Only if the HTTP request was made without credentials, | |
| 639 // we will honor the return value from HasSingleSecurityOriginInternal() | |
| 640 // in pre-L android versions. | |
| 641 // Check http://crbug.com/334204. | |
| 642 if (!allow_stored_credentials_) | |
| 643 return true; | |
| 644 | |
| 645 return base::android::BuildInfo::GetInstance()->sdk_int() >= | |
| 646 kSDKVersionToSupportSecurityOriginCheck; | |
| 647 } | |
| 648 | |
| 649 bool WebMediaPlayerAndroid::didPassCORSAccessCheck() const { | |
| 650 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 651 if (info_loader_) | |
| 652 return info_loader_->DidPassCORSAccessCheck(); | |
| 653 return false; | |
| 654 } | |
| 655 | |
| 656 double WebMediaPlayerAndroid::mediaTimeForTimeValue(double timeValue) const { | |
| 657 return base::TimeDelta::FromSecondsD(timeValue).InSecondsF(); | |
| 658 } | |
| 659 | |
| 660 unsigned WebMediaPlayerAndroid::decodedFrameCount() const { | |
| 661 NOTIMPLEMENTED(); | |
| 662 return 0; | |
| 663 } | |
| 664 | |
| 665 unsigned WebMediaPlayerAndroid::droppedFrameCount() const { | |
| 666 NOTIMPLEMENTED(); | |
| 667 return 0; | |
| 668 } | |
| 669 | |
| 670 size_t WebMediaPlayerAndroid::audioDecodedByteCount() const { | |
| 671 NOTIMPLEMENTED(); | |
| 672 return 0; | |
| 673 } | |
| 674 | |
| 675 size_t WebMediaPlayerAndroid::videoDecodedByteCount() const { | |
| 676 NOTIMPLEMENTED(); | |
| 677 return 0; | |
| 678 } | |
| 679 | |
| 680 void WebMediaPlayerAndroid::OnMediaMetadataChanged( | |
| 681 base::TimeDelta duration, int width, int height, bool success) { | |
| 682 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 683 bool need_to_signal_duration_changed = false; | |
| 684 | |
| 685 if (is_local_resource_) | |
| 686 UpdateNetworkState(WebMediaPlayer::NetworkStateLoaded); | |
| 687 | |
| 688 // For HLS streams, the reported duration may be zero for infinite streams. | |
| 689 // See http://crbug.com/501213. | |
| 690 if (duration.is_zero() && IsHLSStream()) | |
| 691 duration = media::kInfiniteDuration; | |
| 692 | |
| 693 // Update duration, if necessary, prior to ready state updates that may | |
| 694 // cause duration() query. | |
| 695 if (duration_ != duration) { | |
| 696 duration_ = duration; | |
| 697 if (is_local_resource_) | |
| 698 buffered_[0].end = duration_.InSecondsF(); | |
| 699 // Client readyState transition from HAVE_NOTHING to HAVE_METADATA | |
| 700 // already triggers a durationchanged event. If this is a different | |
| 701 // transition, remember to signal durationchanged. | |
| 702 if (ready_state_ > WebMediaPlayer::ReadyStateHaveNothing) | |
| 703 need_to_signal_duration_changed = true; | |
| 704 } | |
| 705 | |
| 706 if (ready_state_ != WebMediaPlayer::ReadyStateHaveEnoughData) { | |
| 707 UpdateReadyState(WebMediaPlayer::ReadyStateHaveMetadata); | |
| 708 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | |
| 709 } | |
| 710 | |
| 711 // TODO(wolenetz): Should we just abort early and set network state to an | |
| 712 // error if success == false? See http://crbug.com/248399 | |
| 713 if (success) | |
| 714 OnVideoSizeChanged(width, height); | |
| 715 | |
| 716 if (need_to_signal_duration_changed) | |
| 717 client_->durationChanged(); | |
| 718 } | |
| 719 | |
| 720 void WebMediaPlayerAndroid::OnPlaybackComplete() { | |
| 721 // When playback is about to finish, android media player often stops | |
| 722 // at a time which is smaller than the duration. This makes webkit never | |
| 723 // know that the playback has finished. To solve this, we set the | |
| 724 // current time to media duration when OnPlaybackComplete() get called. | |
| 725 interpolator_.SetBounds(duration_, duration_, default_tick_clock_.NowTicks()); | |
| 726 client_->timeChanged(); | |
| 727 | |
| 728 // If the loop attribute is set, timeChanged() will update the current time | |
| 729 // to 0. It will perform a seek to 0. Issue a command to the player to start | |
| 730 // playing after seek completes. | |
| 731 if (is_playing_ && seeking_ && seek_time_.is_zero()) | |
| 732 player_manager_->Start(player_id_); | |
| 733 else | |
| 734 playback_completed_ = true; | |
| 735 } | |
| 736 | |
| 737 void WebMediaPlayerAndroid::OnBufferingUpdate(int percentage) { | |
| 738 // inf * 0 == nan which is not an acceptable WebTimeRange. | |
| 739 const double d = duration(); | |
| 740 buffered_[0].end = | |
| 741 d == std::numeric_limits<double>::infinity() ? d : d * percentage / 100; | |
| 742 did_loading_progress_ = true; | |
| 743 | |
| 744 if (percentage == 100 && network_state_ < WebMediaPlayer::NetworkStateLoaded) | |
| 745 UpdateNetworkState(WebMediaPlayer::NetworkStateLoaded); | |
| 746 } | |
| 747 | |
| 748 void WebMediaPlayerAndroid::OnSeekRequest(const base::TimeDelta& time_to_seek) { | |
| 749 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 750 client_->requestSeek(time_to_seek.InSecondsF()); | |
| 751 } | |
| 752 | |
| 753 void WebMediaPlayerAndroid::OnSeekComplete( | |
| 754 const base::TimeDelta& current_time) { | |
| 755 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 756 seeking_ = false; | |
| 757 if (pending_seek_) { | |
| 758 pending_seek_ = false; | |
| 759 seek(pending_seek_time_.InSecondsF()); | |
| 760 return; | |
| 761 } | |
| 762 interpolator_.SetBounds(current_time, current_time, | |
| 763 default_tick_clock_.NowTicks()); | |
| 764 | |
| 765 UpdateReadyState(WebMediaPlayer::ReadyStateHaveEnoughData); | |
| 766 | |
| 767 client_->timeChanged(); | |
| 768 } | |
| 769 | |
| 770 void WebMediaPlayerAndroid::OnMediaError(int error_type) { | |
| 771 switch (error_type) { | |
| 772 case MediaPlayerAndroid::MEDIA_ERROR_FORMAT: | |
| 773 UpdateNetworkState(WebMediaPlayer::NetworkStateFormatError); | |
| 774 break; | |
| 775 case MediaPlayerAndroid::MEDIA_ERROR_DECODE: | |
| 776 UpdateNetworkState(WebMediaPlayer::NetworkStateDecodeError); | |
| 777 break; | |
| 778 case MediaPlayerAndroid::MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK: | |
| 779 UpdateNetworkState(WebMediaPlayer::NetworkStateFormatError); | |
| 780 break; | |
| 781 case MediaPlayerAndroid::MEDIA_ERROR_INVALID_CODE: | |
| 782 break; | |
| 783 } | |
| 784 client_->repaint(); | |
| 785 } | |
| 786 | |
| 787 void WebMediaPlayerAndroid::OnVideoSizeChanged(int width, int height) { | |
| 788 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 789 | |
| 790 // For HLS streams, a bogus empty size may be reported at first, followed by | |
| 791 // the actual size only once playback begins. See http://crbug.com/509972. | |
| 792 if (!has_size_info_ && width == 0 && height == 0 && IsHLSStream()) | |
| 793 return; | |
| 794 | |
| 795 has_size_info_ = true; | |
| 796 if (natural_size_.width == width && natural_size_.height == height) | |
| 797 return; | |
| 798 | |
| 799 natural_size_.width = width; | |
| 800 natural_size_.height = height; | |
| 801 | |
| 802 // When play() gets called, |natural_size_| may still be empty and | |
| 803 // EstablishSurfaceTexturePeer() will not get called. As a result, the video | |
| 804 // may play without a surface texture. When we finally get the valid video | |
| 805 // size here, we should call EstablishSurfaceTexturePeer() if it has not been | |
| 806 // previously called. | |
| 807 if (!paused() && needs_establish_peer_) | |
| 808 EstablishSurfaceTexturePeer(); | |
| 809 | |
| 810 ReallocateVideoFrame(); | |
| 811 | |
| 812 // For hidden video element (with style "display:none"), ensure the texture | |
| 813 // size is set. | |
| 814 if (!is_remote_ && cached_stream_texture_size_ != natural_size_) { | |
| 815 stream_texture_proxy_->SetStreamTextureSize( | |
| 816 gfx::Size(natural_size_.width, natural_size_.height)); | |
| 817 cached_stream_texture_size_ = natural_size_; | |
| 818 } | |
| 819 | |
| 820 // Lazily allocate compositing layer. | |
| 821 if (!video_weblayer_) { | |
| 822 video_weblayer_.reset(new cc_blink::WebLayerImpl( | |
| 823 cc::VideoLayer::Create(this, media::VIDEO_ROTATION_0))); | |
| 824 client_->setWebLayer(video_weblayer_.get()); | |
| 825 | |
| 826 // If we're paused after we receive metadata for the first time, tell the | |
| 827 // delegate we can now be safely suspended due to inactivity if a subsequent | |
| 828 // play event does not occur. | |
| 829 if (paused()) { | |
| 830 delegate_->DidPause(delegate_id_); | |
| 831 delegate_->SetIdle(delegate_id_, true); | |
| 832 } | |
| 833 } | |
| 834 } | |
| 835 | |
| 836 void WebMediaPlayerAndroid::OnTimeUpdate(base::TimeDelta current_timestamp, | |
| 837 base::TimeTicks current_time_ticks) { | |
| 838 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 839 | |
| 840 if (seeking()) | |
| 841 return; | |
| 842 | |
| 843 // Compensate the current_timestamp with the IPC latency. | |
| 844 base::TimeTicks now_ticks = default_tick_clock_.NowTicks(); | |
| 845 base::TimeDelta lower_bound = | |
| 846 now_ticks - current_time_ticks + current_timestamp; | |
| 847 | |
| 848 base::TimeDelta upper_bound = lower_bound; | |
| 849 // We should get another time update in about |kTimeUpdateInterval| | |
| 850 // milliseconds. | |
| 851 if (is_playing_) { | |
| 852 upper_bound += base::TimeDelta::FromMilliseconds( | |
| 853 media::kTimeUpdateInterval); | |
| 854 } | |
| 855 // if the lower_bound is smaller than the current time, just use the current | |
| 856 // time so that the timer is always progressing. | |
| 857 lower_bound = | |
| 858 std::max(lower_bound, base::TimeDelta::FromSecondsD(currentTime())); | |
| 859 if (lower_bound > upper_bound) | |
| 860 upper_bound = lower_bound; | |
| 861 interpolator_.SetBounds(lower_bound, upper_bound, now_ticks); | |
| 862 } | |
| 863 | |
| 864 void WebMediaPlayerAndroid::OnConnectedToRemoteDevice( | |
| 865 const std::string& remote_playback_message) { | |
| 866 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 867 DrawRemotePlaybackText(remote_playback_message); | |
| 868 is_remote_ = true; | |
| 869 SetNeedsEstablishPeer(false); | |
| 870 client_->connectedToRemoteDevice(); | |
| 871 } | |
| 872 | |
| 873 void WebMediaPlayerAndroid::OnDisconnectedFromRemoteDevice() { | |
| 874 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 875 SetNeedsEstablishPeer(true); | |
| 876 if (!paused()) | |
| 877 EstablishSurfaceTexturePeer(); | |
| 878 is_remote_ = false; | |
| 879 ReallocateVideoFrame(); | |
| 880 client_->disconnectedFromRemoteDevice(); | |
| 881 } | |
| 882 | |
| 883 void WebMediaPlayerAndroid::OnCancelledRemotePlaybackRequest() { | |
| 884 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 885 client_->cancelledRemotePlaybackRequest(); | |
| 886 } | |
| 887 | |
| 888 void WebMediaPlayerAndroid::OnRemotePlaybackStarted() { | |
| 889 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 890 client_->remotePlaybackStarted(); | |
| 891 } | |
| 892 | |
| 893 void WebMediaPlayerAndroid::OnDidExitFullscreen() { | |
| 894 SetNeedsEstablishPeer(true); | |
| 895 // We had the fullscreen surface connected to Android MediaPlayer, | |
| 896 // so reconnect our surface texture for embedded playback. | |
| 897 if (!paused() && needs_establish_peer_) { | |
| 898 TryCreateStreamTextureProxyIfNeeded(); | |
| 899 EstablishSurfaceTexturePeer(); | |
| 900 suppress_deleting_texture_ = true; | |
| 901 } | |
| 902 | |
| 903 is_fullscreen_ = false; | |
| 904 ReallocateVideoFrame(); | |
| 905 client_->repaint(); | |
| 906 } | |
| 907 | |
| 908 void WebMediaPlayerAndroid::OnMediaPlayerPlay() { | |
| 909 // The MediaPlayer might request the video to be played after it lost its | |
| 910 // stream texture proxy or the peer connection, for example, if the video was | |
| 911 // paused while fullscreen then fullscreen state was left. | |
| 912 TryCreateStreamTextureProxyIfNeeded(); | |
| 913 if (needs_establish_peer_) | |
| 914 EstablishSurfaceTexturePeer(); | |
| 915 | |
| 916 UpdatePlayingState(true); | |
| 917 client_->playbackStateChanged(); | |
| 918 } | |
| 919 | |
| 920 void WebMediaPlayerAndroid::OnMediaPlayerPause() { | |
| 921 UpdatePlayingState(false); | |
| 922 client_->playbackStateChanged(); | |
| 923 } | |
| 924 | |
| 925 void WebMediaPlayerAndroid::OnRemoteRouteAvailabilityChanged( | |
| 926 blink::WebRemotePlaybackAvailability availability) { | |
| 927 client_->remoteRouteAvailabilityChanged(availability); | |
| 928 } | |
| 929 | |
| 930 void WebMediaPlayerAndroid::UpdateNetworkState( | |
| 931 WebMediaPlayer::NetworkState state) { | |
| 932 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 933 if (ready_state_ == WebMediaPlayer::ReadyStateHaveNothing && | |
| 934 (state == WebMediaPlayer::NetworkStateNetworkError || | |
| 935 state == WebMediaPlayer::NetworkStateDecodeError)) { | |
| 936 // Any error that occurs before reaching ReadyStateHaveMetadata should | |
| 937 // be considered a format error. | |
| 938 network_state_ = WebMediaPlayer::NetworkStateFormatError; | |
| 939 } else { | |
| 940 network_state_ = state; | |
| 941 } | |
| 942 client_->networkStateChanged(); | |
| 943 } | |
| 944 | |
| 945 void WebMediaPlayerAndroid::UpdateReadyState( | |
| 946 WebMediaPlayer::ReadyState state) { | |
| 947 ready_state_ = state; | |
| 948 client_->readyStateChanged(); | |
| 949 } | |
| 950 | |
| 951 void WebMediaPlayerAndroid::OnPlayerReleased() { | |
| 952 needs_establish_peer_ = true; | |
| 953 | |
| 954 if (is_playing_) | |
| 955 OnMediaPlayerPause(); | |
| 956 | |
| 957 delegate_->PlayerGone(delegate_id_); | |
| 958 } | |
| 959 | |
| 960 void WebMediaPlayerAndroid::SuspendAndReleaseResources() { | |
| 961 switch (network_state_) { | |
| 962 // Pause the media player and inform WebKit if the player is in a good | |
| 963 // shape. | |
| 964 case WebMediaPlayer::NetworkStateIdle: | |
| 965 case WebMediaPlayer::NetworkStateLoading: | |
| 966 case WebMediaPlayer::NetworkStateLoaded: | |
| 967 Pause(false); | |
| 968 client_->playbackStateChanged(); | |
| 969 delegate_->PlayerGone(delegate_id_); | |
| 970 break; | |
| 971 // If a WebMediaPlayer instance has entered into one of these states, | |
| 972 // the internal network state in HTMLMediaElement could be set to empty. | |
| 973 // And calling playbackStateChanged() could get this object deleted. | |
| 974 case WebMediaPlayer::NetworkStateEmpty: | |
| 975 case WebMediaPlayer::NetworkStateFormatError: | |
| 976 case WebMediaPlayer::NetworkStateNetworkError: | |
| 977 case WebMediaPlayer::NetworkStateDecodeError: | |
| 978 break; | |
| 979 } | |
| 980 player_manager_->SuspendAndReleaseResources(player_id_); | |
| 981 SetNeedsEstablishPeer(true); | |
| 982 } | |
| 983 | |
| 984 void WebMediaPlayerAndroid::InitializePlayer( | |
| 985 const GURL& url, | |
| 986 const GURL& first_party_for_cookies, | |
| 987 bool allow_stored_credentials) { | |
| 988 ReportHLSMetrics(); | |
| 989 | |
| 990 allow_stored_credentials_ = allow_stored_credentials; | |
| 991 player_manager_->Initialize(player_type_, player_id_, url, | |
| 992 first_party_for_cookies, frame_->document().url(), | |
| 993 allow_stored_credentials, delegate_id_); | |
| 994 is_player_initialized_ = true; | |
| 995 | |
| 996 if (is_fullscreen_) | |
| 997 player_manager_->EnterFullscreen(player_id_); | |
| 998 } | |
| 999 | |
| 1000 void WebMediaPlayerAndroid::Pause(bool is_media_related_action) { | |
| 1001 player_manager_->Pause(player_id_, is_media_related_action); | |
| 1002 UpdatePlayingState(false); | |
| 1003 } | |
| 1004 | |
| 1005 void WebMediaPlayerAndroid::DrawRemotePlaybackText( | |
| 1006 const std::string& remote_playback_message) { | |
| 1007 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 1008 if (!video_weblayer_) | |
| 1009 return; | |
| 1010 | |
| 1011 // TODO(johnme): Should redraw this frame if the layer bounds change; but | |
| 1012 // there seems no easy way to listen for the layer resizing (as opposed to | |
| 1013 // OnVideoSizeChanged, which is when the frame sizes of the video file | |
| 1014 // change). Perhaps have to poll (on main thread of course)? | |
| 1015 gfx::Size video_size_css_px = video_weblayer_->bounds(); | |
| 1016 RenderView* render_view = RenderView::FromWebView(frame_->view()); | |
| 1017 float device_scale_factor = render_view->GetDeviceScaleFactor(); | |
| 1018 // canvas_size will be the size in device pixels when pageScaleFactor == 1 | |
| 1019 gfx::Size canvas_size( | |
| 1020 static_cast<int>(video_size_css_px.width() * device_scale_factor), | |
| 1021 static_cast<int>(video_size_css_px.height() * device_scale_factor)); | |
| 1022 | |
| 1023 scoped_refptr<VideoFrame> new_frame(media::MakeTextFrameForCast( | |
| 1024 remote_playback_message, | |
| 1025 canvas_size, | |
| 1026 canvas_size, | |
| 1027 base::Bind(&StreamTextureFactory::ContextGL, | |
| 1028 stream_texture_factory_))); | |
| 1029 if (!new_frame) | |
| 1030 return; | |
| 1031 SetCurrentFrameInternal(new_frame); | |
| 1032 } | |
| 1033 | |
| 1034 void WebMediaPlayerAndroid::ReallocateVideoFrame() { | |
| 1035 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 1036 | |
| 1037 if (is_fullscreen_) return; | |
| 1038 if (!is_remote_ && texture_id_) { | |
| 1039 GLES2Interface* gl = stream_texture_factory_->ContextGL(); | |
| 1040 GLuint texture_target = kGLTextureExternalOES; | |
| 1041 GLuint texture_id_ref = gl->CreateAndConsumeTextureCHROMIUM( | |
| 1042 texture_target, texture_mailbox_.name); | |
| 1043 const GLuint64 fence_sync = gl->InsertFenceSyncCHROMIUM(); | |
| 1044 gl->Flush(); | |
| 1045 | |
| 1046 gpu::SyncToken texture_mailbox_sync_token; | |
| 1047 gl->GenUnverifiedSyncTokenCHROMIUM(fence_sync, | |
| 1048 texture_mailbox_sync_token.GetData()); | |
| 1049 if (texture_mailbox_sync_token.namespace_id() == | |
| 1050 gpu::CommandBufferNamespace::IN_PROCESS) { | |
| 1051 // TODO(boliu): Remove this once Android WebView switches to IPC-based | |
| 1052 // command buffer for video. | |
| 1053 GLbyte* sync_tokens[] = {texture_mailbox_sync_token.GetData()}; | |
| 1054 gl->VerifySyncTokensCHROMIUM(sync_tokens, arraysize(sync_tokens)); | |
| 1055 } | |
| 1056 | |
| 1057 gpu::MailboxHolder holders[media::VideoFrame::kMaxPlanes] = { | |
| 1058 gpu::MailboxHolder(texture_mailbox_, texture_mailbox_sync_token, | |
| 1059 texture_target)}; | |
| 1060 scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTextures( | |
| 1061 media::PIXEL_FORMAT_ARGB, holders, | |
| 1062 media::BindToCurrentLoop(base::Bind( | |
| 1063 &OnReleaseTexture, stream_texture_factory_, texture_id_ref)), | |
| 1064 natural_size_, gfx::Rect(natural_size_), natural_size_, | |
| 1065 base::TimeDelta()); | |
| 1066 if (new_frame.get()) { | |
| 1067 new_frame->metadata()->SetBoolean( | |
| 1068 media::VideoFrameMetadata::COPY_REQUIRED, enable_texture_copy_); | |
| 1069 } | |
| 1070 SetCurrentFrameInternal(new_frame); | |
| 1071 } | |
| 1072 } | |
| 1073 | |
| 1074 void WebMediaPlayerAndroid::SetVideoFrameProviderClient( | |
| 1075 cc::VideoFrameProvider::Client* client) { | |
| 1076 // This is called from both the main renderer thread and the compositor | |
| 1077 // thread (when the main thread is blocked). | |
| 1078 | |
| 1079 // Set the callback target when a frame is produced. Need to do this before | |
| 1080 // StopUsingProvider to ensure we really stop using the client. | |
| 1081 if (stream_texture_proxy_) | |
| 1082 UpdateStreamTextureProxyCallback(client); | |
| 1083 | |
| 1084 if (video_frame_provider_client_ && video_frame_provider_client_ != client) | |
| 1085 video_frame_provider_client_->StopUsingProvider(); | |
| 1086 video_frame_provider_client_ = client; | |
| 1087 } | |
| 1088 | |
| 1089 void WebMediaPlayerAndroid::SetCurrentFrameInternal( | |
| 1090 scoped_refptr<media::VideoFrame>& video_frame) { | |
| 1091 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 1092 base::AutoLock auto_lock(current_frame_lock_); | |
| 1093 current_frame_ = video_frame; | |
| 1094 } | |
| 1095 | |
| 1096 bool WebMediaPlayerAndroid::UpdateCurrentFrame(base::TimeTicks deadline_min, | |
| 1097 base::TimeTicks deadline_max) { | |
| 1098 NOTIMPLEMENTED(); | |
| 1099 return false; | |
| 1100 } | |
| 1101 | |
| 1102 bool WebMediaPlayerAndroid::HasCurrentFrame() { | |
| 1103 base::AutoLock auto_lock(current_frame_lock_); | |
| 1104 return static_cast<bool>(current_frame_); | |
| 1105 } | |
| 1106 | |
| 1107 scoped_refptr<media::VideoFrame> WebMediaPlayerAndroid::GetCurrentFrame() { | |
| 1108 scoped_refptr<VideoFrame> video_frame; | |
| 1109 { | |
| 1110 base::AutoLock auto_lock(current_frame_lock_); | |
| 1111 video_frame = current_frame_; | |
| 1112 } | |
| 1113 | |
| 1114 return video_frame; | |
| 1115 } | |
| 1116 | |
| 1117 void WebMediaPlayerAndroid::PutCurrentFrame() { | |
| 1118 } | |
| 1119 | |
| 1120 void WebMediaPlayerAndroid::UpdateStreamTextureProxyCallback( | |
| 1121 cc::VideoFrameProvider::Client* client) { | |
| 1122 base::Closure frame_received_cb; | |
| 1123 | |
| 1124 if (client) { | |
| 1125 // Unretained is safe here because: | |
| 1126 // - |client| is valid until we receive a call to | |
| 1127 // SetVideoFrameProviderClient(nullptr). | |
| 1128 // - SetVideoFrameProviderClient(nullptr) clears proxy's callback | |
| 1129 // guaranteeing it will no longer be run. | |
| 1130 frame_received_cb = | |
| 1131 base::Bind(&cc::VideoFrameProvider::Client::DidReceiveFrame, | |
| 1132 base::Unretained(client)); | |
| 1133 } | |
| 1134 | |
| 1135 stream_texture_proxy_->BindToTaskRunner(frame_received_cb, | |
| 1136 compositor_task_runner_); | |
| 1137 } | |
| 1138 | |
| 1139 void WebMediaPlayerAndroid::TryCreateStreamTextureProxyIfNeeded() { | |
| 1140 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 1141 // Already created. | |
| 1142 if (stream_texture_proxy_) | |
| 1143 return; | |
| 1144 | |
| 1145 // No factory to create proxy. | |
| 1146 if (!stream_texture_factory_.get()) | |
| 1147 return; | |
| 1148 | |
| 1149 // Not needed for hole punching. | |
| 1150 if (!needs_establish_peer_) | |
| 1151 return; | |
| 1152 | |
| 1153 DCHECK(!texture_id_); | |
| 1154 stream_texture_proxy_ = stream_texture_factory_->CreateProxy( | |
| 1155 kGLTextureExternalOES, &texture_id_, &texture_mailbox_); | |
| 1156 if (!stream_texture_proxy_) | |
| 1157 return; | |
| 1158 ReallocateVideoFrame(); | |
| 1159 if (video_frame_provider_client_) | |
| 1160 UpdateStreamTextureProxyCallback(video_frame_provider_client_); | |
| 1161 } | |
| 1162 | |
| 1163 void WebMediaPlayerAndroid::EstablishSurfaceTexturePeer() { | |
| 1164 DCHECK(main_thread_checker_.CalledOnValidThread()); | |
| 1165 if (!stream_texture_proxy_) | |
| 1166 return; | |
| 1167 | |
| 1168 stream_texture_proxy_->EstablishPeer(player_id_, frame_id_); | |
| 1169 | |
| 1170 // Set the deferred size because the size was changed in remote mode. | |
| 1171 if (!is_remote_ && cached_stream_texture_size_ != natural_size_) { | |
| 1172 stream_texture_proxy_->SetStreamTextureSize( | |
| 1173 gfx::Size(natural_size_.width, natural_size_.height)); | |
| 1174 cached_stream_texture_size_ = natural_size_; | |
| 1175 } | |
| 1176 | |
| 1177 needs_establish_peer_ = false; | |
| 1178 } | |
| 1179 | |
| 1180 void WebMediaPlayerAndroid::SetNeedsEstablishPeer(bool needs_establish_peer) { | |
| 1181 needs_establish_peer_ = needs_establish_peer; | |
| 1182 } | |
| 1183 | |
| 1184 void WebMediaPlayerAndroid::setPoster(const blink::WebURL& poster) { | |
| 1185 player_manager_->SetPoster(player_id_, poster); | |
| 1186 } | |
| 1187 | |
| 1188 void WebMediaPlayerAndroid::UpdatePlayingState(bool is_playing) { | |
| 1189 if (is_playing == is_playing_) | |
| 1190 return; | |
| 1191 | |
| 1192 is_playing_ = is_playing; | |
| 1193 | |
| 1194 if (is_playing) | |
| 1195 interpolator_.StartInterpolating(); | |
| 1196 else | |
| 1197 interpolator_.StopInterpolating(); | |
| 1198 | |
| 1199 if (is_playing) { | |
| 1200 // We must specify either video or audio to the delegate, but neither may | |
| 1201 // be known at this point -- there are no video only containers, so only | |
| 1202 // send audio if we know for sure its audio. The browser side player will | |
| 1203 // fill in the correct value later for media sessions. | |
| 1204 if (isRemote()) { | |
| 1205 delegate_->PlayerGone(delegate_id_); | |
| 1206 } else { | |
| 1207 delegate_->DidPlay(delegate_id_, hasVideo(), !hasVideo(), | |
| 1208 media::DurationToMediaContentType(duration_)); | |
| 1209 } | |
| 1210 delegate_->SetIdle(delegate_id_, false); | |
| 1211 } else { | |
| 1212 // Even if OnPlaybackComplete() has not been called yet, Blink may have | |
| 1213 // already fired the ended event based on current time relative to | |
| 1214 // duration -- so we need to check both possibilities here. | |
| 1215 if (playback_completed_ || currentTime() >= duration()) { | |
| 1216 delegate_->PlayerGone(delegate_id_); | |
| 1217 } else { | |
| 1218 delegate_->DidPause(delegate_id_); | |
| 1219 } | |
| 1220 delegate_->SetIdle(delegate_id_, true); | |
| 1221 } | |
| 1222 } | |
| 1223 | |
| 1224 void WebMediaPlayerAndroid::OnFrameHidden() { | |
| 1225 // Pause audible video preserving its session. | |
| 1226 if (hasVideo() && IsBackgroundVideoCandidate() && !paused()) { | |
| 1227 Pause(false); | |
| 1228 is_play_pending_ = true; | |
| 1229 return; | |
| 1230 } | |
| 1231 | |
| 1232 OnIdleTimeout(); | |
| 1233 } | |
| 1234 | |
| 1235 void WebMediaPlayerAndroid::OnFrameClosed() { | |
| 1236 SuspendAndReleaseResources(); | |
| 1237 } | |
| 1238 | |
| 1239 void WebMediaPlayerAndroid::OnFrameShown() { | |
| 1240 if (is_play_pending_) | |
| 1241 play(); | |
| 1242 } | |
| 1243 | |
| 1244 void WebMediaPlayerAndroid::OnIdleTimeout() { | |
| 1245 if (base::CommandLine::ForCurrentProcess()->HasSwitch( | |
| 1246 switches::kDisableMediaSuspend)) { | |
| 1247 return; | |
| 1248 } | |
| 1249 | |
| 1250 // If we're playing video or ended, pause and release resources; audio only | |
| 1251 // players are allowed to continue. | |
| 1252 if ((hasVideo() && !IsBackgroundVideoCandidate()) || | |
| 1253 (paused() && playback_completed_)) { | |
| 1254 SuspendAndReleaseResources(); | |
| 1255 } | |
| 1256 } | |
| 1257 | |
| 1258 void WebMediaPlayerAndroid::OnPlay() { | |
| 1259 play(); | |
| 1260 client_->playbackStateChanged(); | |
| 1261 } | |
| 1262 | |
| 1263 void WebMediaPlayerAndroid::OnPause() { | |
| 1264 pause(); | |
| 1265 client_->playbackStateChanged(); | |
| 1266 } | |
| 1267 | |
| 1268 void WebMediaPlayerAndroid::OnVolumeMultiplierUpdate(double multiplier) { | |
| 1269 volume_multiplier_ = multiplier; | |
| 1270 setVolume(volume_); | |
| 1271 } | |
| 1272 | |
| 1273 bool WebMediaPlayerAndroid::supportsOverlayFullscreenVideo() { | |
| 1274 return true; | |
| 1275 } | |
| 1276 | |
| 1277 void WebMediaPlayerAndroid::enteredFullscreen() { | |
| 1278 if (is_player_initialized_) | |
| 1279 player_manager_->EnterFullscreen(player_id_); | |
| 1280 SetNeedsEstablishPeer(false); | |
| 1281 is_fullscreen_ = true; | |
| 1282 suppress_deleting_texture_ = false; | |
| 1283 | |
| 1284 // Create a transparent video frame. Blink will already have made the | |
| 1285 // background transparent because we returned true from | |
| 1286 // supportsOverlayFullscreenVideo(). By making the video frame transparent, | |
| 1287 // as well, everything in the LayerTreeView will be transparent except for | |
| 1288 // media controls. The video will be on visible on the underlaid surface. | |
| 1289 if (!fullscreen_frame_) | |
| 1290 fullscreen_frame_ = VideoFrame::CreateTransparentFrame(gfx::Size(1, 1)); | |
| 1291 SetCurrentFrameInternal(fullscreen_frame_); | |
| 1292 client_->repaint(); | |
| 1293 } | |
| 1294 | |
| 1295 bool WebMediaPlayerAndroid::IsHLSStream() const { | |
| 1296 const GURL& url = redirected_url_.is_empty() ? url_ : redirected_url_; | |
| 1297 return media::MediaCodecUtil::IsHLSURL(url); | |
| 1298 } | |
| 1299 | |
| 1300 void WebMediaPlayerAndroid::ReportHLSMetrics() const { | |
| 1301 if (player_type_ != MEDIA_PLAYER_TYPE_URL) | |
| 1302 return; | |
| 1303 | |
| 1304 bool is_hls = IsHLSStream(); | |
| 1305 UMA_HISTOGRAM_BOOLEAN("Media.Android.IsHttpLiveStreamingMedia", is_hls); | |
| 1306 if (is_hls) { | |
| 1307 media::RecordOriginOfHLSPlayback( | |
| 1308 url::Origin(frame_->getSecurityOrigin()).GetURL()); | |
| 1309 } | |
| 1310 | |
| 1311 // Assuming that |is_hls| is the ground truth, test predictions. | |
| 1312 bool is_hls_path = media::MediaCodecUtil::IsHLSPath(url_); | |
| 1313 bool is_hls_url = media::MediaCodecUtil::IsHLSURL(url_); | |
| 1314 MediaTypePredictionResult result = PREDICTION_RESULT_ALL_INCORRECT; | |
| 1315 if (is_hls_path == is_hls && is_hls_url == is_hls) { | |
| 1316 result = PREDICTION_RESULT_ALL_CORRECT; | |
| 1317 } else if (is_hls_path == is_hls) { | |
| 1318 result = PREDICTION_RESULT_PATH_BASED_WAS_BETTER; | |
| 1319 } else if (is_hls_url == is_hls) { | |
| 1320 result = PREDICTION_RESULT_URL_BASED_WAS_BETTER; | |
| 1321 } | |
| 1322 UMA_HISTOGRAM_ENUMERATION( | |
| 1323 "Media.Android.IsHttpLiveStreamingMediaPredictionResult", | |
| 1324 result, PREDICTION_RESULT_MAX); | |
| 1325 } | |
| 1326 | |
| 1327 bool WebMediaPlayerAndroid::IsBackgroundVideoCandidate() const { | |
| 1328 DCHECK(hasVideo()); | |
| 1329 | |
| 1330 if (base::CommandLine::ForCurrentProcess()->HasSwitch( | |
| 1331 switches::kDisableMediaSuspend)) { | |
| 1332 return false; | |
| 1333 } | |
| 1334 | |
| 1335 return base::FeatureList::IsEnabled(media::kResumeBackgroundVideo) && | |
| 1336 hasAudio() && !isRemote() && delegate_ && delegate_->IsFrameHidden() && | |
| 1337 !delegate_->IsFrameClosed(); | |
| 1338 } | |
| 1339 | |
| 1340 } // namespace content | |
| OLD | NEW |