Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2015 The Chromium Authors. All rights reserved. | 1 // Copyright 2015 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "content/renderer/media/webmediaplayer_ms_compositor.h" | 5 #include "content/renderer/media/webmediaplayer_ms_compositor.h" |
| 6 | 6 |
| 7 #include <stdint.h> | 7 #include <stdint.h> |
| 8 #include <string> | 8 #include <string> |
| 9 | 9 |
| 10 #include "base/command_line.h" | 10 #include "base/command_line.h" |
| 11 #include "base/hash.h" | 11 #include "base/hash.h" |
| 12 #include "base/single_thread_task_runner.h" | 12 #include "base/single_thread_task_runner.h" |
| 13 #include "base/values.h" | 13 #include "base/values.h" |
| 14 #include "cc/paint/skia_paint_canvas.h" | 14 #include "cc/paint/skia_paint_canvas.h" |
| 15 #include "content/renderer/media/webmediaplayer_ms.h" | 15 #include "content/renderer/media/webmediaplayer_ms.h" |
| 16 #include "content/renderer/render_thread_impl.h" | 16 #include "content/renderer/render_thread_impl.h" |
| 17 #include "media/base/bind_to_current_loop.h" | |
| 17 #include "media/base/media_switches.h" | 18 #include "media/base/media_switches.h" |
| 18 #include "media/base/video_frame.h" | 19 #include "media/base/video_frame.h" |
| 19 #include "media/base/video_util.h" | 20 #include "media/base/video_util.h" |
| 20 #include "media/filters/video_renderer_algorithm.h" | 21 #include "media/filters/video_renderer_algorithm.h" |
| 21 #include "media/renderers/skcanvas_video_renderer.h" | 22 #include "media/renderers/skcanvas_video_renderer.h" |
| 22 #include "services/ui/public/cpp/gpu/context_provider_command_buffer.h" | 23 #include "services/ui/public/cpp/gpu/context_provider_command_buffer.h" |
| 23 #include "skia/ext/platform_canvas.h" | 24 #include "skia/ext/platform_canvas.h" |
| 24 #include "third_party/WebKit/public/platform/WebMediaStream.h" | 25 #include "third_party/WebKit/public/platform/WebMediaStream.h" |
| 25 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" | 26 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h" |
| 26 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" | 27 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h" |
| 27 #include "third_party/libyuv/include/libyuv/convert.h" | 28 #include "third_party/libyuv/include/libyuv/convert.h" |
| 28 #include "third_party/libyuv/include/libyuv/planar_functions.h" | 29 #include "third_party/libyuv/include/libyuv/planar_functions.h" |
| 29 #include "third_party/libyuv/include/libyuv/video_common.h" | 30 #include "third_party/libyuv/include/libyuv/video_common.h" |
| 30 #include "third_party/skia/include/core/SkSurface.h" | 31 #include "third_party/skia/include/core/SkSurface.h" |
| 31 | 32 |
| 32 namespace content { | 33 namespace content { |
| 33 | 34 |
| 34 namespace { | 35 namespace { |
| 35 | 36 |
| 36 // This function copies |frame| to a new I420 or YV12A media::VideoFrame. | 37 // This function copies |frame| to a new I420 or YV12A media::VideoFrame. |
| 37 scoped_refptr<media::VideoFrame> CopyFrame( | 38 scoped_refptr<media::VideoFrame> CopyFrame( |
| 38 const scoped_refptr<media::VideoFrame>& frame, | 39 const scoped_refptr<media::VideoFrame>& frame, |
| 39 media::SkCanvasVideoRenderer* video_renderer) { | 40 media::SkCanvasVideoRenderer* video_renderer) { |
| 40 scoped_refptr<media::VideoFrame> new_frame; | 41 // Frames originated from local video capture may have finite number of |
| 41 if (frame->HasTextures()) { | 42 // buffers, so copy only those. |
| 42 DCHECK(frame->format() == media::PIXEL_FORMAT_ARGB || | 43 if (!frame->HasTextures()) { |
|
qiangchen
2017/05/04 16:25:59
What about frame with texture? This way, you will
emircan
2017/05/04 17:58:14
Yes, that is what I am intending to do. Look at th
qiangchen
2017/05/06 12:44:38
But there is buffer limit for hardware decoder, I
emircan
2017/05/08 17:14:02
That would be 4 frames per decoder though. So, it
| |
| 43 frame->format() == media::PIXEL_FORMAT_XRGB || | |
| 44 frame->format() == media::PIXEL_FORMAT_I420 || | |
| 45 frame->format() == media::PIXEL_FORMAT_UYVY || | |
| 46 frame->format() == media::PIXEL_FORMAT_NV12); | |
| 47 new_frame = media::VideoFrame::CreateFrame( | |
| 48 media::PIXEL_FORMAT_I420, frame->coded_size(), frame->visible_rect(), | |
| 49 frame->natural_size(), frame->timestamp()); | |
| 50 | |
| 51 ui::ContextProviderCommandBuffer* const provider = | |
| 52 RenderThreadImpl::current()->SharedMainThreadContextProvider().get(); | |
| 53 if (!provider) { | |
| 54 // Return a black frame (yuv = {0, 0x80, 0x80}). | |
| 55 return media::VideoFrame::CreateColorFrame( | |
| 56 frame->visible_rect().size(), 0u, 0x80, 0x80, frame->timestamp()); | |
| 57 } | |
| 58 | |
| 59 SkBitmap bitmap; | |
| 60 bitmap.allocPixels(SkImageInfo::MakeN32Premul( | |
| 61 frame->visible_rect().width(), frame->visible_rect().height())); | |
| 62 cc::SkiaPaintCanvas paint_canvas(bitmap); | |
| 63 | |
| 64 DCHECK(provider->ContextGL()); | |
| 65 video_renderer->Copy( | |
| 66 frame.get(), &paint_canvas, | |
| 67 media::Context3D(provider->ContextGL(), provider->GrContext())); | |
| 68 | |
| 69 SkPixmap pixmap; | |
| 70 const bool result = bitmap.peekPixels(&pixmap); | |
| 71 DCHECK(result) << "Error trying to access SkBitmap's pixels"; | |
| 72 | |
| 73 const uint32 source_pixel_format = | |
| 74 (kN32_SkColorType == kRGBA_8888_SkColorType) ? libyuv::FOURCC_ABGR | |
| 75 : libyuv::FOURCC_ARGB; | |
| 76 libyuv::ConvertToI420( | |
| 77 static_cast<const uint8*>(pixmap.addr(0, 0)), | |
| 78 pixmap.getSafeSize64(), | |
| 79 new_frame->visible_data(media::VideoFrame::kYPlane), | |
| 80 new_frame->stride(media::VideoFrame::kYPlane), | |
| 81 new_frame->visible_data(media::VideoFrame::kUPlane), | |
| 82 new_frame->stride(media::VideoFrame::kUPlane), | |
| 83 new_frame->visible_data(media::VideoFrame::kVPlane), | |
| 84 new_frame->stride(media::VideoFrame::kVPlane), | |
| 85 0 /* crop_x */, 0 /* crop_y */, | |
| 86 pixmap.width(), pixmap.height(), | |
| 87 new_frame->visible_rect().width(), new_frame->visible_rect().height(), | |
| 88 libyuv::kRotate0, source_pixel_format); | |
| 89 } else { | |
| 90 DCHECK(frame->IsMappable()); | 44 DCHECK(frame->IsMappable()); |
| 91 DCHECK(frame->format() == media::PIXEL_FORMAT_YV12 || | 45 DCHECK(frame->format() == media::PIXEL_FORMAT_YV12 || |
| 92 frame->format() == media::PIXEL_FORMAT_YV12A || | 46 frame->format() == media::PIXEL_FORMAT_YV12A || |
| 93 frame->format() == media::PIXEL_FORMAT_I420); | 47 frame->format() == media::PIXEL_FORMAT_I420); |
| 94 const gfx::Size& coded_size = frame->coded_size(); | 48 const gfx::Size& coded_size = frame->coded_size(); |
| 95 new_frame = media::VideoFrame::CreateFrame( | 49 scoped_refptr<media::VideoFrame> new_frame = media::VideoFrame::CreateFrame( |
| 96 media::IsOpaque(frame->format()) ? media::PIXEL_FORMAT_I420 | 50 media::IsOpaque(frame->format()) ? media::PIXEL_FORMAT_I420 |
| 97 : media::PIXEL_FORMAT_YV12A, | 51 : media::PIXEL_FORMAT_YV12A, |
| 98 coded_size, frame->visible_rect(), frame->natural_size(), | 52 coded_size, frame->visible_rect(), frame->natural_size(), |
| 99 frame->timestamp()); | 53 frame->timestamp()); |
| 100 libyuv::I420Copy(frame->data(media::VideoFrame::kYPlane), | 54 libyuv::I420Copy(frame->data(media::VideoFrame::kYPlane), |
| 101 frame->stride(media::VideoFrame::kYPlane), | 55 frame->stride(media::VideoFrame::kYPlane), |
| 102 frame->data(media::VideoFrame::kUPlane), | 56 frame->data(media::VideoFrame::kUPlane), |
| 103 frame->stride(media::VideoFrame::kUPlane), | 57 frame->stride(media::VideoFrame::kUPlane), |
| 104 frame->data(media::VideoFrame::kVPlane), | 58 frame->data(media::VideoFrame::kVPlane), |
| 105 frame->stride(media::VideoFrame::kVPlane), | 59 frame->stride(media::VideoFrame::kVPlane), |
| 106 new_frame->data(media::VideoFrame::kYPlane), | 60 new_frame->data(media::VideoFrame::kYPlane), |
| 107 new_frame->stride(media::VideoFrame::kYPlane), | 61 new_frame->stride(media::VideoFrame::kYPlane), |
| 108 new_frame->data(media::VideoFrame::kUPlane), | 62 new_frame->data(media::VideoFrame::kUPlane), |
| 109 new_frame->stride(media::VideoFrame::kUPlane), | 63 new_frame->stride(media::VideoFrame::kUPlane), |
| 110 new_frame->data(media::VideoFrame::kVPlane), | 64 new_frame->data(media::VideoFrame::kVPlane), |
| 111 new_frame->stride(media::VideoFrame::kVPlane), | 65 new_frame->stride(media::VideoFrame::kVPlane), |
| 112 coded_size.width(), coded_size.height()); | 66 coded_size.width(), coded_size.height()); |
| 113 if (frame->format() == media::PIXEL_FORMAT_YV12A) { | 67 if (frame->format() == media::PIXEL_FORMAT_YV12A) { |
| 114 libyuv::CopyPlane(frame->data(media::VideoFrame::kAPlane), | 68 libyuv::CopyPlane(frame->data(media::VideoFrame::kAPlane), |
| 115 frame->stride(media::VideoFrame::kAPlane), | 69 frame->stride(media::VideoFrame::kAPlane), |
| 116 new_frame->data(media::VideoFrame::kAPlane), | 70 new_frame->data(media::VideoFrame::kAPlane), |
| 117 new_frame->stride(media::VideoFrame::kAPlane), | 71 new_frame->stride(media::VideoFrame::kAPlane), |
| 118 coded_size.width(), coded_size.height()); | 72 coded_size.width(), coded_size.height()); |
| 119 } | 73 } |
| 74 // Transfer metadata keys. | |
| 75 new_frame->metadata()->MergeMetadataFrom(frame->metadata()); | |
| 76 return new_frame; | |
| 120 } | 77 } |
| 121 | 78 return frame; |
| 122 // Transfer metadata keys. | |
| 123 new_frame->metadata()->MergeMetadataFrom(frame->metadata()); | |
| 124 return new_frame; | |
| 125 } | 79 } |
| 126 | 80 |
| 127 } // anonymous namespace | 81 } // anonymous namespace |
| 128 | 82 |
| 129 WebMediaPlayerMSCompositor::WebMediaPlayerMSCompositor( | 83 WebMediaPlayerMSCompositor::WebMediaPlayerMSCompositor( |
| 130 const scoped_refptr<base::SingleThreadTaskRunner>& compositor_task_runner, | 84 scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner, |
| 85 scoped_refptr<base::SingleThreadTaskRunner> io_task_runner, | |
| 131 const blink::WebMediaStream& web_stream, | 86 const blink::WebMediaStream& web_stream, |
| 132 const base::WeakPtr<WebMediaPlayerMS>& player) | 87 const base::WeakPtr<WebMediaPlayerMS>& player) |
| 133 : compositor_task_runner_(compositor_task_runner), | 88 : compositor_task_runner_(compositor_task_runner), |
| 89 io_task_runner_(io_task_runner), | |
| 134 player_(player), | 90 player_(player), |
| 135 video_frame_provider_client_(nullptr), | 91 video_frame_provider_client_(nullptr), |
| 136 current_frame_used_by_compositor_(false), | 92 current_frame_used_by_compositor_(false), |
| 137 last_render_length_(base::TimeDelta::FromSecondsD(1.0 / 60.0)), | 93 last_render_length_(base::TimeDelta::FromSecondsD(1.0 / 60.0)), |
| 138 total_frame_count_(0), | 94 total_frame_count_(0), |
| 139 dropped_frame_count_(0), | 95 dropped_frame_count_(0), |
| 140 stopped_(true) { | 96 stopped_(true) { |
| 141 main_message_loop_ = base::MessageLoop::current(); | 97 main_message_loop_ = base::MessageLoop::current(); |
| 142 io_thread_checker_.DetachFromThread(); | |
| 143 | 98 |
| 144 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; | 99 blink::WebVector<blink::WebMediaStreamTrack> video_tracks; |
| 145 if (!web_stream.IsNull()) | 100 if (!web_stream.IsNull()) |
| 146 web_stream.VideoTracks(video_tracks); | 101 web_stream.VideoTracks(video_tracks); |
| 147 | 102 |
| 148 const bool remote_video = | 103 const bool remote_video = |
| 149 video_tracks.size() && video_tracks[0].Source().Remote(); | 104 video_tracks.size() && video_tracks[0].Source().Remote(); |
| 150 | 105 |
| 151 if (remote_video && !base::CommandLine::ForCurrentProcess()->HasSwitch( | 106 if (remote_video && !base::CommandLine::ForCurrentProcess()->HasSwitch( |
| 152 switches::kDisableRTCSmoothnessAlgorithm)) { | 107 switches::kDisableRTCSmoothnessAlgorithm)) { |
| (...skipping 48 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 201 if (video_frame_provider_client_) | 156 if (video_frame_provider_client_) |
| 202 video_frame_provider_client_->StopUsingProvider(); | 157 video_frame_provider_client_->StopUsingProvider(); |
| 203 | 158 |
| 204 video_frame_provider_client_ = client; | 159 video_frame_provider_client_ = client; |
| 205 if (video_frame_provider_client_ && !stopped_) | 160 if (video_frame_provider_client_ && !stopped_) |
| 206 video_frame_provider_client_->StartRendering(); | 161 video_frame_provider_client_->StartRendering(); |
| 207 } | 162 } |
| 208 | 163 |
| 209 void WebMediaPlayerMSCompositor::EnqueueFrame( | 164 void WebMediaPlayerMSCompositor::EnqueueFrame( |
| 210 scoped_refptr<media::VideoFrame> frame) { | 165 scoped_refptr<media::VideoFrame> frame) { |
| 211 DCHECK(io_thread_checker_.CalledOnValidThread()); | 166 DCHECK(io_task_runner_->BelongsToCurrentThread()); |
| 212 base::AutoLock auto_lock(current_frame_lock_); | 167 base::AutoLock auto_lock(current_frame_lock_); |
| 213 ++total_frame_count_; | 168 ++total_frame_count_; |
| 214 | 169 |
| 215 // With algorithm off, just let |current_frame_| hold the incoming |frame|. | 170 // With algorithm off, just let |current_frame_| hold the incoming |frame|. |
| 216 if (!rendering_frame_buffer_) { | 171 if (!rendering_frame_buffer_) { |
| 217 SetCurrentFrame(frame); | 172 SetCurrentFrame(frame); |
| 218 return; | 173 return; |
| 219 } | 174 } |
| 220 | 175 |
| 221 // This is a signal frame saying that the stream is stopped. | 176 // This is a signal frame saying that the stream is stopped. |
| (...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 327 | 282 |
| 328 void WebMediaPlayerMSCompositor::StopRendering() { | 283 void WebMediaPlayerMSCompositor::StopRendering() { |
| 329 DCHECK(thread_checker_.CalledOnValidThread()); | 284 DCHECK(thread_checker_.CalledOnValidThread()); |
| 330 compositor_task_runner_->PostTask( | 285 compositor_task_runner_->PostTask( |
| 331 FROM_HERE, | 286 FROM_HERE, |
| 332 base::Bind(&WebMediaPlayerMSCompositor::StopRenderingInternal, this)); | 287 base::Bind(&WebMediaPlayerMSCompositor::StopRenderingInternal, this)); |
| 333 } | 288 } |
| 334 | 289 |
| 335 void WebMediaPlayerMSCompositor::ReplaceCurrentFrameWithACopy() { | 290 void WebMediaPlayerMSCompositor::ReplaceCurrentFrameWithACopy() { |
| 336 DCHECK(thread_checker_.CalledOnValidThread()); | 291 DCHECK(thread_checker_.CalledOnValidThread()); |
| 337 base::AutoLock auto_lock(current_frame_lock_); | 292 // Bounce this call off of IO thread to since there might still be frames |
| 338 if (!current_frame_.get() || !player_) | 293 // passed on IO thread. |
| 339 return; | 294 io_task_runner_->PostTask( |
| 340 | 295 FROM_HERE, |
| 341 // Copy the frame so that rendering can show the last received frame. | 296 media::BindToCurrentLoop(base::Bind( |
| 342 // The original frame must not be referenced when the player is paused since | 297 &WebMediaPlayerMSCompositor::ReplaceCurrentFrameWithACopyInternal, |
| 343 // there might be a finite number of available buffers. E.g, video that | 298 this))); |
| 344 // originates from a video camera. | |
| 345 current_frame_ = | |
| 346 CopyFrame(current_frame_, player_->GetSkCanvasVideoRenderer()); | |
| 347 } | 299 } |
| 348 | 300 |
| 349 void WebMediaPlayerMSCompositor::StopUsingProvider() { | 301 void WebMediaPlayerMSCompositor::StopUsingProvider() { |
| 350 DCHECK(thread_checker_.CalledOnValidThread()); | 302 DCHECK(thread_checker_.CalledOnValidThread()); |
| 351 compositor_task_runner_->PostTask( | 303 compositor_task_runner_->PostTask( |
| 352 FROM_HERE, | 304 FROM_HERE, |
| 353 base::Bind(&WebMediaPlayerMSCompositor::StopUsingProviderInternal, this)); | 305 base::Bind(&WebMediaPlayerMSCompositor::StopUsingProviderInternal, this)); |
| 354 } | 306 } |
| 355 | 307 |
| 356 bool WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks( | 308 bool WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks( |
| 357 const std::vector<base::TimeDelta>& timestamps, | 309 const std::vector<base::TimeDelta>& timestamps, |
| 358 std::vector<base::TimeTicks>* wall_clock_times) { | 310 std::vector<base::TimeTicks>* wall_clock_times) { |
| 359 DCHECK(compositor_task_runner_->BelongsToCurrentThread() || | 311 DCHECK(compositor_task_runner_->BelongsToCurrentThread() || |
| 360 thread_checker_.CalledOnValidThread() || | 312 thread_checker_.CalledOnValidThread() || |
| 361 io_thread_checker_.CalledOnValidThread()); | 313 io_task_runner_->BelongsToCurrentThread()); |
| 362 for (const base::TimeDelta& timestamp : timestamps) { | 314 for (const base::TimeDelta& timestamp : timestamps) { |
| 363 DCHECK(timestamps_to_clock_times_.count(timestamp)); | 315 DCHECK(timestamps_to_clock_times_.count(timestamp)); |
| 364 wall_clock_times->push_back(timestamps_to_clock_times_[timestamp]); | 316 wall_clock_times->push_back(timestamps_to_clock_times_[timestamp]); |
| 365 } | 317 } |
| 366 return true; | 318 return true; |
| 367 } | 319 } |
| 368 | 320 |
| 369 void WebMediaPlayerMSCompositor::Render(base::TimeTicks deadline_min, | 321 void WebMediaPlayerMSCompositor::Render(base::TimeTicks deadline_min, |
| 370 base::TimeTicks deadline_max) { | 322 base::TimeTicks deadline_max) { |
| 371 DCHECK(compositor_task_runner_->BelongsToCurrentThread() || | 323 DCHECK(compositor_task_runner_->BelongsToCurrentThread() || |
| (...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 442 video_frame_provider_client_->StopRendering(); | 394 video_frame_provider_client_->StopRendering(); |
| 443 } | 395 } |
| 444 | 396 |
| 445 void WebMediaPlayerMSCompositor::StopUsingProviderInternal() { | 397 void WebMediaPlayerMSCompositor::StopUsingProviderInternal() { |
| 446 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); | 398 DCHECK(compositor_task_runner_->BelongsToCurrentThread()); |
| 447 if (video_frame_provider_client_) | 399 if (video_frame_provider_client_) |
| 448 video_frame_provider_client_->StopUsingProvider(); | 400 video_frame_provider_client_->StopUsingProvider(); |
| 449 video_frame_provider_client_ = nullptr; | 401 video_frame_provider_client_ = nullptr; |
| 450 } | 402 } |
| 451 | 403 |
| 404 void WebMediaPlayerMSCompositor::ReplaceCurrentFrameWithACopyInternal() { | |
| 405 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 406 scoped_refptr<media::VideoFrame> current_frame; | |
| 407 { | |
| 408 base::AutoLock auto_lock(current_frame_lock_); | |
| 409 if (!current_frame_.get() || !player_) | |
|
DaleCurtis
2017/05/04 00:45:37
Can anyone else set current_frame_? Or is this cal
emircan
2017/05/04 17:58:14
I was thinking of releasing the lock so that basic
| |
| 410 return; | |
| 411 | |
| 412 current_frame = current_frame_; | |
| 413 } | |
| 414 // Copy the frame so that rendering can show the last received frame. | |
| 415 // The original frame must not be referenced when the player is paused since | |
| 416 // there might be a finite number of available buffers. E.g, video that | |
| 417 // originates from a video camera. | |
| 418 current_frame = CopyFrame(current_frame, player_->GetSkCanvasVideoRenderer()); | |
| 419 { | |
| 420 base::AutoLock auto_lock(current_frame_lock_); | |
| 421 current_frame_ = current_frame; | |
| 422 } | |
| 423 } | |
| 424 | |
| 452 void WebMediaPlayerMSCompositor::SetAlgorithmEnabledForTesting( | 425 void WebMediaPlayerMSCompositor::SetAlgorithmEnabledForTesting( |
| 453 bool algorithm_enabled) { | 426 bool algorithm_enabled) { |
| 454 if (!algorithm_enabled) { | 427 if (!algorithm_enabled) { |
| 455 rendering_frame_buffer_.reset(); | 428 rendering_frame_buffer_.reset(); |
| 456 return; | 429 return; |
| 457 } | 430 } |
| 458 | 431 |
| 459 if (!rendering_frame_buffer_) { | 432 if (!rendering_frame_buffer_) { |
| 460 rendering_frame_buffer_.reset(new media::VideoRendererAlgorithm( | 433 rendering_frame_buffer_.reset(new media::VideoRendererAlgorithm( |
| 461 base::Bind(&WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks, | 434 base::Bind(&WebMediaPlayerMSCompositor::MapTimestampsToRenderTimeTicks, |
| 462 base::Unretained(this)), | 435 base::Unretained(this)), |
| 463 &media_log_)); | 436 &media_log_)); |
| 464 } | 437 } |
| 465 } | 438 } |
| 466 | 439 |
| 467 } // namespace content | 440 } // namespace content |
| OLD | NEW |