Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(83)

Side by Side Diff: content/renderer/media/video_capture_impl.cc

Issue 2045813003: Decouple capture timestamp and reference time (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Nit Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Notes about usage of this object by VideoCaptureImplManager. 5 // Notes about usage of this object by VideoCaptureImplManager.
6 // 6 //
7 // VideoCaptureImplManager access this object by using a Unretained() 7 // VideoCaptureImplManager access this object by using a Unretained()
8 // binding and tasks on the IO thread. It is then important that 8 // binding and tasks on the IO thread. It is then important that
9 // VideoCaptureImpl never post task to itself. All operations must be 9 // VideoCaptureImpl never post task to itself. All operations must be
10 // synchronous. 10 // synchronous.
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
184 if (state_ == VIDEO_CAPTURE_STATE_STARTED) 184 if (state_ == VIDEO_CAPTURE_STATE_STARTED)
185 return; 185 return;
186 params_ = params; 186 params_ = params;
187 if (params_.requested_format.frame_rate > 187 if (params_.requested_format.frame_rate >
188 media::limits::kMaxFramesPerSecond) { 188 media::limits::kMaxFramesPerSecond) {
189 params_.requested_format.frame_rate = 189 params_.requested_format.frame_rate =
190 media::limits::kMaxFramesPerSecond; 190 media::limits::kMaxFramesPerSecond;
191 } 191 }
192 DVLOG(1) << "StartCapture: starting with first resolution " 192 DVLOG(1) << "StartCapture: starting with first resolution "
193 << params_.requested_format.frame_size.ToString(); 193 << params_.requested_format.frame_size.ToString();
194 first_frame_timestamp_ = base::TimeTicks();
195 StartCaptureInternal(); 194 StartCaptureInternal();
196 } 195 }
197 } 196 }
198 } 197 }
199 198
200 void VideoCaptureImpl::StopCapture(int client_id) { 199 void VideoCaptureImpl::StopCapture(int client_id) {
201 DCHECK(io_task_runner_->BelongsToCurrentThread()); 200 DCHECK(io_task_runner_->BelongsToCurrentThread());
202 // A client ID can be in only one client list. 201 // A client ID can be in only one client list.
203 // If this ID is in any client list, we can just remove it from 202 // If this ID is in any client list, we can just remove it from
204 // that client list and don't have to run the other following RemoveClient(). 203 // that client list and don't have to run the other following RemoveClient().
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
297 if (cb2_iter != client_buffer2s_.end()) { 296 if (cb2_iter != client_buffer2s_.end()) {
298 DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef()) 297 DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef())
299 << "Instructed to delete buffer we are still using."; 298 << "Instructed to delete buffer we are still using.";
300 client_buffer2s_.erase(cb2_iter); 299 client_buffer2s_.erase(cb2_iter);
301 } 300 }
302 } 301 }
303 } 302 }
304 303
305 void VideoCaptureImpl::OnBufferReceived( 304 void VideoCaptureImpl::OnBufferReceived(
306 int buffer_id, 305 int buffer_id,
307 base::TimeTicks timestamp, 306 base::TimeDelta timestamp,
308 const base::DictionaryValue& metadata, 307 const base::DictionaryValue& metadata,
309 media::VideoPixelFormat pixel_format, 308 media::VideoPixelFormat pixel_format,
310 media::VideoFrame::StorageType storage_type, 309 media::VideoFrame::StorageType storage_type,
311 const gfx::Size& coded_size, 310 const gfx::Size& coded_size,
312 const gfx::Rect& visible_rect) { 311 const gfx::Rect& visible_rect) {
313 DCHECK(io_task_runner_->BelongsToCurrentThread()); 312 DCHECK(io_task_runner_->BelongsToCurrentThread());
314 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ || 313 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ ||
315 pixel_format != media::PIXEL_FORMAT_I420 || 314 pixel_format != media::PIXEL_FORMAT_I420 ||
316 (storage_type != media::VideoFrame::STORAGE_SHMEM && 315 (storage_type != media::VideoFrame::STORAGE_SHMEM &&
317 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) { 316 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) {
318 // Crash in debug builds since the host should not have provided a buffer 317 // Crash in debug builds since the host should not have provided a buffer
319 // with an unsupported pixel format or storage type. 318 // with an unsupported pixel format or storage type.
320 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); 319 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
321 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM || 320 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM ||
322 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS); 321 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS);
323 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 322 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
324 gpu::SyncToken(), -1.0)); 323 gpu::SyncToken(), -1.0));
325 return; 324 return;
326 } 325 }
327 if (first_frame_timestamp_.is_null())
328 first_frame_timestamp_ = timestamp;
329 326
327 base::TimeTicks reference_time;
328 media::VideoFrameMetadata frame_metadata;
329 frame_metadata.MergeInternalValuesFrom(metadata);
330 const bool success = frame_metadata.GetTimeTicks(
331 media::VideoFrameMetadata::REFERENCE_TIME, &reference_time);
332 DCHECK(success);
333
334 if (first_frame_ref_time_.is_null())
335 first_frame_ref_time_ = reference_time;
336
337 // If the timestamp is not prepared, we use reference time to make a rough
338 // estimate. e.g. ThreadSafeCaptureOracle::DidCaptureFrame().
339 // TODO(miu): Fix upstream capturers to always set timestamp and reference
340 // time. See http://crbug/618407/ for tracking.
341 if (timestamp.is_zero())
342 timestamp = reference_time - first_frame_ref_time_;
343
344 // TODO(qiangchen): Change the metric name to "reference_time" and
345 // "timestamp", so that we have consistent naming everywhere.
330 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc 346 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
331 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived", 347 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived",
332 TRACE_EVENT_SCOPE_THREAD, "timestamp", 348 TRACE_EVENT_SCOPE_THREAD, "timestamp",
333 timestamp.ToInternalValue(), "time_delta", 349 (reference_time - base::TimeTicks()).InMicroseconds(),
334 (timestamp - first_frame_timestamp_).ToInternalValue()); 350 "time_delta", timestamp.InMicroseconds());
335 351
336 scoped_refptr<media::VideoFrame> frame; 352 scoped_refptr<media::VideoFrame> frame;
337 BufferFinishedCallback buffer_finished_callback; 353 BufferFinishedCallback buffer_finished_callback;
338 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken); 354 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken);
339 switch (storage_type) { 355 switch (storage_type) {
340 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: { 356 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: {
341 const auto& iter = client_buffer2s_.find(buffer_id); 357 const auto& iter = client_buffer2s_.find(buffer_id);
342 DCHECK(iter != client_buffer2s_.end()); 358 DCHECK(iter != client_buffer2s_.end());
343 scoped_refptr<ClientBuffer2> buffer = iter->second; 359 scoped_refptr<ClientBuffer2> buffer = iter->second;
344 const auto& handles = buffer->gpu_memory_buffer_handles(); 360 const auto& handles = buffer->gpu_memory_buffer_handles();
345 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers( 361 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
346 media::PIXEL_FORMAT_I420, 362 media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size),
347 coded_size, 363 coded_size, buffer->stride(media::VideoFrame::kYPlane),
348 gfx::Rect(coded_size),
349 coded_size,
350 buffer->stride(media::VideoFrame::kYPlane),
351 buffer->stride(media::VideoFrame::kUPlane), 364 buffer->stride(media::VideoFrame::kUPlane),
352 buffer->stride(media::VideoFrame::kVPlane), 365 buffer->stride(media::VideoFrame::kVPlane),
353 buffer->data(media::VideoFrame::kYPlane), 366 buffer->data(media::VideoFrame::kYPlane),
354 buffer->data(media::VideoFrame::kUPlane), 367 buffer->data(media::VideoFrame::kUPlane),
355 buffer->data(media::VideoFrame::kVPlane), 368 buffer->data(media::VideoFrame::kVPlane),
356 handles[media::VideoFrame::kYPlane], 369 handles[media::VideoFrame::kYPlane],
357 handles[media::VideoFrame::kUPlane], 370 handles[media::VideoFrame::kUPlane],
358 handles[media::VideoFrame::kVPlane], 371 handles[media::VideoFrame::kVPlane], timestamp);
359 timestamp - first_frame_timestamp_);
360 buffer_finished_callback = media::BindToCurrentLoop( 372 buffer_finished_callback = media::BindToCurrentLoop(
361 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, 373 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
362 weak_factory_.GetWeakPtr(), buffer_id, buffer)); 374 weak_factory_.GetWeakPtr(), buffer_id, buffer));
363 break; 375 break;
364 } 376 }
365 case media::VideoFrame::STORAGE_SHMEM: { 377 case media::VideoFrame::STORAGE_SHMEM: {
366 const auto& iter = client_buffers_.find(buffer_id); 378 const auto& iter = client_buffers_.find(buffer_id);
367 DCHECK(iter != client_buffers_.end()); 379 DCHECK(iter != client_buffers_.end());
368 const scoped_refptr<ClientBuffer> buffer = iter->second; 380 const scoped_refptr<ClientBuffer> buffer = iter->second;
369 frame = media::VideoFrame::WrapExternalSharedMemory( 381 frame = media::VideoFrame::WrapExternalSharedMemory(
370 pixel_format, coded_size, visible_rect, 382 pixel_format, coded_size, visible_rect,
371 gfx::Size(visible_rect.width(), visible_rect.height()), 383 gfx::Size(visible_rect.width(), visible_rect.height()),
372 reinterpret_cast<uint8_t*>(buffer->buffer()->memory()), 384 reinterpret_cast<uint8_t*>(buffer->buffer()->memory()),
373 buffer->buffer_size(), buffer->buffer()->handle(), 385 buffer->buffer_size(), buffer->buffer()->handle(),
374 0 /* shared_memory_offset */, timestamp - first_frame_timestamp_); 386 0 /* shared_memory_offset */, timestamp);
375 buffer_finished_callback = media::BindToCurrentLoop( 387 buffer_finished_callback = media::BindToCurrentLoop(
376 base::Bind(&VideoCaptureImpl::OnClientBufferFinished, 388 base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
377 weak_factory_.GetWeakPtr(), buffer_id, buffer)); 389 weak_factory_.GetWeakPtr(), buffer_id, buffer));
378 break; 390 break;
379 } 391 }
380 default: 392 default:
381 NOTREACHED(); 393 NOTREACHED();
382 break; 394 break;
383 } 395 }
384 if (!frame) { 396 if (!frame) {
385 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 397 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
386 gpu::SyncToken(), -1.0)); 398 gpu::SyncToken(), -1.0));
387 return; 399 return;
388 } 400 }
389 401
390 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
391 timestamp);
392 frame->AddDestructionObserver( 402 frame->AddDestructionObserver(
393 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), 403 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(),
394 base::Passed(&release_sync_token), buffer_finished_callback)); 404 base::Passed(&release_sync_token), buffer_finished_callback));
395 405
396 frame->metadata()->MergeInternalValuesFrom(metadata); 406 frame->metadata()->MergeInternalValuesFrom(metadata);
397 407
408 // TODO(qiangchen): Dive into the full code path to let frame metadata hold
409 // reference time rather than using an extra parameter.
398 for (const auto& client : clients_) 410 for (const auto& client : clients_)
399 client.second.deliver_frame_cb.Run(frame, timestamp); 411 client.second.deliver_frame_cb.Run(frame, reference_time);
400 } 412 }
401 413
402 void VideoCaptureImpl::OnClientBufferFinished( 414 void VideoCaptureImpl::OnClientBufferFinished(
403 int buffer_id, 415 int buffer_id,
404 const scoped_refptr<ClientBuffer>& /* ignored_buffer */, 416 const scoped_refptr<ClientBuffer>& /* ignored_buffer */,
405 const gpu::SyncToken& release_sync_token, 417 const gpu::SyncToken& release_sync_token,
406 double consumer_resource_utilization) { 418 double consumer_resource_utilization) {
407 DCHECK(io_task_runner_->BelongsToCurrentThread()); 419 DCHECK(io_task_runner_->BelongsToCurrentThread());
408 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 420 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
409 release_sync_token, 421 release_sync_token,
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
558 double consumer_resource_utilization = -1.0; 570 double consumer_resource_utilization = -1.0;
559 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, 571 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION,
560 &consumer_resource_utilization)) { 572 &consumer_resource_utilization)) {
561 consumer_resource_utilization = -1.0; 573 consumer_resource_utilization = -1.0;
562 } 574 }
563 575
564 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization); 576 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization);
565 } 577 }
566 578
567 } // namespace content 579 } // namespace content
OLDNEW
« no previous file with comments | « content/renderer/media/video_capture_impl.h ('k') | content/renderer/media/video_capture_impl_unittest.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698