Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(10)

Side by Side Diff: content/renderer/media/video_capture_impl.cc

Issue 2045813003: Decouple capture timestamp and reference time (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Metadata over parameter Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Notes about usage of this object by VideoCaptureImplManager. 5 // Notes about usage of this object by VideoCaptureImplManager.
6 // 6 //
7 // VideoCaptureImplManager access this object by using a Unretained() 7 // VideoCaptureImplManager access this object by using a Unretained()
8 // binding and tasks on the IO thread. It is then important that 8 // binding and tasks on the IO thread. It is then important that
9 // VideoCaptureImpl never post task to itself. All operations must be 9 // VideoCaptureImpl never post task to itself. All operations must be
10 // synchronous. 10 // synchronous.
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
184 if (state_ == VIDEO_CAPTURE_STATE_STARTED) 184 if (state_ == VIDEO_CAPTURE_STATE_STARTED)
185 return; 185 return;
186 params_ = params; 186 params_ = params;
187 if (params_.requested_format.frame_rate > 187 if (params_.requested_format.frame_rate >
188 media::limits::kMaxFramesPerSecond) { 188 media::limits::kMaxFramesPerSecond) {
189 params_.requested_format.frame_rate = 189 params_.requested_format.frame_rate =
190 media::limits::kMaxFramesPerSecond; 190 media::limits::kMaxFramesPerSecond;
191 } 191 }
192 DVLOG(1) << "StartCapture: starting with first resolution " 192 DVLOG(1) << "StartCapture: starting with first resolution "
193 << params_.requested_format.frame_size.ToString(); 193 << params_.requested_format.frame_size.ToString();
194 first_frame_timestamp_ = base::TimeTicks();
195 StartCaptureInternal(); 194 StartCaptureInternal();
196 } 195 }
197 } 196 }
198 } 197 }
199 198
200 void VideoCaptureImpl::StopCapture(int client_id) { 199 void VideoCaptureImpl::StopCapture(int client_id) {
201 DCHECK(io_task_runner_->BelongsToCurrentThread()); 200 DCHECK(io_task_runner_->BelongsToCurrentThread());
202 // A client ID can be in only one client list. 201 // A client ID can be in only one client list.
203 // If this ID is in any client list, we can just remove it from 202 // If this ID is in any client list, we can just remove it from
204 // that client list and don't have to run the other following RemoveClient(). 203 // that client list and don't have to run the other following RemoveClient().
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
297 if (cb2_iter != client_buffer2s_.end()) { 296 if (cb2_iter != client_buffer2s_.end()) {
298 DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef()) 297 DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef())
299 << "Instructed to delete buffer we are still using."; 298 << "Instructed to delete buffer we are still using.";
300 client_buffer2s_.erase(cb2_iter); 299 client_buffer2s_.erase(cb2_iter);
301 } 300 }
302 } 301 }
303 } 302 }
304 303
305 void VideoCaptureImpl::OnBufferReceived( 304 void VideoCaptureImpl::OnBufferReceived(
306 int buffer_id, 305 int buffer_id,
307 base::TimeTicks timestamp, 306 base::TimeDelta timestamp,
308 const base::DictionaryValue& metadata, 307 const base::DictionaryValue& metadata,
309 media::VideoPixelFormat pixel_format, 308 media::VideoPixelFormat pixel_format,
310 media::VideoFrame::StorageType storage_type, 309 media::VideoFrame::StorageType storage_type,
311 const gfx::Size& coded_size, 310 const gfx::Size& coded_size,
312 const gfx::Rect& visible_rect) { 311 const gfx::Rect& visible_rect) {
313 DCHECK(io_task_runner_->BelongsToCurrentThread()); 312 DCHECK(io_task_runner_->BelongsToCurrentThread());
314 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ || 313 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ ||
315 pixel_format != media::PIXEL_FORMAT_I420 || 314 pixel_format != media::PIXEL_FORMAT_I420 ||
316 (storage_type != media::VideoFrame::STORAGE_SHMEM && 315 (storage_type != media::VideoFrame::STORAGE_SHMEM &&
317 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) { 316 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) {
318 // Crash in debug builds since the host should not have provided a buffer 317 // Crash in debug builds since the host should not have provided a buffer
319 // with an unsupported pixel format or storage type. 318 // with an unsupported pixel format or storage type.
320 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); 319 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
321 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM || 320 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM ||
322 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS); 321 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS);
323 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 322 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
324 gpu::SyncToken(), -1.0)); 323 gpu::SyncToken(), -1.0));
325 return; 324 return;
326 } 325 }
327 if (first_frame_timestamp_.is_null())
328 first_frame_timestamp_ = timestamp;
329 326
327 base::TimeTicks reference_time;
328 media::VideoFrameMetadata frame_metadata;
329 frame_metadata.MergeInternalValuesFrom(metadata);
330 const bool success = frame_metadata.GetTimeTicks(
331 media::VideoFrameMetadata::REFERENCE_TIME, &reference_time);
332 DCHECK(success);
333
334 if (first_frame_ref_time_.is_null())
335 first_frame_ref_time_ = reference_time;
336
337 // If the timestamp is not prepared, we use reference time to make a rough
miu 2016/06/08 19:24:44 This wasn't in PS1. Looks like there are some capt
qiangchen 2016/06/08 20:08:54 Yeah, when I change the signature of OnIncomingCap
338 // estimate.
339 if (timestamp.is_zero())
340 timestamp = reference_time - first_frame_ref_time_;
341
342 // TODO(qiangchen): Change the metric name to "reference_time" and
343 // "timestamp", so that we have consistent naming everywhere.
330 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc 344 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
331 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived", 345 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived",
332 TRACE_EVENT_SCOPE_THREAD, "timestamp", 346 TRACE_EVENT_SCOPE_THREAD, "timestamp",
333 timestamp.ToInternalValue(), "time_delta", 347 (reference_time - base::TimeTicks()).InMicroseconds(),
334 (timestamp - first_frame_timestamp_).ToInternalValue()); 348 "time_delta", timestamp.InMicroseconds());
335 349
336 scoped_refptr<media::VideoFrame> frame; 350 scoped_refptr<media::VideoFrame> frame;
337 BufferFinishedCallback buffer_finished_callback; 351 BufferFinishedCallback buffer_finished_callback;
338 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken); 352 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken);
339 switch (storage_type) { 353 switch (storage_type) {
340 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: { 354 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: {
341 const auto& iter = client_buffer2s_.find(buffer_id); 355 const auto& iter = client_buffer2s_.find(buffer_id);
342 DCHECK(iter != client_buffer2s_.end()); 356 DCHECK(iter != client_buffer2s_.end());
343 scoped_refptr<ClientBuffer2> buffer = iter->second; 357 scoped_refptr<ClientBuffer2> buffer = iter->second;
344 const auto& handles = buffer->gpu_memory_buffer_handles(); 358 const auto& handles = buffer->gpu_memory_buffer_handles();
345 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers( 359 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
346 media::PIXEL_FORMAT_I420, 360 media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size),
347 coded_size, 361 coded_size, buffer->stride(media::VideoFrame::kYPlane),
348 gfx::Rect(coded_size),
349 coded_size,
350 buffer->stride(media::VideoFrame::kYPlane),
351 buffer->stride(media::VideoFrame::kUPlane), 362 buffer->stride(media::VideoFrame::kUPlane),
352 buffer->stride(media::VideoFrame::kVPlane), 363 buffer->stride(media::VideoFrame::kVPlane),
353 buffer->data(media::VideoFrame::kYPlane), 364 buffer->data(media::VideoFrame::kYPlane),
354 buffer->data(media::VideoFrame::kUPlane), 365 buffer->data(media::VideoFrame::kUPlane),
355 buffer->data(media::VideoFrame::kVPlane), 366 buffer->data(media::VideoFrame::kVPlane),
356 handles[media::VideoFrame::kYPlane], 367 handles[media::VideoFrame::kYPlane],
357 handles[media::VideoFrame::kUPlane], 368 handles[media::VideoFrame::kUPlane],
358 handles[media::VideoFrame::kVPlane], 369 handles[media::VideoFrame::kVPlane], timestamp);
359 timestamp - first_frame_timestamp_);
360 buffer_finished_callback = media::BindToCurrentLoop( 370 buffer_finished_callback = media::BindToCurrentLoop(
361 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, 371 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
362 weak_factory_.GetWeakPtr(), buffer_id, buffer)); 372 weak_factory_.GetWeakPtr(), buffer_id, buffer));
363 break; 373 break;
364 } 374 }
365 case media::VideoFrame::STORAGE_SHMEM: { 375 case media::VideoFrame::STORAGE_SHMEM: {
366 const auto& iter = client_buffers_.find(buffer_id); 376 const auto& iter = client_buffers_.find(buffer_id);
367 DCHECK(iter != client_buffers_.end()); 377 DCHECK(iter != client_buffers_.end());
368 const scoped_refptr<ClientBuffer> buffer = iter->second; 378 const scoped_refptr<ClientBuffer> buffer = iter->second;
369 frame = media::VideoFrame::WrapExternalSharedMemory( 379 frame = media::VideoFrame::WrapExternalSharedMemory(
370 pixel_format, coded_size, visible_rect, 380 pixel_format, coded_size, visible_rect,
371 gfx::Size(visible_rect.width(), visible_rect.height()), 381 gfx::Size(visible_rect.width(), visible_rect.height()),
372 reinterpret_cast<uint8_t*>(buffer->buffer()->memory()), 382 reinterpret_cast<uint8_t*>(buffer->buffer()->memory()),
373 buffer->buffer_size(), buffer->buffer()->handle(), 383 buffer->buffer_size(), buffer->buffer()->handle(),
374 0 /* shared_memory_offset */, timestamp - first_frame_timestamp_); 384 0 /* shared_memory_offset */, timestamp);
375 buffer_finished_callback = media::BindToCurrentLoop( 385 buffer_finished_callback = media::BindToCurrentLoop(
376 base::Bind(&VideoCaptureImpl::OnClientBufferFinished, 386 base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
377 weak_factory_.GetWeakPtr(), buffer_id, buffer)); 387 weak_factory_.GetWeakPtr(), buffer_id, buffer));
378 break; 388 break;
379 } 389 }
380 default: 390 default:
381 NOTREACHED(); 391 NOTREACHED();
382 break; 392 break;
383 } 393 }
384 if (!frame) { 394 if (!frame) {
385 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 395 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
386 gpu::SyncToken(), -1.0)); 396 gpu::SyncToken(), -1.0));
387 return; 397 return;
388 } 398 }
389 399
390 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
391 timestamp);
392 frame->AddDestructionObserver( 400 frame->AddDestructionObserver(
393 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), 401 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(),
394 base::Passed(&release_sync_token), buffer_finished_callback)); 402 base::Passed(&release_sync_token), buffer_finished_callback));
395 403
396 frame->metadata()->MergeInternalValuesFrom(metadata); 404 frame->metadata()->MergeInternalValuesFrom(metadata);
397 405
406 // TODO(qiangchen): Dive into the full code path to let frame metadata hold
407 // reference time rather than using an extra parameter.
miu 2016/06/08 19:24:44 Yep, this "deliver callback" API was developed bef
qiangchen 2016/06/08 20:08:54 Acknowledged.
398 for (const auto& client : clients_) 408 for (const auto& client : clients_)
399 client.second.deliver_frame_cb.Run(frame, timestamp); 409 client.second.deliver_frame_cb.Run(frame, reference_time);
400 } 410 }
401 411
402 void VideoCaptureImpl::OnClientBufferFinished( 412 void VideoCaptureImpl::OnClientBufferFinished(
403 int buffer_id, 413 int buffer_id,
404 const scoped_refptr<ClientBuffer>& /* ignored_buffer */, 414 const scoped_refptr<ClientBuffer>& /* ignored_buffer */,
405 const gpu::SyncToken& release_sync_token, 415 const gpu::SyncToken& release_sync_token,
406 double consumer_resource_utilization) { 416 double consumer_resource_utilization) {
407 DCHECK(io_task_runner_->BelongsToCurrentThread()); 417 DCHECK(io_task_runner_->BelongsToCurrentThread());
408 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 418 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
409 release_sync_token, 419 release_sync_token,
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
558 double consumer_resource_utilization = -1.0; 568 double consumer_resource_utilization = -1.0;
559 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, 569 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION,
560 &consumer_resource_utilization)) { 570 &consumer_resource_utilization)) {
561 consumer_resource_utilization = -1.0; 571 consumer_resource_utilization = -1.0;
562 } 572 }
563 573
564 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization); 574 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization);
565 } 575 }
566 576
567 } // namespace content 577 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698