Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(759)

Side by Side Diff: content/renderer/media/video_capture_impl.cc

Issue 2045813003: Decouple capture timestamp and reference time (Closed) Base URL: https://chromium.googlesource.com/chromium/src.git@master
Patch Set: Created 4 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 // 4 //
5 // Notes about usage of this object by VideoCaptureImplManager. 5 // Notes about usage of this object by VideoCaptureImplManager.
6 // 6 //
7 // VideoCaptureImplManager access this object by using a Unretained() 7 // VideoCaptureImplManager access this object by using a Unretained()
8 // binding and tasks on the IO thread. It is then important that 8 // binding and tasks on the IO thread. It is then important that
9 // VideoCaptureImpl never post task to itself. All operations must be 9 // VideoCaptureImpl never post task to itself. All operations must be
10 // synchronous. 10 // synchronous.
(...skipping 173 matching lines...) Expand 10 before | Expand all | Expand 10 after
184 if (state_ == VIDEO_CAPTURE_STATE_STARTED) 184 if (state_ == VIDEO_CAPTURE_STATE_STARTED)
185 return; 185 return;
186 params_ = params; 186 params_ = params;
187 if (params_.requested_format.frame_rate > 187 if (params_.requested_format.frame_rate >
188 media::limits::kMaxFramesPerSecond) { 188 media::limits::kMaxFramesPerSecond) {
189 params_.requested_format.frame_rate = 189 params_.requested_format.frame_rate =
190 media::limits::kMaxFramesPerSecond; 190 media::limits::kMaxFramesPerSecond;
191 } 191 }
192 DVLOG(1) << "StartCapture: starting with first resolution " 192 DVLOG(1) << "StartCapture: starting with first resolution "
193 << params_.requested_format.frame_size.ToString(); 193 << params_.requested_format.frame_size.ToString();
194 first_frame_timestamp_ = base::TimeTicks();
195 StartCaptureInternal(); 194 StartCaptureInternal();
196 } 195 }
197 } 196 }
198 } 197 }
199 198
200 void VideoCaptureImpl::StopCapture(int client_id) { 199 void VideoCaptureImpl::StopCapture(int client_id) {
201 DCHECK(io_task_runner_->BelongsToCurrentThread()); 200 DCHECK(io_task_runner_->BelongsToCurrentThread());
202 // A client ID can be in only one client list. 201 // A client ID can be in only one client list.
203 // If this ID is in any client list, we can just remove it from 202 // If this ID is in any client list, we can just remove it from
204 // that client list and don't have to run the other following RemoveClient(). 203 // that client list and don't have to run the other following RemoveClient().
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after
297 if (cb2_iter != client_buffer2s_.end()) { 296 if (cb2_iter != client_buffer2s_.end()) {
298 DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef()) 297 DCHECK(!cb2_iter->second.get() || cb2_iter->second->HasOneRef())
299 << "Instructed to delete buffer we are still using."; 298 << "Instructed to delete buffer we are still using.";
300 client_buffer2s_.erase(cb2_iter); 299 client_buffer2s_.erase(cb2_iter);
301 } 300 }
302 } 301 }
303 } 302 }
304 303
305 void VideoCaptureImpl::OnBufferReceived( 304 void VideoCaptureImpl::OnBufferReceived(
306 int buffer_id, 305 int buffer_id,
307 base::TimeTicks timestamp, 306 base::TimeTicks reference_time,
307 base::TimeDelta timestamp,
308 const base::DictionaryValue& metadata, 308 const base::DictionaryValue& metadata,
309 media::VideoPixelFormat pixel_format, 309 media::VideoPixelFormat pixel_format,
310 media::VideoFrame::StorageType storage_type, 310 media::VideoFrame::StorageType storage_type,
311 const gfx::Size& coded_size, 311 const gfx::Size& coded_size,
312 const gfx::Rect& visible_rect) { 312 const gfx::Rect& visible_rect) {
313 DCHECK(io_task_runner_->BelongsToCurrentThread()); 313 DCHECK(io_task_runner_->BelongsToCurrentThread());
314 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ || 314 if (state_ != VIDEO_CAPTURE_STATE_STARTED || suspended_ ||
315 pixel_format != media::PIXEL_FORMAT_I420 || 315 pixel_format != media::PIXEL_FORMAT_I420 ||
316 (storage_type != media::VideoFrame::STORAGE_SHMEM && 316 (storage_type != media::VideoFrame::STORAGE_SHMEM &&
317 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) { 317 storage_type != media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS)) {
318 // Crash in debug builds since the host should not have provided a buffer 318 // Crash in debug builds since the host should not have provided a buffer
319 // with an unsupported pixel format or storage type. 319 // with an unsupported pixel format or storage type.
320 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format); 320 DCHECK_EQ(media::PIXEL_FORMAT_I420, pixel_format);
321 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM || 321 DCHECK(storage_type == media::VideoFrame::STORAGE_SHMEM ||
322 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS); 322 storage_type == media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS);
323 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 323 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
324 gpu::SyncToken(), -1.0)); 324 gpu::SyncToken(), -1.0));
325 return; 325 return;
326 } 326 }
327 if (first_frame_timestamp_.is_null())
328 first_frame_timestamp_ = timestamp;
329 327
330 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc 328 // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc
331 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived", 329 TRACE_EVENT_INSTANT2("cast_perf_test", "OnBufferReceived",
332 TRACE_EVENT_SCOPE_THREAD, "timestamp", 330 TRACE_EVENT_SCOPE_THREAD, "reference_time",
333 timestamp.ToInternalValue(), "time_delta", 331 reference_time.ToInternalValue(), "timestamp",
miu 2016/06/07 20:03:47 Hmm...These uses of ToInternalValue() are question
qiangchen 2016/06/08 18:04:30 Change the names back to be consistent with other
334 (timestamp - first_frame_timestamp_).ToInternalValue()); 332 timestamp.ToInternalValue());
335 333
336 scoped_refptr<media::VideoFrame> frame; 334 scoped_refptr<media::VideoFrame> frame;
337 BufferFinishedCallback buffer_finished_callback; 335 BufferFinishedCallback buffer_finished_callback;
338 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken); 336 std::unique_ptr<gpu::SyncToken> release_sync_token(new gpu::SyncToken);
339 switch (storage_type) { 337 switch (storage_type) {
340 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: { 338 case media::VideoFrame::STORAGE_GPU_MEMORY_BUFFERS: {
341 const auto& iter = client_buffer2s_.find(buffer_id); 339 const auto& iter = client_buffer2s_.find(buffer_id);
342 DCHECK(iter != client_buffer2s_.end()); 340 DCHECK(iter != client_buffer2s_.end());
343 scoped_refptr<ClientBuffer2> buffer = iter->second; 341 scoped_refptr<ClientBuffer2> buffer = iter->second;
344 const auto& handles = buffer->gpu_memory_buffer_handles(); 342 const auto& handles = buffer->gpu_memory_buffer_handles();
345 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers( 343 frame = media::VideoFrame::WrapExternalYuvGpuMemoryBuffers(
346 media::PIXEL_FORMAT_I420, 344 media::PIXEL_FORMAT_I420, coded_size, gfx::Rect(coded_size),
347 coded_size, 345 coded_size, buffer->stride(media::VideoFrame::kYPlane),
348 gfx::Rect(coded_size),
349 coded_size,
350 buffer->stride(media::VideoFrame::kYPlane),
351 buffer->stride(media::VideoFrame::kUPlane), 346 buffer->stride(media::VideoFrame::kUPlane),
352 buffer->stride(media::VideoFrame::kVPlane), 347 buffer->stride(media::VideoFrame::kVPlane),
353 buffer->data(media::VideoFrame::kYPlane), 348 buffer->data(media::VideoFrame::kYPlane),
354 buffer->data(media::VideoFrame::kUPlane), 349 buffer->data(media::VideoFrame::kUPlane),
355 buffer->data(media::VideoFrame::kVPlane), 350 buffer->data(media::VideoFrame::kVPlane),
356 handles[media::VideoFrame::kYPlane], 351 handles[media::VideoFrame::kYPlane],
357 handles[media::VideoFrame::kUPlane], 352 handles[media::VideoFrame::kUPlane],
358 handles[media::VideoFrame::kVPlane], 353 handles[media::VideoFrame::kVPlane], timestamp);
359 timestamp - first_frame_timestamp_);
360 buffer_finished_callback = media::BindToCurrentLoop( 354 buffer_finished_callback = media::BindToCurrentLoop(
361 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2, 355 base::Bind(&VideoCaptureImpl::OnClientBufferFinished2,
362 weak_factory_.GetWeakPtr(), buffer_id, buffer)); 356 weak_factory_.GetWeakPtr(), buffer_id, buffer));
363 break; 357 break;
364 } 358 }
365 case media::VideoFrame::STORAGE_SHMEM: { 359 case media::VideoFrame::STORAGE_SHMEM: {
366 const auto& iter = client_buffers_.find(buffer_id); 360 const auto& iter = client_buffers_.find(buffer_id);
367 DCHECK(iter != client_buffers_.end()); 361 DCHECK(iter != client_buffers_.end());
368 const scoped_refptr<ClientBuffer> buffer = iter->second; 362 const scoped_refptr<ClientBuffer> buffer = iter->second;
369 frame = media::VideoFrame::WrapExternalSharedMemory( 363 frame = media::VideoFrame::WrapExternalSharedMemory(
370 pixel_format, coded_size, visible_rect, 364 pixel_format, coded_size, visible_rect,
371 gfx::Size(visible_rect.width(), visible_rect.height()), 365 gfx::Size(visible_rect.width(), visible_rect.height()),
372 reinterpret_cast<uint8_t*>(buffer->buffer()->memory()), 366 reinterpret_cast<uint8_t*>(buffer->buffer()->memory()),
373 buffer->buffer_size(), buffer->buffer()->handle(), 367 buffer->buffer_size(), buffer->buffer()->handle(),
374 0 /* shared_memory_offset */, timestamp - first_frame_timestamp_); 368 0 /* shared_memory_offset */, timestamp);
375 buffer_finished_callback = media::BindToCurrentLoop( 369 buffer_finished_callback = media::BindToCurrentLoop(
376 base::Bind(&VideoCaptureImpl::OnClientBufferFinished, 370 base::Bind(&VideoCaptureImpl::OnClientBufferFinished,
377 weak_factory_.GetWeakPtr(), buffer_id, buffer)); 371 weak_factory_.GetWeakPtr(), buffer_id, buffer));
378 break; 372 break;
379 } 373 }
380 default: 374 default:
381 NOTREACHED(); 375 NOTREACHED();
382 break; 376 break;
383 } 377 }
384 if (!frame) { 378 if (!frame) {
385 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 379 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
386 gpu::SyncToken(), -1.0)); 380 gpu::SyncToken(), -1.0));
387 return; 381 return;
388 } 382 }
389 383
390 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME, 384 frame->metadata()->SetTimeTicks(media::VideoFrameMetadata::REFERENCE_TIME,
miu 2016/06/07 20:03:47 This can become: DCHECK(frame->metadata()->HasK
qiangchen 2016/06/08 18:04:30 It is removed, as the metadata is already prepared
391 timestamp); 385 reference_time);
392 frame->AddDestructionObserver( 386 frame->AddDestructionObserver(
393 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), 387 base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(),
394 base::Passed(&release_sync_token), buffer_finished_callback)); 388 base::Passed(&release_sync_token), buffer_finished_callback));
395 389
396 frame->metadata()->MergeInternalValuesFrom(metadata); 390 frame->metadata()->MergeInternalValuesFrom(metadata);
397 391
398 for (const auto& client : clients_) 392 for (const auto& client : clients_)
399 client.second.deliver_frame_cb.Run(frame, timestamp); 393 client.second.deliver_frame_cb.Run(frame, reference_time);
400 } 394 }
401 395
402 void VideoCaptureImpl::OnClientBufferFinished( 396 void VideoCaptureImpl::OnClientBufferFinished(
403 int buffer_id, 397 int buffer_id,
404 const scoped_refptr<ClientBuffer>& /* ignored_buffer */, 398 const scoped_refptr<ClientBuffer>& /* ignored_buffer */,
405 const gpu::SyncToken& release_sync_token, 399 const gpu::SyncToken& release_sync_token,
406 double consumer_resource_utilization) { 400 double consumer_resource_utilization) {
407 DCHECK(io_task_runner_->BelongsToCurrentThread()); 401 DCHECK(io_task_runner_->BelongsToCurrentThread());
408 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id, 402 Send(new VideoCaptureHostMsg_BufferReady(device_id_, buffer_id,
409 release_sync_token, 403 release_sync_token,
(...skipping 148 matching lines...) Expand 10 before | Expand all | Expand 10 after
558 double consumer_resource_utilization = -1.0; 552 double consumer_resource_utilization = -1.0;
559 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION, 553 if (!metadata->GetDouble(media::VideoFrameMetadata::RESOURCE_UTILIZATION,
560 &consumer_resource_utilization)) { 554 &consumer_resource_utilization)) {
561 consumer_resource_utilization = -1.0; 555 consumer_resource_utilization = -1.0;
562 } 556 }
563 557
564 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization); 558 callback_to_io_thread.Run(*release_sync_token, consumer_resource_utilization);
565 } 559 }
566 560
567 } // namespace content 561 } // namespace content
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698