OLD | NEW |
1 // Copyright 2014 The Chromium Authors. All rights reserved. | 1 // Copyright 2014 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "content/renderer/media/media_stream_video_source.h" | 5 #include "content/renderer/media/media_stream_video_source.h" |
6 | 6 |
7 #include <algorithm> | 7 #include <algorithm> |
8 #include <limits> | 8 #include <limits> |
9 #include <string> | 9 #include <string> |
10 | 10 |
11 #include "base/logging.h" | 11 #include "base/logging.h" |
12 #include "base/strings/string_number_conversions.h" | 12 #include "base/strings/string_number_conversions.h" |
13 #include "content/renderer/media/media_stream_dependency_factory.h" | 13 #include "content/renderer/media/media_stream_dependency_factory.h" |
| 14 #include "content/renderer/media/media_stream_video_track.h" |
14 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" | 15 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h" |
15 | 16 |
16 namespace content { | 17 namespace content { |
17 | 18 |
18 // Constraint keys. Specified by draft-alvestrand-constraints-resolution-00b | 19 // Constraint keys. Specified by draft-alvestrand-constraints-resolution-00b |
19 const char MediaStreamVideoSource::kMinAspectRatio[] = "minAspectRatio"; | 20 const char MediaStreamVideoSource::kMinAspectRatio[] = "minAspectRatio"; |
20 const char MediaStreamVideoSource::kMaxAspectRatio[] = "maxAspectRatio"; | 21 const char MediaStreamVideoSource::kMaxAspectRatio[] = "maxAspectRatio"; |
21 const char MediaStreamVideoSource::kMaxWidth[] = "maxWidth"; | 22 const char MediaStreamVideoSource::kMaxWidth[] = "maxWidth"; |
22 const char MediaStreamVideoSource::kMinWidth[] = "minWidth"; | 23 const char MediaStreamVideoSource::kMinWidth[] = "minWidth"; |
23 const char MediaStreamVideoSource::kMaxHeight[] = "maxHeight"; | 24 const char MediaStreamVideoSource::kMaxHeight[] = "maxHeight"; |
(...skipping 245 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
269 | 270 |
270 // Empty method used for keeping a reference to the original media::VideoFrame | 271 // Empty method used for keeping a reference to the original media::VideoFrame |
271 // in MediaStreamVideoSource::DeliverVideoFrame if cropping is needed. | 272 // in MediaStreamVideoSource::DeliverVideoFrame if cropping is needed. |
272 // The reference to |frame| is kept in the closure that calls this method. | 273 // The reference to |frame| is kept in the closure that calls this method. |
273 void ReleaseOriginalFrame( | 274 void ReleaseOriginalFrame( |
274 const scoped_refptr<media::VideoFrame>& frame) { | 275 const scoped_refptr<media::VideoFrame>& frame) { |
275 } | 276 } |
276 | 277 |
277 } // anonymous namespace | 278 } // anonymous namespace |
278 | 279 |
| 280 // static |
| 281 MediaStreamVideoSource* MediaStreamVideoSource::GetVideoSource( |
| 282 const blink::WebMediaStreamSource& source) { |
| 283 return static_cast<MediaStreamVideoSource*>(source.extraData()); |
| 284 } |
| 285 |
279 MediaStreamVideoSource::MediaStreamVideoSource( | 286 MediaStreamVideoSource::MediaStreamVideoSource( |
280 MediaStreamDependencyFactory* factory) | 287 MediaStreamDependencyFactory* factory) |
281 : state_(NEW), | 288 : state_(NEW), |
282 factory_(factory), | 289 factory_(factory), |
283 capture_adapter_(NULL) { | 290 capture_adapter_(NULL) { |
284 DCHECK(factory_); | 291 DCHECK(factory_); |
285 } | 292 } |
286 | 293 |
287 MediaStreamVideoSource::~MediaStreamVideoSource() { | 294 MediaStreamVideoSource::~MediaStreamVideoSource() { |
288 } | 295 } |
289 | 296 |
290 void MediaStreamVideoSource::AddTrack( | 297 void MediaStreamVideoSource::AddTrack( |
291 const blink::WebMediaStreamTrack& track, | 298 MediaStreamVideoTrack* track, |
292 const blink::WebMediaConstraints& constraints, | 299 const blink::WebMediaConstraints& constraints, |
293 const ConstraintsCallback& callback) { | 300 const ConstraintsCallback& callback) { |
294 DCHECK(CalledOnValidThread()); | 301 DCHECK(CalledOnValidThread()); |
295 requested_constraints_.push_back(RequestedConstraints(constraints, | 302 DCHECK(std::find(tracks_.begin(), tracks_.end(), |
296 callback)); | 303 track) == tracks_.end()); |
| 304 tracks_.push_back(track); |
| 305 |
| 306 requested_constraints_.push_back( |
| 307 RequestedConstraints(constraints, callback)); |
| 308 |
297 switch (state_) { | 309 switch (state_) { |
298 case NEW: { | 310 case NEW: { |
299 // Tab capture and Screen capture needs the maximum requested height | 311 // Tab capture and Screen capture needs the maximum requested height |
300 // and width to decide on the resolution. | 312 // and width to decide on the resolution. |
301 int max_requested_width = 0; | 313 int max_requested_width = 0; |
302 GetConstraintValue(constraints, true, kMaxWidth, &max_requested_width); | 314 GetConstraintValue(constraints, true, kMaxWidth, &max_requested_width); |
303 | 315 |
304 int max_requested_height = 0; | 316 int max_requested_height = 0; |
305 GetConstraintValue(constraints, true, kMaxHeight, &max_requested_height); | 317 GetConstraintValue(constraints, true, kMaxHeight, &max_requested_height); |
306 | 318 |
307 state_ = RETRIEVING_CAPABILITIES; | 319 state_ = RETRIEVING_CAPABILITIES; |
308 GetCurrentSupportedFormats(max_requested_width, | 320 GetCurrentSupportedFormats(max_requested_width, |
309 max_requested_height); | 321 max_requested_height); |
310 | 322 |
311 break; | 323 break; |
312 } | 324 } |
313 case STARTING: | 325 case STARTING: |
314 case RETRIEVING_CAPABILITIES: { | 326 case RETRIEVING_CAPABILITIES: { |
315 // The |callback| will be triggered once the delegate has started or | 327 // The |callback| will be triggered once the source has started or |
316 // the capabilities have been retrieved. | 328 // the capabilities have been retrieved. |
317 break; | 329 break; |
318 } | 330 } |
319 case ENDED: | 331 case ENDED: |
320 case STARTED: { | 332 case STARTED: { |
321 // Currently, reconfiguring the source is not supported. | 333 // Currently, reconfiguring the source is not supported. |
322 FinalizeAddTrack(); | 334 FinalizeAddTrack(); |
323 } | 335 } |
324 } | 336 } |
325 } | 337 } |
326 | 338 |
327 void MediaStreamVideoSource::RemoveTrack( | 339 void MediaStreamVideoSource::RemoveTrack(MediaStreamVideoTrack* video_track) { |
328 const blink::WebMediaStreamTrack& track) { | 340 std::vector<MediaStreamVideoTrack*>::iterator it = |
329 // TODO(ronghuawu): What should be done here? Do we really need RemoveTrack? | 341 std::find(tracks_.begin(), tracks_.end(), video_track); |
| 342 DCHECK(it != tracks_.end()); |
| 343 tracks_.erase(it); |
330 } | 344 } |
331 | 345 |
332 void MediaStreamVideoSource::InitAdapter() { | 346 void MediaStreamVideoSource::InitAdapter() { |
333 if (adapter_) | 347 if (adapter_) |
334 return; | 348 return; |
335 // Create the webrtc::MediaStreamVideoSourceInterface adapter. | 349 // Create the webrtc::MediaStreamVideoSourceInterface adapter. |
336 // It needs the constraints so that constraints used by a PeerConnection | 350 // It needs the constraints so that constraints used by a PeerConnection |
337 // will be available such as constraints for CPU adaptation and a tab | 351 // will be available such as constraints for CPU adaptation and a tab |
338 // capture. | 352 // capture. |
339 bool is_screeencast = | 353 bool is_screeencast = |
340 device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || | 354 device_info().device.type == MEDIA_TAB_VIDEO_CAPTURE || |
341 device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; | 355 device_info().device.type == MEDIA_DESKTOP_VIDEO_CAPTURE; |
342 capture_adapter_ = factory_->CreateVideoCapturer(is_screeencast); | 356 capture_adapter_ = factory_->CreateVideoCapturer(is_screeencast); |
343 capture_adapter_->SetRequestedFormat(current_format_); | 357 capture_adapter_->SetRequestedFormat(current_format_); |
344 adapter_ = factory_->CreateVideoSource(capture_adapter_, | 358 adapter_ = factory_->CreateVideoSource(capture_adapter_, |
345 current_constraints_); | 359 current_constraints_); |
346 } | 360 } |
347 | 361 |
348 webrtc::VideoSourceInterface* MediaStreamVideoSource::GetAdapter() { | 362 webrtc::VideoSourceInterface* MediaStreamVideoSource::GetAdapter() { |
349 if (!adapter_) { | 363 if (!adapter_) { |
350 InitAdapter(); | 364 InitAdapter(); |
351 } | 365 } |
352 return adapter_; | 366 return adapter_; |
353 } | 367 } |
354 | 368 |
355 void MediaStreamVideoSource::DoStopSource() { | 369 void MediaStreamVideoSource::DoStopSource() { |
356 DVLOG(3) << "DoStopSource()"; | 370 DVLOG(3) << "DoStopSource()"; |
357 StopSourceImpl(); | 371 StopSourceImpl(); |
358 state_ = ENDED; | 372 state_ = ENDED; |
| 373 SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded); |
359 } | 374 } |
360 | 375 |
361 void MediaStreamVideoSource::DeliverVideoFrame( | 376 void MediaStreamVideoSource::DeliverVideoFrame( |
362 const scoped_refptr<media::VideoFrame>& frame) { | 377 const scoped_refptr<media::VideoFrame>& frame) { |
363 scoped_refptr<media::VideoFrame> video_frame(frame); | 378 scoped_refptr<media::VideoFrame> video_frame(frame); |
364 | 379 |
365 if (frame->visible_rect().size() != frame_output_size_) { | 380 if (frame->visible_rect().size() != frame_output_size_) { |
366 // If |frame| is not the size that is expected, we need to crop it by | 381 // If |frame| is not the size that is expected, we need to crop it by |
367 // providing a new |visible_rect|. The new visible rect must be within the | 382 // providing a new |visible_rect|. The new visible rect must be within the |
368 // original |visible_rect|. | 383 // original |visible_rect|. |
(...skipping 19 matching lines...) Expand all Loading... |
388 gfx::Rect rect(horiz_crop, vert_crop, visible_width, visible_height); | 403 gfx::Rect rect(horiz_crop, vert_crop, visible_width, visible_height); |
389 video_frame = media::VideoFrame::WrapVideoFrame( | 404 video_frame = media::VideoFrame::WrapVideoFrame( |
390 frame, rect, base::Bind(&ReleaseOriginalFrame, frame)); | 405 frame, rect, base::Bind(&ReleaseOriginalFrame, frame)); |
391 } | 406 } |
392 | 407 |
393 if ((frame->format() == media::VideoFrame::I420 || | 408 if ((frame->format() == media::VideoFrame::I420 || |
394 frame->format() == media::VideoFrame::YV12) && | 409 frame->format() == media::VideoFrame::YV12) && |
395 capture_adapter_) { | 410 capture_adapter_) { |
396 capture_adapter_->OnFrameCaptured(video_frame); | 411 capture_adapter_->OnFrameCaptured(video_frame); |
397 } | 412 } |
| 413 |
| 414 for (std::vector<MediaStreamVideoTrack*>::iterator it = tracks_.begin(); |
| 415 it != tracks_.end(); ++it) { |
| 416 (*it)->OnVideoFrame(video_frame); |
| 417 } |
398 } | 418 } |
399 | 419 |
400 void MediaStreamVideoSource::OnSupportedFormats( | 420 void MediaStreamVideoSource::OnSupportedFormats( |
401 const media::VideoCaptureFormats& formats) { | 421 const media::VideoCaptureFormats& formats) { |
402 DCHECK(CalledOnValidThread()); | 422 DCHECK(CalledOnValidThread()); |
403 DCHECK_EQ(RETRIEVING_CAPABILITIES, state_); | 423 DCHECK_EQ(RETRIEVING_CAPABILITIES, state_); |
404 | 424 |
405 supported_formats_ = formats; | 425 supported_formats_ = formats; |
406 if (!FindBestFormatWithConstraints(supported_formats_, | 426 if (!FindBestFormatWithConstraints(supported_formats_, |
407 ¤t_format_, | 427 ¤t_format_, |
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
467 } | 487 } |
468 | 488 |
469 void MediaStreamVideoSource::FinalizeAddTrack() { | 489 void MediaStreamVideoSource::FinalizeAddTrack() { |
470 media::VideoCaptureFormats formats; | 490 media::VideoCaptureFormats formats; |
471 formats.push_back(current_format_); | 491 formats.push_back(current_format_); |
472 | 492 |
473 std::vector<RequestedConstraints> callbacks; | 493 std::vector<RequestedConstraints> callbacks; |
474 callbacks.swap(requested_constraints_); | 494 callbacks.swap(requested_constraints_); |
475 for (std::vector<RequestedConstraints>::iterator it = callbacks.begin(); | 495 for (std::vector<RequestedConstraints>::iterator it = callbacks.begin(); |
476 it != callbacks.end(); ++it) { | 496 it != callbacks.end(); ++it) { |
| 497 |
477 bool success = state_ == STARTED && | 498 bool success = state_ == STARTED && |
478 !FilterFormats(it->constraints, formats).empty(); | 499 !FilterFormats(it->constraints, formats).empty(); |
479 DVLOG(3) << "FinalizeAddTrack() success " << success; | 500 DVLOG(3) << "FinalizeAddTrack() success " << success; |
480 if (!it->callback.is_null()) | 501 if (!it->callback.is_null()) |
481 it->callback.Run(this, success); | 502 it->callback.Run(this, success); |
482 } | 503 } |
483 } | 504 } |
484 | 505 |
485 void MediaStreamVideoSource::SetReadyState( | 506 void MediaStreamVideoSource::SetReadyState( |
486 blink::WebMediaStreamSource::ReadyState state) { | 507 blink::WebMediaStreamSource::ReadyState state) { |
487 if (!owner().isNull()) { | 508 if (!owner().isNull()) { |
488 owner().setReadyState(state); | 509 owner().setReadyState(state); |
489 } | 510 } |
490 // TODO(perkj): Notify all registered tracks. | 511 for (std::vector<MediaStreamVideoTrack*>::iterator it = tracks_.begin(); |
| 512 it != tracks_.end(); ++it) { |
| 513 (*it)->OnReadyStateChanged(state); |
| 514 } |
491 } | 515 } |
492 | 516 |
493 MediaStreamVideoSource::RequestedConstraints::RequestedConstraints( | 517 MediaStreamVideoSource::RequestedConstraints::RequestedConstraints( |
494 const blink::WebMediaConstraints& constraints, | 518 const blink::WebMediaConstraints& constraints, |
495 const ConstraintsCallback& callback) | 519 const ConstraintsCallback& callback) |
496 : constraints(constraints), callback(callback) { | 520 : constraints(constraints), callback(callback) { |
497 } | 521 } |
498 | 522 |
499 MediaStreamVideoSource::RequestedConstraints::~RequestedConstraints() { | 523 MediaStreamVideoSource::RequestedConstraints::~RequestedConstraints() { |
500 } | 524 } |
501 | 525 |
502 } // namespace content | 526 } // namespace content |
OLD | NEW |