| Index: media/capture/video/fake_video_capture_device.cc
|
| diff --git a/media/capture/video/fake_video_capture_device.cc b/media/capture/video/fake_video_capture_device.cc
|
| index 333bca72ba09e6691058f4fcffa8a795adfc8962..d1e6ed2cded79889c7ece8452afec4eb9a029407 100644
|
| --- a/media/capture/video/fake_video_capture_device.cc
|
| +++ b/media/capture/video/fake_video_capture_device.cc
|
| @@ -28,11 +28,36 @@ namespace media {
|
| static const float kPacmanAngularVelocity = 600;
|
| // Beep every 500 ms.
|
| static const int kBeepInterval = 500;
|
| +// Gradient travels from bottom to top in 5 seconds.
|
| +static const float kGradientFrequency = 1.f / 5;
|
|
|
| static const uint32_t kMinZoom = 100;
|
| static const uint32_t kMaxZoom = 400;
|
| static const uint32_t kZoomStep = 1;
|
|
|
| +// Starting from top left, -45 deg gradient.
|
| +void Draw16BitGradient(uint8_t* const pixels,
|
| + base::TimeDelta elapsed_time,
|
| + const gfx::Size& frame_size) {
|
| + uint16_t* data = reinterpret_cast<uint16_t*>(pixels);
|
| + // We calculate the color difference to the neighbour on the right or bellow,
|
| + // so that value change from top left to bottom right is 65535 (pixel values
|
| + // are wrapped arounf uint16_t).
|
| + const float color_step =
|
| + 65535 / static_cast<float>(frame_size.width() + frame_size.height());
|
| + float start =
|
| + fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536);
|
| + for (int j = 0; j < frame_size.height(); ++j) {
|
| + float value = start;
|
| + uint16_t* row = data + j * (frame_size.width());
|
| + for (int i = 0; i < frame_size.width(); ++i) {
|
| + *row++ = static_cast<unsigned>(value) & 0xFFFF;
|
| + value += color_step;
|
| + }
|
| + start += color_step;
|
| + }
|
| +}
|
| +
|
| void DrawPacman(bool use_argb,
|
| uint8_t* const data,
|
| base::TimeDelta elapsed_time,
|
| @@ -114,9 +139,11 @@ void DoTakeFakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
|
| }
|
|
|
| FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership,
|
| - float fake_capture_rate)
|
| + float fake_capture_rate,
|
| + VideoPixelFormat pixel_format)
|
| : buffer_ownership_(buffer_ownership),
|
| fake_capture_rate_(fake_capture_rate),
|
| + pixel_format_(pixel_format),
|
| current_zoom_(kMinZoom),
|
| weak_factory_(this) {}
|
|
|
| @@ -141,22 +168,25 @@ void FakeVideoCaptureDevice::AllocateAndStart(
|
| capture_format_.frame_size.SetSize(1280, 720);
|
| else if (params.requested_format.frame_size.width() > 320)
|
| capture_format_.frame_size.SetSize(640, 480);
|
| - else
|
| + else if (params.requested_format.frame_size.width() > 96)
|
| capture_format_.frame_size.SetSize(320, 240);
|
| + else
|
| + capture_format_.frame_size.SetSize(96, 96);
|
|
|
| + capture_format_.pixel_format = pixel_format_;
|
| if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) {
|
| capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
|
| capture_format_.pixel_format = PIXEL_FORMAT_ARGB;
|
| DVLOG(1) << "starting with client argb buffers";
|
| } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) {
|
| capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
|
| - capture_format_.pixel_format = PIXEL_FORMAT_I420;
|
| - DVLOG(1) << "starting with own I420 buffers";
|
| + DVLOG(1) << "starting with own" << VideoPixelFormatToString(pixel_format_)
|
| + << "buffers";
|
| }
|
|
|
| - if (capture_format_.pixel_format == PIXEL_FORMAT_I420) {
|
| + if (buffer_ownership_ != BufferOwnership::CLIENT_BUFFERS) {
|
| fake_frame_.reset(new uint8_t[VideoFrame::AllocationSize(
|
| - PIXEL_FORMAT_I420, capture_format_.frame_size)]);
|
| + pixel_format_, capture_format_.frame_size)]);
|
| }
|
|
|
| beep_time_ = base::TimeDelta();
|
| @@ -235,11 +265,15 @@ void FakeVideoCaptureDevice::CaptureUsingOwnBuffers(
|
| base::TimeTicks expected_execution_time) {
|
| DCHECK(thread_checker_.CalledOnValidThread());
|
| const size_t frame_size = capture_format_.ImageAllocationSize();
|
| - memset(fake_frame_.get(), 0, frame_size);
|
| -
|
| - DrawPacman(false /* use_argb */, fake_frame_.get(), elapsed_time_,
|
| - fake_capture_rate_, capture_format_.frame_size, current_zoom_);
|
|
|
| + if (capture_format_.pixel_format == media::PIXEL_FORMAT_Y16) {
|
| + Draw16BitGradient(fake_frame_.get(), elapsed_time_,
|
| + capture_format_.frame_size);
|
| + } else {
|
| + memset(fake_frame_.get(), 0, frame_size);
|
| + DrawPacman(false /* use_argb */, fake_frame_.get(), elapsed_time_,
|
| + fake_capture_rate_, capture_format_.frame_size, current_zoom_);
|
| + }
|
| // Give the captured frame to the client.
|
| base::TimeTicks now = base::TimeTicks::Now();
|
| if (first_ref_time_.is_null())
|
| @@ -264,12 +298,17 @@ void FakeVideoCaptureDevice::CaptureUsingClientBuffers(
|
| DLOG_IF(ERROR, !capture_buffer) << "Couldn't allocate Capture Buffer";
|
| DCHECK(capture_buffer->data()) << "Buffer has NO backing memory";
|
|
|
| - DCHECK_EQ(PIXEL_STORAGE_CPU, capture_format_.pixel_storage);
|
| - DCHECK_EQ(PIXEL_FORMAT_ARGB, capture_format_.pixel_format);
|
| - uint8_t* data_ptr = static_cast<uint8_t*>(capture_buffer->data());
|
| - memset(data_ptr, 0, capture_buffer->mapped_size());
|
| - DrawPacman(true /* use_argb */, data_ptr, elapsed_time_, fake_capture_rate_,
|
| - capture_format_.frame_size, current_zoom_);
|
| + if (capture_format_.pixel_format == media::PIXEL_FORMAT_Y16) {
|
| + Draw16BitGradient(static_cast<uint8_t*>(capture_buffer->data()),
|
| + elapsed_time_, capture_format_.frame_size);
|
| + } else {
|
| + DCHECK_EQ(PIXEL_STORAGE_CPU, capture_format_.pixel_storage);
|
| + DCHECK_EQ(PIXEL_FORMAT_ARGB, capture_format_.pixel_format);
|
| + uint8_t* data_ptr = static_cast<uint8_t*>(capture_buffer->data());
|
| + memset(data_ptr, 0, capture_buffer->mapped_size());
|
| + DrawPacman(true /* use_argb */, data_ptr, elapsed_time_, fake_capture_rate_,
|
| + capture_format_.frame_size, current_zoom_);
|
| + }
|
|
|
| // Give the captured frame to the client.
|
| base::TimeTicks now = base::TimeTicks::Now();
|
|
|