Index: media/capture/video/fake_video_capture_device.cc |
diff --git a/media/capture/video/fake_video_capture_device.cc b/media/capture/video/fake_video_capture_device.cc |
index ba59bb4f2da8269cb7922d5faaa662b1f531626e..fe393982f9041a7a114497777fad97ed5421ca95 100644 |
--- a/media/capture/video/fake_video_capture_device.cc |
+++ b/media/capture/video/fake_video_capture_device.cc |
@@ -23,6 +23,7 @@ |
#include "third_party/skia/include/core/SkCanvas.h" |
#include "third_party/skia/include/core/SkMatrix.h" |
#include "third_party/skia/include/core/SkPaint.h" |
+#include "ui/gfx/codec/jpeg_codec.h" |
#include "ui/gfx/codec/png_codec.h" |
namespace media { |
@@ -47,7 +48,7 @@ static const int kSupportedSizesCount = |
arraysize(kSupportedSizesOrderedByIncreasingWidth); |
static const VideoPixelFormat kSupportedPixelFormats[] = { |
- PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB}; |
+ PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB, PIXEL_FORMAT_MJPEG}; |
static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { |
for (const gfx::Size& supported_size : |
@@ -152,6 +153,28 @@ class ClientBufferFrameDeliverer : public FrameDeliverer { |
void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; |
}; |
+class CAPTURE_EXPORT JpegEncodingFrameDeliverer : public FrameDeliverer { |
+ public: |
+ JpegEncodingFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); |
+ ~JpegEncodingFrameDeliverer() override; |
+ |
+ // Implementation of FrameDeliveryStrategy |
+ void Initialize(VideoPixelFormat pixel_format, |
+ std::unique_ptr<VideoCaptureDevice::Client> client, |
+ const FakeDeviceState* device_state) override; |
+ void Uninitialize() override; |
+ void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; |
+ |
+ private: |
+ const FakeDeviceState* device_state_ = nullptr; |
+ std::unique_ptr<uint8_t[]> argb_buffer_; |
+ int argb_buffer_size_in_bytes_ = -1; |
+ std::vector<unsigned char> jpeg_buffer_; |
+ std::unique_ptr<VideoCaptureDevice::Client> client_; |
+ // The system time when we receive the first frame. |
+ base::TimeTicks first_ref_time_; |
chfremer
2017/02/17 18:07:38
I forgot to remove members |device_state|, |client
chfremer
2017/02/22 17:28:41
Done.
|
+}; |
+ |
// Implements the photo functionality of a VideoCaptureDevice |
class FakePhotoDevice { |
public: |
@@ -242,15 +265,32 @@ std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( |
auto device_state = |
base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format); |
- auto video_frame_painter = |
- base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get()); |
+ std::unique_ptr<PacmanFramePainter> video_frame_painter; |
+ if (pixel_format == PIXEL_FORMAT_MJPEG) { |
+ video_frame_painter = base::MakeUnique<PacmanFramePainter>( |
+ PIXEL_FORMAT_ARGB, device_state.get()); |
+ } else { |
+ video_frame_painter = |
+ base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get()); |
+ } |
mcasas
2017/02/17 18:35:47
I find this part hard to read, mostly because of t
chfremer
2017/02/22 17:28:42
Done.
|
+ |
std::unique_ptr<FrameDeliverer> frame_delivery_strategy; |
switch (delivery_mode) { |
case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: |
- frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>( |
- std::move(video_frame_painter)); |
+ if (pixel_format == PIXEL_FORMAT_MJPEG) { |
+ frame_delivery_strategy = base::MakeUnique<JpegEncodingFrameDeliverer>( |
+ std::move(video_frame_painter)); |
+ } else { |
+ frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>( |
+ std::move(video_frame_painter)); |
+ } |
break; |
case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: |
+ if (pixel_format == PIXEL_FORMAT_MJPEG) { |
+ DLOG(ERROR) << "PIXEL_FORMAT_MJPEG cannot be used in combination with " |
+ << "USE_CLIENT_BUFFERS."; |
+ return nullptr; |
+ } |
mcasas
2017/02/17 18:35:47
Maybe not in this CL, but I don't see any fundamen
chfremer
2017/02/22 17:28:41
I thought so, too, until I tried and ran into some
|
frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>( |
std::move(video_frame_painter)); |
break; |
@@ -514,6 +554,63 @@ void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { |
base::Passed(&callback), elapsed_time_)); |
} |
+JpegEncodingFrameDeliverer::JpegEncodingFrameDeliverer( |
+ std::unique_ptr<PacmanFramePainter> frame_painter) |
+ : FrameDeliverer(std::move(frame_painter)) {} |
+ |
+JpegEncodingFrameDeliverer::~JpegEncodingFrameDeliverer() = default; |
+ |
+void JpegEncodingFrameDeliverer::Initialize( |
+ VideoPixelFormat pixel_format, |
+ std::unique_ptr<VideoCaptureDevice::Client> client, |
+ const FakeDeviceState* device_state) { |
+ client_ = std::move(client); |
+ device_state_ = device_state; |
+} |
+ |
+void JpegEncodingFrameDeliverer::Uninitialize() { |
+ client_.reset(); |
+ device_state_ = nullptr; |
mcasas
2017/02/17 18:35:47
l.567-568 and l.572-573 are common among all
Frame
chfremer
2017/02/22 17:28:41
Agreed. Thanks.
I moved the common initialize and
|
+ argb_buffer_.reset(); |
+ argb_buffer_size_in_bytes_ = -1; |
+ jpeg_buffer_.clear(); |
+} |
+ |
+void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame( |
+ base::TimeDelta timestamp_to_paint) { |
+ if (client_ == nullptr) |
+ return; |
+ |
+ int required_argb_buffer_size_in_bytes = VideoFrame::AllocationSize( |
+ PIXEL_FORMAT_ARGB, device_state_->format.frame_size); |
+ if (argb_buffer_size_in_bytes_ < required_argb_buffer_size_in_bytes) { |
+ argb_buffer_ = |
+ base::MakeUnique<uint8_t[]>(required_argb_buffer_size_in_bytes); |
+ argb_buffer_size_in_bytes_ = required_argb_buffer_size_in_bytes; |
+ } |
mcasas
2017/02/17 18:35:47
Instead of having a |argb_buffer_| and
|argb_buff
chfremer
2017/02/22 17:28:41
Nice. Thanks.
Done.
|
+ |
+ frame_painter_->PaintFrame(timestamp_to_paint, argb_buffer_.get()); |
+ |
+ static const int kBytePerPixel = 4; |
mcasas
2017/02/17 18:35:47
s/kBytePer/BytesPer/
But you can skip it altogethe
chfremer
2017/02/22 17:28:41
Done.
|
+ static const int kQuality = 75; |
+ bool success = gfx::JPEGCodec::Encode( |
+ argb_buffer_.get(), gfx::JPEGCodec::FORMAT_RGBA, |
mcasas
2017/02/17 18:35:47
We seem to be using ARGB format internally whereas
chfremer
2017/02/22 17:28:41
Thanks for raising this.
Yes, this probably leads
|
+ device_state_->format.frame_size.width(), |
+ device_state_->format.frame_size.height(), |
+ device_state_->format.frame_size.width() * kBytePerPixel, kQuality, |
mcasas
2017/02/17 18:35:47
Use here
VideoFrame::RowBytes(1 /* plane */, PIXE
chfremer
2017/02/22 17:28:42
Did you mean 0 instead of 1 for the plane index?
D
|
+ &jpeg_buffer_); |
+ if (!success) { |
+ DLOG(ERROR) << "Jpeg encoding failed"; |
+ return; |
+ } |
+ |
+ const size_t frame_size = jpeg_buffer_.size(); |
+ base::TimeTicks now = base::TimeTicks::Now(); |
+ client_->OnIncomingCapturedData(&jpeg_buffer_[0], frame_size, |
+ device_state_->format, 0 /* rotation */, now, |
+ CalculateTimeSinceFirstInvocation(now)); |
+} |
+ |
OwnBufferFrameDeliverer::OwnBufferFrameDeliverer( |
std::unique_ptr<PacmanFramePainter> frame_painter) |
: FrameDeliverer(std::move(frame_painter)) {} |
@@ -541,8 +638,6 @@ void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame( |
if (!client_) |
return; |
const size_t frame_size = device_state_->format.ImageAllocationSize(); |
- memset(buffer_.get(), 0, frame_size); |
- frame_painter_->PaintFrame(timestamp_to_paint, buffer_.get()); |
base::TimeTicks now = base::TimeTicks::Now(); |
client_->OnIncomingCapturedData(buffer_.get(), frame_size, |
device_state_->format, 0 /* rotation */, now, |