Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(771)

Side by Side Diff: media/capture/video/fake_video_capture_device.cc

Issue 2712123003: Revert of Add MJPEG support to FakeVideoCaptureDevice (Closed)
Patch Set: Created 3 years, 9 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/fake_video_capture_device.h" 5 #include "media/capture/video/fake_video_capture_device.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <algorithm> 8 #include <algorithm>
9 #include <utility> 9 #include <utility>
10 10
11 #include "base/bind.h" 11 #include "base/bind.h"
12 #include "base/location.h" 12 #include "base/location.h"
13 #include "base/macros.h" 13 #include "base/macros.h"
14 #include "base/memory/weak_ptr.h" 14 #include "base/memory/weak_ptr.h"
15 #include "base/single_thread_task_runner.h" 15 #include "base/single_thread_task_runner.h"
16 #include "base/strings/stringprintf.h" 16 #include "base/strings/stringprintf.h"
17 #include "base/threading/thread_checker.h" 17 #include "base/threading/thread_checker.h"
18 #include "base/threading/thread_task_runner_handle.h" 18 #include "base/threading/thread_task_runner_handle.h"
19 #include "base/time/time.h" 19 #include "base/time/time.h"
20 #include "media/audio/fake_audio_input_stream.h" 20 #include "media/audio/fake_audio_input_stream.h"
21 #include "media/base/video_frame.h" 21 #include "media/base/video_frame.h"
22 #include "third_party/skia/include/core/SkBitmap.h" 22 #include "third_party/skia/include/core/SkBitmap.h"
23 #include "third_party/skia/include/core/SkCanvas.h" 23 #include "third_party/skia/include/core/SkCanvas.h"
24 #include "third_party/skia/include/core/SkMatrix.h" 24 #include "third_party/skia/include/core/SkMatrix.h"
25 #include "third_party/skia/include/core/SkPaint.h" 25 #include "third_party/skia/include/core/SkPaint.h"
26 #include "ui/gfx/codec/jpeg_codec.h"
27 #include "ui/gfx/codec/png_codec.h" 26 #include "ui/gfx/codec/png_codec.h"
28 27
29 namespace media { 28 namespace media {
30 29
31 namespace { 30 namespace {
32 // Sweep at 600 deg/sec. 31 // Sweep at 600 deg/sec.
33 static const float kPacmanAngularVelocity = 600; 32 static const float kPacmanAngularVelocity = 600;
34 // Beep every 500 ms. 33 // Beep every 500 ms.
35 static const int kBeepInterval = 500; 34 static const int kBeepInterval = 500;
36 // Gradient travels from bottom to top in 5 seconds. 35 // Gradient travels from bottom to top in 5 seconds.
37 static const float kGradientFrequency = 1.f / 5; 36 static const float kGradientFrequency = 1.f / 5;
38 37
39 static const double kMinZoom = 100.0; 38 static const double kMinZoom = 100.0;
40 static const double kMaxZoom = 400.0; 39 static const double kMaxZoom = 400.0;
41 static const double kZoomStep = 1.0; 40 static const double kZoomStep = 1.0;
42 static const double kInitialZoom = 100.0; 41 static const double kInitialZoom = 100.0;
43 42
44 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = { 43 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = {
45 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), 44 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480),
46 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; 45 gfx::Size(1280, 720), gfx::Size(1920, 1080)};
47 static const int kSupportedSizesCount = 46 static const int kSupportedSizesCount =
48 arraysize(kSupportedSizesOrderedByIncreasingWidth); 47 arraysize(kSupportedSizesOrderedByIncreasingWidth);
49 48
49 static const VideoPixelFormat kSupportedPixelFormats[] = {
50 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB};
51
50 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { 52 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) {
51 for (const gfx::Size& supported_size : 53 for (const gfx::Size& supported_size :
52 kSupportedSizesOrderedByIncreasingWidth) { 54 kSupportedSizesOrderedByIncreasingWidth) {
53 if (requested_size.width() <= supported_size.width()) 55 if (requested_size.width() <= supported_size.width())
54 return supported_size; 56 return supported_size;
55 } 57 }
56 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1]; 58 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1];
57 } 59 }
58 60
59 // Represents the current state of a FakeVideoCaptureDevice. 61 // Represents the current state of a FakeVideoCaptureDevice.
60 // This is a separate struct because read-access to it is shared with several 62 // This is a separate struct because read-access to it is shared with several
61 // collaborating classes. 63 // collaborating classes.
62 struct FakeDeviceState { 64 struct FakeDeviceState {
63 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) 65 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format)
64 : zoom(zoom), 66 : zoom(zoom),
65 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} 67 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {}
66 68
67 uint32_t zoom; 69 uint32_t zoom;
68 VideoCaptureFormat format; 70 VideoCaptureFormat format;
69 }; 71 };
70 72
71 // Paints a "pacman-like" animated circle including textual information such 73 // Paints a "pacman-like" animated circle including textual information such
72 // as a frame count and timer. 74 // as a frame count and timer.
73 class PacmanFramePainter { 75 class PacmanFramePainter {
74 public: 76 public:
75 enum class Format { I420, SK_N32, Y16 }; 77 // Currently, only the following values are supported for |pixel_format|:
76 PacmanFramePainter(Format pixel_format, 78 // PIXEL_FORMAT_I420
79 // PIXEL_FORMAT_Y16
80 // PIXEL_FORMAT_ARGB
81 PacmanFramePainter(VideoPixelFormat pixel_format,
77 const FakeDeviceState* fake_device_state); 82 const FakeDeviceState* fake_device_state);
78 83
79 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer); 84 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer);
80 85
81 private: 86 private:
82 void DrawGradientSquares(base::TimeDelta elapsed_time, 87 void DrawGradientSquares(base::TimeDelta elapsed_time,
83 uint8_t* target_buffer); 88 uint8_t* target_buffer);
84 89
85 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); 90 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer);
86 91
87 const Format pixel_format_; 92 const VideoPixelFormat pixel_format_;
88 const FakeDeviceState* fake_device_state_ = nullptr; 93 const FakeDeviceState* fake_device_state_ = nullptr;
89 }; 94 };
90 95
91 // Paints and delivers frames to a client, which is set via Initialize(). 96 // Paints and delivers frames to a client, which is set via Initialize().
92 class FrameDeliverer { 97 class FrameDeliverer {
93 public: 98 public:
94 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter) 99 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter)
95 : frame_painter_(std::move(frame_painter)) {} 100 : frame_painter_(std::move(frame_painter)) {}
96 virtual ~FrameDeliverer() {} 101 virtual ~FrameDeliverer() {}
97 virtual void Initialize(VideoPixelFormat pixel_format, 102 virtual void Initialize(VideoPixelFormat pixel_format,
98 std::unique_ptr<VideoCaptureDevice::Client> client, 103 std::unique_ptr<VideoCaptureDevice::Client> client,
99 const FakeDeviceState* device_state) { 104 const FakeDeviceState* device_state) = 0;
100 client_ = std::move(client); 105 virtual void Uninitialize() = 0;
101 device_state_ = device_state;
102 }
103 virtual void Uninitialize() {
104 client_.reset();
105 device_state_ = nullptr;
106 }
107 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0; 106 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0;
108 107
109 protected: 108 protected:
110 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) { 109 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) {
111 if (first_ref_time_.is_null()) 110 if (first_ref_time_.is_null())
112 first_ref_time_ = now; 111 first_ref_time_ = now;
113 return now - first_ref_time_; 112 return now - first_ref_time_;
114 } 113 }
115 114
116 PacmanFramePainter* frame_painter() { return frame_painter_.get(); }
117 const FakeDeviceState* device_state() { return device_state_; }
118 VideoCaptureDevice::Client* client() { return client_.get(); }
119
120 private:
121 const std::unique_ptr<PacmanFramePainter> frame_painter_; 115 const std::unique_ptr<PacmanFramePainter> frame_painter_;
122 const FakeDeviceState* device_state_ = nullptr; 116 const FakeDeviceState* device_state_ = nullptr;
123 std::unique_ptr<VideoCaptureDevice::Client> client_; 117 std::unique_ptr<VideoCaptureDevice::Client> client_;
118
119 private:
124 base::TimeTicks first_ref_time_; 120 base::TimeTicks first_ref_time_;
125 }; 121 };
126 122
127 // Delivers frames using its own buffers via OnIncomingCapturedData(). 123 // Delivers frames using its own buffers via OnIncomingCapturedData().
128 class OwnBufferFrameDeliverer : public FrameDeliverer { 124 class OwnBufferFrameDeliverer : public FrameDeliverer {
129 public: 125 public:
130 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 126 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
131 ~OwnBufferFrameDeliverer() override; 127 ~OwnBufferFrameDeliverer() override;
132 128
133 // Implementation of FrameDeliverer 129 // Implementation of FrameDeliverer
134 void Initialize(VideoPixelFormat pixel_format, 130 void Initialize(VideoPixelFormat pixel_format,
135 std::unique_ptr<VideoCaptureDevice::Client> client, 131 std::unique_ptr<VideoCaptureDevice::Client> client,
136 const FakeDeviceState* device_state) override; 132 const FakeDeviceState* device_state) override;
137 void Uninitialize() override; 133 void Uninitialize() override;
138 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 134 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
139 135
140 private: 136 private:
141 std::unique_ptr<uint8_t[]> buffer_; 137 std::unique_ptr<uint8_t[]> buffer_;
142 }; 138 };
143 139
144 // Delivers frames using buffers provided by the client via 140 // Delivers frames using buffers provided by the client via
145 // OnIncomingCapturedBuffer(). 141 // OnIncomingCapturedBuffer().
146 class ClientBufferFrameDeliverer : public FrameDeliverer { 142 class ClientBufferFrameDeliverer : public FrameDeliverer {
147 public: 143 public:
148 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 144 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
149 ~ClientBufferFrameDeliverer() override; 145 ~ClientBufferFrameDeliverer() override;
150 146
151 // Implementation of FrameDeliverer 147 // Implementation of FrameDeliverer
152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 148 void Initialize(VideoPixelFormat pixel_format,
153 }; 149 std::unique_ptr<VideoCaptureDevice::Client> client,
154 150 const FakeDeviceState* device_state) override;
155 class JpegEncodingFrameDeliverer : public FrameDeliverer {
156 public:
157 JpegEncodingFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
158 ~JpegEncodingFrameDeliverer() override;
159
160 // Implementation of FrameDeliveryStrategy
161 void Uninitialize() override; 151 void Uninitialize() override;
162 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
163
164 private:
165 std::vector<uint8_t> sk_n32_buffer_;
166 std::vector<unsigned char> jpeg_buffer_;
167 }; 153 };
168 154
169 // Implements the photo functionality of a VideoCaptureDevice 155 // Implements the photo functionality of a VideoCaptureDevice
170 class FakePhotoDevice { 156 class FakePhotoDevice {
171 public: 157 public:
172 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> painter, 158 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> argb_painter,
173 const FakeDeviceState* fake_device_state); 159 const FakeDeviceState* fake_device_state);
174 ~FakePhotoDevice(); 160 ~FakePhotoDevice();
175 161
176 void GetPhotoCapabilities( 162 void GetPhotoCapabilities(
177 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); 163 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback);
178 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 164 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
179 base::TimeDelta elapsed_time); 165 base::TimeDelta elapsed_time);
180 166
181 private: 167 private:
182 const std::unique_ptr<PacmanFramePainter> painter_; 168 const std::unique_ptr<PacmanFramePainter> argb_painter_;
183 const FakeDeviceState* const fake_device_state_; 169 const FakeDeviceState* const fake_device_state_;
184 }; 170 };
185 171
186 // Implementation of VideoCaptureDevice that generates test frames. This is 172 // Implementation of VideoCaptureDevice that generates test frames. This is
187 // useful for testing the video capture components without having to use real 173 // useful for testing the video capture components without having to use real
188 // devices. The implementation schedules delayed tasks to itself to generate and 174 // devices. The implementation schedules delayed tasks to itself to generate and
189 // deliver frames at the requested rate. 175 // deliver frames at the requested rate.
190 class FakeVideoCaptureDevice : public VideoCaptureDevice { 176 class FakeVideoCaptureDevice : public VideoCaptureDevice {
191 public: 177 public:
192 FakeVideoCaptureDevice( 178 FakeVideoCaptureDevice(
(...skipping 38 matching lines...) Expand 10 before | Expand all | Expand 10 after
231 217
232 // static 218 // static
233 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( 219 void FakeVideoCaptureDeviceMaker::GetSupportedSizes(
234 std::vector<gfx::Size>* supported_sizes) { 220 std::vector<gfx::Size>* supported_sizes) {
235 for (int i = 0; i < kSupportedSizesCount; i++) 221 for (int i = 0; i < kSupportedSizesCount; i++)
236 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]); 222 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]);
237 } 223 }
238 224
239 // static 225 // static
240 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( 226 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance(
241 PixelFormat pixel_format, 227 VideoPixelFormat pixel_format,
242 DeliveryMode delivery_mode, 228 DeliveryMode delivery_mode,
243 float frame_rate) { 229 float frame_rate) {
244 auto device_state = base::MakeUnique<FakeDeviceState>( 230 bool pixel_format_supported = false;
245 kInitialZoom, frame_rate, 231 for (const auto& supported_pixel_format : kSupportedPixelFormats) {
246 static_cast<media::VideoPixelFormat>(pixel_format)); 232 if (pixel_format == supported_pixel_format) {
247 PacmanFramePainter::Format painter_format; 233 pixel_format_supported = true;
248 switch (pixel_format) {
249 case PixelFormat::I420:
250 painter_format = PacmanFramePainter::Format::I420;
251 break; 234 break;
252 case PixelFormat::Y16: 235 }
253 painter_format = PacmanFramePainter::Format::Y16;
254 break;
255 case PixelFormat::MJPEG:
256 painter_format = PacmanFramePainter::Format::SK_N32;
257 break;
258 } 236 }
237 if (!pixel_format_supported) {
238 DLOG(ERROR) << "Requested an unsupported pixel format "
239 << VideoPixelFormatToString(pixel_format);
240 return nullptr;
241 }
242
243 auto device_state =
244 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format);
259 auto video_frame_painter = 245 auto video_frame_painter =
260 base::MakeUnique<PacmanFramePainter>(painter_format, device_state.get()); 246 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get());
261
262 std::unique_ptr<FrameDeliverer> frame_delivery_strategy; 247 std::unique_ptr<FrameDeliverer> frame_delivery_strategy;
263 switch (delivery_mode) { 248 switch (delivery_mode) {
264 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: 249 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS:
265 if (pixel_format == PixelFormat::MJPEG) { 250 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>(
266 frame_delivery_strategy = base::MakeUnique<JpegEncodingFrameDeliverer>( 251 std::move(video_frame_painter));
267 std::move(video_frame_painter));
268 } else {
269 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>(
270 std::move(video_frame_painter));
271 }
272 break; 252 break;
273 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: 253 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS:
274 if (pixel_format == PixelFormat::MJPEG) {
275 DLOG(ERROR) << "PixelFormat::MJPEG cannot be used in combination with "
276 << "USE_CLIENT_PROVIDED_BUFFERS.";
277 return nullptr;
278 }
279 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>( 254 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>(
280 std::move(video_frame_painter)); 255 std::move(video_frame_painter));
281 break; 256 break;
282 } 257 }
283 258
284 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( 259 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>(
285 PacmanFramePainter::Format::SK_N32, device_state.get()); 260 PIXEL_FORMAT_ARGB, device_state.get());
286 auto photo_device = base::MakeUnique<FakePhotoDevice>( 261 auto photo_device = base::MakeUnique<FakePhotoDevice>(
287 std::move(photo_frame_painter), device_state.get()); 262 std::move(photo_frame_painter), device_state.get());
288 263
289 return base::MakeUnique<FakeVideoCaptureDevice>( 264 return base::MakeUnique<FakeVideoCaptureDevice>(
290 std::move(frame_delivery_strategy), std::move(photo_device), 265 std::move(frame_delivery_strategy), std::move(photo_device),
291 std::move(device_state)); 266 std::move(device_state));
292 } 267 }
293 268
294 PacmanFramePainter::PacmanFramePainter(Format pixel_format, 269 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format,
295 const FakeDeviceState* fake_device_state) 270 const FakeDeviceState* fake_device_state)
296 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {} 271 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {
272 DCHECK(pixel_format == PIXEL_FORMAT_I420 ||
273 pixel_format == PIXEL_FORMAT_Y16 || pixel_format == PIXEL_FORMAT_ARGB);
274 }
297 275
298 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, 276 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time,
299 uint8_t* target_buffer) { 277 uint8_t* target_buffer) {
300 DrawPacman(elapsed_time, target_buffer); 278 DrawPacman(elapsed_time, target_buffer);
301 DrawGradientSquares(elapsed_time, target_buffer); 279 DrawGradientSquares(elapsed_time, target_buffer);
302 } 280 }
303 281
304 // Starting from top left, -45 deg gradient. Value at point (row, column) is 282 // Starting from top left, -45 deg gradient. Value at point (row, column) is
305 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where 283 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where
306 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per 284 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per
(...skipping 13 matching lines...) Expand all
320 const float start = 298 const float start =
321 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); 299 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536);
322 const float color_step = 65535 / static_cast<float>(width + height); 300 const float color_step = 65535 / static_cast<float>(width + height);
323 for (const auto& corner : squares) { 301 for (const auto& corner : squares) {
324 for (int y = corner.y(); y < corner.y() + side; ++y) { 302 for (int y = corner.y(); y < corner.y() + side; ++y) {
325 for (int x = corner.x(); x < corner.x() + side; ++x) { 303 for (int x = corner.x(); x < corner.x() + side; ++x) {
326 const unsigned int value = 304 const unsigned int value =
327 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; 305 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF;
328 size_t offset = (y * width) + x; 306 size_t offset = (y * width) + x;
329 switch (pixel_format_) { 307 switch (pixel_format_) {
330 case Format::Y16: 308 case PIXEL_FORMAT_Y16:
331 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; 309 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF;
332 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; 310 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8;
333 break; 311 break;
334 case Format::SK_N32: 312 case PIXEL_FORMAT_ARGB:
335 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; 313 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8;
336 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; 314 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8;
337 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; 315 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8;
338 break; 316 break;
339 case Format::I420: 317 default:
340 target_buffer[offset] = value >> 8; 318 target_buffer[offset] = value >> 8;
341 break; 319 break;
342 } 320 }
343 } 321 }
344 } 322 }
345 } 323 }
346 } 324 }
347 325
348 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, 326 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
349 uint8_t* target_buffer) { 327 uint8_t* target_buffer) {
350 const int width = fake_device_state_->format.frame_size.width(); 328 const int width = fake_device_state_->format.frame_size.width();
351 const int height = fake_device_state_->format.frame_size.height(); 329 const int height = fake_device_state_->format.frame_size.height();
352 330
353 SkColorType colorspace = kAlpha_8_SkColorType; 331 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format.
354 switch (pixel_format_) { 332 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB)
355 case Format::I420: 333 ? kN32_SkColorType
356 // Skia doesn't support painting in I420. Instead, paint an 8bpp 334 : kAlpha_8_SkColorType;
357 // monochrome image to the beginning of |target_buffer|. This section of 335 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use
358 // |target_buffer| corresponds to the Y-plane of the YUV image. Do not 336 // this as high byte values in 16 bit pixels.
359 // touch the U or V planes of |target_buffer|. Assuming they have been
360 // initialized to 0, which corresponds to a green color tone, the result
361 // will be an green-ish monochrome frame.
362 colorspace = kAlpha_8_SkColorType;
363 break;
364 case Format::SK_N32:
365 // SkColorType is RGBA on some platforms and BGRA on others.
366 colorspace = kN32_SkColorType;
367 break;
368 case Format::Y16:
369 // Skia doesn't support painting in Y16. Instead, paint an 8bpp monochrome
370 // image to the beginning of |target_buffer|. Later, move the 8bit pixel
371 // values to a position corresponding to the high byte values of 16bit
372 // pixel values (assuming the byte order is little-endian).
373 colorspace = kAlpha_8_SkColorType;
374 break;
375 }
376
377 const SkImageInfo info = 337 const SkImageInfo info =
378 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); 338 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType);
379 SkBitmap bitmap; 339 SkBitmap bitmap;
380 bitmap.setInfo(info); 340 bitmap.setInfo(info);
381 bitmap.setPixels(target_buffer); 341 bitmap.setPixels(target_buffer);
382 SkPaint paint; 342 SkPaint paint;
383 paint.setStyle(SkPaint::kFill_Style); 343 paint.setStyle(SkPaint::kFill_Style);
384 SkCanvas canvas(bitmap); 344 SkCanvas canvas(bitmap);
385 345
386 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; 346 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f;
387 SkMatrix matrix; 347 SkMatrix matrix;
388 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); 348 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2);
389 canvas.setMatrix(matrix); 349 canvas.setMatrix(matrix);
390 350
391 // For the SK_N32 case, match the green color tone produced by the 351 // Equalize Alpha_8 that has light green background while RGBA has white.
392 // I420 case. 352 if (pixel_format_ == PIXEL_FORMAT_ARGB) {
393 if (pixel_format_ == Format::SK_N32) {
394 const SkRect full_frame = SkRect::MakeWH(width, height); 353 const SkRect full_frame = SkRect::MakeWH(width, height);
395 paint.setARGB(255, 0, 127, 0); 354 paint.setARGB(255, 0, 127, 0);
396 canvas.drawRect(full_frame, paint); 355 canvas.drawRect(full_frame, paint);
397 paint.setColor(SK_ColorGREEN);
398 } 356 }
357 paint.setColor(SK_ColorGREEN);
399 358
400 // Draw a sweeping circle to show an animation. 359 // Draw a sweeping circle to show an animation.
401 const float end_angle = 360 const float end_angle =
402 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); 361 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361);
403 const int radius = std::min(width, height) / 4; 362 const int radius = std::min(width, height) / 4;
404 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, 363 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius,
405 2 * radius, 2 * radius); 364 2 * radius, 2 * radius);
406 canvas.drawArc(rect, 0, end_angle, true, paint); 365 canvas.drawArc(rect, 0, end_angle, true, paint);
407 366
408 // Draw current time. 367 // Draw current time.
409 const int milliseconds = elapsed_time.InMilliseconds() % 1000; 368 const int milliseconds = elapsed_time.InMilliseconds() % 1000;
410 const int seconds = elapsed_time.InSeconds() % 60; 369 const int seconds = elapsed_time.InSeconds() % 60;
411 const int minutes = elapsed_time.InMinutes() % 60; 370 const int minutes = elapsed_time.InMinutes() % 60;
412 const int hours = elapsed_time.InHours(); 371 const int hours = elapsed_time.InHours();
413 const int frame_count = elapsed_time.InMilliseconds() * 372 const int frame_count = elapsed_time.InMilliseconds() *
414 fake_device_state_->format.frame_rate / 1000; 373 fake_device_state_->format.frame_rate / 1000;
415 374
416 const std::string time_string = 375 const std::string time_string =
417 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, 376 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds,
418 milliseconds, frame_count); 377 milliseconds, frame_count);
419 canvas.scale(3, 3); 378 canvas.scale(3, 3);
420 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); 379 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint);
421 380
422 if (pixel_format_ == Format::Y16) { 381 if (pixel_format_ == PIXEL_FORMAT_Y16) {
423 // Use 8 bit bitmap rendered to first half of the buffer as high byte values 382 // Use 8 bit bitmap rendered to first half of the buffer as high byte values
424 // for the whole buffer. Low byte values are not important. 383 // for the whole buffer. Low byte values are not important.
425 for (int i = (width * height) - 1; i >= 0; --i) 384 for (int i = (width * height) - 1; i >= 0; --i)
426 target_buffer[i * 2 + 1] = target_buffer[i]; 385 target_buffer[i * 2 + 1] = target_buffer[i];
427 } 386 }
428 } 387 }
429 388
430 FakePhotoDevice::FakePhotoDevice(std::unique_ptr<PacmanFramePainter> painter, 389 FakePhotoDevice::FakePhotoDevice(
431 const FakeDeviceState* fake_device_state) 390 std::unique_ptr<PacmanFramePainter> argb_painter,
432 : painter_(std::move(painter)), fake_device_state_(fake_device_state) {} 391 const FakeDeviceState* fake_device_state)
392 : argb_painter_(std::move(argb_painter)),
393 fake_device_state_(fake_device_state) {}
433 394
434 FakePhotoDevice::~FakePhotoDevice() = default; 395 FakePhotoDevice::~FakePhotoDevice() = default;
435 396
436 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 397 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
437 base::TimeDelta elapsed_time) { 398 base::TimeDelta elapsed_time) {
438 // Create a PNG-encoded frame and send it back to |callback|. 399 // Create a PNG-encoded frame and send it back to |callback|.
439 auto required_sk_n32_buffer_size = VideoFrame::AllocationSize( 400 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize(
440 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size); 401 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]);
441 std::unique_ptr<uint8_t[]> buffer(new uint8_t[required_sk_n32_buffer_size]); 402 argb_painter_->PaintFrame(elapsed_time, buffer.get());
442 memset(buffer.get(), 0, required_sk_n32_buffer_size);
443 painter_->PaintFrame(elapsed_time, buffer.get());
444 mojom::BlobPtr blob = mojom::Blob::New(); 403 mojom::BlobPtr blob = mojom::Blob::New();
445 const gfx::PNGCodec::ColorFormat encoding_source_format = 404 const bool result =
446 (kN32_SkColorType == kRGBA_8888_SkColorType) ? gfx::PNGCodec::FORMAT_RGBA 405 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA,
447 : gfx::PNGCodec::FORMAT_BGRA; 406 fake_device_state_->format.frame_size,
448 const bool result = gfx::PNGCodec::Encode( 407 fake_device_state_->format.frame_size.width() * 4,
449 buffer.get(), encoding_source_format, 408 true /* discard_transparency */,
450 fake_device_state_->format.frame_size, 409 std::vector<gfx::PNGCodec::Comment>(), &blob->data);
451 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_ARGB,
452 fake_device_state_->format.frame_size.width()),
453 true /* discard_transparency */, std::vector<gfx::PNGCodec::Comment>(),
454 &blob->data);
455 DCHECK(result); 410 DCHECK(result);
456 411
457 blob->mime_type = "image/png"; 412 blob->mime_type = "image/png";
458 callback.Run(std::move(blob)); 413 callback.Run(std::move(blob));
459 } 414 }
460 415
461 FakeVideoCaptureDevice::FakeVideoCaptureDevice( 416 FakeVideoCaptureDevice::FakeVideoCaptureDevice(
462 std::unique_ptr<FrameDeliverer> frame_delivery_strategy, 417 std::unique_ptr<FrameDeliverer> frame_delivery_strategy,
463 std::unique_ptr<FakePhotoDevice> photo_device, 418 std::unique_ptr<FakePhotoDevice> photo_device,
464 std::unique_ptr<FakeDeviceState> device_state) 419 std::unique_ptr<FakeDeviceState> device_state)
(...skipping 97 matching lines...) Expand 10 before | Expand all | Expand 10 after
562 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer( 517 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer(
563 std::unique_ptr<PacmanFramePainter> frame_painter) 518 std::unique_ptr<PacmanFramePainter> frame_painter)
564 : FrameDeliverer(std::move(frame_painter)) {} 519 : FrameDeliverer(std::move(frame_painter)) {}
565 520
566 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default; 521 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default;
567 522
568 void OwnBufferFrameDeliverer::Initialize( 523 void OwnBufferFrameDeliverer::Initialize(
569 VideoPixelFormat pixel_format, 524 VideoPixelFormat pixel_format,
570 std::unique_ptr<VideoCaptureDevice::Client> client, 525 std::unique_ptr<VideoCaptureDevice::Client> client,
571 const FakeDeviceState* device_state) { 526 const FakeDeviceState* device_state) {
572 FrameDeliverer::Initialize(pixel_format, std::move(client), device_state); 527 client_ = std::move(client);
528 device_state_ = device_state;
573 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( 529 buffer_.reset(new uint8_t[VideoFrame::AllocationSize(
574 pixel_format, device_state->format.frame_size)]); 530 pixel_format, device_state_->format.frame_size)]);
575 } 531 }
576 532
577 void OwnBufferFrameDeliverer::Uninitialize() { 533 void OwnBufferFrameDeliverer::Uninitialize() {
578 FrameDeliverer::Uninitialize(); 534 client_.reset();
535 device_state_ = nullptr;
579 buffer_.reset(); 536 buffer_.reset();
580 } 537 }
581 538
582 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame( 539 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame(
583 base::TimeDelta timestamp_to_paint) { 540 base::TimeDelta timestamp_to_paint) {
584 if (!client()) 541 if (!client_)
585 return; 542 return;
586 const size_t frame_size = device_state()->format.ImageAllocationSize(); 543 const size_t frame_size = device_state_->format.ImageAllocationSize();
587 memset(buffer_.get(), 0, frame_size); 544 memset(buffer_.get(), 0, frame_size);
588 frame_painter()->PaintFrame(timestamp_to_paint, buffer_.get()); 545 frame_painter_->PaintFrame(timestamp_to_paint, buffer_.get());
589 base::TimeTicks now = base::TimeTicks::Now(); 546 base::TimeTicks now = base::TimeTicks::Now();
590 client()->OnIncomingCapturedData(buffer_.get(), frame_size, 547 client_->OnIncomingCapturedData(buffer_.get(), frame_size,
591 device_state()->format, 0 /* rotation */, 548 device_state_->format, 0 /* rotation */, now,
592 now, CalculateTimeSinceFirstInvocation(now)); 549 CalculateTimeSinceFirstInvocation(now));
593 } 550 }
594 551
595 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer( 552 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer(
596 std::unique_ptr<PacmanFramePainter> frame_painter) 553 std::unique_ptr<PacmanFramePainter> frame_painter)
597 : FrameDeliverer(std::move(frame_painter)) {} 554 : FrameDeliverer(std::move(frame_painter)) {}
598 555
599 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default; 556 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default;
600 557
558 void ClientBufferFrameDeliverer::Initialize(
559 VideoPixelFormat,
560 std::unique_ptr<VideoCaptureDevice::Client> client,
561 const FakeDeviceState* device_state) {
562 client_ = std::move(client);
563 device_state_ = device_state;
564 }
565
566 void ClientBufferFrameDeliverer::Uninitialize() {
567 client_.reset();
568 device_state_ = nullptr;
569 }
570
601 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame( 571 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame(
602 base::TimeDelta timestamp_to_paint) { 572 base::TimeDelta timestamp_to_paint) {
603 if (!client()) 573 if (client_ == nullptr)
604 return; 574 return;
605 575
606 const int arbitrary_frame_feedback_id = 0; 576 const int arbitrary_frame_feedback_id = 0;
607 auto capture_buffer = client()->ReserveOutputBuffer( 577 auto capture_buffer = client_->ReserveOutputBuffer(
608 device_state()->format.frame_size, device_state()->format.pixel_format, 578 device_state_->format.frame_size, device_state_->format.pixel_format,
609 device_state()->format.pixel_storage, arbitrary_frame_feedback_id); 579 device_state_->format.pixel_storage, arbitrary_frame_feedback_id);
610 DLOG_IF(ERROR, !capture_buffer.is_valid()) 580 DLOG_IF(ERROR, !capture_buffer.is_valid())
611 << "Couldn't allocate Capture Buffer"; 581 << "Couldn't allocate Capture Buffer";
612 auto buffer_access = 582 auto buffer_access =
613 capture_buffer.handle_provider->GetHandleForInProcessAccess(); 583 capture_buffer.handle_provider->GetHandleForInProcessAccess();
614 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; 584 DCHECK(buffer_access->data()) << "Buffer has NO backing memory";
615 585
616 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state()->format.pixel_storage); 586 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state_->format.pixel_storage);
617 587
618 uint8_t* data_ptr = buffer_access->data(); 588 uint8_t* data_ptr = buffer_access->data();
619 memset(data_ptr, 0, buffer_access->mapped_size()); 589 memset(data_ptr, 0, buffer_access->mapped_size());
620 frame_painter()->PaintFrame(timestamp_to_paint, data_ptr); 590 frame_painter_->PaintFrame(timestamp_to_paint, data_ptr);
621 591
622 base::TimeTicks now = base::TimeTicks::Now(); 592 base::TimeTicks now = base::TimeTicks::Now();
623 client()->OnIncomingCapturedBuffer(std::move(capture_buffer), 593 client_->OnIncomingCapturedBuffer(std::move(capture_buffer),
624 device_state()->format, now, 594 device_state_->format, now,
625 CalculateTimeSinceFirstInvocation(now)); 595 CalculateTimeSinceFirstInvocation(now));
626 }
627
628 JpegEncodingFrameDeliverer::JpegEncodingFrameDeliverer(
629 std::unique_ptr<PacmanFramePainter> frame_painter)
630 : FrameDeliverer(std::move(frame_painter)) {}
631
632 JpegEncodingFrameDeliverer::~JpegEncodingFrameDeliverer() = default;
633
634 void JpegEncodingFrameDeliverer::Uninitialize() {
635 FrameDeliverer::Uninitialize();
636 sk_n32_buffer_.clear();
637 jpeg_buffer_.clear();
638 }
639
640 void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame(
641 base::TimeDelta timestamp_to_paint) {
642 if (!client())
643 return;
644
645 auto required_sk_n32_buffer_size = VideoFrame::AllocationSize(
646 PIXEL_FORMAT_ARGB, device_state()->format.frame_size);
647 sk_n32_buffer_.resize(required_sk_n32_buffer_size);
648 memset(&sk_n32_buffer_[0], 0, required_sk_n32_buffer_size);
649
650 frame_painter()->PaintFrame(timestamp_to_paint, &sk_n32_buffer_[0]);
651
652 static const int kQuality = 75;
653 const gfx::JPEGCodec::ColorFormat encoding_source_format =
654 (kN32_SkColorType == kRGBA_8888_SkColorType)
655 ? gfx::JPEGCodec::FORMAT_RGBA
656 : gfx::JPEGCodec::FORMAT_BGRA;
657 bool success = gfx::JPEGCodec::Encode(
658 &sk_n32_buffer_[0], encoding_source_format,
659 device_state()->format.frame_size.width(),
660 device_state()->format.frame_size.height(),
661 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_ARGB,
662 device_state()->format.frame_size.width()),
663 kQuality, &jpeg_buffer_);
664 if (!success) {
665 DLOG(ERROR) << "Jpeg encoding failed";
666 return;
667 }
668
669 const size_t frame_size = jpeg_buffer_.size();
670 base::TimeTicks now = base::TimeTicks::Now();
671 client()->OnIncomingCapturedData(&jpeg_buffer_[0], frame_size,
672 device_state()->format, 0 /* rotation */,
673 now, CalculateTimeSinceFirstInvocation(now));
674 } 596 }
675 597
676 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( 598 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
677 base::TimeTicks expected_execution_time) { 599 base::TimeTicks expected_execution_time) {
678 DCHECK(thread_checker_.CalledOnValidThread()); 600 DCHECK(thread_checker_.CalledOnValidThread());
679 const base::TimeDelta beep_interval = 601 const base::TimeDelta beep_interval =
680 base::TimeDelta::FromMilliseconds(kBeepInterval); 602 base::TimeDelta::FromMilliseconds(kBeepInterval);
681 const base::TimeDelta frame_interval = 603 const base::TimeDelta frame_interval =
682 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); 604 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate);
683 beep_time_ += frame_interval; 605 beep_time_ += frame_interval;
684 elapsed_time_ += frame_interval; 606 elapsed_time_ += frame_interval;
685 607
686 // Generate a synchronized beep twice per second. 608 // Generate a synchronized beep twice per second.
687 if (beep_time_ >= beep_interval) { 609 if (beep_time_ >= beep_interval) {
688 FakeAudioInputStream::BeepOnce(); 610 FakeAudioInputStream::BeepOnce();
689 beep_time_ -= beep_interval; 611 beep_time_ -= beep_interval;
690 } 612 }
691 613
692 // Reschedule next CaptureTask. 614 // Reschedule next CaptureTask.
693 const base::TimeTicks current_time = base::TimeTicks::Now(); 615 const base::TimeTicks current_time = base::TimeTicks::Now();
694 // Don't accumulate any debt if we are lagging behind - just post the next 616 // Don't accumulate any debt if we are lagging behind - just post the next
695 // frame immediately and continue as normal. 617 // frame immediately and continue as normal.
696 const base::TimeTicks next_execution_time = 618 const base::TimeTicks next_execution_time =
697 std::max(current_time, expected_execution_time + frame_interval); 619 std::max(current_time, expected_execution_time + frame_interval);
698 const base::TimeDelta delay = next_execution_time - current_time; 620 const base::TimeDelta delay = next_execution_time - current_time;
699 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( 621 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
700 FROM_HERE, 622 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue,
701 base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, 623 weak_factory_.GetWeakPtr(), next_execution_time,
702 weak_factory_.GetWeakPtr(), next_execution_time, 624 current_session_id_),
703 current_session_id_),
704 delay); 625 delay);
705 } 626 }
706 627
707 void FakeVideoCaptureDevice::OnNextFrameDue( 628 void FakeVideoCaptureDevice::OnNextFrameDue(
708 base::TimeTicks expected_execution_time, 629 base::TimeTicks expected_execution_time,
709 int session_id) { 630 int session_id) {
710 DCHECK(thread_checker_.CalledOnValidThread()); 631 DCHECK(thread_checker_.CalledOnValidThread());
711 if (session_id != current_session_id_) 632 if (session_id != current_session_id_)
712 return; 633 return;
713 634
714 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_); 635 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_);
715 BeepAndScheduleNextCapture(expected_execution_time); 636 BeepAndScheduleNextCapture(expected_execution_time);
716 } 637 }
717 638
718 } // namespace media 639 } // namespace media
OLDNEW
« no previous file with comments | « media/capture/video/fake_video_capture_device.h ('k') | media/capture/video/fake_video_capture_device_factory.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698