Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(91)

Side by Side Diff: media/capture/video/fake_video_capture_device.cc

Issue 2619503003: Split FakeVideoCaptureDevice into classes with single responsibility (Closed)
Patch Set: Created 3 years, 11 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/fake_video_capture_device.h" 5 #include "media/capture/video/fake_video_capture_device.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <algorithm> 8 #include <algorithm>
9 #include <utility> 9 #include <utility>
10 10
11 #include "base/bind.h" 11 #include "base/bind.h"
12 #include "base/location.h" 12 #include "base/location.h"
13 #include "base/single_thread_task_runner.h" 13 #include "base/single_thread_task_runner.h"
14 #include "base/strings/stringprintf.h" 14 #include "base/strings/stringprintf.h"
15 #include "base/threading/thread_task_runner_handle.h" 15 #include "base/threading/thread_task_runner_handle.h"
16 #include "media/audio/fake_audio_input_stream.h" 16 #include "media/audio/fake_audio_input_stream.h"
17 #include "media/base/video_frame.h" 17 #include "media/base/video_frame.h"
18 #include "mojo/public/cpp/bindings/string.h" 18 #include "mojo/public/cpp/bindings/string.h"
19 #include "third_party/skia/include/core/SkBitmap.h" 19 #include "third_party/skia/include/core/SkBitmap.h"
20 #include "third_party/skia/include/core/SkCanvas.h" 20 #include "third_party/skia/include/core/SkCanvas.h"
21 #include "third_party/skia/include/core/SkMatrix.h" 21 #include "third_party/skia/include/core/SkMatrix.h"
22 #include "third_party/skia/include/core/SkPaint.h" 22 #include "third_party/skia/include/core/SkPaint.h"
23 #include "ui/gfx/codec/png_codec.h" 23 #include "ui/gfx/codec/png_codec.h"
24 24
25 namespace media { 25 namespace media {
26 26
27 namespace {
27 // Sweep at 600 deg/sec. 28 // Sweep at 600 deg/sec.
28 static const float kPacmanAngularVelocity = 600; 29 static const float kPacmanAngularVelocity = 600;
29 // Beep every 500 ms. 30 // Beep every 500 ms.
30 static const int kBeepInterval = 500; 31 static const int kBeepInterval = 500;
31 // Gradient travels from bottom to top in 5 seconds. 32 // Gradient travels from bottom to top in 5 seconds.
32 static const float kGradientFrequency = 1.f / 5; 33 static const float kGradientFrequency = 1.f / 5;
33 34
34 static const double kMinZoom = 100.0; 35 static const double kMinZoom = 100.0;
35 static const double kMaxZoom = 400.0; 36 static const double kMaxZoom = 400.0;
36 static const double kZoomStep = 1.0; 37 static const double kZoomStep = 1.0;
38 static const double kInitialZoom = 100.0;
39
40 static const gfx::Size kSupportedSizes[] = {
41 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480),
42 gfx::Size(1280, 720), gfx::Size(1920, 1080)};
43 static const int kSupportedSizesCount =
44 sizeof(kSupportedSizes) / sizeof(gfx::Size);
45
46 static const VideoPixelFormat kSupportedPixelFormats[] = {
47 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB};
48
49 } // anonymous namespace
50
51 // static
52 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance(
53 VideoPixelFormat pixel_format,
54 DeliveryMode delivery_mode,
55 float frame_rate) {
56 bool pixel_format_supported = false;
57 for (const auto& supported_pixel_format : kSupportedPixelFormats) {
58 if (pixel_format == supported_pixel_format) {
59 pixel_format_supported = true;
60 break;
61 }
62 }
63 if (!pixel_format_supported) {
64 DLOG(ERROR) << "Requested an unsupported pixel format "
65 << VideoPixelFormatToString(pixel_format);
66 return nullptr;
67 }
68
69 auto device_state =
70 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format);
71 auto video_frame_painter =
72 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get());
73 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy;
74 switch (delivery_mode) {
75 case DeliveryMode::USE_OWN_BUFFERS:
76 frame_delivery_strategy =
77 base::MakeUnique<OwnBufferFrameDeliveryStrategy>();
78 break;
79 case DeliveryMode::USE_CLIENT_BUFFERS:
80 frame_delivery_strategy =
81 base::MakeUnique<ClientBufferFrameDeliveryStrategy>();
82 break;
83 }
84
85 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>(
86 PIXEL_FORMAT_ARGB, device_state.get());
87 auto photo_device = base::MakeUnique<FakePhotoDevice>(
88 std::move(photo_frame_painter), device_state.get());
89
90 return base::MakeUnique<FakeVideoCaptureDevice>(
91 std::move(video_frame_painter), std::move(frame_delivery_strategy),
92 std::move(photo_device), std::move(device_state));
93 }
94
95 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format,
96 const FakeDeviceState* fake_device_state)
97 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {}
98
99 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time,
100 uint8_t* target_buffer) {
101 DrawPacman(elapsed_time, target_buffer);
102 DrawGradientSquares(elapsed_time, target_buffer);
103 }
37 104
38 // Starting from top left, -45 deg gradient. Value at point (row, column) is 105 // Starting from top left, -45 deg gradient. Value at point (row, column) is
39 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where 106 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where
40 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per 107 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per
41 // component) or 65535 for Y16. 108 // component) or 65535 for Y16.
42 // This is handy for pixel tests where we use the squares to verify rendering. 109 // This is handy for pixel tests where we use the squares to verify rendering.
43 void DrawGradientSquares(VideoPixelFormat frame_format, 110 void PacmanFramePainter::DrawGradientSquares(base::TimeDelta elapsed_time,
44 uint8_t* const pixels, 111 uint8_t* target_buffer) {
45 base::TimeDelta elapsed_time, 112 const int width = fake_device_state_->format.frame_size.width();
46 const gfx::Size& frame_size) { 113 const int height = fake_device_state_->format.frame_size.height();
47 const int width = frame_size.width(); 114
48 const int height = frame_size.height();
49 const int side = width / 16; // square side length. 115 const int side = width / 16; // square side length.
50 DCHECK(side); 116 DCHECK(side);
51 const gfx::Point squares[] = {{0, 0}, 117 const gfx::Point squares[] = {{0, 0},
52 {width - side, 0}, 118 {width - side, 0},
53 {0, height - side}, 119 {0, height - side},
54 {width - side, height - side}}; 120 {width - side, height - side}};
55 const float start = 121 const float start =
56 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); 122 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536);
57 const float color_step = 65535 / static_cast<float>(width + height); 123 const float color_step = 65535 / static_cast<float>(width + height);
58 for (const auto& corner : squares) { 124 for (const auto& corner : squares) {
59 for (int y = corner.y(); y < corner.y() + side; ++y) { 125 for (int y = corner.y(); y < corner.y() + side; ++y) {
60 for (int x = corner.x(); x < corner.x() + side; ++x) { 126 for (int x = corner.x(); x < corner.x() + side; ++x) {
61 const unsigned int value = 127 const unsigned int value =
62 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; 128 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF;
63 size_t offset = (y * width) + x; 129 size_t offset = (y * width) + x;
64 switch (frame_format) { 130 switch (pixel_format_) {
65 case PIXEL_FORMAT_Y16: 131 case PIXEL_FORMAT_Y16:
66 pixels[offset * sizeof(uint16_t)] = value & 0xFF; 132 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF;
67 pixels[offset * sizeof(uint16_t) + 1] = value >> 8; 133 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8;
68 break; 134 break;
69 case PIXEL_FORMAT_ARGB: 135 case PIXEL_FORMAT_ARGB:
70 pixels[offset * sizeof(uint32_t) + 1] = value >> 8; 136 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8;
71 pixels[offset * sizeof(uint32_t) + 2] = value >> 8; 137 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8;
72 pixels[offset * sizeof(uint32_t) + 3] = value >> 8; 138 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8;
73 break; 139 break;
74 default: 140 default:
75 pixels[offset] = value >> 8; 141 target_buffer[offset] = value >> 8;
76 break; 142 break;
77 } 143 }
78 } 144 }
79 } 145 }
80 } 146 }
81 } 147 }
82 148
83 void DrawPacman(VideoPixelFormat frame_format, 149 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
84 uint8_t* const data, 150 uint8_t* target_buffer) {
85 base::TimeDelta elapsed_time, 151 const int width = fake_device_state_->format.frame_size.width();
86 float frame_rate, 152 const int height = fake_device_state_->format.frame_size.height();
87 const gfx::Size& frame_size, 153
88 double zoom) {
89 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. 154 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format.
90 const SkColorType colorspace = (frame_format == PIXEL_FORMAT_ARGB) 155 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB)
91 ? kN32_SkColorType 156 ? kN32_SkColorType
92 : kAlpha_8_SkColorType; 157 : kAlpha_8_SkColorType;
93 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use 158 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use
94 // this as high byte values in 16 bit pixels. 159 // this as high byte values in 16 bit pixels.
95 const SkImageInfo info = SkImageInfo::Make( 160 const SkImageInfo info =
96 frame_size.width(), frame_size.height(), colorspace, kOpaque_SkAlphaType); 161 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType);
97 SkBitmap bitmap; 162 SkBitmap bitmap;
98 bitmap.setInfo(info); 163 bitmap.setInfo(info);
99 bitmap.setPixels(data); 164 bitmap.setPixels(target_buffer);
100 SkPaint paint; 165 SkPaint paint;
101 paint.setStyle(SkPaint::kFill_Style); 166 paint.setStyle(SkPaint::kFill_Style);
102 SkCanvas canvas(bitmap); 167 SkCanvas canvas(bitmap);
103 168
104 const SkScalar unscaled_zoom = zoom / 100.f; 169 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f;
105 SkMatrix matrix; 170 SkMatrix matrix;
106 matrix.setScale(unscaled_zoom, unscaled_zoom, frame_size.width() / 2, 171 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2);
107 frame_size.height() / 2);
108 canvas.setMatrix(matrix); 172 canvas.setMatrix(matrix);
109 173
110 // Equalize Alpha_8 that has light green background while RGBA has white. 174 // Equalize Alpha_8 that has light green background while RGBA has white.
111 if (frame_format == PIXEL_FORMAT_ARGB) { 175 if (pixel_format_ == PIXEL_FORMAT_ARGB) {
112 const SkRect full_frame = 176 const SkRect full_frame = SkRect::MakeWH(width, height);
113 SkRect::MakeWH(frame_size.width(), frame_size.height());
114 paint.setARGB(255, 0, 127, 0); 177 paint.setARGB(255, 0, 127, 0);
115 canvas.drawRect(full_frame, paint); 178 canvas.drawRect(full_frame, paint);
116 } 179 }
117 paint.setColor(SK_ColorGREEN); 180 paint.setColor(SK_ColorGREEN);
118 181
119 // Draw a sweeping circle to show an animation. 182 // Draw a sweeping circle to show an animation.
120 const float end_angle = 183 const float end_angle =
121 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); 184 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361);
122 const int radius = std::min(frame_size.width(), frame_size.height()) / 4; 185 const int radius = std::min(width, height) / 4;
123 const SkRect rect = SkRect::MakeXYWH(frame_size.width() / 2 - radius, 186 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius,
124 frame_size.height() / 2 - radius,
125 2 * radius, 2 * radius); 187 2 * radius, 2 * radius);
126 canvas.drawArc(rect, 0, end_angle, true, paint); 188 canvas.drawArc(rect, 0, end_angle, true, paint);
127 189
128 // Draw current time. 190 // Draw current time.
129 const int milliseconds = elapsed_time.InMilliseconds() % 1000; 191 const int milliseconds = elapsed_time.InMilliseconds() % 1000;
130 const int seconds = elapsed_time.InSeconds() % 60; 192 const int seconds = elapsed_time.InSeconds() % 60;
131 const int minutes = elapsed_time.InMinutes() % 60; 193 const int minutes = elapsed_time.InMinutes() % 60;
132 const int hours = elapsed_time.InHours(); 194 const int hours = elapsed_time.InHours();
133 const int frame_count = elapsed_time.InMilliseconds() * frame_rate / 1000; 195 const int frame_count = elapsed_time.InMilliseconds() *
196 fake_device_state_->format.frame_rate / 1000;
134 197
135 const std::string time_string = 198 const std::string time_string =
136 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, 199 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds,
137 milliseconds, frame_count); 200 milliseconds, frame_count);
138 canvas.scale(3, 3); 201 canvas.scale(3, 3);
139 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); 202 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint);
140 203
141 if (frame_format == PIXEL_FORMAT_Y16) { 204 if (pixel_format_ == PIXEL_FORMAT_Y16) {
142 // Use 8 bit bitmap rendered to first half of the buffer as high byte values 205 // Use 8 bit bitmap rendered to first half of the buffer as high byte values
143 // for the whole buffer. Low byte values are not important. 206 // for the whole buffer. Low byte values are not important.
144 for (int i = frame_size.GetArea() - 1; i >= 0; --i) 207 for (int i = (width * height) - 1; i >= 0; --i)
145 data[i * 2 + 1] = data[i]; 208 target_buffer[i * 2 + 1] = target_buffer[i];
146 } 209 }
147 DrawGradientSquares(frame_format, data, elapsed_time, frame_size);
148 } 210 }
149 211
150 // Creates a PNG-encoded frame and sends it back to |callback|. The other 212 FakePhotoDevice::FakePhotoDevice(std::unique_ptr<FramePainter> argb_painter,
151 // parameters are used to replicate the PacMan rendering. 213 const FakeDeviceState* fake_device_state)
152 void DoTakeFakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 214 : argb_painter_(std::move(argb_painter)),
153 const VideoCaptureFormat& capture_format, 215 fake_device_state_(fake_device_state) {}
154 base::TimeDelta elapsed_time, 216
155 float fake_capture_rate, 217 FakePhotoDevice::~FakePhotoDevice() = default;
156 uint32_t zoom) { 218
219 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
220 base::TimeDelta elapsed_time) {
221 // Create a PNG-encoded frame and send it back to |callback|.
157 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( 222 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize(
158 PIXEL_FORMAT_ARGB, capture_format.frame_size)]); 223 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]);
159 224 argb_painter_->PaintFrame(elapsed_time, buffer.get());
160 DrawPacman(PIXEL_FORMAT_ARGB, buffer.get(), elapsed_time, fake_capture_rate,
161 capture_format.frame_size, zoom);
162
163 mojom::BlobPtr blob = mojom::Blob::New(); 225 mojom::BlobPtr blob = mojom::Blob::New();
164 const bool result = gfx::PNGCodec::Encode( 226 const bool result =
165 buffer.get(), gfx::PNGCodec::FORMAT_RGBA, capture_format.frame_size, 227 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA,
166 capture_format.frame_size.width() * 4, true /* discard_transparency */, 228 fake_device_state_->format.frame_size,
167 std::vector<gfx::PNGCodec::Comment>(), &blob->data); 229 fake_device_state_->format.frame_size.width() * 4,
230 true /* discard_transparency */,
231 std::vector<gfx::PNGCodec::Comment>(), &blob->data);
168 DCHECK(result); 232 DCHECK(result);
169 233
170 blob->mime_type = "image/png"; 234 blob->mime_type = "image/png";
171 callback.Run(std::move(blob)); 235 callback.Run(std::move(blob));
172 } 236 }
173 237
174 FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership, 238 FakeVideoCaptureDevice::FakeVideoCaptureDevice(
175 float fake_capture_rate, 239 std::unique_ptr<FramePainter> frame_painter,
176 VideoPixelFormat pixel_format) 240 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy,
177 : buffer_ownership_(buffer_ownership), 241 std::unique_ptr<FakePhotoDevice> photo_device,
178 fake_capture_rate_(fake_capture_rate), 242 std::unique_ptr<FakeDeviceState> device_state)
179 pixel_format_(pixel_format), 243 : frame_painter_(std::move(frame_painter)),
180 current_zoom_(kMinZoom), 244 frame_delivery_strategy_(std::move(frame_delivery_strategy)),
245 photo_device_(std::move(photo_device)),
246 device_state_(std::move(device_state)),
181 weak_factory_(this) {} 247 weak_factory_(this) {}
182 248
183 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { 249 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() {
184 DCHECK(thread_checker_.CalledOnValidThread()); 250 DCHECK(thread_checker_.CalledOnValidThread());
185 } 251 }
186 252
253 // static
254 void FakeVideoCaptureDevice::GetSupportedSizes(
255 std::vector<gfx::Size>* supported_sizes) {
256 for (int i = 0; i < kSupportedSizesCount; i++) {
257 supported_sizes->push_back(kSupportedSizes[i]);
258 }
259 }
260
261 // static
262 gfx::Size FakeVideoCaptureDevice::SnapToSupportedSize(
263 const gfx::Size& requested_size) {
264 for (int i = 0; i < kSupportedSizesCount; i++) {
265 const gfx::Size& supported_size = kSupportedSizes[i];
266 if (requested_size.width() <= supported_size.width()) {
267 return supported_size;
268 }
269 }
270 return kSupportedSizes[kSupportedSizesCount - 1];
271 }
272
187 void FakeVideoCaptureDevice::AllocateAndStart( 273 void FakeVideoCaptureDevice::AllocateAndStart(
188 const VideoCaptureParams& params, 274 const VideoCaptureParams& params,
189 std::unique_ptr<VideoCaptureDevice::Client> client) { 275 std::unique_ptr<VideoCaptureDevice::Client> client) {
190 DCHECK(thread_checker_.CalledOnValidThread()); 276 DCHECK(thread_checker_.CalledOnValidThread());
191 277
192 client_ = std::move(client);
193
194 // Incoming |params| can be none of the supported formats, so we get the
195 // closest thing rounded up. TODO(mcasas): Use the |params|, if they belong to
196 // the supported ones, when http://crbug.com/309554 is verified.
197 capture_format_.frame_rate = fake_capture_rate_;
198 if (params.requested_format.frame_size.width() > 1280)
199 capture_format_.frame_size.SetSize(1920, 1080);
200 else if (params.requested_format.frame_size.width() > 640)
201 capture_format_.frame_size.SetSize(1280, 720);
202 else if (params.requested_format.frame_size.width() > 320)
203 capture_format_.frame_size.SetSize(640, 480);
204 else if (params.requested_format.frame_size.width() > 96)
205 capture_format_.frame_size.SetSize(320, 240);
206 else
207 capture_format_.frame_size.SetSize(96, 96);
208
209 capture_format_.pixel_format = pixel_format_;
210 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) {
211 capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
212 capture_format_.pixel_format = PIXEL_FORMAT_ARGB;
213 DVLOG(1) << "starting with client argb buffers";
214 } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) {
215 capture_format_.pixel_storage = PIXEL_STORAGE_CPU;
216 DVLOG(1) << "starting with own " << VideoPixelFormatToString(pixel_format_)
217 << " buffers";
218 }
219
220 if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) {
221 fake_frame_.reset(new uint8_t[VideoFrame::AllocationSize(
222 pixel_format_, capture_format_.frame_size)]);
223 }
224
225 beep_time_ = base::TimeDelta(); 278 beep_time_ = base::TimeDelta();
226 elapsed_time_ = base::TimeDelta(); 279 elapsed_time_ = base::TimeDelta();
227 280 device_state_->format.frame_size =
228 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) 281 SnapToSupportedSize(params.requested_format.frame_size);
229 BeepAndScheduleNextCapture( 282 frame_delivery_strategy_->Initialize(device_state_->format.pixel_format,
230 base::TimeTicks::Now(), 283 std::move(client), device_state_.get());
231 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, 284 device_running_ = true;
232 weak_factory_.GetWeakPtr())); 285 current_session_id_++;
233 else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) 286 BeepAndScheduleNextCapture(base::TimeTicks::Now());
234 BeepAndScheduleNextCapture(
235 base::TimeTicks::Now(),
236 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers,
237 weak_factory_.GetWeakPtr()));
238 } 287 }
239 288
240 void FakeVideoCaptureDevice::StopAndDeAllocate() { 289 void FakeVideoCaptureDevice::StopAndDeAllocate() {
241 DCHECK(thread_checker_.CalledOnValidThread()); 290 DCHECK(thread_checker_.CalledOnValidThread());
242 client_.reset(); 291
292 // Update flag to stop the perpetual scheduling of tasks.
293 device_running_ = false;
294 frame_delivery_strategy_->Uninitialize();
243 } 295 }
244 296
245 void FakeVideoCaptureDevice::GetPhotoCapabilities( 297 void FakeVideoCaptureDevice::GetPhotoCapabilities(
246 GetPhotoCapabilitiesCallback callback) { 298 GetPhotoCapabilitiesCallback callback) {
299 DCHECK(thread_checker_.CalledOnValidThread());
300 photo_device_->GetPhotoCapabilities(std::move(callback));
301 }
302
303 void FakePhotoDevice::GetPhotoCapabilities(
304 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) {
247 mojom::PhotoCapabilitiesPtr photo_capabilities = 305 mojom::PhotoCapabilitiesPtr photo_capabilities =
248 mojom::PhotoCapabilities::New(); 306 mojom::PhotoCapabilities::New();
249 photo_capabilities->iso = mojom::Range::New(); 307 photo_capabilities->iso = mojom::Range::New();
250 photo_capabilities->iso->current = 100.0; 308 photo_capabilities->iso->current = 100.0;
251 photo_capabilities->iso->max = 100.0; 309 photo_capabilities->iso->max = 100.0;
252 photo_capabilities->iso->min = 100.0; 310 photo_capabilities->iso->min = 100.0;
253 photo_capabilities->iso->step = 0.0; 311 photo_capabilities->iso->step = 0.0;
254 photo_capabilities->height = mojom::Range::New(); 312 photo_capabilities->height = mojom::Range::New();
255 photo_capabilities->height->current = capture_format_.frame_size.height(); 313 photo_capabilities->height->current =
314 fake_device_state_->format.frame_size.height();
256 photo_capabilities->height->max = 1080.0; 315 photo_capabilities->height->max = 1080.0;
257 photo_capabilities->height->min = 96.0; 316 photo_capabilities->height->min = 96.0;
258 photo_capabilities->height->step = 1.0; 317 photo_capabilities->height->step = 1.0;
259 photo_capabilities->width = mojom::Range::New(); 318 photo_capabilities->width = mojom::Range::New();
260 photo_capabilities->width->current = capture_format_.frame_size.width(); 319 photo_capabilities->width->current =
320 fake_device_state_->format.frame_size.width();
261 photo_capabilities->width->max = 1920.0; 321 photo_capabilities->width->max = 1920.0;
262 photo_capabilities->width->min = 96.0; 322 photo_capabilities->width->min = 96.0;
263 photo_capabilities->width->step = 1; 323 photo_capabilities->width->step = 1.0;
264 photo_capabilities->zoom = mojom::Range::New(); 324 photo_capabilities->zoom = mojom::Range::New();
265 photo_capabilities->zoom->current = current_zoom_; 325 photo_capabilities->zoom->current = fake_device_state_->zoom;
266 photo_capabilities->zoom->max = kMaxZoom; 326 photo_capabilities->zoom->max = kMaxZoom;
267 photo_capabilities->zoom->min = kMinZoom; 327 photo_capabilities->zoom->min = kMinZoom;
268 photo_capabilities->zoom->step = kZoomStep; 328 photo_capabilities->zoom->step = kZoomStep;
269 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; 329 photo_capabilities->focus_mode = mojom::MeteringMode::NONE;
270 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; 330 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE;
271 photo_capabilities->exposure_compensation = mojom::Range::New(); 331 photo_capabilities->exposure_compensation = mojom::Range::New();
272 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; 332 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE;
273 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; 333 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE;
274 photo_capabilities->red_eye_reduction = false; 334 photo_capabilities->red_eye_reduction = false;
275 photo_capabilities->color_temperature = mojom::Range::New(); 335 photo_capabilities->color_temperature = mojom::Range::New();
276 photo_capabilities->brightness = media::mojom::Range::New(); 336 photo_capabilities->brightness = media::mojom::Range::New();
277 photo_capabilities->contrast = media::mojom::Range::New(); 337 photo_capabilities->contrast = media::mojom::Range::New();
278 photo_capabilities->saturation = media::mojom::Range::New(); 338 photo_capabilities->saturation = media::mojom::Range::New();
279 photo_capabilities->sharpness = media::mojom::Range::New(); 339 photo_capabilities->sharpness = media::mojom::Range::New();
280 callback.Run(std::move(photo_capabilities)); 340 callback.Run(std::move(photo_capabilities));
281 } 341 }
282 342
283 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, 343 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings,
284 SetPhotoOptionsCallback callback) { 344 SetPhotoOptionsCallback callback) {
285 if (settings->has_zoom) 345 DCHECK(thread_checker_.CalledOnValidThread());
286 current_zoom_ = std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); 346 if (settings->has_zoom) {
347 device_state_->zoom =
348 std::max(kMinZoom, std::min(settings->zoom, kMaxZoom));
349 }
350
287 callback.Run(true); 351 callback.Run(true);
288 } 352 }
289 353
290 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { 354 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) {
355 DCHECK(thread_checker_.CalledOnValidThread());
291 base::ThreadTaskRunnerHandle::Get()->PostTask( 356 base::ThreadTaskRunnerHandle::Get()->PostTask(
292 FROM_HERE, 357 FROM_HERE, base::Bind(&FakePhotoDevice::TakePhoto,
293 base::Bind(&DoTakeFakePhoto, base::Passed(&callback), capture_format_, 358 base::Unretained(photo_device_.get()),
294 elapsed_time_, fake_capture_rate_, current_zoom_)); 359 base::Passed(&callback), elapsed_time_));
295 } 360 }
296 361
297 void FakeVideoCaptureDevice::CaptureUsingOwnBuffers( 362 OwnBufferFrameDeliveryStrategy::OwnBufferFrameDeliveryStrategy() = default;
298 base::TimeTicks expected_execution_time) {
299 DCHECK(thread_checker_.CalledOnValidThread());
300 const size_t frame_size = capture_format_.ImageAllocationSize();
301 363
302 memset(fake_frame_.get(), 0, frame_size); 364 OwnBufferFrameDeliveryStrategy::~OwnBufferFrameDeliveryStrategy() = default;
303 DrawPacman(capture_format_.pixel_format, fake_frame_.get(), elapsed_time_, 365
304 fake_capture_rate_, capture_format_.frame_size, current_zoom_); 366 void OwnBufferFrameDeliveryStrategy::Initialize(
305 // Give the captured frame to the client. 367 VideoPixelFormat pixel_format,
368 std::unique_ptr<VideoCaptureDevice::Client> client,
369 const FakeDeviceState* device_state) {
370 client_ = std::move(client);
371 device_state_ = device_state;
372 buffer_.reset(new uint8_t[VideoFrame::AllocationSize(
373 pixel_format, device_state_->format.frame_size)]);
374 }
375
376 void OwnBufferFrameDeliveryStrategy::Uninitialize() {
377 client_.reset();
378 device_state_ = nullptr;
379 buffer_.reset();
380 }
381
382 uint8_t* OwnBufferFrameDeliveryStrategy::PrepareBufferForNextFrame() {
383 if (client_ == nullptr)
384 return nullptr;
385
386 const size_t frame_size = device_state_->format.ImageAllocationSize();
387 memset(buffer_.get(), 0, frame_size);
388 return buffer_.get();
389 }
390
391 void OwnBufferFrameDeliveryStrategy::DeliverFrame() {
392 if (client_ == nullptr)
393 return;
394 const size_t frame_size = device_state_->format.ImageAllocationSize();
306 base::TimeTicks now = base::TimeTicks::Now(); 395 base::TimeTicks now = base::TimeTicks::Now();
307 if (first_ref_time_.is_null()) 396 if (first_ref_time_.is_null())
308 first_ref_time_ = now; 397 first_ref_time_ = now;
309 client_->OnIncomingCapturedData(fake_frame_.get(), frame_size, 398 client_->OnIncomingCapturedData(buffer_.get(), frame_size,
310 capture_format_, 0 /* rotation */, now, 399 device_state_->format, 0 /* rotation */, now,
311 now - first_ref_time_); 400 now - first_ref_time_);
312 BeepAndScheduleNextCapture(
313 expected_execution_time,
314 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers,
315 weak_factory_.GetWeakPtr()));
316 } 401 }
317 402
318 void FakeVideoCaptureDevice::CaptureUsingClientBuffers( 403 ClientBufferFrameDeliveryStrategy::ClientBufferFrameDeliveryStrategy() =
319 base::TimeTicks expected_execution_time) { 404 default;
320 DCHECK(thread_checker_.CalledOnValidThread()); 405
406 ClientBufferFrameDeliveryStrategy::~ClientBufferFrameDeliveryStrategy() =
407 default;
408
409 void ClientBufferFrameDeliveryStrategy::Initialize(
410 VideoPixelFormat,
411 std::unique_ptr<VideoCaptureDevice::Client> client,
412 const FakeDeviceState* device_state) {
413 client_ = std::move(client);
414 device_state_ = device_state;
415 }
416
417 void ClientBufferFrameDeliveryStrategy::Uninitialize() {
418 client_.reset();
419 device_state_ = nullptr;
420 }
421
422 uint8_t* ClientBufferFrameDeliveryStrategy::PrepareBufferForNextFrame() {
423 if (client_ == nullptr)
424 return nullptr;
321 425
322 const int arbitrary_frame_feedback_id = 0; 426 const int arbitrary_frame_feedback_id = 0;
323 std::unique_ptr<VideoCaptureDevice::Client::Buffer> capture_buffer( 427 capture_buffer_ = client_->ReserveOutputBuffer(
324 client_->ReserveOutputBuffer( 428 device_state_->format.frame_size, device_state_->format.pixel_format,
325 capture_format_.frame_size, capture_format_.pixel_format, 429 device_state_->format.pixel_storage, arbitrary_frame_feedback_id);
326 capture_format_.pixel_storage, arbitrary_frame_feedback_id)); 430 DLOG_IF(ERROR, !capture_buffer_) << "Couldn't allocate Capture Buffer";
327 DLOG_IF(ERROR, !capture_buffer) << "Couldn't allocate Capture Buffer"; 431 DCHECK(capture_buffer_->data()) << "Buffer has NO backing memory";
328 DCHECK(capture_buffer->data()) << "Buffer has NO backing memory";
329 432
330 DCHECK_EQ(PIXEL_STORAGE_CPU, capture_format_.pixel_storage); 433 DCHECK_EQ(device_state_->format.pixel_storage, PIXEL_STORAGE_CPU);
331 uint8_t* data_ptr = static_cast<uint8_t*>(capture_buffer->data());
332 memset(data_ptr, 0, capture_buffer->mapped_size());
333 DrawPacman(capture_format_.pixel_format, data_ptr, elapsed_time_,
334 fake_capture_rate_, capture_format_.frame_size, current_zoom_);
335 434
336 // Give the captured frame to the client. 435 uint8_t* data_ptr = static_cast<uint8_t*>(capture_buffer_->data());
436 memset(data_ptr, 0, capture_buffer_->mapped_size());
437 return data_ptr;
438 }
439
440 void ClientBufferFrameDeliveryStrategy::DeliverFrame() {
441 if (client_ == nullptr)
442 return;
443
337 base::TimeTicks now = base::TimeTicks::Now(); 444 base::TimeTicks now = base::TimeTicks::Now();
338 if (first_ref_time_.is_null()) 445 if (first_ref_time_.is_null())
339 first_ref_time_ = now; 446 first_ref_time_ = now;
340 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), capture_format_, 447 client_->OnIncomingCapturedBuffer(std::move(capture_buffer_),
341 now, now - first_ref_time_); 448 device_state_->format, now,
342 449 now - first_ref_time_);
343 BeepAndScheduleNextCapture(
344 expected_execution_time,
345 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers,
346 weak_factory_.GetWeakPtr()));
347 } 450 }
348 451
349 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( 452 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
350 base::TimeTicks expected_execution_time, 453 base::TimeTicks expected_execution_time) {
351 const base::Callback<void(base::TimeTicks)>& next_capture) { 454 DCHECK(thread_checker_.CalledOnValidThread());
352 const base::TimeDelta beep_interval = 455 const base::TimeDelta beep_interval =
353 base::TimeDelta::FromMilliseconds(kBeepInterval); 456 base::TimeDelta::FromMilliseconds(kBeepInterval);
354 const base::TimeDelta frame_interval = 457 const base::TimeDelta frame_interval =
355 base::TimeDelta::FromMicroseconds(1e6 / fake_capture_rate_); 458 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate);
356 beep_time_ += frame_interval; 459 beep_time_ += frame_interval;
357 elapsed_time_ += frame_interval; 460 elapsed_time_ += frame_interval;
358 461
359 // Generate a synchronized beep twice per second. 462 // Generate a synchronized beep twice per second.
360 if (beep_time_ >= beep_interval) { 463 if (beep_time_ >= beep_interval) {
361 FakeAudioInputStream::BeepOnce(); 464 FakeAudioInputStream::BeepOnce();
362 beep_time_ -= beep_interval; 465 beep_time_ -= beep_interval;
363 } 466 }
364 467
365 // Reschedule next CaptureTask. 468 // Reschedule next CaptureTask.
366 const base::TimeTicks current_time = base::TimeTicks::Now(); 469 const base::TimeTicks current_time = base::TimeTicks::Now();
367 // Don't accumulate any debt if we are lagging behind - just post the next 470 // Don't accumulate any debt if we are lagging behind - just post the next
368 // frame immediately and continue as normal. 471 // frame immediately and continue as normal.
369 const base::TimeTicks next_execution_time = 472 const base::TimeTicks next_execution_time =
370 std::max(current_time, expected_execution_time + frame_interval); 473 std::max(current_time, expected_execution_time + frame_interval);
371 const base::TimeDelta delay = next_execution_time - current_time; 474 const base::TimeDelta delay = next_execution_time - current_time;
372 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( 475 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
373 FROM_HERE, base::Bind(next_capture, next_execution_time), delay); 476 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue,
477 weak_factory_.GetWeakPtr(), next_execution_time,
478 current_session_id_),
479 delay);
480 }
481
482 void FakeVideoCaptureDevice::OnNextFrameDue(
483 base::TimeTicks expected_execution_time,
484 int session_id) {
485 DCHECK(thread_checker_.CalledOnValidThread());
486 if (!device_running_)
487 return;
488 if (session_id != current_session_id_)
489 return;
490
491 uint8_t* buffer = frame_delivery_strategy_->PrepareBufferForNextFrame();
492 frame_painter_->PaintFrame(elapsed_time_, buffer);
493 frame_delivery_strategy_->DeliverFrame();
494
495 BeepAndScheduleNextCapture(expected_execution_time);
374 } 496 }
375 497
376 } // namespace media 498 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698