Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/capture/video/fake_video_capture_device.h" | 5 #include "media/capture/video/fake_video_capture_device.h" |
| 6 | 6 |
| 7 #include <stddef.h> | 7 #include <stddef.h> |
| 8 #include <algorithm> | 8 #include <algorithm> |
| 9 #include <utility> | 9 #include <utility> |
| 10 | 10 |
| 11 #include "base/atomicops.h" | |
| 11 #include "base/bind.h" | 12 #include "base/bind.h" |
| 12 #include "base/location.h" | 13 #include "base/location.h" |
| 14 #include "base/macros.h" | |
| 15 #include "base/memory/weak_ptr.h" | |
| 13 #include "base/single_thread_task_runner.h" | 16 #include "base/single_thread_task_runner.h" |
| 14 #include "base/strings/stringprintf.h" | 17 #include "base/strings/stringprintf.h" |
| 18 #include "base/threading/thread_checker.h" | |
| 15 #include "base/threading/thread_task_runner_handle.h" | 19 #include "base/threading/thread_task_runner_handle.h" |
| 20 #include "base/time/time.h" | |
| 16 #include "media/audio/fake_audio_input_stream.h" | 21 #include "media/audio/fake_audio_input_stream.h" |
| 17 #include "media/base/video_frame.h" | 22 #include "media/base/video_frame.h" |
| 18 #include "third_party/skia/include/core/SkBitmap.h" | 23 #include "third_party/skia/include/core/SkBitmap.h" |
| 19 #include "third_party/skia/include/core/SkCanvas.h" | 24 #include "third_party/skia/include/core/SkCanvas.h" |
| 20 #include "third_party/skia/include/core/SkMatrix.h" | 25 #include "third_party/skia/include/core/SkMatrix.h" |
| 21 #include "third_party/skia/include/core/SkPaint.h" | 26 #include "third_party/skia/include/core/SkPaint.h" |
| 22 #include "ui/gfx/codec/png_codec.h" | 27 #include "ui/gfx/codec/png_codec.h" |
| 23 | 28 |
| 24 namespace media { | 29 namespace media { |
| 25 | 30 |
| 31 namespace { | |
| 26 // Sweep at 600 deg/sec. | 32 // Sweep at 600 deg/sec. |
| 27 static const float kPacmanAngularVelocity = 600; | 33 static const float kPacmanAngularVelocity = 600; |
| 28 // Beep every 500 ms. | 34 // Beep every 500 ms. |
| 29 static const int kBeepInterval = 500; | 35 static const int kBeepInterval = 500; |
| 30 // Gradient travels from bottom to top in 5 seconds. | 36 // Gradient travels from bottom to top in 5 seconds. |
| 31 static const float kGradientFrequency = 1.f / 5; | 37 static const float kGradientFrequency = 1.f / 5; |
| 32 | 38 |
| 33 static const double kMinZoom = 100.0; | 39 static const double kMinZoom = 100.0; |
| 34 static const double kMaxZoom = 400.0; | 40 static const double kMaxZoom = 400.0; |
| 35 static const double kZoomStep = 1.0; | 41 static const double kZoomStep = 1.0; |
| 42 static const double kInitialZoom = 100.0; | |
| 43 | |
| 44 static const gfx::Size kSupportedSizes[] = { | |
| 45 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), | |
| 46 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; | |
| 47 static const int kSupportedSizesCount = | |
| 48 sizeof(kSupportedSizes) / sizeof(gfx::Size); | |
| 49 | |
| 50 static const VideoPixelFormat kSupportedPixelFormats[] = { | |
| 51 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB}; | |
| 52 | |
| 53 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { | |
| 54 for (int i = 0; i < kSupportedSizesCount; i++) { | |
| 55 const gfx::Size& supported_size = kSupportedSizes[i]; | |
| 56 if (requested_size.width() <= supported_size.width()) { | |
| 57 return supported_size; | |
| 58 } | |
|
mcasas
2017/02/15 00:44:19
No {} in one-line bodies.
chfremer
2017/02/15 18:11:29
Done.
| |
| 59 } | |
|
mcasas
2017/02/15 00:44:19
for (const gfx::Size& supported_size : kSupportedS
chfremer
2017/02/15 18:11:29
Done.
| |
| 60 return kSupportedSizes[kSupportedSizesCount - 1]; | |
| 61 } | |
| 62 | |
| 63 class FakeVideoCaptureDevice; | |
|
mcasas
2017/02/15 00:44:19
Probably not needed? I don't see any refs to it
be
chfremer
2017/02/15 18:11:29
Done.
| |
| 64 | |
| 65 // Represents the current state of a FakeVideoCaptureDevice. | |
| 66 // This is a separate struct because read-access to it is shared with several | |
| 67 // collaborating classes. | |
| 68 struct FakeDeviceState { | |
| 69 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) | |
| 70 : zoom(zoom), | |
| 71 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} | |
| 72 | |
| 73 uint32_t zoom; | |
| 74 VideoCaptureFormat format; | |
| 75 }; | |
| 76 | |
| 77 // Paints a frame into the given |target_buffer|. | |
| 78 class FramePainter { | |
| 79 public: | |
| 80 virtual void PaintFrame(base::TimeDelta elapsed_time, | |
| 81 uint8_t* target_buffer) = 0; | |
| 82 }; | |
|
mcasas
2017/02/15 00:44:19
Why a base class if there's only one class
derivin
chfremer
2017/02/15 18:11:29
Short answer: For abstraction and loose coupling.
mcasas
2017/02/15 18:23:47
That's all good, but in Chromium we only abstract
chfremer
2017/02/15 18:37:16
Done.
| |
| 83 | |
| 84 // Paints a "pacman-like" animated circle including textual information such | |
| 85 // as a frame count and timer. | |
| 86 class PacmanFramePainter : public FramePainter { | |
| 87 public: | |
| 88 // Currently, only the following values are supported for |pixel_format|: | |
| 89 // PIXEL_FORMAT_I420 | |
| 90 // PIXEL_FORMAT_Y16 | |
| 91 // PIXEL_FORMAT_ARGB | |
| 92 PacmanFramePainter(VideoPixelFormat pixel_format, | |
| 93 const FakeDeviceState* fake_device_state); | |
| 94 | |
| 95 // Implementation of FramePainter | |
| 96 void PaintFrame(base::TimeDelta elapsed_time, | |
| 97 uint8_t* target_buffer) override; | |
| 98 | |
| 99 private: | |
| 100 void DrawGradientSquares(base::TimeDelta elapsed_time, | |
| 101 uint8_t* target_buffer); | |
| 102 | |
| 103 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); | |
| 104 | |
| 105 const VideoPixelFormat pixel_format_; | |
| 106 const FakeDeviceState* fake_device_state_ = nullptr; | |
| 107 }; | |
| 108 | |
| 109 // Delivers frames to a client, which is set via Initialize(). | |
| 110 class FrameDeliveryStrategy { | |
| 111 public: | |
| 112 virtual ~FrameDeliveryStrategy() {} | |
| 113 virtual void Initialize(VideoPixelFormat pixel_format, | |
| 114 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 115 const FakeDeviceState* device_state) = 0; | |
| 116 virtual void Uninitialize() = 0; | |
| 117 virtual uint8_t* PrepareBufferForNextFrame() = 0; | |
| 118 virtual void DeliverFrame() = 0; | |
| 119 | |
| 120 protected: | |
| 121 const FakeDeviceState* device_state_ = nullptr; | |
| 122 std::unique_ptr<VideoCaptureDevice::Client> client_; | |
| 123 // The system time when we receive the first frame. | |
| 124 base::TimeTicks first_ref_time_; | |
| 125 }; | |
| 126 | |
| 127 // Delivers frames using its own buffers via OnIncomingCapturedData(). | |
| 128 class OwnBufferFrameDeliveryStrategy : public FrameDeliveryStrategy { | |
| 129 public: | |
| 130 OwnBufferFrameDeliveryStrategy(); | |
| 131 ~OwnBufferFrameDeliveryStrategy() override; | |
| 132 | |
| 133 // Implementation of FrameDeliveryStrategy | |
| 134 void Initialize(VideoPixelFormat pixel_format, | |
| 135 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 136 const FakeDeviceState* device_state) override; | |
| 137 void Uninitialize() override; | |
| 138 uint8_t* PrepareBufferForNextFrame() override; | |
| 139 void DeliverFrame() override; | |
| 140 | |
| 141 private: | |
| 142 std::unique_ptr<uint8_t[]> buffer_; | |
| 143 }; | |
| 144 | |
| 145 // Delivers frames using buffers provided by the client via | |
| 146 // OnIncomingCapturedBuffer(). | |
| 147 class ClientBufferFrameDeliveryStrategy : public FrameDeliveryStrategy { | |
| 148 public: | |
| 149 ClientBufferFrameDeliveryStrategy(); | |
| 150 ~ClientBufferFrameDeliveryStrategy() override; | |
| 151 | |
| 152 // Implementation of FrameDeliveryStrategy | |
| 153 void Initialize(VideoPixelFormat pixel_format, | |
| 154 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 155 const FakeDeviceState* device_state) override; | |
| 156 void Uninitialize() override; | |
| 157 uint8_t* PrepareBufferForNextFrame() override; | |
| 158 void DeliverFrame() override; | |
| 159 | |
| 160 private: | |
| 161 VideoCaptureDevice::Client::Buffer capture_buffer_; | |
| 162 }; | |
| 163 | |
| 164 // Implements the photo functionality of a VideoCaptureDevice | |
| 165 class FakePhotoDevice { | |
| 166 public: | |
| 167 FakePhotoDevice(std::unique_ptr<FramePainter> argb_painter, | |
| 168 const FakeDeviceState* fake_device_state); | |
| 169 ~FakePhotoDevice(); | |
| 170 | |
| 171 void GetPhotoCapabilities( | |
| 172 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); | |
| 173 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | |
| 174 base::TimeDelta elapsed_time); | |
| 175 | |
| 176 private: | |
| 177 const std::unique_ptr<FramePainter> argb_painter_; | |
| 178 const FakeDeviceState* const fake_device_state_; | |
| 179 }; | |
| 180 | |
| 181 // Implementation of VideoCaptureDevice that generates test frames. This is | |
| 182 // useful for testing the video capture components without having to use real | |
| 183 // devices. The implementation schedules delayed tasks to itself to generate and | |
| 184 // deliver frames at the requested rate. | |
| 185 class FakeVideoCaptureDevice : public VideoCaptureDevice { | |
| 186 public: | |
| 187 FakeVideoCaptureDevice( | |
| 188 std::unique_ptr<FramePainter> frame_painter, | |
| 189 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy, | |
| 190 std::unique_ptr<FakePhotoDevice> photo_device, | |
| 191 std::unique_ptr<FakeDeviceState> device_state); | |
| 192 ~FakeVideoCaptureDevice() override; | |
| 193 | |
| 194 // VideoCaptureDevice implementation. | |
| 195 void AllocateAndStart(const VideoCaptureParams& params, | |
| 196 std::unique_ptr<Client> client) override; | |
| 197 void StopAndDeAllocate() override; | |
| 198 void GetPhotoCapabilities(GetPhotoCapabilitiesCallback callback) override; | |
| 199 void SetPhotoOptions(mojom::PhotoSettingsPtr settings, | |
| 200 SetPhotoOptionsCallback callback) override; | |
| 201 void TakePhoto(TakePhotoCallback callback) override; | |
| 202 | |
| 203 private: | |
| 204 void BeepAndScheduleNextCapture(base::TimeTicks expected_execution_time); | |
| 205 void OnNextFrameDue(base::TimeTicks expected_execution_time, int session_id); | |
| 206 | |
| 207 const std::unique_ptr<FramePainter> frame_painter_; | |
| 208 const std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy_; | |
| 209 const std::unique_ptr<FakePhotoDevice> photo_device_; | |
| 210 const std::unique_ptr<FakeDeviceState> device_state_; | |
| 211 int current_session_id_ = 0; | |
| 212 | |
| 213 // Time when the next beep occurs. | |
| 214 base::TimeDelta beep_time_; | |
| 215 // Time since the fake video started rendering frames. | |
| 216 base::TimeDelta elapsed_time_; | |
| 217 | |
| 218 base::ThreadChecker thread_checker_; | |
| 219 | |
| 220 // FakeVideoCaptureDevice post tasks to itself for frame construction and | |
| 221 // needs to deal with asynchronous StopAndDeallocate(). | |
| 222 base::WeakPtrFactory<FakeVideoCaptureDevice> weak_factory_; | |
| 223 | |
| 224 DISALLOW_COPY_AND_ASSIGN(FakeVideoCaptureDevice); | |
| 225 }; | |
| 226 | |
| 227 } // anonymous namespace | |
| 228 | |
| 229 // static | |
| 230 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( | |
| 231 std::vector<gfx::Size>* supported_sizes) { | |
| 232 for (int i = 0; i < kSupportedSizesCount; i++) | |
| 233 supported_sizes->push_back(kSupportedSizes[i]); | |
| 234 } | |
| 235 | |
| 236 // static | |
| 237 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( | |
| 238 VideoPixelFormat pixel_format, | |
| 239 DeliveryMode delivery_mode, | |
| 240 float frame_rate) { | |
| 241 bool pixel_format_supported = false; | |
| 242 for (const auto& supported_pixel_format : kSupportedPixelFormats) { | |
| 243 if (pixel_format == supported_pixel_format) { | |
| 244 pixel_format_supported = true; | |
| 245 break; | |
| 246 } | |
| 247 } | |
| 248 if (!pixel_format_supported) { | |
| 249 DLOG(ERROR) << "Requested an unsupported pixel format " | |
| 250 << VideoPixelFormatToString(pixel_format); | |
| 251 return nullptr; | |
| 252 } | |
| 253 | |
| 254 auto device_state = | |
| 255 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format); | |
| 256 auto video_frame_painter = | |
| 257 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get()); | |
| 258 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy; | |
| 259 switch (delivery_mode) { | |
| 260 case DeliveryMode::USE_OWN_BUFFERS: | |
| 261 frame_delivery_strategy = | |
| 262 base::MakeUnique<OwnBufferFrameDeliveryStrategy>(); | |
| 263 break; | |
| 264 case DeliveryMode::USE_CLIENT_BUFFERS: | |
| 265 frame_delivery_strategy = | |
| 266 base::MakeUnique<ClientBufferFrameDeliveryStrategy>(); | |
| 267 break; | |
| 268 } | |
| 269 | |
| 270 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( | |
| 271 PIXEL_FORMAT_ARGB, device_state.get()); | |
| 272 auto photo_device = base::MakeUnique<FakePhotoDevice>( | |
| 273 std::move(photo_frame_painter), device_state.get()); | |
| 274 | |
| 275 return base::MakeUnique<FakeVideoCaptureDevice>( | |
| 276 std::move(video_frame_painter), std::move(frame_delivery_strategy), | |
| 277 std::move(photo_device), std::move(device_state)); | |
| 278 } | |
| 279 | |
| 280 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format, | |
| 281 const FakeDeviceState* fake_device_state) | |
| 282 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {} | |
| 283 | |
| 284 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, | |
| 285 uint8_t* target_buffer) { | |
| 286 DrawPacman(elapsed_time, target_buffer); | |
| 287 DrawGradientSquares(elapsed_time, target_buffer); | |
| 288 } | |
| 36 | 289 |
| 37 // Starting from top left, -45 deg gradient. Value at point (row, column) is | 290 // Starting from top left, -45 deg gradient. Value at point (row, column) is |
| 38 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where | 291 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where |
| 39 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per | 292 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per |
| 40 // component) or 65535 for Y16. | 293 // component) or 65535 for Y16. |
| 41 // This is handy for pixel tests where we use the squares to verify rendering. | 294 // This is handy for pixel tests where we use the squares to verify rendering. |
| 42 void DrawGradientSquares(VideoPixelFormat frame_format, | 295 void PacmanFramePainter::DrawGradientSquares(base::TimeDelta elapsed_time, |
| 43 uint8_t* const pixels, | 296 uint8_t* target_buffer) { |
| 44 base::TimeDelta elapsed_time, | 297 const int width = fake_device_state_->format.frame_size.width(); |
| 45 const gfx::Size& frame_size) { | 298 const int height = fake_device_state_->format.frame_size.height(); |
| 46 const int width = frame_size.width(); | 299 |
| 47 const int height = frame_size.height(); | |
| 48 const int side = width / 16; // square side length. | 300 const int side = width / 16; // square side length. |
| 49 DCHECK(side); | 301 DCHECK(side); |
| 50 const gfx::Point squares[] = {{0, 0}, | 302 const gfx::Point squares[] = {{0, 0}, |
| 51 {width - side, 0}, | 303 {width - side, 0}, |
| 52 {0, height - side}, | 304 {0, height - side}, |
| 53 {width - side, height - side}}; | 305 {width - side, height - side}}; |
| 54 const float start = | 306 const float start = |
| 55 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); | 307 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); |
| 56 const float color_step = 65535 / static_cast<float>(width + height); | 308 const float color_step = 65535 / static_cast<float>(width + height); |
| 57 for (const auto& corner : squares) { | 309 for (const auto& corner : squares) { |
| 58 for (int y = corner.y(); y < corner.y() + side; ++y) { | 310 for (int y = corner.y(); y < corner.y() + side; ++y) { |
| 59 for (int x = corner.x(); x < corner.x() + side; ++x) { | 311 for (int x = corner.x(); x < corner.x() + side; ++x) { |
| 60 const unsigned int value = | 312 const unsigned int value = |
| 61 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; | 313 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; |
| 62 size_t offset = (y * width) + x; | 314 size_t offset = (y * width) + x; |
| 63 switch (frame_format) { | 315 switch (pixel_format_) { |
| 64 case PIXEL_FORMAT_Y16: | 316 case PIXEL_FORMAT_Y16: |
| 65 pixels[offset * sizeof(uint16_t)] = value & 0xFF; | 317 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; |
| 66 pixels[offset * sizeof(uint16_t) + 1] = value >> 8; | 318 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; |
| 67 break; | 319 break; |
| 68 case PIXEL_FORMAT_ARGB: | 320 case PIXEL_FORMAT_ARGB: |
| 69 pixels[offset * sizeof(uint32_t) + 1] = value >> 8; | 321 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; |
| 70 pixels[offset * sizeof(uint32_t) + 2] = value >> 8; | 322 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; |
| 71 pixels[offset * sizeof(uint32_t) + 3] = value >> 8; | 323 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; |
| 72 break; | 324 break; |
| 73 default: | 325 default: |
| 74 pixels[offset] = value >> 8; | 326 target_buffer[offset] = value >> 8; |
| 75 break; | 327 break; |
| 76 } | 328 } |
| 77 } | 329 } |
| 78 } | 330 } |
| 79 } | 331 } |
| 80 } | 332 } |
| 81 | 333 |
| 82 void DrawPacman(VideoPixelFormat frame_format, | 334 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, |
| 83 uint8_t* const data, | 335 uint8_t* target_buffer) { |
| 84 base::TimeDelta elapsed_time, | 336 const int width = fake_device_state_->format.frame_size.width(); |
| 85 float frame_rate, | 337 const int height = fake_device_state_->format.frame_size.height(); |
| 86 const gfx::Size& frame_size, | 338 |
| 87 double zoom) { | |
| 88 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. | 339 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. |
| 89 const SkColorType colorspace = (frame_format == PIXEL_FORMAT_ARGB) | 340 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB) |
| 90 ? kN32_SkColorType | 341 ? kN32_SkColorType |
| 91 : kAlpha_8_SkColorType; | 342 : kAlpha_8_SkColorType; |
| 92 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use | 343 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use |
| 93 // this as high byte values in 16 bit pixels. | 344 // this as high byte values in 16 bit pixels. |
| 94 const SkImageInfo info = SkImageInfo::Make( | 345 const SkImageInfo info = |
| 95 frame_size.width(), frame_size.height(), colorspace, kOpaque_SkAlphaType); | 346 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); |
| 96 SkBitmap bitmap; | 347 SkBitmap bitmap; |
| 97 bitmap.setInfo(info); | 348 bitmap.setInfo(info); |
| 98 bitmap.setPixels(data); | 349 bitmap.setPixels(target_buffer); |
| 99 SkPaint paint; | 350 SkPaint paint; |
| 100 paint.setStyle(SkPaint::kFill_Style); | 351 paint.setStyle(SkPaint::kFill_Style); |
| 101 SkCanvas canvas(bitmap); | 352 SkCanvas canvas(bitmap); |
| 102 | 353 |
| 103 const SkScalar unscaled_zoom = zoom / 100.f; | 354 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; |
| 104 SkMatrix matrix; | 355 SkMatrix matrix; |
| 105 matrix.setScale(unscaled_zoom, unscaled_zoom, frame_size.width() / 2, | 356 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); |
| 106 frame_size.height() / 2); | |
| 107 canvas.setMatrix(matrix); | 357 canvas.setMatrix(matrix); |
| 108 | 358 |
| 109 // Equalize Alpha_8 that has light green background while RGBA has white. | 359 // Equalize Alpha_8 that has light green background while RGBA has white. |
| 110 if (frame_format == PIXEL_FORMAT_ARGB) { | 360 if (pixel_format_ == PIXEL_FORMAT_ARGB) { |
| 111 const SkRect full_frame = | 361 const SkRect full_frame = SkRect::MakeWH(width, height); |
| 112 SkRect::MakeWH(frame_size.width(), frame_size.height()); | |
| 113 paint.setARGB(255, 0, 127, 0); | 362 paint.setARGB(255, 0, 127, 0); |
| 114 canvas.drawRect(full_frame, paint); | 363 canvas.drawRect(full_frame, paint); |
| 115 } | 364 } |
| 116 paint.setColor(SK_ColorGREEN); | 365 paint.setColor(SK_ColorGREEN); |
| 117 | 366 |
| 118 // Draw a sweeping circle to show an animation. | 367 // Draw a sweeping circle to show an animation. |
| 119 const float end_angle = | 368 const float end_angle = |
| 120 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); | 369 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); |
| 121 const int radius = std::min(frame_size.width(), frame_size.height()) / 4; | 370 const int radius = std::min(width, height) / 4; |
| 122 const SkRect rect = SkRect::MakeXYWH(frame_size.width() / 2 - radius, | 371 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, |
| 123 frame_size.height() / 2 - radius, | |
| 124 2 * radius, 2 * radius); | 372 2 * radius, 2 * radius); |
| 125 canvas.drawArc(rect, 0, end_angle, true, paint); | 373 canvas.drawArc(rect, 0, end_angle, true, paint); |
| 126 | 374 |
| 127 // Draw current time. | 375 // Draw current time. |
| 128 const int milliseconds = elapsed_time.InMilliseconds() % 1000; | 376 const int milliseconds = elapsed_time.InMilliseconds() % 1000; |
| 129 const int seconds = elapsed_time.InSeconds() % 60; | 377 const int seconds = elapsed_time.InSeconds() % 60; |
| 130 const int minutes = elapsed_time.InMinutes() % 60; | 378 const int minutes = elapsed_time.InMinutes() % 60; |
| 131 const int hours = elapsed_time.InHours(); | 379 const int hours = elapsed_time.InHours(); |
| 132 const int frame_count = elapsed_time.InMilliseconds() * frame_rate / 1000; | 380 const int frame_count = elapsed_time.InMilliseconds() * |
| 381 fake_device_state_->format.frame_rate / 1000; | |
| 133 | 382 |
| 134 const std::string time_string = | 383 const std::string time_string = |
| 135 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, | 384 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, |
| 136 milliseconds, frame_count); | 385 milliseconds, frame_count); |
| 137 canvas.scale(3, 3); | 386 canvas.scale(3, 3); |
| 138 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); | 387 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); |
| 139 | 388 |
| 140 if (frame_format == PIXEL_FORMAT_Y16) { | 389 if (pixel_format_ == PIXEL_FORMAT_Y16) { |
| 141 // Use 8 bit bitmap rendered to first half of the buffer as high byte values | 390 // Use 8 bit bitmap rendered to first half of the buffer as high byte values |
| 142 // for the whole buffer. Low byte values are not important. | 391 // for the whole buffer. Low byte values are not important. |
| 143 for (int i = frame_size.GetArea() - 1; i >= 0; --i) | 392 for (int i = (width * height) - 1; i >= 0; --i) |
| 144 data[i * 2 + 1] = data[i]; | 393 target_buffer[i * 2 + 1] = target_buffer[i]; |
| 145 } | 394 } |
| 146 DrawGradientSquares(frame_format, data, elapsed_time, frame_size); | |
| 147 } | 395 } |
| 148 | 396 |
| 149 // Creates a PNG-encoded frame and sends it back to |callback|. The other | 397 FakePhotoDevice::FakePhotoDevice(std::unique_ptr<FramePainter> argb_painter, |
| 150 // parameters are used to replicate the PacMan rendering. | 398 const FakeDeviceState* fake_device_state) |
| 151 void DoTakeFakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | 399 : argb_painter_(std::move(argb_painter)), |
| 152 const VideoCaptureFormat& capture_format, | 400 fake_device_state_(fake_device_state) {} |
| 153 base::TimeDelta elapsed_time, | 401 |
| 154 float fake_capture_rate, | 402 FakePhotoDevice::~FakePhotoDevice() = default; |
| 155 uint32_t zoom) { | 403 |
| 404 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | |
| 405 base::TimeDelta elapsed_time) { | |
| 406 // Create a PNG-encoded frame and send it back to |callback|. | |
| 156 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( | 407 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( |
| 157 PIXEL_FORMAT_ARGB, capture_format.frame_size)]); | 408 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]); |
| 158 | 409 argb_painter_->PaintFrame(elapsed_time, buffer.get()); |
| 159 DrawPacman(PIXEL_FORMAT_ARGB, buffer.get(), elapsed_time, fake_capture_rate, | |
| 160 capture_format.frame_size, zoom); | |
| 161 | |
| 162 mojom::BlobPtr blob = mojom::Blob::New(); | 410 mojom::BlobPtr blob = mojom::Blob::New(); |
| 163 const bool result = gfx::PNGCodec::Encode( | 411 const bool result = |
| 164 buffer.get(), gfx::PNGCodec::FORMAT_RGBA, capture_format.frame_size, | 412 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA, |
| 165 capture_format.frame_size.width() * 4, true /* discard_transparency */, | 413 fake_device_state_->format.frame_size, |
| 166 std::vector<gfx::PNGCodec::Comment>(), &blob->data); | 414 fake_device_state_->format.frame_size.width() * 4, |
| 415 true /* discard_transparency */, | |
| 416 std::vector<gfx::PNGCodec::Comment>(), &blob->data); | |
| 167 DCHECK(result); | 417 DCHECK(result); |
| 168 | 418 |
| 169 blob->mime_type = "image/png"; | 419 blob->mime_type = "image/png"; |
| 170 callback.Run(std::move(blob)); | 420 callback.Run(std::move(blob)); |
| 171 } | 421 } |
| 172 | 422 |
| 173 FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership, | 423 FakeVideoCaptureDevice::FakeVideoCaptureDevice( |
| 174 float fake_capture_rate, | 424 std::unique_ptr<FramePainter> frame_painter, |
| 175 VideoPixelFormat pixel_format) | 425 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy, |
| 176 : buffer_ownership_(buffer_ownership), | 426 std::unique_ptr<FakePhotoDevice> photo_device, |
| 177 fake_capture_rate_(fake_capture_rate), | 427 std::unique_ptr<FakeDeviceState> device_state) |
| 178 pixel_format_(pixel_format), | 428 : frame_painter_(std::move(frame_painter)), |
| 179 current_zoom_(kMinZoom), | 429 frame_delivery_strategy_(std::move(frame_delivery_strategy)), |
| 430 photo_device_(std::move(photo_device)), | |
| 431 device_state_(std::move(device_state)), | |
| 180 weak_factory_(this) {} | 432 weak_factory_(this) {} |
| 181 | 433 |
| 182 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { | 434 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { |
| 183 DCHECK(thread_checker_.CalledOnValidThread()); | 435 DCHECK(thread_checker_.CalledOnValidThread()); |
| 184 } | 436 } |
| 185 | 437 |
| 186 void FakeVideoCaptureDevice::AllocateAndStart( | 438 void FakeVideoCaptureDevice::AllocateAndStart( |
| 187 const VideoCaptureParams& params, | 439 const VideoCaptureParams& params, |
| 188 std::unique_ptr<VideoCaptureDevice::Client> client) { | 440 std::unique_ptr<VideoCaptureDevice::Client> client) { |
| 189 DCHECK(thread_checker_.CalledOnValidThread()); | 441 DCHECK(thread_checker_.CalledOnValidThread()); |
| 190 | 442 |
| 191 client_ = std::move(client); | |
| 192 | |
| 193 // Incoming |params| can be none of the supported formats, so we get the | |
| 194 // closest thing rounded up. TODO(mcasas): Use the |params|, if they belong to | |
| 195 // the supported ones, when http://crbug.com/309554 is verified. | |
| 196 capture_format_.frame_rate = fake_capture_rate_; | |
| 197 if (params.requested_format.frame_size.width() > 1280) | |
| 198 capture_format_.frame_size.SetSize(1920, 1080); | |
| 199 else if (params.requested_format.frame_size.width() > 640) | |
| 200 capture_format_.frame_size.SetSize(1280, 720); | |
| 201 else if (params.requested_format.frame_size.width() > 320) | |
| 202 capture_format_.frame_size.SetSize(640, 480); | |
| 203 else if (params.requested_format.frame_size.width() > 96) | |
| 204 capture_format_.frame_size.SetSize(320, 240); | |
| 205 else | |
| 206 capture_format_.frame_size.SetSize(96, 96); | |
| 207 | |
| 208 capture_format_.pixel_format = pixel_format_; | |
| 209 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) { | |
| 210 capture_format_.pixel_storage = PIXEL_STORAGE_CPU; | |
| 211 capture_format_.pixel_format = PIXEL_FORMAT_ARGB; | |
| 212 DVLOG(1) << "starting with client argb buffers"; | |
| 213 } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { | |
| 214 capture_format_.pixel_storage = PIXEL_STORAGE_CPU; | |
| 215 DVLOG(1) << "starting with own " << VideoPixelFormatToString(pixel_format_) | |
| 216 << " buffers"; | |
| 217 } | |
| 218 | |
| 219 if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { | |
| 220 fake_frame_.reset(new uint8_t[VideoFrame::AllocationSize( | |
| 221 pixel_format_, capture_format_.frame_size)]); | |
| 222 } | |
| 223 | |
| 224 beep_time_ = base::TimeDelta(); | 443 beep_time_ = base::TimeDelta(); |
| 225 elapsed_time_ = base::TimeDelta(); | 444 elapsed_time_ = base::TimeDelta(); |
| 226 | 445 device_state_->format.frame_size = |
| 227 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) | 446 SnapToSupportedSize(params.requested_format.frame_size); |
| 228 BeepAndScheduleNextCapture( | 447 frame_delivery_strategy_->Initialize(device_state_->format.pixel_format, |
| 229 base::TimeTicks::Now(), | 448 std::move(client), device_state_.get()); |
| 230 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, | 449 current_session_id_++; |
| 231 weak_factory_.GetWeakPtr())); | 450 BeepAndScheduleNextCapture(base::TimeTicks::Now()); |
| 232 else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) | |
| 233 BeepAndScheduleNextCapture( | |
| 234 base::TimeTicks::Now(), | |
| 235 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers, | |
| 236 weak_factory_.GetWeakPtr())); | |
| 237 } | 451 } |
| 238 | 452 |
| 239 void FakeVideoCaptureDevice::StopAndDeAllocate() { | 453 void FakeVideoCaptureDevice::StopAndDeAllocate() { |
| 240 DCHECK(thread_checker_.CalledOnValidThread()); | 454 DCHECK(thread_checker_.CalledOnValidThread()); |
| 241 client_.reset(); | 455 |
| 456 // Invalidate WeakPtr to stop the perpetual scheduling of tasks. | |
| 457 weak_factory_.InvalidateWeakPtrs(); | |
| 458 frame_delivery_strategy_->Uninitialize(); | |
| 242 } | 459 } |
| 243 | 460 |
| 244 void FakeVideoCaptureDevice::GetPhotoCapabilities( | 461 void FakeVideoCaptureDevice::GetPhotoCapabilities( |
| 245 GetPhotoCapabilitiesCallback callback) { | 462 GetPhotoCapabilitiesCallback callback) { |
| 463 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 464 photo_device_->GetPhotoCapabilities(std::move(callback)); | |
| 465 } | |
| 466 | |
| 467 void FakePhotoDevice::GetPhotoCapabilities( | |
| 468 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) { | |
| 246 mojom::PhotoCapabilitiesPtr photo_capabilities = | 469 mojom::PhotoCapabilitiesPtr photo_capabilities = |
| 247 mojom::PhotoCapabilities::New(); | 470 mojom::PhotoCapabilities::New(); |
| 248 photo_capabilities->iso = mojom::Range::New(); | 471 photo_capabilities->iso = mojom::Range::New(); |
| 249 photo_capabilities->iso->current = 100.0; | 472 photo_capabilities->iso->current = 100.0; |
| 250 photo_capabilities->iso->max = 100.0; | 473 photo_capabilities->iso->max = 100.0; |
| 251 photo_capabilities->iso->min = 100.0; | 474 photo_capabilities->iso->min = 100.0; |
| 252 photo_capabilities->iso->step = 0.0; | 475 photo_capabilities->iso->step = 0.0; |
| 253 photo_capabilities->height = mojom::Range::New(); | 476 photo_capabilities->height = mojom::Range::New(); |
| 254 photo_capabilities->height->current = capture_format_.frame_size.height(); | 477 photo_capabilities->height->current = |
| 478 fake_device_state_->format.frame_size.height(); | |
| 255 photo_capabilities->height->max = 1080.0; | 479 photo_capabilities->height->max = 1080.0; |
| 256 photo_capabilities->height->min = 96.0; | 480 photo_capabilities->height->min = 96.0; |
| 257 photo_capabilities->height->step = 1.0; | 481 photo_capabilities->height->step = 1.0; |
| 258 photo_capabilities->width = mojom::Range::New(); | 482 photo_capabilities->width = mojom::Range::New(); |
| 259 photo_capabilities->width->current = capture_format_.frame_size.width(); | 483 photo_capabilities->width->current = |
| 484 fake_device_state_->format.frame_size.width(); | |
| 260 photo_capabilities->width->max = 1920.0; | 485 photo_capabilities->width->max = 1920.0; |
| 261 photo_capabilities->width->min = 96.0; | 486 photo_capabilities->width->min = 96.0; |
| 262 photo_capabilities->width->step = 1; | 487 photo_capabilities->width->step = 1.0; |
| 263 photo_capabilities->zoom = mojom::Range::New(); | 488 photo_capabilities->zoom = mojom::Range::New(); |
| 264 photo_capabilities->zoom->current = current_zoom_; | 489 photo_capabilities->zoom->current = fake_device_state_->zoom; |
| 265 photo_capabilities->zoom->max = kMaxZoom; | 490 photo_capabilities->zoom->max = kMaxZoom; |
| 266 photo_capabilities->zoom->min = kMinZoom; | 491 photo_capabilities->zoom->min = kMinZoom; |
| 267 photo_capabilities->zoom->step = kZoomStep; | 492 photo_capabilities->zoom->step = kZoomStep; |
| 268 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; | 493 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; |
| 269 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; | 494 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; |
| 270 photo_capabilities->exposure_compensation = mojom::Range::New(); | 495 photo_capabilities->exposure_compensation = mojom::Range::New(); |
| 271 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; | 496 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; |
| 272 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; | 497 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; |
| 273 photo_capabilities->red_eye_reduction = false; | 498 photo_capabilities->red_eye_reduction = false; |
| 274 photo_capabilities->color_temperature = mojom::Range::New(); | 499 photo_capabilities->color_temperature = mojom::Range::New(); |
| 275 photo_capabilities->brightness = media::mojom::Range::New(); | 500 photo_capabilities->brightness = media::mojom::Range::New(); |
| 276 photo_capabilities->contrast = media::mojom::Range::New(); | 501 photo_capabilities->contrast = media::mojom::Range::New(); |
| 277 photo_capabilities->saturation = media::mojom::Range::New(); | 502 photo_capabilities->saturation = media::mojom::Range::New(); |
| 278 photo_capabilities->sharpness = media::mojom::Range::New(); | 503 photo_capabilities->sharpness = media::mojom::Range::New(); |
| 279 callback.Run(std::move(photo_capabilities)); | 504 callback.Run(std::move(photo_capabilities)); |
| 280 } | 505 } |
| 281 | 506 |
| 282 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, | 507 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, |
| 283 SetPhotoOptionsCallback callback) { | 508 SetPhotoOptionsCallback callback) { |
| 284 if (settings->has_zoom) | 509 DCHECK(thread_checker_.CalledOnValidThread()); |
| 285 current_zoom_ = std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); | 510 if (settings->has_zoom) { |
| 511 device_state_->zoom = | |
| 512 std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); | |
| 513 } | |
| 514 | |
| 286 callback.Run(true); | 515 callback.Run(true); |
| 287 } | 516 } |
| 288 | 517 |
| 289 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { | 518 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { |
| 519 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 290 base::ThreadTaskRunnerHandle::Get()->PostTask( | 520 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 291 FROM_HERE, | 521 FROM_HERE, base::Bind(&FakePhotoDevice::TakePhoto, |
| 292 base::Bind(&DoTakeFakePhoto, base::Passed(&callback), capture_format_, | 522 base::Unretained(photo_device_.get()), |
| 293 elapsed_time_, fake_capture_rate_, current_zoom_)); | 523 base::Passed(&callback), elapsed_time_)); |
| 294 } | 524 } |
| 295 | 525 |
| 296 void FakeVideoCaptureDevice::CaptureUsingOwnBuffers( | 526 OwnBufferFrameDeliveryStrategy::OwnBufferFrameDeliveryStrategy() = default; |
| 297 base::TimeTicks expected_execution_time) { | |
| 298 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 299 const size_t frame_size = capture_format_.ImageAllocationSize(); | |
| 300 | 527 |
| 301 memset(fake_frame_.get(), 0, frame_size); | 528 OwnBufferFrameDeliveryStrategy::~OwnBufferFrameDeliveryStrategy() = default; |
| 302 DrawPacman(capture_format_.pixel_format, fake_frame_.get(), elapsed_time_, | 529 |
| 303 fake_capture_rate_, capture_format_.frame_size, current_zoom_); | 530 void OwnBufferFrameDeliveryStrategy::Initialize( |
| 304 // Give the captured frame to the client. | 531 VideoPixelFormat pixel_format, |
| 532 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 533 const FakeDeviceState* device_state) { | |
| 534 client_ = std::move(client); | |
| 535 device_state_ = device_state; | |
| 536 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( | |
| 537 pixel_format, device_state_->format.frame_size)]); | |
| 538 } | |
| 539 | |
| 540 void OwnBufferFrameDeliveryStrategy::Uninitialize() { | |
| 541 client_.reset(); | |
| 542 device_state_ = nullptr; | |
| 543 buffer_.reset(); | |
| 544 } | |
| 545 | |
| 546 uint8_t* OwnBufferFrameDeliveryStrategy::PrepareBufferForNextFrame() { | |
| 547 if (client_ == nullptr) | |
|
mcasas
2017/02/15 00:44:19
if (!client_)
here and in l.556
chfremer
2017/02/15 18:11:29
Done.
| |
| 548 return nullptr; | |
| 549 | |
| 550 const size_t frame_size = device_state_->format.ImageAllocationSize(); | |
| 551 memset(buffer_.get(), 0, frame_size); | |
| 552 return buffer_.get(); | |
| 553 } | |
| 554 | |
| 555 void OwnBufferFrameDeliveryStrategy::DeliverFrame() { | |
| 556 if (client_ == nullptr) | |
| 557 return; | |
| 558 const size_t frame_size = device_state_->format.ImageAllocationSize(); | |
| 305 base::TimeTicks now = base::TimeTicks::Now(); | 559 base::TimeTicks now = base::TimeTicks::Now(); |
| 306 if (first_ref_time_.is_null()) | 560 if (first_ref_time_.is_null()) |
| 307 first_ref_time_ = now; | 561 first_ref_time_ = now; |
| 308 client_->OnIncomingCapturedData(fake_frame_.get(), frame_size, | 562 client_->OnIncomingCapturedData(buffer_.get(), frame_size, |
| 309 capture_format_, 0 /* rotation */, now, | 563 device_state_->format, 0 /* rotation */, now, |
| 310 now - first_ref_time_); | 564 now - first_ref_time_); |
| 311 BeepAndScheduleNextCapture( | |
| 312 expected_execution_time, | |
| 313 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers, | |
| 314 weak_factory_.GetWeakPtr())); | |
| 315 } | 565 } |
| 316 | 566 |
| 317 void FakeVideoCaptureDevice::CaptureUsingClientBuffers( | 567 ClientBufferFrameDeliveryStrategy::ClientBufferFrameDeliveryStrategy() = |
| 318 base::TimeTicks expected_execution_time) { | 568 default; |
| 319 DCHECK(thread_checker_.CalledOnValidThread()); | 569 |
| 570 ClientBufferFrameDeliveryStrategy::~ClientBufferFrameDeliveryStrategy() = | |
| 571 default; | |
| 572 | |
| 573 void ClientBufferFrameDeliveryStrategy::Initialize( | |
| 574 VideoPixelFormat, | |
| 575 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 576 const FakeDeviceState* device_state) { | |
| 577 client_ = std::move(client); | |
| 578 device_state_ = device_state; | |
| 579 } | |
| 580 | |
| 581 void ClientBufferFrameDeliveryStrategy::Uninitialize() { | |
| 582 client_.reset(); | |
| 583 device_state_ = nullptr; | |
| 584 } | |
| 585 | |
| 586 uint8_t* ClientBufferFrameDeliveryStrategy::PrepareBufferForNextFrame() { | |
| 587 if (client_ == nullptr) | |
| 588 return nullptr; | |
| 320 | 589 |
| 321 const int arbitrary_frame_feedback_id = 0; | 590 const int arbitrary_frame_feedback_id = 0; |
| 322 VideoCaptureDevice::Client::Buffer capture_buffer = | 591 capture_buffer_ = client_->ReserveOutputBuffer( |
| 323 client_->ReserveOutputBuffer( | 592 device_state_->format.frame_size, device_state_->format.pixel_format, |
| 324 capture_format_.frame_size, capture_format_.pixel_format, | 593 device_state_->format.pixel_storage, arbitrary_frame_feedback_id); |
| 325 capture_format_.pixel_storage, arbitrary_frame_feedback_id); | 594 DLOG_IF(ERROR, !capture_buffer_.is_valid()) |
| 326 DLOG_IF(ERROR, !capture_buffer.is_valid()) | |
| 327 << "Couldn't allocate Capture Buffer"; | 595 << "Couldn't allocate Capture Buffer"; |
| 328 auto buffer_access = | 596 auto buffer_access = |
| 329 capture_buffer.handle_provider()->GetHandleForInProcessAccess(); | 597 capture_buffer_.handle_provider()->GetHandleForInProcessAccess(); |
| 330 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; | 598 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; |
| 331 | 599 |
| 332 DCHECK_EQ(PIXEL_STORAGE_CPU, capture_format_.pixel_storage); | 600 DCHECK_EQ(device_state_->format.pixel_storage, PIXEL_STORAGE_CPU); |
| 601 | |
| 333 uint8_t* data_ptr = buffer_access->data(); | 602 uint8_t* data_ptr = buffer_access->data(); |
| 334 memset(data_ptr, 0, buffer_access->mapped_size()); | 603 memset(data_ptr, 0, buffer_access->mapped_size()); |
| 335 DrawPacman(capture_format_.pixel_format, data_ptr, elapsed_time_, | 604 return data_ptr; |
| 336 fake_capture_rate_, capture_format_.frame_size, current_zoom_); | 605 } |
| 337 | 606 |
| 338 // Give the captured frame to the client. | 607 void ClientBufferFrameDeliveryStrategy::DeliverFrame() { |
| 608 if (client_ == nullptr) | |
| 609 return; | |
| 610 | |
| 339 base::TimeTicks now = base::TimeTicks::Now(); | 611 base::TimeTicks now = base::TimeTicks::Now(); |
| 340 if (first_ref_time_.is_null()) | 612 if (first_ref_time_.is_null()) |
| 341 first_ref_time_ = now; | 613 first_ref_time_ = now; |
| 342 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), capture_format_, | 614 client_->OnIncomingCapturedBuffer(std::move(capture_buffer_), |
| 343 now, now - first_ref_time_); | 615 device_state_->format, now, |
| 344 | 616 now - first_ref_time_); |
| 345 BeepAndScheduleNextCapture( | |
| 346 expected_execution_time, | |
| 347 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, | |
| 348 weak_factory_.GetWeakPtr())); | |
| 349 } | 617 } |
| 350 | 618 |
| 351 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( | 619 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( |
| 352 base::TimeTicks expected_execution_time, | 620 base::TimeTicks expected_execution_time) { |
| 353 const base::Callback<void(base::TimeTicks)>& next_capture) { | 621 DCHECK(thread_checker_.CalledOnValidThread()); |
| 354 const base::TimeDelta beep_interval = | 622 const base::TimeDelta beep_interval = |
| 355 base::TimeDelta::FromMilliseconds(kBeepInterval); | 623 base::TimeDelta::FromMilliseconds(kBeepInterval); |
| 356 const base::TimeDelta frame_interval = | 624 const base::TimeDelta frame_interval = |
| 357 base::TimeDelta::FromMicroseconds(1e6 / fake_capture_rate_); | 625 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); |
| 358 beep_time_ += frame_interval; | 626 beep_time_ += frame_interval; |
| 359 elapsed_time_ += frame_interval; | 627 elapsed_time_ += frame_interval; |
| 360 | 628 |
| 361 // Generate a synchronized beep twice per second. | 629 // Generate a synchronized beep twice per second. |
| 362 if (beep_time_ >= beep_interval) { | 630 if (beep_time_ >= beep_interval) { |
| 363 FakeAudioInputStream::BeepOnce(); | 631 FakeAudioInputStream::BeepOnce(); |
| 364 beep_time_ -= beep_interval; | 632 beep_time_ -= beep_interval; |
| 365 } | 633 } |
| 366 | 634 |
| 367 // Reschedule next CaptureTask. | 635 // Reschedule next CaptureTask. |
| 368 const base::TimeTicks current_time = base::TimeTicks::Now(); | 636 const base::TimeTicks current_time = base::TimeTicks::Now(); |
| 369 // Don't accumulate any debt if we are lagging behind - just post the next | 637 // Don't accumulate any debt if we are lagging behind - just post the next |
| 370 // frame immediately and continue as normal. | 638 // frame immediately and continue as normal. |
| 371 const base::TimeTicks next_execution_time = | 639 const base::TimeTicks next_execution_time = |
| 372 std::max(current_time, expected_execution_time + frame_interval); | 640 std::max(current_time, expected_execution_time + frame_interval); |
| 373 const base::TimeDelta delay = next_execution_time - current_time; | 641 const base::TimeDelta delay = next_execution_time - current_time; |
| 374 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 642 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
| 375 FROM_HERE, base::Bind(next_capture, next_execution_time), delay); | 643 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, |
| 644 weak_factory_.GetWeakPtr(), next_execution_time, | |
| 645 current_session_id_), | |
| 646 delay); | |
| 647 } | |
| 648 | |
| 649 void FakeVideoCaptureDevice::OnNextFrameDue( | |
| 650 base::TimeTicks expected_execution_time, | |
| 651 int session_id) { | |
| 652 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 653 if (session_id != current_session_id_) | |
| 654 return; | |
| 655 | |
| 656 uint8_t* const buffer = frame_delivery_strategy_->PrepareBufferForNextFrame(); | |
| 657 frame_painter_->PaintFrame(elapsed_time_, buffer); | |
| 658 frame_delivery_strategy_->DeliverFrame(); | |
| 659 | |
| 660 BeepAndScheduleNextCapture(expected_execution_time); | |
| 376 } | 661 } |
| 377 | 662 |
| 378 } // namespace media | 663 } // namespace media |
| OLD | NEW |