Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/capture/video/fake_video_capture_device.h" | 5 #include "media/capture/video/fake_video_capture_device.h" |
| 6 | 6 |
| 7 #include <stddef.h> | 7 #include <stddef.h> |
| 8 #include <algorithm> | 8 #include <algorithm> |
| 9 #include <utility> | 9 #include <utility> |
| 10 | 10 |
| 11 #include "base/atomicops.h" | |
| 11 #include "base/bind.h" | 12 #include "base/bind.h" |
| 12 #include "base/location.h" | 13 #include "base/location.h" |
| 14 #include "base/macros.h" | |
| 15 #include "base/memory/weak_ptr.h" | |
| 13 #include "base/single_thread_task_runner.h" | 16 #include "base/single_thread_task_runner.h" |
| 14 #include "base/strings/stringprintf.h" | 17 #include "base/strings/stringprintf.h" |
| 18 #include "base/threading/thread_checker.h" | |
| 15 #include "base/threading/thread_task_runner_handle.h" | 19 #include "base/threading/thread_task_runner_handle.h" |
| 20 #include "base/time/time.h" | |
| 16 #include "media/audio/fake_audio_input_stream.h" | 21 #include "media/audio/fake_audio_input_stream.h" |
| 17 #include "media/base/video_frame.h" | 22 #include "media/base/video_frame.h" |
| 18 #include "third_party/skia/include/core/SkBitmap.h" | 23 #include "third_party/skia/include/core/SkBitmap.h" |
| 19 #include "third_party/skia/include/core/SkCanvas.h" | 24 #include "third_party/skia/include/core/SkCanvas.h" |
| 20 #include "third_party/skia/include/core/SkMatrix.h" | 25 #include "third_party/skia/include/core/SkMatrix.h" |
| 21 #include "third_party/skia/include/core/SkPaint.h" | 26 #include "third_party/skia/include/core/SkPaint.h" |
| 22 #include "ui/gfx/codec/png_codec.h" | 27 #include "ui/gfx/codec/png_codec.h" |
| 23 | 28 |
| 24 namespace media { | 29 namespace media { |
| 25 | 30 |
| 31 namespace { | |
| 26 // Sweep at 600 deg/sec. | 32 // Sweep at 600 deg/sec. |
| 27 static const float kPacmanAngularVelocity = 600; | 33 static const float kPacmanAngularVelocity = 600; |
| 28 // Beep every 500 ms. | 34 // Beep every 500 ms. |
| 29 static const int kBeepInterval = 500; | 35 static const int kBeepInterval = 500; |
| 30 // Gradient travels from bottom to top in 5 seconds. | 36 // Gradient travels from bottom to top in 5 seconds. |
| 31 static const float kGradientFrequency = 1.f / 5; | 37 static const float kGradientFrequency = 1.f / 5; |
| 32 | 38 |
| 33 static const double kMinZoom = 100.0; | 39 static const double kMinZoom = 100.0; |
| 34 static const double kMaxZoom = 400.0; | 40 static const double kMaxZoom = 400.0; |
| 35 static const double kZoomStep = 1.0; | 41 static const double kZoomStep = 1.0; |
| 42 static const double kInitialZoom = 100.0; | |
| 43 | |
| 44 static const gfx::Size kSupportedSizes[] = { | |
| 45 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), | |
| 46 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; | |
| 47 static const int kSupportedSizesCount = | |
| 48 sizeof(kSupportedSizes) / sizeof(gfx::Size); | |
| 49 | |
| 50 static const VideoPixelFormat kSupportedPixelFormats[] = { | |
| 51 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB}; | |
| 52 | |
| 53 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { | |
| 54 for (int i = 0; i < kSupportedSizesCount; i++) { | |
| 55 const gfx::Size& supported_size = kSupportedSizes[i]; | |
| 56 if (requested_size.width() <= supported_size.width()) { | |
| 57 return supported_size; | |
| 58 } | |
| 59 } | |
| 60 return kSupportedSizes[kSupportedSizesCount - 1]; | |
| 61 } | |
| 62 | |
| 63 class FakeVideoCaptureDevice; | |
| 64 | |
| 65 // Represents the current state of a FakeVideoCaptureDevice. | |
| 66 // This is a separate struct because read-access to it is shared with several | |
| 67 // collaborating classes. | |
| 68 struct FakeDeviceState { | |
| 69 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) | |
| 70 : zoom(zoom), | |
| 71 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} | |
| 72 | |
| 73 uint32_t zoom; | |
| 74 VideoCaptureFormat format; | |
| 75 }; | |
| 76 | |
| 77 // Paints a frame into the given |target_buffer|. | |
| 78 class FramePainter { | |
| 79 public: | |
| 80 virtual void PaintFrame(base::TimeDelta elapsed_time, | |
| 81 uint8_t* target_buffer) = 0; | |
| 82 }; | |
| 83 | |
| 84 // Paints a "pacman-like" animated circle including textual information such | |
| 85 // as a frame count and timer. | |
| 86 class PacmanFramePainter : public FramePainter { | |
| 87 public: | |
| 88 // Currently, only the following values are supported for |pixel_format|: | |
| 89 // PIXEL_FORMAT_I420 | |
| 90 // PIXEL_FORMAT_Y16 | |
| 91 // PIXEL_FORMAT_ARGB | |
| 92 PacmanFramePainter(VideoPixelFormat pixel_format, | |
| 93 const FakeDeviceState* fake_device_state); | |
| 94 | |
| 95 // Implementation of FramePainter | |
| 96 void PaintFrame(base::TimeDelta elapsed_time, | |
| 97 uint8_t* target_buffer) override; | |
| 98 | |
| 99 private: | |
| 100 void DrawGradientSquares(base::TimeDelta elapsed_time, | |
| 101 uint8_t* target_buffer); | |
| 102 | |
| 103 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); | |
| 104 | |
| 105 const VideoPixelFormat pixel_format_; | |
| 106 const FakeDeviceState* fake_device_state_ = nullptr; | |
| 107 }; | |
| 108 | |
| 109 // Delivers frames to a client, which is set via Initialize(). | |
| 110 class FrameDeliveryStrategy { | |
| 111 public: | |
| 112 virtual ~FrameDeliveryStrategy() {} | |
| 113 virtual void Initialize(VideoPixelFormat pixel_format, | |
| 114 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 115 const FakeDeviceState* device_state) = 0; | |
| 116 virtual void Uninitialize() = 0; | |
| 117 virtual uint8_t* PrepareBufferForNextFrame() = 0; | |
| 118 virtual void DeliverFrame() = 0; | |
| 119 | |
| 120 protected: | |
| 121 const FakeDeviceState* device_state_ = nullptr; | |
| 122 std::unique_ptr<VideoCaptureDevice::Client> client_; | |
| 123 // The system time when we receive the first frame. | |
| 124 base::TimeTicks first_ref_time_; | |
| 125 }; | |
| 126 | |
| 127 // Delivers frames using its own buffers via OnIncomingCapturedData(). | |
| 128 class OwnBufferFrameDeliveryStrategy : public FrameDeliveryStrategy { | |
| 129 public: | |
| 130 OwnBufferFrameDeliveryStrategy(); | |
| 131 ~OwnBufferFrameDeliveryStrategy() override; | |
| 132 | |
| 133 // Implementation of FrameDeliveryStrategy | |
| 134 void Initialize(VideoPixelFormat pixel_format, | |
| 135 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 136 const FakeDeviceState* device_state) override; | |
| 137 void Uninitialize() override; | |
| 138 uint8_t* PrepareBufferForNextFrame() override; | |
| 139 void DeliverFrame() override; | |
| 140 | |
| 141 private: | |
| 142 std::unique_ptr<uint8_t[]> buffer_; | |
| 143 }; | |
| 144 | |
| 145 // Delivers frames using buffers provided by the client via | |
| 146 // OnIncomingCapturedBuffer(). | |
| 147 class ClientBufferFrameDeliveryStrategy : public FrameDeliveryStrategy { | |
| 148 public: | |
| 149 ClientBufferFrameDeliveryStrategy(); | |
| 150 ~ClientBufferFrameDeliveryStrategy() override; | |
| 151 | |
| 152 // Implementation of FrameDeliveryStrategy | |
| 153 void Initialize(VideoPixelFormat pixel_format, | |
| 154 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 155 const FakeDeviceState* device_state) override; | |
| 156 void Uninitialize() override; | |
| 157 uint8_t* PrepareBufferForNextFrame() override; | |
| 158 void DeliverFrame() override; | |
| 159 | |
| 160 private: | |
| 161 VideoCaptureDevice::Client::Buffer capture_buffer_; | |
| 162 }; | |
| 163 | |
| 164 // Implements the photo functionality of a VideoCaptureDevice | |
| 165 class FakePhotoDevice { | |
| 166 public: | |
| 167 FakePhotoDevice(std::unique_ptr<FramePainter> argb_painter, | |
| 168 const FakeDeviceState* fake_device_state); | |
| 169 ~FakePhotoDevice(); | |
| 170 | |
| 171 void GetPhotoCapabilities( | |
| 172 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); | |
| 173 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | |
| 174 base::TimeDelta elapsed_time); | |
| 175 | |
| 176 private: | |
| 177 const std::unique_ptr<FramePainter> argb_painter_; | |
| 178 const FakeDeviceState* const fake_device_state_; | |
| 179 }; | |
| 180 | |
| 181 // Implementation of VideoCaptureDevice that generates test frames. This is | |
| 182 // useful for testing the video capture components without having to use real | |
| 183 // devices. The implementation schedules delayed tasks to itself to generate and | |
| 184 // deliver frames at the requested rate. | |
| 185 class FakeVideoCaptureDevice : public VideoCaptureDevice { | |
| 186 public: | |
| 187 FakeVideoCaptureDevice( | |
| 188 std::unique_ptr<FramePainter> frame_painter, | |
| 189 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy, | |
| 190 std::unique_ptr<FakePhotoDevice> photo_device, | |
| 191 std::unique_ptr<FakeDeviceState> device_state); | |
| 192 ~FakeVideoCaptureDevice() override; | |
| 193 | |
| 194 // VideoCaptureDevice implementation. | |
| 195 void AllocateAndStart(const VideoCaptureParams& params, | |
| 196 std::unique_ptr<Client> client) override; | |
| 197 void StopAndDeAllocate() override; | |
| 198 void GetPhotoCapabilities(GetPhotoCapabilitiesCallback callback) override; | |
| 199 void SetPhotoOptions(mojom::PhotoSettingsPtr settings, | |
| 200 SetPhotoOptionsCallback callback) override; | |
| 201 void TakePhoto(TakePhotoCallback callback) override; | |
| 202 | |
| 203 private: | |
| 204 void BeepAndScheduleNextCapture(base::TimeTicks expected_execution_time); | |
| 205 void OnNextFrameDue(base::TimeTicks expected_execution_time, int session_id); | |
| 206 | |
| 207 const std::unique_ptr<FramePainter> frame_painter_; | |
| 208 const std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy_; | |
| 209 const std::unique_ptr<FakePhotoDevice> photo_device_; | |
| 210 const std::unique_ptr<FakeDeviceState> device_state_; | |
| 211 bool device_running_ = false; | |
| 212 int current_session_id_ = 0; | |
| 213 | |
| 214 // Time when the next beep occurs. | |
| 215 base::TimeDelta beep_time_; | |
| 216 // Time since the fake video started rendering frames. | |
| 217 base::TimeDelta elapsed_time_; | |
| 218 | |
| 219 base::ThreadChecker thread_checker_; | |
| 220 | |
| 221 // FakeVideoCaptureDevice post tasks to itself for frame construction and | |
| 222 // needs to deal with asynchronous StopAndDeallocate(). | |
| 223 base::WeakPtrFactory<FakeVideoCaptureDevice> weak_factory_; | |
| 224 | |
| 225 DISALLOW_COPY_AND_ASSIGN(FakeVideoCaptureDevice); | |
| 226 }; | |
| 227 | |
| 228 } // anonymous namespace | |
| 229 | |
| 230 // static | |
| 231 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( | |
| 232 std::vector<gfx::Size>* supported_sizes) { | |
| 233 for (int i = 0; i < kSupportedSizesCount; i++) { | |
|
emircan
2017/01/31 18:47:39
One liner, you don't need {}.
chfremer
2017/02/01 00:21:18
Done.
| |
| 234 supported_sizes->push_back(kSupportedSizes[i]); | |
| 235 } | |
| 236 } | |
| 237 | |
| 238 // static | |
| 239 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( | |
| 240 VideoPixelFormat pixel_format, | |
| 241 DeliveryMode delivery_mode, | |
| 242 float frame_rate) { | |
| 243 bool pixel_format_supported = false; | |
| 244 for (const auto& supported_pixel_format : kSupportedPixelFormats) { | |
| 245 if (pixel_format == supported_pixel_format) { | |
| 246 pixel_format_supported = true; | |
| 247 break; | |
| 248 } | |
| 249 } | |
| 250 if (!pixel_format_supported) { | |
| 251 DLOG(ERROR) << "Requested an unsupported pixel format " | |
| 252 << VideoPixelFormatToString(pixel_format); | |
| 253 return nullptr; | |
| 254 } | |
| 255 | |
| 256 auto device_state = | |
| 257 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format); | |
| 258 auto video_frame_painter = | |
| 259 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get()); | |
| 260 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy; | |
| 261 switch (delivery_mode) { | |
| 262 case DeliveryMode::USE_OWN_BUFFERS: | |
| 263 frame_delivery_strategy = | |
| 264 base::MakeUnique<OwnBufferFrameDeliveryStrategy>(); | |
| 265 break; | |
| 266 case DeliveryMode::USE_CLIENT_BUFFERS: | |
| 267 frame_delivery_strategy = | |
| 268 base::MakeUnique<ClientBufferFrameDeliveryStrategy>(); | |
| 269 break; | |
| 270 } | |
| 271 | |
| 272 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( | |
| 273 PIXEL_FORMAT_ARGB, device_state.get()); | |
| 274 auto photo_device = base::MakeUnique<FakePhotoDevice>( | |
| 275 std::move(photo_frame_painter), device_state.get()); | |
| 276 | |
| 277 return base::MakeUnique<FakeVideoCaptureDevice>( | |
| 278 std::move(video_frame_painter), std::move(frame_delivery_strategy), | |
| 279 std::move(photo_device), std::move(device_state)); | |
| 280 } | |
| 281 | |
| 282 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format, | |
| 283 const FakeDeviceState* fake_device_state) | |
| 284 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {} | |
| 285 | |
| 286 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, | |
| 287 uint8_t* target_buffer) { | |
| 288 DrawPacman(elapsed_time, target_buffer); | |
| 289 DrawGradientSquares(elapsed_time, target_buffer); | |
| 290 } | |
| 36 | 291 |
| 37 // Starting from top left, -45 deg gradient. Value at point (row, column) is | 292 // Starting from top left, -45 deg gradient. Value at point (row, column) is |
| 38 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where | 293 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where |
| 39 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per | 294 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per |
| 40 // component) or 65535 for Y16. | 295 // component) or 65535 for Y16. |
| 41 // This is handy for pixel tests where we use the squares to verify rendering. | 296 // This is handy for pixel tests where we use the squares to verify rendering. |
| 42 void DrawGradientSquares(VideoPixelFormat frame_format, | 297 void PacmanFramePainter::DrawGradientSquares(base::TimeDelta elapsed_time, |
| 43 uint8_t* const pixels, | 298 uint8_t* target_buffer) { |
| 44 base::TimeDelta elapsed_time, | 299 const int width = fake_device_state_->format.frame_size.width(); |
| 45 const gfx::Size& frame_size) { | 300 const int height = fake_device_state_->format.frame_size.height(); |
| 46 const int width = frame_size.width(); | 301 |
| 47 const int height = frame_size.height(); | |
| 48 const int side = width / 16; // square side length. | 302 const int side = width / 16; // square side length. |
| 49 DCHECK(side); | 303 DCHECK(side); |
| 50 const gfx::Point squares[] = {{0, 0}, | 304 const gfx::Point squares[] = {{0, 0}, |
| 51 {width - side, 0}, | 305 {width - side, 0}, |
| 52 {0, height - side}, | 306 {0, height - side}, |
| 53 {width - side, height - side}}; | 307 {width - side, height - side}}; |
| 54 const float start = | 308 const float start = |
| 55 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); | 309 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); |
| 56 const float color_step = 65535 / static_cast<float>(width + height); | 310 const float color_step = 65535 / static_cast<float>(width + height); |
| 57 for (const auto& corner : squares) { | 311 for (const auto& corner : squares) { |
| 58 for (int y = corner.y(); y < corner.y() + side; ++y) { | 312 for (int y = corner.y(); y < corner.y() + side; ++y) { |
| 59 for (int x = corner.x(); x < corner.x() + side; ++x) { | 313 for (int x = corner.x(); x < corner.x() + side; ++x) { |
| 60 const unsigned int value = | 314 const unsigned int value = |
| 61 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; | 315 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; |
| 62 size_t offset = (y * width) + x; | 316 size_t offset = (y * width) + x; |
| 63 switch (frame_format) { | 317 switch (pixel_format_) { |
| 64 case PIXEL_FORMAT_Y16: | 318 case PIXEL_FORMAT_Y16: |
| 65 pixels[offset * sizeof(uint16_t)] = value & 0xFF; | 319 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; |
| 66 pixels[offset * sizeof(uint16_t) + 1] = value >> 8; | 320 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; |
| 67 break; | 321 break; |
| 68 case PIXEL_FORMAT_ARGB: | 322 case PIXEL_FORMAT_ARGB: |
| 69 pixels[offset * sizeof(uint32_t) + 1] = value >> 8; | 323 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; |
| 70 pixels[offset * sizeof(uint32_t) + 2] = value >> 8; | 324 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; |
| 71 pixels[offset * sizeof(uint32_t) + 3] = value >> 8; | 325 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; |
| 72 break; | 326 break; |
| 73 default: | 327 default: |
| 74 pixels[offset] = value >> 8; | 328 target_buffer[offset] = value >> 8; |
| 75 break; | 329 break; |
| 76 } | 330 } |
| 77 } | 331 } |
| 78 } | 332 } |
| 79 } | 333 } |
| 80 } | 334 } |
| 81 | 335 |
| 82 void DrawPacman(VideoPixelFormat frame_format, | 336 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, |
| 83 uint8_t* const data, | 337 uint8_t* target_buffer) { |
| 84 base::TimeDelta elapsed_time, | 338 const int width = fake_device_state_->format.frame_size.width(); |
| 85 float frame_rate, | 339 const int height = fake_device_state_->format.frame_size.height(); |
| 86 const gfx::Size& frame_size, | 340 |
| 87 double zoom) { | |
| 88 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. | 341 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. |
| 89 const SkColorType colorspace = (frame_format == PIXEL_FORMAT_ARGB) | 342 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB) |
| 90 ? kN32_SkColorType | 343 ? kN32_SkColorType |
| 91 : kAlpha_8_SkColorType; | 344 : kAlpha_8_SkColorType; |
| 92 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use | 345 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use |
| 93 // this as high byte values in 16 bit pixels. | 346 // this as high byte values in 16 bit pixels. |
| 94 const SkImageInfo info = SkImageInfo::Make( | 347 const SkImageInfo info = |
| 95 frame_size.width(), frame_size.height(), colorspace, kOpaque_SkAlphaType); | 348 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); |
| 96 SkBitmap bitmap; | 349 SkBitmap bitmap; |
| 97 bitmap.setInfo(info); | 350 bitmap.setInfo(info); |
| 98 bitmap.setPixels(data); | 351 bitmap.setPixels(target_buffer); |
| 99 SkPaint paint; | 352 SkPaint paint; |
| 100 paint.setStyle(SkPaint::kFill_Style); | 353 paint.setStyle(SkPaint::kFill_Style); |
| 101 SkCanvas canvas(bitmap); | 354 SkCanvas canvas(bitmap); |
| 102 | 355 |
| 103 const SkScalar unscaled_zoom = zoom / 100.f; | 356 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; |
| 104 SkMatrix matrix; | 357 SkMatrix matrix; |
| 105 matrix.setScale(unscaled_zoom, unscaled_zoom, frame_size.width() / 2, | 358 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); |
| 106 frame_size.height() / 2); | |
| 107 canvas.setMatrix(matrix); | 359 canvas.setMatrix(matrix); |
| 108 | 360 |
| 109 // Equalize Alpha_8 that has light green background while RGBA has white. | 361 // Equalize Alpha_8 that has light green background while RGBA has white. |
| 110 if (frame_format == PIXEL_FORMAT_ARGB) { | 362 if (pixel_format_ == PIXEL_FORMAT_ARGB) { |
| 111 const SkRect full_frame = | 363 const SkRect full_frame = SkRect::MakeWH(width, height); |
| 112 SkRect::MakeWH(frame_size.width(), frame_size.height()); | |
| 113 paint.setARGB(255, 0, 127, 0); | 364 paint.setARGB(255, 0, 127, 0); |
| 114 canvas.drawRect(full_frame, paint); | 365 canvas.drawRect(full_frame, paint); |
| 115 } | 366 } |
| 116 paint.setColor(SK_ColorGREEN); | 367 paint.setColor(SK_ColorGREEN); |
| 117 | 368 |
| 118 // Draw a sweeping circle to show an animation. | 369 // Draw a sweeping circle to show an animation. |
| 119 const float end_angle = | 370 const float end_angle = |
| 120 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); | 371 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); |
| 121 const int radius = std::min(frame_size.width(), frame_size.height()) / 4; | 372 const int radius = std::min(width, height) / 4; |
| 122 const SkRect rect = SkRect::MakeXYWH(frame_size.width() / 2 - radius, | 373 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, |
| 123 frame_size.height() / 2 - radius, | |
| 124 2 * radius, 2 * radius); | 374 2 * radius, 2 * radius); |
| 125 canvas.drawArc(rect, 0, end_angle, true, paint); | 375 canvas.drawArc(rect, 0, end_angle, true, paint); |
| 126 | 376 |
| 127 // Draw current time. | 377 // Draw current time. |
| 128 const int milliseconds = elapsed_time.InMilliseconds() % 1000; | 378 const int milliseconds = elapsed_time.InMilliseconds() % 1000; |
| 129 const int seconds = elapsed_time.InSeconds() % 60; | 379 const int seconds = elapsed_time.InSeconds() % 60; |
| 130 const int minutes = elapsed_time.InMinutes() % 60; | 380 const int minutes = elapsed_time.InMinutes() % 60; |
| 131 const int hours = elapsed_time.InHours(); | 381 const int hours = elapsed_time.InHours(); |
| 132 const int frame_count = elapsed_time.InMilliseconds() * frame_rate / 1000; | 382 const int frame_count = elapsed_time.InMilliseconds() * |
| 383 fake_device_state_->format.frame_rate / 1000; | |
| 133 | 384 |
| 134 const std::string time_string = | 385 const std::string time_string = |
| 135 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, | 386 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, |
| 136 milliseconds, frame_count); | 387 milliseconds, frame_count); |
| 137 canvas.scale(3, 3); | 388 canvas.scale(3, 3); |
| 138 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); | 389 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); |
| 139 | 390 |
| 140 if (frame_format == PIXEL_FORMAT_Y16) { | 391 if (pixel_format_ == PIXEL_FORMAT_Y16) { |
| 141 // Use 8 bit bitmap rendered to first half of the buffer as high byte values | 392 // Use 8 bit bitmap rendered to first half of the buffer as high byte values |
| 142 // for the whole buffer. Low byte values are not important. | 393 // for the whole buffer. Low byte values are not important. |
| 143 for (int i = frame_size.GetArea() - 1; i >= 0; --i) | 394 for (int i = (width * height) - 1; i >= 0; --i) |
| 144 data[i * 2 + 1] = data[i]; | 395 target_buffer[i * 2 + 1] = target_buffer[i]; |
| 145 } | 396 } |
| 146 DrawGradientSquares(frame_format, data, elapsed_time, frame_size); | |
| 147 } | 397 } |
| 148 | 398 |
| 149 // Creates a PNG-encoded frame and sends it back to |callback|. The other | 399 FakePhotoDevice::FakePhotoDevice(std::unique_ptr<FramePainter> argb_painter, |
| 150 // parameters are used to replicate the PacMan rendering. | 400 const FakeDeviceState* fake_device_state) |
| 151 void DoTakeFakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | 401 : argb_painter_(std::move(argb_painter)), |
| 152 const VideoCaptureFormat& capture_format, | 402 fake_device_state_(fake_device_state) {} |
| 153 base::TimeDelta elapsed_time, | 403 |
| 154 float fake_capture_rate, | 404 FakePhotoDevice::~FakePhotoDevice() = default; |
| 155 uint32_t zoom) { | 405 |
| 406 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | |
| 407 base::TimeDelta elapsed_time) { | |
| 408 // Create a PNG-encoded frame and send it back to |callback|. | |
| 156 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( | 409 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( |
| 157 PIXEL_FORMAT_ARGB, capture_format.frame_size)]); | 410 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]); |
| 158 | 411 argb_painter_->PaintFrame(elapsed_time, buffer.get()); |
| 159 DrawPacman(PIXEL_FORMAT_ARGB, buffer.get(), elapsed_time, fake_capture_rate, | |
| 160 capture_format.frame_size, zoom); | |
| 161 | |
| 162 mojom::BlobPtr blob = mojom::Blob::New(); | 412 mojom::BlobPtr blob = mojom::Blob::New(); |
| 163 const bool result = gfx::PNGCodec::Encode( | 413 const bool result = |
| 164 buffer.get(), gfx::PNGCodec::FORMAT_RGBA, capture_format.frame_size, | 414 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA, |
| 165 capture_format.frame_size.width() * 4, true /* discard_transparency */, | 415 fake_device_state_->format.frame_size, |
| 166 std::vector<gfx::PNGCodec::Comment>(), &blob->data); | 416 fake_device_state_->format.frame_size.width() * 4, |
| 417 true /* discard_transparency */, | |
| 418 std::vector<gfx::PNGCodec::Comment>(), &blob->data); | |
| 167 DCHECK(result); | 419 DCHECK(result); |
| 168 | 420 |
| 169 blob->mime_type = "image/png"; | 421 blob->mime_type = "image/png"; |
| 170 callback.Run(std::move(blob)); | 422 callback.Run(std::move(blob)); |
| 171 } | 423 } |
| 172 | 424 |
| 173 FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership, | 425 FakeVideoCaptureDevice::FakeVideoCaptureDevice( |
| 174 float fake_capture_rate, | 426 std::unique_ptr<FramePainter> frame_painter, |
| 175 VideoPixelFormat pixel_format) | 427 std::unique_ptr<FrameDeliveryStrategy> frame_delivery_strategy, |
| 176 : buffer_ownership_(buffer_ownership), | 428 std::unique_ptr<FakePhotoDevice> photo_device, |
| 177 fake_capture_rate_(fake_capture_rate), | 429 std::unique_ptr<FakeDeviceState> device_state) |
| 178 pixel_format_(pixel_format), | 430 : frame_painter_(std::move(frame_painter)), |
| 179 current_zoom_(kMinZoom), | 431 frame_delivery_strategy_(std::move(frame_delivery_strategy)), |
| 432 photo_device_(std::move(photo_device)), | |
| 433 device_state_(std::move(device_state)), | |
| 180 weak_factory_(this) {} | 434 weak_factory_(this) {} |
| 181 | 435 |
| 182 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { | 436 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { |
| 183 DCHECK(thread_checker_.CalledOnValidThread()); | 437 DCHECK(thread_checker_.CalledOnValidThread()); |
| 184 } | 438 } |
| 185 | 439 |
| 186 void FakeVideoCaptureDevice::AllocateAndStart( | 440 void FakeVideoCaptureDevice::AllocateAndStart( |
| 187 const VideoCaptureParams& params, | 441 const VideoCaptureParams& params, |
| 188 std::unique_ptr<VideoCaptureDevice::Client> client) { | 442 std::unique_ptr<VideoCaptureDevice::Client> client) { |
| 189 DCHECK(thread_checker_.CalledOnValidThread()); | 443 DCHECK(thread_checker_.CalledOnValidThread()); |
| 190 | 444 |
| 191 client_ = std::move(client); | |
| 192 | |
| 193 // Incoming |params| can be none of the supported formats, so we get the | |
| 194 // closest thing rounded up. TODO(mcasas): Use the |params|, if they belong to | |
| 195 // the supported ones, when http://crbug.com/309554 is verified. | |
| 196 capture_format_.frame_rate = fake_capture_rate_; | |
| 197 if (params.requested_format.frame_size.width() > 1280) | |
| 198 capture_format_.frame_size.SetSize(1920, 1080); | |
| 199 else if (params.requested_format.frame_size.width() > 640) | |
| 200 capture_format_.frame_size.SetSize(1280, 720); | |
| 201 else if (params.requested_format.frame_size.width() > 320) | |
| 202 capture_format_.frame_size.SetSize(640, 480); | |
| 203 else if (params.requested_format.frame_size.width() > 96) | |
| 204 capture_format_.frame_size.SetSize(320, 240); | |
| 205 else | |
| 206 capture_format_.frame_size.SetSize(96, 96); | |
| 207 | |
| 208 capture_format_.pixel_format = pixel_format_; | |
| 209 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) { | |
| 210 capture_format_.pixel_storage = PIXEL_STORAGE_CPU; | |
| 211 capture_format_.pixel_format = PIXEL_FORMAT_ARGB; | |
| 212 DVLOG(1) << "starting with client argb buffers"; | |
| 213 } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { | |
| 214 capture_format_.pixel_storage = PIXEL_STORAGE_CPU; | |
| 215 DVLOG(1) << "starting with own " << VideoPixelFormatToString(pixel_format_) | |
| 216 << " buffers"; | |
| 217 } | |
| 218 | |
| 219 if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { | |
| 220 fake_frame_.reset(new uint8_t[VideoFrame::AllocationSize( | |
| 221 pixel_format_, capture_format_.frame_size)]); | |
| 222 } | |
| 223 | |
| 224 beep_time_ = base::TimeDelta(); | 445 beep_time_ = base::TimeDelta(); |
| 225 elapsed_time_ = base::TimeDelta(); | 446 elapsed_time_ = base::TimeDelta(); |
| 226 | 447 device_state_->format.frame_size = |
| 227 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) | 448 SnapToSupportedSize(params.requested_format.frame_size); |
| 228 BeepAndScheduleNextCapture( | 449 frame_delivery_strategy_->Initialize(device_state_->format.pixel_format, |
| 229 base::TimeTicks::Now(), | 450 std::move(client), device_state_.get()); |
| 230 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, | 451 device_running_ = true; |
| 231 weak_factory_.GetWeakPtr())); | 452 current_session_id_++; |
| 232 else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) | 453 BeepAndScheduleNextCapture(base::TimeTicks::Now()); |
| 233 BeepAndScheduleNextCapture( | |
| 234 base::TimeTicks::Now(), | |
| 235 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers, | |
| 236 weak_factory_.GetWeakPtr())); | |
| 237 } | 454 } |
| 238 | 455 |
| 239 void FakeVideoCaptureDevice::StopAndDeAllocate() { | 456 void FakeVideoCaptureDevice::StopAndDeAllocate() { |
| 240 DCHECK(thread_checker_.CalledOnValidThread()); | 457 DCHECK(thread_checker_.CalledOnValidThread()); |
| 241 client_.reset(); | 458 |
| 459 // Update flag to stop the perpetual scheduling of tasks. | |
| 460 device_running_ = false; | |
| 461 frame_delivery_strategy_->Uninitialize(); | |
| 242 } | 462 } |
| 243 | 463 |
| 244 void FakeVideoCaptureDevice::GetPhotoCapabilities( | 464 void FakeVideoCaptureDevice::GetPhotoCapabilities( |
| 245 GetPhotoCapabilitiesCallback callback) { | 465 GetPhotoCapabilitiesCallback callback) { |
| 466 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 467 photo_device_->GetPhotoCapabilities(std::move(callback)); | |
| 468 } | |
| 469 | |
| 470 void FakePhotoDevice::GetPhotoCapabilities( | |
| 471 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) { | |
| 246 mojom::PhotoCapabilitiesPtr photo_capabilities = | 472 mojom::PhotoCapabilitiesPtr photo_capabilities = |
| 247 mojom::PhotoCapabilities::New(); | 473 mojom::PhotoCapabilities::New(); |
| 248 photo_capabilities->iso = mojom::Range::New(); | 474 photo_capabilities->iso = mojom::Range::New(); |
| 249 photo_capabilities->iso->current = 100.0; | 475 photo_capabilities->iso->current = 100.0; |
| 250 photo_capabilities->iso->max = 100.0; | 476 photo_capabilities->iso->max = 100.0; |
| 251 photo_capabilities->iso->min = 100.0; | 477 photo_capabilities->iso->min = 100.0; |
| 252 photo_capabilities->iso->step = 0.0; | 478 photo_capabilities->iso->step = 0.0; |
| 253 photo_capabilities->height = mojom::Range::New(); | 479 photo_capabilities->height = mojom::Range::New(); |
| 254 photo_capabilities->height->current = capture_format_.frame_size.height(); | 480 photo_capabilities->height->current = |
| 481 fake_device_state_->format.frame_size.height(); | |
| 255 photo_capabilities->height->max = 1080.0; | 482 photo_capabilities->height->max = 1080.0; |
| 256 photo_capabilities->height->min = 96.0; | 483 photo_capabilities->height->min = 96.0; |
| 257 photo_capabilities->height->step = 1.0; | 484 photo_capabilities->height->step = 1.0; |
| 258 photo_capabilities->width = mojom::Range::New(); | 485 photo_capabilities->width = mojom::Range::New(); |
| 259 photo_capabilities->width->current = capture_format_.frame_size.width(); | 486 photo_capabilities->width->current = |
| 487 fake_device_state_->format.frame_size.width(); | |
| 260 photo_capabilities->width->max = 1920.0; | 488 photo_capabilities->width->max = 1920.0; |
| 261 photo_capabilities->width->min = 96.0; | 489 photo_capabilities->width->min = 96.0; |
| 262 photo_capabilities->width->step = 1; | 490 photo_capabilities->width->step = 1.0; |
| 263 photo_capabilities->zoom = mojom::Range::New(); | 491 photo_capabilities->zoom = mojom::Range::New(); |
| 264 photo_capabilities->zoom->current = current_zoom_; | 492 photo_capabilities->zoom->current = fake_device_state_->zoom; |
| 265 photo_capabilities->zoom->max = kMaxZoom; | 493 photo_capabilities->zoom->max = kMaxZoom; |
| 266 photo_capabilities->zoom->min = kMinZoom; | 494 photo_capabilities->zoom->min = kMinZoom; |
| 267 photo_capabilities->zoom->step = kZoomStep; | 495 photo_capabilities->zoom->step = kZoomStep; |
| 268 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; | 496 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; |
| 269 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; | 497 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; |
| 270 photo_capabilities->exposure_compensation = mojom::Range::New(); | 498 photo_capabilities->exposure_compensation = mojom::Range::New(); |
| 271 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; | 499 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; |
| 272 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; | 500 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; |
| 273 photo_capabilities->red_eye_reduction = false; | 501 photo_capabilities->red_eye_reduction = false; |
| 274 photo_capabilities->color_temperature = mojom::Range::New(); | 502 photo_capabilities->color_temperature = mojom::Range::New(); |
| 275 photo_capabilities->brightness = media::mojom::Range::New(); | 503 photo_capabilities->brightness = media::mojom::Range::New(); |
| 276 photo_capabilities->contrast = media::mojom::Range::New(); | 504 photo_capabilities->contrast = media::mojom::Range::New(); |
| 277 photo_capabilities->saturation = media::mojom::Range::New(); | 505 photo_capabilities->saturation = media::mojom::Range::New(); |
| 278 photo_capabilities->sharpness = media::mojom::Range::New(); | 506 photo_capabilities->sharpness = media::mojom::Range::New(); |
| 279 callback.Run(std::move(photo_capabilities)); | 507 callback.Run(std::move(photo_capabilities)); |
| 280 } | 508 } |
| 281 | 509 |
| 282 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, | 510 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, |
| 283 SetPhotoOptionsCallback callback) { | 511 SetPhotoOptionsCallback callback) { |
| 284 if (settings->has_zoom) | 512 DCHECK(thread_checker_.CalledOnValidThread()); |
| 285 current_zoom_ = std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); | 513 if (settings->has_zoom) { |
| 514 device_state_->zoom = | |
| 515 std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); | |
| 516 } | |
| 517 | |
| 286 callback.Run(true); | 518 callback.Run(true); |
| 287 } | 519 } |
| 288 | 520 |
| 289 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { | 521 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { |
| 522 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 290 base::ThreadTaskRunnerHandle::Get()->PostTask( | 523 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 291 FROM_HERE, | 524 FROM_HERE, base::Bind(&FakePhotoDevice::TakePhoto, |
| 292 base::Bind(&DoTakeFakePhoto, base::Passed(&callback), capture_format_, | 525 base::Unretained(photo_device_.get()), |
| 293 elapsed_time_, fake_capture_rate_, current_zoom_)); | 526 base::Passed(&callback), elapsed_time_)); |
| 294 } | 527 } |
| 295 | 528 |
| 296 void FakeVideoCaptureDevice::CaptureUsingOwnBuffers( | 529 OwnBufferFrameDeliveryStrategy::OwnBufferFrameDeliveryStrategy() = default; |
| 297 base::TimeTicks expected_execution_time) { | |
| 298 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 299 const size_t frame_size = capture_format_.ImageAllocationSize(); | |
| 300 | 530 |
| 301 memset(fake_frame_.get(), 0, frame_size); | 531 OwnBufferFrameDeliveryStrategy::~OwnBufferFrameDeliveryStrategy() = default; |
| 302 DrawPacman(capture_format_.pixel_format, fake_frame_.get(), elapsed_time_, | 532 |
| 303 fake_capture_rate_, capture_format_.frame_size, current_zoom_); | 533 void OwnBufferFrameDeliveryStrategy::Initialize( |
| 304 // Give the captured frame to the client. | 534 VideoPixelFormat pixel_format, |
| 535 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 536 const FakeDeviceState* device_state) { | |
| 537 client_ = std::move(client); | |
| 538 device_state_ = device_state; | |
| 539 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( | |
| 540 pixel_format, device_state_->format.frame_size)]); | |
| 541 } | |
| 542 | |
| 543 void OwnBufferFrameDeliveryStrategy::Uninitialize() { | |
| 544 client_.reset(); | |
| 545 device_state_ = nullptr; | |
| 546 buffer_.reset(); | |
| 547 } | |
| 548 | |
| 549 uint8_t* OwnBufferFrameDeliveryStrategy::PrepareBufferForNextFrame() { | |
| 550 if (client_ == nullptr) | |
| 551 return nullptr; | |
| 552 | |
| 553 const size_t frame_size = device_state_->format.ImageAllocationSize(); | |
| 554 memset(buffer_.get(), 0, frame_size); | |
| 555 return buffer_.get(); | |
| 556 } | |
| 557 | |
| 558 void OwnBufferFrameDeliveryStrategy::DeliverFrame() { | |
| 559 if (client_ == nullptr) | |
| 560 return; | |
| 561 const size_t frame_size = device_state_->format.ImageAllocationSize(); | |
| 305 base::TimeTicks now = base::TimeTicks::Now(); | 562 base::TimeTicks now = base::TimeTicks::Now(); |
| 306 if (first_ref_time_.is_null()) | 563 if (first_ref_time_.is_null()) |
| 307 first_ref_time_ = now; | 564 first_ref_time_ = now; |
| 308 client_->OnIncomingCapturedData(fake_frame_.get(), frame_size, | 565 client_->OnIncomingCapturedData(buffer_.get(), frame_size, |
| 309 capture_format_, 0 /* rotation */, now, | 566 device_state_->format, 0 /* rotation */, now, |
| 310 now - first_ref_time_); | 567 now - first_ref_time_); |
| 311 BeepAndScheduleNextCapture( | |
| 312 expected_execution_time, | |
| 313 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers, | |
| 314 weak_factory_.GetWeakPtr())); | |
| 315 } | 568 } |
| 316 | 569 |
| 317 void FakeVideoCaptureDevice::CaptureUsingClientBuffers( | 570 ClientBufferFrameDeliveryStrategy::ClientBufferFrameDeliveryStrategy() = |
| 318 base::TimeTicks expected_execution_time) { | 571 default; |
| 319 DCHECK(thread_checker_.CalledOnValidThread()); | 572 |
| 573 ClientBufferFrameDeliveryStrategy::~ClientBufferFrameDeliveryStrategy() = | |
| 574 default; | |
| 575 | |
| 576 void ClientBufferFrameDeliveryStrategy::Initialize( | |
| 577 VideoPixelFormat, | |
| 578 std::unique_ptr<VideoCaptureDevice::Client> client, | |
| 579 const FakeDeviceState* device_state) { | |
| 580 client_ = std::move(client); | |
| 581 device_state_ = device_state; | |
| 582 } | |
| 583 | |
| 584 void ClientBufferFrameDeliveryStrategy::Uninitialize() { | |
| 585 client_.reset(); | |
| 586 device_state_ = nullptr; | |
| 587 } | |
| 588 | |
| 589 uint8_t* ClientBufferFrameDeliveryStrategy::PrepareBufferForNextFrame() { | |
| 590 if (client_ == nullptr) | |
| 591 return nullptr; | |
| 320 | 592 |
| 321 const int arbitrary_frame_feedback_id = 0; | 593 const int arbitrary_frame_feedback_id = 0; |
| 322 VideoCaptureDevice::Client::Buffer capture_buffer = | 594 capture_buffer_ = client_->ReserveOutputBuffer( |
| 323 client_->ReserveOutputBuffer( | 595 device_state_->format.frame_size, device_state_->format.pixel_format, |
| 324 capture_format_.frame_size, capture_format_.pixel_format, | 596 device_state_->format.pixel_storage, arbitrary_frame_feedback_id); |
| 325 capture_format_.pixel_storage, arbitrary_frame_feedback_id); | 597 DLOG_IF(ERROR, !capture_buffer_.is_valid()) |
| 326 DLOG_IF(ERROR, !capture_buffer.is_valid()) | |
| 327 << "Couldn't allocate Capture Buffer"; | 598 << "Couldn't allocate Capture Buffer"; |
| 328 auto buffer_access = | 599 auto buffer_access = |
| 329 capture_buffer.handle_provider()->GetHandleForInProcessAccess(); | 600 capture_buffer_.handle_provider()->GetHandleForInProcessAccess(); |
| 330 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; | 601 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; |
| 331 | 602 |
| 332 DCHECK_EQ(PIXEL_STORAGE_CPU, capture_format_.pixel_storage); | 603 DCHECK_EQ(device_state_->format.pixel_storage, PIXEL_STORAGE_CPU); |
| 604 | |
| 333 uint8_t* data_ptr = buffer_access->data(); | 605 uint8_t* data_ptr = buffer_access->data(); |
| 334 memset(data_ptr, 0, buffer_access->mapped_size()); | 606 memset(data_ptr, 0, buffer_access->mapped_size()); |
| 335 DrawPacman(capture_format_.pixel_format, data_ptr, elapsed_time_, | 607 return data_ptr; |
| 336 fake_capture_rate_, capture_format_.frame_size, current_zoom_); | 608 } |
| 337 | 609 |
| 338 // Give the captured frame to the client. | 610 void ClientBufferFrameDeliveryStrategy::DeliverFrame() { |
| 611 if (client_ == nullptr) | |
| 612 return; | |
| 613 | |
| 339 base::TimeTicks now = base::TimeTicks::Now(); | 614 base::TimeTicks now = base::TimeTicks::Now(); |
| 340 if (first_ref_time_.is_null()) | 615 if (first_ref_time_.is_null()) |
| 341 first_ref_time_ = now; | 616 first_ref_time_ = now; |
| 342 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), capture_format_, | 617 client_->OnIncomingCapturedBuffer(std::move(capture_buffer_), |
| 343 now, now - first_ref_time_); | 618 device_state_->format, now, |
| 344 | 619 now - first_ref_time_); |
| 345 BeepAndScheduleNextCapture( | |
| 346 expected_execution_time, | |
| 347 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, | |
| 348 weak_factory_.GetWeakPtr())); | |
| 349 } | 620 } |
| 350 | 621 |
| 351 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( | 622 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( |
| 352 base::TimeTicks expected_execution_time, | 623 base::TimeTicks expected_execution_time) { |
| 353 const base::Callback<void(base::TimeTicks)>& next_capture) { | 624 DCHECK(thread_checker_.CalledOnValidThread()); |
| 354 const base::TimeDelta beep_interval = | 625 const base::TimeDelta beep_interval = |
| 355 base::TimeDelta::FromMilliseconds(kBeepInterval); | 626 base::TimeDelta::FromMilliseconds(kBeepInterval); |
| 356 const base::TimeDelta frame_interval = | 627 const base::TimeDelta frame_interval = |
| 357 base::TimeDelta::FromMicroseconds(1e6 / fake_capture_rate_); | 628 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); |
|
emircan
2017/01/31 18:47:39
Nit for defensive code, you can clamp the frame ra
chfremer
2017/02/01 00:21:18
Hmm, interesting ... but this may not be the best
emircan
2017/02/01 18:06:04
I see, there is a kFakeCaptureMaxFrameRate used wh
| |
| 358 beep_time_ += frame_interval; | 629 beep_time_ += frame_interval; |
| 359 elapsed_time_ += frame_interval; | 630 elapsed_time_ += frame_interval; |
| 360 | 631 |
| 361 // Generate a synchronized beep twice per second. | 632 // Generate a synchronized beep twice per second. |
| 362 if (beep_time_ >= beep_interval) { | 633 if (beep_time_ >= beep_interval) { |
| 363 FakeAudioInputStream::BeepOnce(); | 634 FakeAudioInputStream::BeepOnce(); |
| 364 beep_time_ -= beep_interval; | 635 beep_time_ -= beep_interval; |
| 365 } | 636 } |
| 366 | 637 |
| 367 // Reschedule next CaptureTask. | 638 // Reschedule next CaptureTask. |
| 368 const base::TimeTicks current_time = base::TimeTicks::Now(); | 639 const base::TimeTicks current_time = base::TimeTicks::Now(); |
| 369 // Don't accumulate any debt if we are lagging behind - just post the next | 640 // Don't accumulate any debt if we are lagging behind - just post the next |
| 370 // frame immediately and continue as normal. | 641 // frame immediately and continue as normal. |
| 371 const base::TimeTicks next_execution_time = | 642 const base::TimeTicks next_execution_time = |
| 372 std::max(current_time, expected_execution_time + frame_interval); | 643 std::max(current_time, expected_execution_time + frame_interval); |
| 373 const base::TimeDelta delay = next_execution_time - current_time; | 644 const base::TimeDelta delay = next_execution_time - current_time; |
| 374 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 645 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
| 375 FROM_HERE, base::Bind(next_capture, next_execution_time), delay); | 646 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, |
| 647 weak_factory_.GetWeakPtr(), next_execution_time, | |
| 648 current_session_id_), | |
| 649 delay); | |
| 650 } | |
| 651 | |
| 652 void FakeVideoCaptureDevice::OnNextFrameDue( | |
| 653 base::TimeTicks expected_execution_time, | |
| 654 int session_id) { | |
| 655 DCHECK(thread_checker_.CalledOnValidThread()); | |
| 656 if (!device_running_) | |
|
emircan
2017/01/31 18:47:39
Instead of using a bool and id to stop running tas
chfremer
2017/02/01 00:21:18
Done.
| |
| 657 return; | |
| 658 if (session_id != current_session_id_) | |
| 659 return; | |
| 660 | |
| 661 uint8_t* buffer = frame_delivery_strategy_->PrepareBufferForNextFrame(); | |
|
emircan
2017/01/31 18:47:38
uint8_t* const buffer
chfremer
2017/02/01 00:21:18
Done.
| |
| 662 frame_painter_->PaintFrame(elapsed_time_, buffer); | |
| 663 frame_delivery_strategy_->DeliverFrame(); | |
| 664 | |
| 665 BeepAndScheduleNextCapture(expected_execution_time); | |
| 376 } | 666 } |
| 377 | 667 |
| 378 } // namespace media | 668 } // namespace media |
| OLD | NEW |