| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/capture/video/fake_video_capture_device.h" | 5 #include "media/capture/video/fake_video_capture_device.h" |
| 6 | 6 |
| 7 #include <stddef.h> | 7 #include <stddef.h> |
| 8 #include <algorithm> | 8 #include <algorithm> |
| 9 #include <utility> | 9 #include <utility> |
| 10 | 10 |
| 11 #include "base/bind.h" | 11 #include "base/bind.h" |
| 12 #include "base/location.h" | 12 #include "base/location.h" |
| 13 #include "base/macros.h" |
| 14 #include "base/memory/weak_ptr.h" |
| 13 #include "base/single_thread_task_runner.h" | 15 #include "base/single_thread_task_runner.h" |
| 14 #include "base/strings/stringprintf.h" | 16 #include "base/strings/stringprintf.h" |
| 17 #include "base/threading/thread_checker.h" |
| 15 #include "base/threading/thread_task_runner_handle.h" | 18 #include "base/threading/thread_task_runner_handle.h" |
| 19 #include "base/time/time.h" |
| 16 #include "media/audio/fake_audio_input_stream.h" | 20 #include "media/audio/fake_audio_input_stream.h" |
| 17 #include "media/base/video_frame.h" | 21 #include "media/base/video_frame.h" |
| 18 #include "third_party/skia/include/core/SkBitmap.h" | 22 #include "third_party/skia/include/core/SkBitmap.h" |
| 19 #include "third_party/skia/include/core/SkCanvas.h" | 23 #include "third_party/skia/include/core/SkCanvas.h" |
| 20 #include "third_party/skia/include/core/SkMatrix.h" | 24 #include "third_party/skia/include/core/SkMatrix.h" |
| 21 #include "third_party/skia/include/core/SkPaint.h" | 25 #include "third_party/skia/include/core/SkPaint.h" |
| 22 #include "ui/gfx/codec/png_codec.h" | 26 #include "ui/gfx/codec/png_codec.h" |
| 23 | 27 |
| 24 namespace media { | 28 namespace media { |
| 25 | 29 |
| 30 namespace { |
| 26 // Sweep at 600 deg/sec. | 31 // Sweep at 600 deg/sec. |
| 27 static const float kPacmanAngularVelocity = 600; | 32 static const float kPacmanAngularVelocity = 600; |
| 28 // Beep every 500 ms. | 33 // Beep every 500 ms. |
| 29 static const int kBeepInterval = 500; | 34 static const int kBeepInterval = 500; |
| 30 // Gradient travels from bottom to top in 5 seconds. | 35 // Gradient travels from bottom to top in 5 seconds. |
| 31 static const float kGradientFrequency = 1.f / 5; | 36 static const float kGradientFrequency = 1.f / 5; |
| 32 | 37 |
| 33 static const double kMinZoom = 100.0; | 38 static const double kMinZoom = 100.0; |
| 34 static const double kMaxZoom = 400.0; | 39 static const double kMaxZoom = 400.0; |
| 35 static const double kZoomStep = 1.0; | 40 static const double kZoomStep = 1.0; |
| 41 static const double kInitialZoom = 100.0; |
| 42 |
| 43 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = { |
| 44 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), |
| 45 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; |
| 46 static const int kSupportedSizesCount = |
| 47 arraysize(kSupportedSizesOrderedByIncreasingWidth); |
| 48 |
| 49 static const VideoPixelFormat kSupportedPixelFormats[] = { |
| 50 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB}; |
| 51 |
| 52 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { |
| 53 for (const gfx::Size& supported_size : |
| 54 kSupportedSizesOrderedByIncreasingWidth) { |
| 55 if (requested_size.width() <= supported_size.width()) |
| 56 return supported_size; |
| 57 } |
| 58 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1]; |
| 59 } |
| 60 |
| 61 // Represents the current state of a FakeVideoCaptureDevice. |
| 62 // This is a separate struct because read-access to it is shared with several |
| 63 // collaborating classes. |
| 64 struct FakeDeviceState { |
| 65 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) |
| 66 : zoom(zoom), |
| 67 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} |
| 68 |
| 69 uint32_t zoom; |
| 70 VideoCaptureFormat format; |
| 71 }; |
| 72 |
| 73 // Paints a "pacman-like" animated circle including textual information such |
| 74 // as a frame count and timer. |
| 75 class PacmanFramePainter { |
| 76 public: |
| 77 // Currently, only the following values are supported for |pixel_format|: |
| 78 // PIXEL_FORMAT_I420 |
| 79 // PIXEL_FORMAT_Y16 |
| 80 // PIXEL_FORMAT_ARGB |
| 81 PacmanFramePainter(VideoPixelFormat pixel_format, |
| 82 const FakeDeviceState* fake_device_state); |
| 83 |
| 84 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer); |
| 85 |
| 86 private: |
| 87 void DrawGradientSquares(base::TimeDelta elapsed_time, |
| 88 uint8_t* target_buffer); |
| 89 |
| 90 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); |
| 91 |
| 92 const VideoPixelFormat pixel_format_; |
| 93 const FakeDeviceState* fake_device_state_ = nullptr; |
| 94 }; |
| 95 |
| 96 // Paints and delivers frames to a client, which is set via Initialize(). |
| 97 class FrameDeliverer { |
| 98 public: |
| 99 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter) |
| 100 : frame_painter_(std::move(frame_painter)) {} |
| 101 virtual ~FrameDeliverer() {} |
| 102 virtual void Initialize(VideoPixelFormat pixel_format, |
| 103 std::unique_ptr<VideoCaptureDevice::Client> client, |
| 104 const FakeDeviceState* device_state) = 0; |
| 105 virtual void Uninitialize() = 0; |
| 106 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0; |
| 107 |
| 108 protected: |
| 109 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) { |
| 110 if (first_ref_time_.is_null()) |
| 111 first_ref_time_ = now; |
| 112 return now - first_ref_time_; |
| 113 } |
| 114 |
| 115 const std::unique_ptr<PacmanFramePainter> frame_painter_; |
| 116 const FakeDeviceState* device_state_ = nullptr; |
| 117 std::unique_ptr<VideoCaptureDevice::Client> client_; |
| 118 |
| 119 private: |
| 120 base::TimeTicks first_ref_time_; |
| 121 }; |
| 122 |
| 123 // Delivers frames using its own buffers via OnIncomingCapturedData(). |
| 124 class OwnBufferFrameDeliverer : public FrameDeliverer { |
| 125 public: |
| 126 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); |
| 127 ~OwnBufferFrameDeliverer() override; |
| 128 |
| 129 // Implementation of FrameDeliverer |
| 130 void Initialize(VideoPixelFormat pixel_format, |
| 131 std::unique_ptr<VideoCaptureDevice::Client> client, |
| 132 const FakeDeviceState* device_state) override; |
| 133 void Uninitialize() override; |
| 134 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; |
| 135 |
| 136 private: |
| 137 std::unique_ptr<uint8_t[]> buffer_; |
| 138 }; |
| 139 |
| 140 // Delivers frames using buffers provided by the client via |
| 141 // OnIncomingCapturedBuffer(). |
| 142 class ClientBufferFrameDeliverer : public FrameDeliverer { |
| 143 public: |
| 144 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); |
| 145 ~ClientBufferFrameDeliverer() override; |
| 146 |
| 147 // Implementation of FrameDeliverer |
| 148 void Initialize(VideoPixelFormat pixel_format, |
| 149 std::unique_ptr<VideoCaptureDevice::Client> client, |
| 150 const FakeDeviceState* device_state) override; |
| 151 void Uninitialize() override; |
| 152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; |
| 153 }; |
| 154 |
| 155 // Implements the photo functionality of a VideoCaptureDevice |
| 156 class FakePhotoDevice { |
| 157 public: |
| 158 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> argb_painter, |
| 159 const FakeDeviceState* fake_device_state); |
| 160 ~FakePhotoDevice(); |
| 161 |
| 162 void GetPhotoCapabilities( |
| 163 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); |
| 164 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, |
| 165 base::TimeDelta elapsed_time); |
| 166 |
| 167 private: |
| 168 const std::unique_ptr<PacmanFramePainter> argb_painter_; |
| 169 const FakeDeviceState* const fake_device_state_; |
| 170 }; |
| 171 |
| 172 // Implementation of VideoCaptureDevice that generates test frames. This is |
| 173 // useful for testing the video capture components without having to use real |
| 174 // devices. The implementation schedules delayed tasks to itself to generate and |
| 175 // deliver frames at the requested rate. |
| 176 class FakeVideoCaptureDevice : public VideoCaptureDevice { |
| 177 public: |
| 178 FakeVideoCaptureDevice( |
| 179 std::unique_ptr<FrameDeliverer> frame_delivery_strategy, |
| 180 std::unique_ptr<FakePhotoDevice> photo_device, |
| 181 std::unique_ptr<FakeDeviceState> device_state); |
| 182 ~FakeVideoCaptureDevice() override; |
| 183 |
| 184 // VideoCaptureDevice implementation. |
| 185 void AllocateAndStart(const VideoCaptureParams& params, |
| 186 std::unique_ptr<Client> client) override; |
| 187 void StopAndDeAllocate() override; |
| 188 void GetPhotoCapabilities(GetPhotoCapabilitiesCallback callback) override; |
| 189 void SetPhotoOptions(mojom::PhotoSettingsPtr settings, |
| 190 SetPhotoOptionsCallback callback) override; |
| 191 void TakePhoto(TakePhotoCallback callback) override; |
| 192 |
| 193 private: |
| 194 void BeepAndScheduleNextCapture(base::TimeTicks expected_execution_time); |
| 195 void OnNextFrameDue(base::TimeTicks expected_execution_time, int session_id); |
| 196 |
| 197 const std::unique_ptr<FrameDeliverer> frame_deliverer_; |
| 198 const std::unique_ptr<FakePhotoDevice> photo_device_; |
| 199 const std::unique_ptr<FakeDeviceState> device_state_; |
| 200 int current_session_id_ = 0; |
| 201 |
| 202 // Time when the next beep occurs. |
| 203 base::TimeDelta beep_time_; |
| 204 // Time since the fake video started rendering frames. |
| 205 base::TimeDelta elapsed_time_; |
| 206 |
| 207 base::ThreadChecker thread_checker_; |
| 208 |
| 209 // FakeVideoCaptureDevice post tasks to itself for frame construction and |
| 210 // needs to deal with asynchronous StopAndDeallocate(). |
| 211 base::WeakPtrFactory<FakeVideoCaptureDevice> weak_factory_; |
| 212 |
| 213 DISALLOW_COPY_AND_ASSIGN(FakeVideoCaptureDevice); |
| 214 }; |
| 215 |
| 216 } // anonymous namespace |
| 217 |
| 218 // static |
| 219 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( |
| 220 std::vector<gfx::Size>* supported_sizes) { |
| 221 for (int i = 0; i < kSupportedSizesCount; i++) |
| 222 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]); |
| 223 } |
| 224 |
| 225 // static |
| 226 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( |
| 227 VideoPixelFormat pixel_format, |
| 228 DeliveryMode delivery_mode, |
| 229 float frame_rate) { |
| 230 bool pixel_format_supported = false; |
| 231 for (const auto& supported_pixel_format : kSupportedPixelFormats) { |
| 232 if (pixel_format == supported_pixel_format) { |
| 233 pixel_format_supported = true; |
| 234 break; |
| 235 } |
| 236 } |
| 237 if (!pixel_format_supported) { |
| 238 DLOG(ERROR) << "Requested an unsupported pixel format " |
| 239 << VideoPixelFormatToString(pixel_format); |
| 240 return nullptr; |
| 241 } |
| 242 |
| 243 auto device_state = |
| 244 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format); |
| 245 auto video_frame_painter = |
| 246 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get()); |
| 247 std::unique_ptr<FrameDeliverer> frame_delivery_strategy; |
| 248 switch (delivery_mode) { |
| 249 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: |
| 250 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>( |
| 251 std::move(video_frame_painter)); |
| 252 break; |
| 253 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: |
| 254 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>( |
| 255 std::move(video_frame_painter)); |
| 256 break; |
| 257 } |
| 258 |
| 259 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( |
| 260 PIXEL_FORMAT_ARGB, device_state.get()); |
| 261 auto photo_device = base::MakeUnique<FakePhotoDevice>( |
| 262 std::move(photo_frame_painter), device_state.get()); |
| 263 |
| 264 return base::MakeUnique<FakeVideoCaptureDevice>( |
| 265 std::move(frame_delivery_strategy), std::move(photo_device), |
| 266 std::move(device_state)); |
| 267 } |
| 268 |
| 269 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format, |
| 270 const FakeDeviceState* fake_device_state) |
| 271 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) { |
| 272 DCHECK(pixel_format == PIXEL_FORMAT_I420 || |
| 273 pixel_format == PIXEL_FORMAT_Y16 || pixel_format == PIXEL_FORMAT_ARGB); |
| 274 } |
| 275 |
| 276 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, |
| 277 uint8_t* target_buffer) { |
| 278 DrawPacman(elapsed_time, target_buffer); |
| 279 DrawGradientSquares(elapsed_time, target_buffer); |
| 280 } |
| 36 | 281 |
| 37 // Starting from top left, -45 deg gradient. Value at point (row, column) is | 282 // Starting from top left, -45 deg gradient. Value at point (row, column) is |
| 38 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where | 283 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where |
| 39 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per | 284 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per |
| 40 // component) or 65535 for Y16. | 285 // component) or 65535 for Y16. |
| 41 // This is handy for pixel tests where we use the squares to verify rendering. | 286 // This is handy for pixel tests where we use the squares to verify rendering. |
| 42 void DrawGradientSquares(VideoPixelFormat frame_format, | 287 void PacmanFramePainter::DrawGradientSquares(base::TimeDelta elapsed_time, |
| 43 uint8_t* const pixels, | 288 uint8_t* target_buffer) { |
| 44 base::TimeDelta elapsed_time, | 289 const int width = fake_device_state_->format.frame_size.width(); |
| 45 const gfx::Size& frame_size) { | 290 const int height = fake_device_state_->format.frame_size.height(); |
| 46 const int width = frame_size.width(); | 291 |
| 47 const int height = frame_size.height(); | |
| 48 const int side = width / 16; // square side length. | 292 const int side = width / 16; // square side length. |
| 49 DCHECK(side); | 293 DCHECK(side); |
| 50 const gfx::Point squares[] = {{0, 0}, | 294 const gfx::Point squares[] = {{0, 0}, |
| 51 {width - side, 0}, | 295 {width - side, 0}, |
| 52 {0, height - side}, | 296 {0, height - side}, |
| 53 {width - side, height - side}}; | 297 {width - side, height - side}}; |
| 54 const float start = | 298 const float start = |
| 55 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); | 299 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); |
| 56 const float color_step = 65535 / static_cast<float>(width + height); | 300 const float color_step = 65535 / static_cast<float>(width + height); |
| 57 for (const auto& corner : squares) { | 301 for (const auto& corner : squares) { |
| 58 for (int y = corner.y(); y < corner.y() + side; ++y) { | 302 for (int y = corner.y(); y < corner.y() + side; ++y) { |
| 59 for (int x = corner.x(); x < corner.x() + side; ++x) { | 303 for (int x = corner.x(); x < corner.x() + side; ++x) { |
| 60 const unsigned int value = | 304 const unsigned int value = |
| 61 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; | 305 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; |
| 62 size_t offset = (y * width) + x; | 306 size_t offset = (y * width) + x; |
| 63 switch (frame_format) { | 307 switch (pixel_format_) { |
| 64 case PIXEL_FORMAT_Y16: | 308 case PIXEL_FORMAT_Y16: |
| 65 pixels[offset * sizeof(uint16_t)] = value & 0xFF; | 309 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; |
| 66 pixels[offset * sizeof(uint16_t) + 1] = value >> 8; | 310 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; |
| 67 break; | 311 break; |
| 68 case PIXEL_FORMAT_ARGB: | 312 case PIXEL_FORMAT_ARGB: |
| 69 pixels[offset * sizeof(uint32_t) + 1] = value >> 8; | 313 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; |
| 70 pixels[offset * sizeof(uint32_t) + 2] = value >> 8; | 314 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; |
| 71 pixels[offset * sizeof(uint32_t) + 3] = value >> 8; | 315 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; |
| 72 break; | 316 break; |
| 73 default: | 317 default: |
| 74 pixels[offset] = value >> 8; | 318 target_buffer[offset] = value >> 8; |
| 75 break; | 319 break; |
| 76 } | 320 } |
| 77 } | 321 } |
| 78 } | 322 } |
| 79 } | 323 } |
| 80 } | 324 } |
| 81 | 325 |
| 82 void DrawPacman(VideoPixelFormat frame_format, | 326 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, |
| 83 uint8_t* const data, | 327 uint8_t* target_buffer) { |
| 84 base::TimeDelta elapsed_time, | 328 const int width = fake_device_state_->format.frame_size.width(); |
| 85 float frame_rate, | 329 const int height = fake_device_state_->format.frame_size.height(); |
| 86 const gfx::Size& frame_size, | 330 |
| 87 double zoom) { | |
| 88 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. | 331 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. |
| 89 const SkColorType colorspace = (frame_format == PIXEL_FORMAT_ARGB) | 332 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB) |
| 90 ? kN32_SkColorType | 333 ? kN32_SkColorType |
| 91 : kAlpha_8_SkColorType; | 334 : kAlpha_8_SkColorType; |
| 92 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use | 335 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use |
| 93 // this as high byte values in 16 bit pixels. | 336 // this as high byte values in 16 bit pixels. |
| 94 const SkImageInfo info = SkImageInfo::Make( | 337 const SkImageInfo info = |
| 95 frame_size.width(), frame_size.height(), colorspace, kOpaque_SkAlphaType); | 338 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); |
| 96 SkBitmap bitmap; | 339 SkBitmap bitmap; |
| 97 bitmap.setInfo(info); | 340 bitmap.setInfo(info); |
| 98 bitmap.setPixels(data); | 341 bitmap.setPixels(target_buffer); |
| 99 SkPaint paint; | 342 SkPaint paint; |
| 100 paint.setStyle(SkPaint::kFill_Style); | 343 paint.setStyle(SkPaint::kFill_Style); |
| 101 SkCanvas canvas(bitmap); | 344 SkCanvas canvas(bitmap); |
| 102 | 345 |
| 103 const SkScalar unscaled_zoom = zoom / 100.f; | 346 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; |
| 104 SkMatrix matrix; | 347 SkMatrix matrix; |
| 105 matrix.setScale(unscaled_zoom, unscaled_zoom, frame_size.width() / 2, | 348 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); |
| 106 frame_size.height() / 2); | |
| 107 canvas.setMatrix(matrix); | 349 canvas.setMatrix(matrix); |
| 108 | 350 |
| 109 // Equalize Alpha_8 that has light green background while RGBA has white. | 351 // Equalize Alpha_8 that has light green background while RGBA has white. |
| 110 if (frame_format == PIXEL_FORMAT_ARGB) { | 352 if (pixel_format_ == PIXEL_FORMAT_ARGB) { |
| 111 const SkRect full_frame = | 353 const SkRect full_frame = SkRect::MakeWH(width, height); |
| 112 SkRect::MakeWH(frame_size.width(), frame_size.height()); | |
| 113 paint.setARGB(255, 0, 127, 0); | 354 paint.setARGB(255, 0, 127, 0); |
| 114 canvas.drawRect(full_frame, paint); | 355 canvas.drawRect(full_frame, paint); |
| 115 } | 356 } |
| 116 paint.setColor(SK_ColorGREEN); | 357 paint.setColor(SK_ColorGREEN); |
| 117 | 358 |
| 118 // Draw a sweeping circle to show an animation. | 359 // Draw a sweeping circle to show an animation. |
| 119 const float end_angle = | 360 const float end_angle = |
| 120 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); | 361 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); |
| 121 const int radius = std::min(frame_size.width(), frame_size.height()) / 4; | 362 const int radius = std::min(width, height) / 4; |
| 122 const SkRect rect = SkRect::MakeXYWH(frame_size.width() / 2 - radius, | 363 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, |
| 123 frame_size.height() / 2 - radius, | |
| 124 2 * radius, 2 * radius); | 364 2 * radius, 2 * radius); |
| 125 canvas.drawArc(rect, 0, end_angle, true, paint); | 365 canvas.drawArc(rect, 0, end_angle, true, paint); |
| 126 | 366 |
| 127 // Draw current time. | 367 // Draw current time. |
| 128 const int milliseconds = elapsed_time.InMilliseconds() % 1000; | 368 const int milliseconds = elapsed_time.InMilliseconds() % 1000; |
| 129 const int seconds = elapsed_time.InSeconds() % 60; | 369 const int seconds = elapsed_time.InSeconds() % 60; |
| 130 const int minutes = elapsed_time.InMinutes() % 60; | 370 const int minutes = elapsed_time.InMinutes() % 60; |
| 131 const int hours = elapsed_time.InHours(); | 371 const int hours = elapsed_time.InHours(); |
| 132 const int frame_count = elapsed_time.InMilliseconds() * frame_rate / 1000; | 372 const int frame_count = elapsed_time.InMilliseconds() * |
| 373 fake_device_state_->format.frame_rate / 1000; |
| 133 | 374 |
| 134 const std::string time_string = | 375 const std::string time_string = |
| 135 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, | 376 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, |
| 136 milliseconds, frame_count); | 377 milliseconds, frame_count); |
| 137 canvas.scale(3, 3); | 378 canvas.scale(3, 3); |
| 138 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); | 379 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); |
| 139 | 380 |
| 140 if (frame_format == PIXEL_FORMAT_Y16) { | 381 if (pixel_format_ == PIXEL_FORMAT_Y16) { |
| 141 // Use 8 bit bitmap rendered to first half of the buffer as high byte values | 382 // Use 8 bit bitmap rendered to first half of the buffer as high byte values |
| 142 // for the whole buffer. Low byte values are not important. | 383 // for the whole buffer. Low byte values are not important. |
| 143 for (int i = frame_size.GetArea() - 1; i >= 0; --i) | 384 for (int i = (width * height) - 1; i >= 0; --i) |
| 144 data[i * 2 + 1] = data[i]; | 385 target_buffer[i * 2 + 1] = target_buffer[i]; |
| 145 } | 386 } |
| 146 DrawGradientSquares(frame_format, data, elapsed_time, frame_size); | |
| 147 } | 387 } |
| 148 | 388 |
| 149 // Creates a PNG-encoded frame and sends it back to |callback|. The other | 389 FakePhotoDevice::FakePhotoDevice( |
| 150 // parameters are used to replicate the PacMan rendering. | 390 std::unique_ptr<PacmanFramePainter> argb_painter, |
| 151 void DoTakeFakePhoto(VideoCaptureDevice::TakePhotoCallback callback, | 391 const FakeDeviceState* fake_device_state) |
| 152 const VideoCaptureFormat& capture_format, | 392 : argb_painter_(std::move(argb_painter)), |
| 153 base::TimeDelta elapsed_time, | 393 fake_device_state_(fake_device_state) {} |
| 154 float fake_capture_rate, | 394 |
| 155 uint32_t zoom) { | 395 FakePhotoDevice::~FakePhotoDevice() = default; |
| 396 |
| 397 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, |
| 398 base::TimeDelta elapsed_time) { |
| 399 // Create a PNG-encoded frame and send it back to |callback|. |
| 156 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( | 400 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( |
| 157 PIXEL_FORMAT_ARGB, capture_format.frame_size)]); | 401 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]); |
| 158 | 402 argb_painter_->PaintFrame(elapsed_time, buffer.get()); |
| 159 DrawPacman(PIXEL_FORMAT_ARGB, buffer.get(), elapsed_time, fake_capture_rate, | |
| 160 capture_format.frame_size, zoom); | |
| 161 | |
| 162 mojom::BlobPtr blob = mojom::Blob::New(); | 403 mojom::BlobPtr blob = mojom::Blob::New(); |
| 163 const bool result = gfx::PNGCodec::Encode( | 404 const bool result = |
| 164 buffer.get(), gfx::PNGCodec::FORMAT_RGBA, capture_format.frame_size, | 405 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA, |
| 165 capture_format.frame_size.width() * 4, true /* discard_transparency */, | 406 fake_device_state_->format.frame_size, |
| 166 std::vector<gfx::PNGCodec::Comment>(), &blob->data); | 407 fake_device_state_->format.frame_size.width() * 4, |
| 408 true /* discard_transparency */, |
| 409 std::vector<gfx::PNGCodec::Comment>(), &blob->data); |
| 167 DCHECK(result); | 410 DCHECK(result); |
| 168 | 411 |
| 169 blob->mime_type = "image/png"; | 412 blob->mime_type = "image/png"; |
| 170 callback.Run(std::move(blob)); | 413 callback.Run(std::move(blob)); |
| 171 } | 414 } |
| 172 | 415 |
| 173 FakeVideoCaptureDevice::FakeVideoCaptureDevice(BufferOwnership buffer_ownership, | 416 FakeVideoCaptureDevice::FakeVideoCaptureDevice( |
| 174 float fake_capture_rate, | 417 std::unique_ptr<FrameDeliverer> frame_delivery_strategy, |
| 175 VideoPixelFormat pixel_format) | 418 std::unique_ptr<FakePhotoDevice> photo_device, |
| 176 : buffer_ownership_(buffer_ownership), | 419 std::unique_ptr<FakeDeviceState> device_state) |
| 177 fake_capture_rate_(fake_capture_rate), | 420 : frame_deliverer_(std::move(frame_delivery_strategy)), |
| 178 pixel_format_(pixel_format), | 421 photo_device_(std::move(photo_device)), |
| 179 current_zoom_(kMinZoom), | 422 device_state_(std::move(device_state)), |
| 180 weak_factory_(this) {} | 423 weak_factory_(this) {} |
| 181 | 424 |
| 182 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { | 425 FakeVideoCaptureDevice::~FakeVideoCaptureDevice() { |
| 183 DCHECK(thread_checker_.CalledOnValidThread()); | 426 DCHECK(thread_checker_.CalledOnValidThread()); |
| 184 } | 427 } |
| 185 | 428 |
| 186 void FakeVideoCaptureDevice::AllocateAndStart( | 429 void FakeVideoCaptureDevice::AllocateAndStart( |
| 187 const VideoCaptureParams& params, | 430 const VideoCaptureParams& params, |
| 188 std::unique_ptr<VideoCaptureDevice::Client> client) { | 431 std::unique_ptr<VideoCaptureDevice::Client> client) { |
| 189 DCHECK(thread_checker_.CalledOnValidThread()); | 432 DCHECK(thread_checker_.CalledOnValidThread()); |
| 190 | 433 |
| 191 client_ = std::move(client); | |
| 192 | |
| 193 // Incoming |params| can be none of the supported formats, so we get the | |
| 194 // closest thing rounded up. TODO(mcasas): Use the |params|, if they belong to | |
| 195 // the supported ones, when http://crbug.com/309554 is verified. | |
| 196 capture_format_.frame_rate = fake_capture_rate_; | |
| 197 if (params.requested_format.frame_size.width() > 1280) | |
| 198 capture_format_.frame_size.SetSize(1920, 1080); | |
| 199 else if (params.requested_format.frame_size.width() > 640) | |
| 200 capture_format_.frame_size.SetSize(1280, 720); | |
| 201 else if (params.requested_format.frame_size.width() > 320) | |
| 202 capture_format_.frame_size.SetSize(640, 480); | |
| 203 else if (params.requested_format.frame_size.width() > 96) | |
| 204 capture_format_.frame_size.SetSize(320, 240); | |
| 205 else | |
| 206 capture_format_.frame_size.SetSize(96, 96); | |
| 207 | |
| 208 capture_format_.pixel_format = pixel_format_; | |
| 209 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) { | |
| 210 capture_format_.pixel_storage = PIXEL_STORAGE_CPU; | |
| 211 capture_format_.pixel_format = PIXEL_FORMAT_ARGB; | |
| 212 DVLOG(1) << "starting with client argb buffers"; | |
| 213 } else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { | |
| 214 capture_format_.pixel_storage = PIXEL_STORAGE_CPU; | |
| 215 DVLOG(1) << "starting with own " << VideoPixelFormatToString(pixel_format_) | |
| 216 << " buffers"; | |
| 217 } | |
| 218 | |
| 219 if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) { | |
| 220 fake_frame_.reset(new uint8_t[VideoFrame::AllocationSize( | |
| 221 pixel_format_, capture_format_.frame_size)]); | |
| 222 } | |
| 223 | |
| 224 beep_time_ = base::TimeDelta(); | 434 beep_time_ = base::TimeDelta(); |
| 225 elapsed_time_ = base::TimeDelta(); | 435 elapsed_time_ = base::TimeDelta(); |
| 226 | 436 device_state_->format.frame_size = |
| 227 if (buffer_ownership_ == BufferOwnership::CLIENT_BUFFERS) | 437 SnapToSupportedSize(params.requested_format.frame_size); |
| 228 BeepAndScheduleNextCapture( | 438 frame_deliverer_->Initialize(device_state_->format.pixel_format, |
| 229 base::TimeTicks::Now(), | 439 std::move(client), device_state_.get()); |
| 230 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, | 440 current_session_id_++; |
| 231 weak_factory_.GetWeakPtr())); | 441 BeepAndScheduleNextCapture(base::TimeTicks::Now()); |
| 232 else if (buffer_ownership_ == BufferOwnership::OWN_BUFFERS) | |
| 233 BeepAndScheduleNextCapture( | |
| 234 base::TimeTicks::Now(), | |
| 235 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers, | |
| 236 weak_factory_.GetWeakPtr())); | |
| 237 } | 442 } |
| 238 | 443 |
| 239 void FakeVideoCaptureDevice::StopAndDeAllocate() { | 444 void FakeVideoCaptureDevice::StopAndDeAllocate() { |
| 240 DCHECK(thread_checker_.CalledOnValidThread()); | 445 DCHECK(thread_checker_.CalledOnValidThread()); |
| 241 client_.reset(); | 446 |
| 447 // Invalidate WeakPtr to stop the perpetual scheduling of tasks. |
| 448 weak_factory_.InvalidateWeakPtrs(); |
| 449 frame_deliverer_->Uninitialize(); |
| 242 } | 450 } |
| 243 | 451 |
| 244 void FakeVideoCaptureDevice::GetPhotoCapabilities( | 452 void FakeVideoCaptureDevice::GetPhotoCapabilities( |
| 245 GetPhotoCapabilitiesCallback callback) { | 453 GetPhotoCapabilitiesCallback callback) { |
| 454 DCHECK(thread_checker_.CalledOnValidThread()); |
| 455 photo_device_->GetPhotoCapabilities(std::move(callback)); |
| 456 } |
| 457 |
| 458 void FakePhotoDevice::GetPhotoCapabilities( |
| 459 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback) { |
| 246 mojom::PhotoCapabilitiesPtr photo_capabilities = | 460 mojom::PhotoCapabilitiesPtr photo_capabilities = |
| 247 mojom::PhotoCapabilities::New(); | 461 mojom::PhotoCapabilities::New(); |
| 248 photo_capabilities->iso = mojom::Range::New(); | 462 photo_capabilities->iso = mojom::Range::New(); |
| 249 photo_capabilities->iso->current = 100.0; | 463 photo_capabilities->iso->current = 100.0; |
| 250 photo_capabilities->iso->max = 100.0; | 464 photo_capabilities->iso->max = 100.0; |
| 251 photo_capabilities->iso->min = 100.0; | 465 photo_capabilities->iso->min = 100.0; |
| 252 photo_capabilities->iso->step = 0.0; | 466 photo_capabilities->iso->step = 0.0; |
| 253 photo_capabilities->height = mojom::Range::New(); | 467 photo_capabilities->height = mojom::Range::New(); |
| 254 photo_capabilities->height->current = capture_format_.frame_size.height(); | 468 photo_capabilities->height->current = |
| 469 fake_device_state_->format.frame_size.height(); |
| 255 photo_capabilities->height->max = 1080.0; | 470 photo_capabilities->height->max = 1080.0; |
| 256 photo_capabilities->height->min = 96.0; | 471 photo_capabilities->height->min = 96.0; |
| 257 photo_capabilities->height->step = 1.0; | 472 photo_capabilities->height->step = 1.0; |
| 258 photo_capabilities->width = mojom::Range::New(); | 473 photo_capabilities->width = mojom::Range::New(); |
| 259 photo_capabilities->width->current = capture_format_.frame_size.width(); | 474 photo_capabilities->width->current = |
| 475 fake_device_state_->format.frame_size.width(); |
| 260 photo_capabilities->width->max = 1920.0; | 476 photo_capabilities->width->max = 1920.0; |
| 261 photo_capabilities->width->min = 96.0; | 477 photo_capabilities->width->min = 96.0; |
| 262 photo_capabilities->width->step = 1; | 478 photo_capabilities->width->step = 1.0; |
| 263 photo_capabilities->zoom = mojom::Range::New(); | 479 photo_capabilities->zoom = mojom::Range::New(); |
| 264 photo_capabilities->zoom->current = current_zoom_; | 480 photo_capabilities->zoom->current = fake_device_state_->zoom; |
| 265 photo_capabilities->zoom->max = kMaxZoom; | 481 photo_capabilities->zoom->max = kMaxZoom; |
| 266 photo_capabilities->zoom->min = kMinZoom; | 482 photo_capabilities->zoom->min = kMinZoom; |
| 267 photo_capabilities->zoom->step = kZoomStep; | 483 photo_capabilities->zoom->step = kZoomStep; |
| 268 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; | 484 photo_capabilities->focus_mode = mojom::MeteringMode::NONE; |
| 269 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; | 485 photo_capabilities->exposure_mode = mojom::MeteringMode::NONE; |
| 270 photo_capabilities->exposure_compensation = mojom::Range::New(); | 486 photo_capabilities->exposure_compensation = mojom::Range::New(); |
| 271 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; | 487 photo_capabilities->white_balance_mode = mojom::MeteringMode::NONE; |
| 272 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; | 488 photo_capabilities->fill_light_mode = mojom::FillLightMode::NONE; |
| 273 photo_capabilities->red_eye_reduction = false; | 489 photo_capabilities->red_eye_reduction = false; |
| 274 photo_capabilities->color_temperature = mojom::Range::New(); | 490 photo_capabilities->color_temperature = mojom::Range::New(); |
| 275 photo_capabilities->brightness = media::mojom::Range::New(); | 491 photo_capabilities->brightness = media::mojom::Range::New(); |
| 276 photo_capabilities->contrast = media::mojom::Range::New(); | 492 photo_capabilities->contrast = media::mojom::Range::New(); |
| 277 photo_capabilities->saturation = media::mojom::Range::New(); | 493 photo_capabilities->saturation = media::mojom::Range::New(); |
| 278 photo_capabilities->sharpness = media::mojom::Range::New(); | 494 photo_capabilities->sharpness = media::mojom::Range::New(); |
| 279 callback.Run(std::move(photo_capabilities)); | 495 callback.Run(std::move(photo_capabilities)); |
| 280 } | 496 } |
| 281 | 497 |
| 282 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, | 498 void FakeVideoCaptureDevice::SetPhotoOptions(mojom::PhotoSettingsPtr settings, |
| 283 SetPhotoOptionsCallback callback) { | 499 SetPhotoOptionsCallback callback) { |
| 284 if (settings->has_zoom) | 500 DCHECK(thread_checker_.CalledOnValidThread()); |
| 285 current_zoom_ = std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); | 501 if (settings->has_zoom) { |
| 502 device_state_->zoom = |
| 503 std::max(kMinZoom, std::min(settings->zoom, kMaxZoom)); |
| 504 } |
| 505 |
| 286 callback.Run(true); | 506 callback.Run(true); |
| 287 } | 507 } |
| 288 | 508 |
| 289 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { | 509 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { |
| 510 DCHECK(thread_checker_.CalledOnValidThread()); |
| 290 base::ThreadTaskRunnerHandle::Get()->PostTask( | 511 base::ThreadTaskRunnerHandle::Get()->PostTask( |
| 291 FROM_HERE, | 512 FROM_HERE, base::Bind(&FakePhotoDevice::TakePhoto, |
| 292 base::Bind(&DoTakeFakePhoto, base::Passed(&callback), capture_format_, | 513 base::Unretained(photo_device_.get()), |
| 293 elapsed_time_, fake_capture_rate_, current_zoom_)); | 514 base::Passed(&callback), elapsed_time_)); |
| 294 } | 515 } |
| 295 | 516 |
| 296 void FakeVideoCaptureDevice::CaptureUsingOwnBuffers( | 517 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer( |
| 297 base::TimeTicks expected_execution_time) { | 518 std::unique_ptr<PacmanFramePainter> frame_painter) |
| 298 DCHECK(thread_checker_.CalledOnValidThread()); | 519 : FrameDeliverer(std::move(frame_painter)) {} |
| 299 const size_t frame_size = capture_format_.ImageAllocationSize(); | |
| 300 | 520 |
| 301 memset(fake_frame_.get(), 0, frame_size); | 521 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default; |
| 302 DrawPacman(capture_format_.pixel_format, fake_frame_.get(), elapsed_time_, | 522 |
| 303 fake_capture_rate_, capture_format_.frame_size, current_zoom_); | 523 void OwnBufferFrameDeliverer::Initialize( |
| 304 // Give the captured frame to the client. | 524 VideoPixelFormat pixel_format, |
| 305 base::TimeTicks now = base::TimeTicks::Now(); | 525 std::unique_ptr<VideoCaptureDevice::Client> client, |
| 306 if (first_ref_time_.is_null()) | 526 const FakeDeviceState* device_state) { |
| 307 first_ref_time_ = now; | 527 client_ = std::move(client); |
| 308 client_->OnIncomingCapturedData(fake_frame_.get(), frame_size, | 528 device_state_ = device_state; |
| 309 capture_format_, 0 /* rotation */, now, | 529 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( |
| 310 now - first_ref_time_); | 530 pixel_format, device_state_->format.frame_size)]); |
| 311 BeepAndScheduleNextCapture( | |
| 312 expected_execution_time, | |
| 313 base::Bind(&FakeVideoCaptureDevice::CaptureUsingOwnBuffers, | |
| 314 weak_factory_.GetWeakPtr())); | |
| 315 } | 531 } |
| 316 | 532 |
| 317 void FakeVideoCaptureDevice::CaptureUsingClientBuffers( | 533 void OwnBufferFrameDeliverer::Uninitialize() { |
| 318 base::TimeTicks expected_execution_time) { | 534 client_.reset(); |
| 319 DCHECK(thread_checker_.CalledOnValidThread()); | 535 device_state_ = nullptr; |
| 536 buffer_.reset(); |
| 537 } |
| 538 |
| 539 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame( |
| 540 base::TimeDelta timestamp_to_paint) { |
| 541 if (!client_) |
| 542 return; |
| 543 const size_t frame_size = device_state_->format.ImageAllocationSize(); |
| 544 memset(buffer_.get(), 0, frame_size); |
| 545 frame_painter_->PaintFrame(timestamp_to_paint, buffer_.get()); |
| 546 base::TimeTicks now = base::TimeTicks::Now(); |
| 547 client_->OnIncomingCapturedData(buffer_.get(), frame_size, |
| 548 device_state_->format, 0 /* rotation */, now, |
| 549 CalculateTimeSinceFirstInvocation(now)); |
| 550 } |
| 551 |
| 552 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer( |
| 553 std::unique_ptr<PacmanFramePainter> frame_painter) |
| 554 : FrameDeliverer(std::move(frame_painter)) {} |
| 555 |
| 556 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default; |
| 557 |
| 558 void ClientBufferFrameDeliverer::Initialize( |
| 559 VideoPixelFormat, |
| 560 std::unique_ptr<VideoCaptureDevice::Client> client, |
| 561 const FakeDeviceState* device_state) { |
| 562 client_ = std::move(client); |
| 563 device_state_ = device_state; |
| 564 } |
| 565 |
| 566 void ClientBufferFrameDeliverer::Uninitialize() { |
| 567 client_.reset(); |
| 568 device_state_ = nullptr; |
| 569 } |
| 570 |
| 571 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame( |
| 572 base::TimeDelta timestamp_to_paint) { |
| 573 if (client_ == nullptr) |
| 574 return; |
| 320 | 575 |
| 321 const int arbitrary_frame_feedback_id = 0; | 576 const int arbitrary_frame_feedback_id = 0; |
| 322 VideoCaptureDevice::Client::Buffer capture_buffer = | 577 auto capture_buffer = client_->ReserveOutputBuffer( |
| 323 client_->ReserveOutputBuffer( | 578 device_state_->format.frame_size, device_state_->format.pixel_format, |
| 324 capture_format_.frame_size, capture_format_.pixel_format, | 579 device_state_->format.pixel_storage, arbitrary_frame_feedback_id); |
| 325 capture_format_.pixel_storage, arbitrary_frame_feedback_id); | |
| 326 DLOG_IF(ERROR, !capture_buffer.is_valid()) | 580 DLOG_IF(ERROR, !capture_buffer.is_valid()) |
| 327 << "Couldn't allocate Capture Buffer"; | 581 << "Couldn't allocate Capture Buffer"; |
| 328 auto buffer_access = | 582 auto buffer_access = |
| 329 capture_buffer.handle_provider()->GetHandleForInProcessAccess(); | 583 capture_buffer.handle_provider()->GetHandleForInProcessAccess(); |
| 330 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; | 584 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; |
| 331 | 585 |
| 332 DCHECK_EQ(PIXEL_STORAGE_CPU, capture_format_.pixel_storage); | 586 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state_->format.pixel_storage); |
| 587 |
| 333 uint8_t* data_ptr = buffer_access->data(); | 588 uint8_t* data_ptr = buffer_access->data(); |
| 334 memset(data_ptr, 0, buffer_access->mapped_size()); | 589 memset(data_ptr, 0, buffer_access->mapped_size()); |
| 335 DrawPacman(capture_format_.pixel_format, data_ptr, elapsed_time_, | 590 frame_painter_->PaintFrame(timestamp_to_paint, data_ptr); |
| 336 fake_capture_rate_, capture_format_.frame_size, current_zoom_); | |
| 337 | 591 |
| 338 // Give the captured frame to the client. | |
| 339 base::TimeTicks now = base::TimeTicks::Now(); | 592 base::TimeTicks now = base::TimeTicks::Now(); |
| 340 if (first_ref_time_.is_null()) | 593 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), |
| 341 first_ref_time_ = now; | 594 device_state_->format, now, |
| 342 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), capture_format_, | 595 CalculateTimeSinceFirstInvocation(now)); |
| 343 now, now - first_ref_time_); | |
| 344 | |
| 345 BeepAndScheduleNextCapture( | |
| 346 expected_execution_time, | |
| 347 base::Bind(&FakeVideoCaptureDevice::CaptureUsingClientBuffers, | |
| 348 weak_factory_.GetWeakPtr())); | |
| 349 } | 596 } |
| 350 | 597 |
| 351 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( | 598 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( |
| 352 base::TimeTicks expected_execution_time, | 599 base::TimeTicks expected_execution_time) { |
| 353 const base::Callback<void(base::TimeTicks)>& next_capture) { | 600 DCHECK(thread_checker_.CalledOnValidThread()); |
| 354 const base::TimeDelta beep_interval = | 601 const base::TimeDelta beep_interval = |
| 355 base::TimeDelta::FromMilliseconds(kBeepInterval); | 602 base::TimeDelta::FromMilliseconds(kBeepInterval); |
| 356 const base::TimeDelta frame_interval = | 603 const base::TimeDelta frame_interval = |
| 357 base::TimeDelta::FromMicroseconds(1e6 / fake_capture_rate_); | 604 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); |
| 358 beep_time_ += frame_interval; | 605 beep_time_ += frame_interval; |
| 359 elapsed_time_ += frame_interval; | 606 elapsed_time_ += frame_interval; |
| 360 | 607 |
| 361 // Generate a synchronized beep twice per second. | 608 // Generate a synchronized beep twice per second. |
| 362 if (beep_time_ >= beep_interval) { | 609 if (beep_time_ >= beep_interval) { |
| 363 FakeAudioInputStream::BeepOnce(); | 610 FakeAudioInputStream::BeepOnce(); |
| 364 beep_time_ -= beep_interval; | 611 beep_time_ -= beep_interval; |
| 365 } | 612 } |
| 366 | 613 |
| 367 // Reschedule next CaptureTask. | 614 // Reschedule next CaptureTask. |
| 368 const base::TimeTicks current_time = base::TimeTicks::Now(); | 615 const base::TimeTicks current_time = base::TimeTicks::Now(); |
| 369 // Don't accumulate any debt if we are lagging behind - just post the next | 616 // Don't accumulate any debt if we are lagging behind - just post the next |
| 370 // frame immediately and continue as normal. | 617 // frame immediately and continue as normal. |
| 371 const base::TimeTicks next_execution_time = | 618 const base::TimeTicks next_execution_time = |
| 372 std::max(current_time, expected_execution_time + frame_interval); | 619 std::max(current_time, expected_execution_time + frame_interval); |
| 373 const base::TimeDelta delay = next_execution_time - current_time; | 620 const base::TimeDelta delay = next_execution_time - current_time; |
| 374 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( | 621 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( |
| 375 FROM_HERE, base::Bind(next_capture, next_execution_time), delay); | 622 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, |
| 623 weak_factory_.GetWeakPtr(), next_execution_time, |
| 624 current_session_id_), |
| 625 delay); |
| 626 } |
| 627 |
| 628 void FakeVideoCaptureDevice::OnNextFrameDue( |
| 629 base::TimeTicks expected_execution_time, |
| 630 int session_id) { |
| 631 DCHECK(thread_checker_.CalledOnValidThread()); |
| 632 if (session_id != current_session_id_) |
| 633 return; |
| 634 |
| 635 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_); |
| 636 BeepAndScheduleNextCapture(expected_execution_time); |
| 376 } | 637 } |
| 377 | 638 |
| 378 } // namespace media | 639 } // namespace media |
| OLD | NEW |