Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(15)

Side by Side Diff: media/capture/video/fake_video_capture_device.cc

Issue 2700173002: Add MJPEG support to FakeVideoCaptureDevice (Closed)
Patch Set: Fix for compilers on Win and Android Created 3 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "media/capture/video/fake_video_capture_device.h" 5 #include "media/capture/video/fake_video_capture_device.h"
6 6
7 #include <stddef.h> 7 #include <stddef.h>
8 #include <algorithm> 8 #include <algorithm>
9 #include <utility> 9 #include <utility>
10 10
11 #include "base/bind.h" 11 #include "base/bind.h"
12 #include "base/location.h" 12 #include "base/location.h"
13 #include "base/macros.h" 13 #include "base/macros.h"
14 #include "base/memory/weak_ptr.h" 14 #include "base/memory/weak_ptr.h"
15 #include "base/single_thread_task_runner.h" 15 #include "base/single_thread_task_runner.h"
16 #include "base/strings/stringprintf.h" 16 #include "base/strings/stringprintf.h"
17 #include "base/threading/thread_checker.h" 17 #include "base/threading/thread_checker.h"
18 #include "base/threading/thread_task_runner_handle.h" 18 #include "base/threading/thread_task_runner_handle.h"
19 #include "base/time/time.h" 19 #include "base/time/time.h"
20 #include "media/audio/fake_audio_input_stream.h" 20 #include "media/audio/fake_audio_input_stream.h"
21 #include "media/base/video_frame.h" 21 #include "media/base/video_frame.h"
22 #include "third_party/skia/include/core/SkBitmap.h" 22 #include "third_party/skia/include/core/SkBitmap.h"
23 #include "third_party/skia/include/core/SkCanvas.h" 23 #include "third_party/skia/include/core/SkCanvas.h"
24 #include "third_party/skia/include/core/SkMatrix.h" 24 #include "third_party/skia/include/core/SkMatrix.h"
25 #include "third_party/skia/include/core/SkPaint.h" 25 #include "third_party/skia/include/core/SkPaint.h"
26 #include "ui/gfx/codec/jpeg_codec.h"
26 #include "ui/gfx/codec/png_codec.h" 27 #include "ui/gfx/codec/png_codec.h"
27 28
28 namespace media { 29 namespace media {
29 30
30 namespace { 31 namespace {
31 // Sweep at 600 deg/sec. 32 // Sweep at 600 deg/sec.
32 static const float kPacmanAngularVelocity = 600; 33 static const float kPacmanAngularVelocity = 600;
33 // Beep every 500 ms. 34 // Beep every 500 ms.
34 static const int kBeepInterval = 500; 35 static const int kBeepInterval = 500;
35 // Gradient travels from bottom to top in 5 seconds. 36 // Gradient travels from bottom to top in 5 seconds.
36 static const float kGradientFrequency = 1.f / 5; 37 static const float kGradientFrequency = 1.f / 5;
37 38
38 static const double kMinZoom = 100.0; 39 static const double kMinZoom = 100.0;
39 static const double kMaxZoom = 400.0; 40 static const double kMaxZoom = 400.0;
40 static const double kZoomStep = 1.0; 41 static const double kZoomStep = 1.0;
41 static const double kInitialZoom = 100.0; 42 static const double kInitialZoom = 100.0;
42 43
43 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = { 44 static const gfx::Size kSupportedSizesOrderedByIncreasingWidth[] = {
44 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480), 45 gfx::Size(96, 96), gfx::Size(320, 240), gfx::Size(640, 480),
45 gfx::Size(1280, 720), gfx::Size(1920, 1080)}; 46 gfx::Size(1280, 720), gfx::Size(1920, 1080)};
46 static const int kSupportedSizesCount = 47 static const int kSupportedSizesCount =
47 arraysize(kSupportedSizesOrderedByIncreasingWidth); 48 arraysize(kSupportedSizesOrderedByIncreasingWidth);
48 49
49 static const VideoPixelFormat kSupportedPixelFormats[] = {
50 PIXEL_FORMAT_I420, PIXEL_FORMAT_Y16, PIXEL_FORMAT_ARGB};
51
52 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) { 50 static gfx::Size SnapToSupportedSize(const gfx::Size& requested_size) {
53 for (const gfx::Size& supported_size : 51 for (const gfx::Size& supported_size :
54 kSupportedSizesOrderedByIncreasingWidth) { 52 kSupportedSizesOrderedByIncreasingWidth) {
55 if (requested_size.width() <= supported_size.width()) 53 if (requested_size.width() <= supported_size.width())
56 return supported_size; 54 return supported_size;
57 } 55 }
58 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1]; 56 return kSupportedSizesOrderedByIncreasingWidth[kSupportedSizesCount - 1];
59 } 57 }
60 58
61 // Represents the current state of a FakeVideoCaptureDevice. 59 // Represents the current state of a FakeVideoCaptureDevice.
62 // This is a separate struct because read-access to it is shared with several 60 // This is a separate struct because read-access to it is shared with several
63 // collaborating classes. 61 // collaborating classes.
64 struct FakeDeviceState { 62 struct FakeDeviceState {
65 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format) 63 FakeDeviceState(float zoom, float frame_rate, VideoPixelFormat pixel_format)
66 : zoom(zoom), 64 : zoom(zoom),
67 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {} 65 format(gfx::Size(), frame_rate, pixel_format, PIXEL_STORAGE_CPU) {}
68 66
69 uint32_t zoom; 67 uint32_t zoom;
70 VideoCaptureFormat format; 68 VideoCaptureFormat format;
71 }; 69 };
72 70
73 // Paints a "pacman-like" animated circle including textual information such 71 // Paints a "pacman-like" animated circle including textual information such
74 // as a frame count and timer. 72 // as a frame count and timer.
75 class PacmanFramePainter { 73 class PacmanFramePainter {
76 public: 74 public:
77 // Currently, only the following values are supported for |pixel_format|: 75 enum class Format { I420, RGB, Y16 };
78 // PIXEL_FORMAT_I420 76 PacmanFramePainter(Format pixel_format,
79 // PIXEL_FORMAT_Y16
80 // PIXEL_FORMAT_ARGB
81 PacmanFramePainter(VideoPixelFormat pixel_format,
82 const FakeDeviceState* fake_device_state); 77 const FakeDeviceState* fake_device_state);
83 78
84 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer); 79 void PaintFrame(base::TimeDelta elapsed_time, uint8_t* target_buffer);
85 80
86 private: 81 private:
87 void DrawGradientSquares(base::TimeDelta elapsed_time, 82 void DrawGradientSquares(base::TimeDelta elapsed_time,
88 uint8_t* target_buffer); 83 uint8_t* target_buffer);
89 84
90 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer); 85 void DrawPacman(base::TimeDelta elapsed_time, uint8_t* target_buffer);
91 86
92 const VideoPixelFormat pixel_format_; 87 const Format pixel_format_;
93 const FakeDeviceState* fake_device_state_ = nullptr; 88 const FakeDeviceState* fake_device_state_ = nullptr;
94 }; 89 };
95 90
96 // Paints and delivers frames to a client, which is set via Initialize(). 91 // Paints and delivers frames to a client, which is set via Initialize().
97 class FrameDeliverer { 92 class FrameDeliverer {
98 public: 93 public:
99 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter) 94 FrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter)
100 : frame_painter_(std::move(frame_painter)) {} 95 : frame_painter_(std::move(frame_painter)) {}
101 virtual ~FrameDeliverer() {} 96 virtual ~FrameDeliverer() {}
102 virtual void Initialize(VideoPixelFormat pixel_format, 97 void Initialize(VideoPixelFormat pixel_format,
103 std::unique_ptr<VideoCaptureDevice::Client> client, 98 std::unique_ptr<VideoCaptureDevice::Client> client,
104 const FakeDeviceState* device_state) = 0; 99 const FakeDeviceState* device_state) {
105 virtual void Uninitialize() = 0; 100 client_ = std::move(client);
101 device_state_ = device_state;
102 InitializeDerived(pixel_format);
103 }
104 void Uninitialize() {
105 UninitializeDerived();
106 client_.reset();
107 device_state_ = nullptr;
108 }
106 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0; 109 virtual void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) = 0;
107 110
108 protected: 111 protected:
112 virtual void InitializeDerived(VideoPixelFormat pixel_format) {}
113 virtual void UninitializeDerived() {}
114
109 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) { 115 base::TimeDelta CalculateTimeSinceFirstInvocation(base::TimeTicks now) {
110 if (first_ref_time_.is_null()) 116 if (first_ref_time_.is_null())
111 first_ref_time_ = now; 117 first_ref_time_ = now;
112 return now - first_ref_time_; 118 return now - first_ref_time_;
113 } 119 }
114 120
121 PacmanFramePainter* frame_painter() { return frame_painter_.get(); }
122 const FakeDeviceState* device_state() { return device_state_; }
123 VideoCaptureDevice::Client* client() { return client_.get(); }
124 bool has_client() { return client_ != nullptr; }
mcasas 2017/02/22 20:26:33 bool HasClient() const { return !client_; } but I
chfremer 2017/02/23 01:14:28 Done.
125
126 private:
115 const std::unique_ptr<PacmanFramePainter> frame_painter_; 127 const std::unique_ptr<PacmanFramePainter> frame_painter_;
116 const FakeDeviceState* device_state_ = nullptr; 128 const FakeDeviceState* device_state_ = nullptr;
117 std::unique_ptr<VideoCaptureDevice::Client> client_; 129 std::unique_ptr<VideoCaptureDevice::Client> client_;
118
119 private:
120 base::TimeTicks first_ref_time_; 130 base::TimeTicks first_ref_time_;
121 }; 131 };
122 132
123 // Delivers frames using its own buffers via OnIncomingCapturedData(). 133 // Delivers frames using its own buffers via OnIncomingCapturedData().
124 class OwnBufferFrameDeliverer : public FrameDeliverer { 134 class OwnBufferFrameDeliverer : public FrameDeliverer {
125 public: 135 public:
126 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 136 OwnBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
127 ~OwnBufferFrameDeliverer() override; 137 ~OwnBufferFrameDeliverer() override;
128 138
129 // Implementation of FrameDeliverer 139 // Implementation of FrameDeliverer
130 void Initialize(VideoPixelFormat pixel_format, 140 void InitializeDerived(VideoPixelFormat pixel_format) override;
131 std::unique_ptr<VideoCaptureDevice::Client> client, 141 void UninitializeDerived() override;
mcasas 2017/02/22 20:26:32 There's no need for these ...Derived() guys, inste
chfremer 2017/02/23 01:14:28 I am intentionally avoiding requiring the call to
132 const FakeDeviceState* device_state) override;
133 void Uninitialize() override;
134 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 142 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
135 143
136 private: 144 private:
137 std::unique_ptr<uint8_t[]> buffer_; 145 std::unique_ptr<uint8_t[]> buffer_;
138 }; 146 };
139 147
140 // Delivers frames using buffers provided by the client via 148 // Delivers frames using buffers provided by the client via
141 // OnIncomingCapturedBuffer(). 149 // OnIncomingCapturedBuffer().
142 class ClientBufferFrameDeliverer : public FrameDeliverer { 150 class ClientBufferFrameDeliverer : public FrameDeliverer {
143 public: 151 public:
144 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter); 152 ClientBufferFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
145 ~ClientBufferFrameDeliverer() override; 153 ~ClientBufferFrameDeliverer() override;
146 154
147 // Implementation of FrameDeliverer 155 // Implementation of FrameDeliverer
148 void Initialize(VideoPixelFormat pixel_format,
149 std::unique_ptr<VideoCaptureDevice::Client> client,
150 const FakeDeviceState* device_state) override;
151 void Uninitialize() override;
152 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override; 156 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
153 }; 157 };
154 158
159 class JpegEncodingFrameDeliverer : public FrameDeliverer {
160 public:
161 JpegEncodingFrameDeliverer(std::unique_ptr<PacmanFramePainter> frame_painter);
162 ~JpegEncodingFrameDeliverer() override;
163
164 // Implementation of FrameDeliveryStrategy
165 void UninitializeDerived() override;
166 void PaintAndDeliverNextFrame(base::TimeDelta timestamp_to_paint) override;
167
168 private:
169 std::vector<uint8_t> rgb_buffer_;
170 std::vector<unsigned char> jpeg_buffer_;
171 };
172
155 // Implements the photo functionality of a VideoCaptureDevice 173 // Implements the photo functionality of a VideoCaptureDevice
156 class FakePhotoDevice { 174 class FakePhotoDevice {
157 public: 175 public:
158 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> argb_painter, 176 FakePhotoDevice(std::unique_ptr<PacmanFramePainter> rgb_painter,
159 const FakeDeviceState* fake_device_state); 177 const FakeDeviceState* fake_device_state);
160 ~FakePhotoDevice(); 178 ~FakePhotoDevice();
161 179
162 void GetPhotoCapabilities( 180 void GetPhotoCapabilities(
163 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback); 181 VideoCaptureDevice::GetPhotoCapabilitiesCallback callback);
164 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 182 void TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
165 base::TimeDelta elapsed_time); 183 base::TimeDelta elapsed_time);
166 184
167 private: 185 private:
168 const std::unique_ptr<PacmanFramePainter> argb_painter_; 186 const std::unique_ptr<PacmanFramePainter> rgb_painter_;
169 const FakeDeviceState* const fake_device_state_; 187 const FakeDeviceState* const fake_device_state_;
170 }; 188 };
171 189
172 // Implementation of VideoCaptureDevice that generates test frames. This is 190 // Implementation of VideoCaptureDevice that generates test frames. This is
173 // useful for testing the video capture components without having to use real 191 // useful for testing the video capture components without having to use real
174 // devices. The implementation schedules delayed tasks to itself to generate and 192 // devices. The implementation schedules delayed tasks to itself to generate and
175 // deliver frames at the requested rate. 193 // deliver frames at the requested rate.
176 class FakeVideoCaptureDevice : public VideoCaptureDevice { 194 class FakeVideoCaptureDevice : public VideoCaptureDevice {
177 public: 195 public:
178 FakeVideoCaptureDevice( 196 FakeVideoCaptureDevice(
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
216 } // anonymous namespace 234 } // anonymous namespace
217 235
218 // static 236 // static
219 void FakeVideoCaptureDeviceMaker::GetSupportedSizes( 237 void FakeVideoCaptureDeviceMaker::GetSupportedSizes(
220 std::vector<gfx::Size>* supported_sizes) { 238 std::vector<gfx::Size>* supported_sizes) {
221 for (int i = 0; i < kSupportedSizesCount; i++) 239 for (int i = 0; i < kSupportedSizesCount; i++)
222 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]); 240 supported_sizes->push_back(kSupportedSizesOrderedByIncreasingWidth[i]);
223 } 241 }
224 242
225 // static 243 // static
244 media::VideoPixelFormat
245 FakeVideoCaptureDeviceMaker::TranslateToMediaVideoPixelFormat(
246 FakeVideoCaptureDeviceMaker::PixelFormat format) {
247 switch (format) {
248 case FakeVideoCaptureDeviceMaker::PixelFormat::I420:
249 return media::PIXEL_FORMAT_I420;
250 case FakeVideoCaptureDeviceMaker::PixelFormat::Y16:
251 return media::PIXEL_FORMAT_Y16;
252 case FakeVideoCaptureDeviceMaker::PixelFormat::MJPEG:
253 return media::PIXEL_FORMAT_MJPEG;
254 }
255 // This is just to satisfy compilers that otherwise complain about control
256 // reaching end of non-void method.
mcasas 2017/02/22 20:26:33 This is a widespread pattern in Chromium, no need
chfremer 2017/02/23 01:14:28 Done.
257 NOTREACHED();
258 return media::PIXEL_FORMAT_UNKNOWN;
259 }
260
261 // static
226 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance( 262 std::unique_ptr<VideoCaptureDevice> FakeVideoCaptureDeviceMaker::MakeInstance(
227 VideoPixelFormat pixel_format, 263 PixelFormat pixel_format,
228 DeliveryMode delivery_mode, 264 DeliveryMode delivery_mode,
229 float frame_rate) { 265 float frame_rate) {
230 bool pixel_format_supported = false; 266 auto device_state = base::MakeUnique<FakeDeviceState>(
231 for (const auto& supported_pixel_format : kSupportedPixelFormats) { 267 kInitialZoom, frame_rate, TranslateToMediaVideoPixelFormat(pixel_format));
232 if (pixel_format == supported_pixel_format) { 268 PacmanFramePainter::Format painter_format;
233 pixel_format_supported = true; 269 switch (pixel_format) {
270 case PixelFormat::I420:
271 painter_format = PacmanFramePainter::Format::I420;
234 break; 272 break;
235 } 273 case PixelFormat::Y16:
274 painter_format = PacmanFramePainter::Format::Y16;
275 break;
276 case PixelFormat::MJPEG:
277 painter_format = PacmanFramePainter::Format::RGB;
278 break;
236 } 279 }
237 if (!pixel_format_supported) { 280 auto video_frame_painter =
238 DLOG(ERROR) << "Requested an unsupported pixel format " 281 base::MakeUnique<PacmanFramePainter>(painter_format, device_state.get());
239 << VideoPixelFormatToString(pixel_format);
240 return nullptr;
241 }
242 282
243 auto device_state =
244 base::MakeUnique<FakeDeviceState>(kInitialZoom, frame_rate, pixel_format);
245 auto video_frame_painter =
246 base::MakeUnique<PacmanFramePainter>(pixel_format, device_state.get());
247 std::unique_ptr<FrameDeliverer> frame_delivery_strategy; 283 std::unique_ptr<FrameDeliverer> frame_delivery_strategy;
248 switch (delivery_mode) { 284 switch (delivery_mode) {
249 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS: 285 case DeliveryMode::USE_DEVICE_INTERNAL_BUFFERS:
250 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>( 286 if (pixel_format == PixelFormat::MJPEG) {
251 std::move(video_frame_painter)); 287 frame_delivery_strategy = base::MakeUnique<JpegEncodingFrameDeliverer>(
288 std::move(video_frame_painter));
289 } else {
290 frame_delivery_strategy = base::MakeUnique<OwnBufferFrameDeliverer>(
291 std::move(video_frame_painter));
292 }
252 break; 293 break;
253 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS: 294 case DeliveryMode::USE_CLIENT_PROVIDED_BUFFERS:
295 if (pixel_format == PixelFormat::MJPEG) {
296 DLOG(ERROR) << "PixelFormat::MJPEG cannot be used in combination with "
297 << "USE_CLIENT_PROVIDED_BUFFERS.";
298 return nullptr;
299 }
254 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>( 300 frame_delivery_strategy = base::MakeUnique<ClientBufferFrameDeliverer>(
255 std::move(video_frame_painter)); 301 std::move(video_frame_painter));
256 break; 302 break;
257 } 303 }
258 304
259 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>( 305 auto photo_frame_painter = base::MakeUnique<PacmanFramePainter>(
260 PIXEL_FORMAT_ARGB, device_state.get()); 306 PacmanFramePainter::Format::RGB, device_state.get());
261 auto photo_device = base::MakeUnique<FakePhotoDevice>( 307 auto photo_device = base::MakeUnique<FakePhotoDevice>(
262 std::move(photo_frame_painter), device_state.get()); 308 std::move(photo_frame_painter), device_state.get());
263 309
264 return base::MakeUnique<FakeVideoCaptureDevice>( 310 return base::MakeUnique<FakeVideoCaptureDevice>(
265 std::move(frame_delivery_strategy), std::move(photo_device), 311 std::move(frame_delivery_strategy), std::move(photo_device),
266 std::move(device_state)); 312 std::move(device_state));
267 } 313 }
268 314
269 PacmanFramePainter::PacmanFramePainter(VideoPixelFormat pixel_format, 315 PacmanFramePainter::PacmanFramePainter(Format pixel_format,
270 const FakeDeviceState* fake_device_state) 316 const FakeDeviceState* fake_device_state)
271 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) { 317 : pixel_format_(pixel_format), fake_device_state_(fake_device_state) {}
272 DCHECK(pixel_format == PIXEL_FORMAT_I420 ||
273 pixel_format == PIXEL_FORMAT_Y16 || pixel_format == PIXEL_FORMAT_ARGB);
274 }
275 318
276 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time, 319 void PacmanFramePainter::PaintFrame(base::TimeDelta elapsed_time,
277 uint8_t* target_buffer) { 320 uint8_t* target_buffer) {
278 DrawPacman(elapsed_time, target_buffer); 321 DrawPacman(elapsed_time, target_buffer);
279 DrawGradientSquares(elapsed_time, target_buffer); 322 DrawGradientSquares(elapsed_time, target_buffer);
280 } 323 }
281 324
282 // Starting from top left, -45 deg gradient. Value at point (row, column) is 325 // Starting from top left, -45 deg gradient. Value at point (row, column) is
283 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where 326 // calculated as (top_left_value + (row + column) * step) % MAX_VALUE, where
284 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per 327 // step is MAX_VALUE / (width + height). MAX_VALUE is 255 (for 8 bit per
(...skipping 13 matching lines...) Expand all
298 const float start = 341 const float start =
299 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536); 342 fmod(65536 * elapsed_time.InSecondsF() * kGradientFrequency, 65536);
300 const float color_step = 65535 / static_cast<float>(width + height); 343 const float color_step = 65535 / static_cast<float>(width + height);
301 for (const auto& corner : squares) { 344 for (const auto& corner : squares) {
302 for (int y = corner.y(); y < corner.y() + side; ++y) { 345 for (int y = corner.y(); y < corner.y() + side; ++y) {
303 for (int x = corner.x(); x < corner.x() + side; ++x) { 346 for (int x = corner.x(); x < corner.x() + side; ++x) {
304 const unsigned int value = 347 const unsigned int value =
305 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF; 348 static_cast<unsigned int>(start + (x + y) * color_step) & 0xFFFF;
306 size_t offset = (y * width) + x; 349 size_t offset = (y * width) + x;
307 switch (pixel_format_) { 350 switch (pixel_format_) {
308 case PIXEL_FORMAT_Y16: 351 case Format::Y16:
309 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF; 352 target_buffer[offset * sizeof(uint16_t)] = value & 0xFF;
310 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8; 353 target_buffer[offset * sizeof(uint16_t) + 1] = value >> 8;
311 break; 354 break;
312 case PIXEL_FORMAT_ARGB: 355 case Format::RGB:
mcasas 2017/02/22 20:26:33 I'd advice against touching code in CLs where you
chfremer 2017/02/23 01:14:28 Agreed that mixing new stuff with changing old stu
313 target_buffer[offset * sizeof(uint32_t) + 1] = value >> 8; 356 target_buffer[offset * 3 + 0] = value >> 8;
314 target_buffer[offset * sizeof(uint32_t) + 2] = value >> 8; 357 target_buffer[offset * 3 + 1] = value >> 8;
315 target_buffer[offset * sizeof(uint32_t) + 3] = value >> 8; 358 target_buffer[offset * 3 + 2] = value >> 8;
316 break; 359 break;
317 default: 360 case Format::I420:
318 target_buffer[offset] = value >> 8; 361 target_buffer[offset] = value >> 8;
319 break; 362 break;
320 } 363 }
321 } 364 }
322 } 365 }
323 } 366 }
324 } 367 }
325 368
326 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time, 369 void PacmanFramePainter::DrawPacman(base::TimeDelta elapsed_time,
327 uint8_t* target_buffer) { 370 uint8_t* target_buffer) {
328 const int width = fake_device_state_->format.frame_size.width(); 371 const int width = fake_device_state_->format.frame_size.width();
329 const int height = fake_device_state_->format.frame_size.height(); 372 const int height = fake_device_state_->format.frame_size.height();
330 373
331 // |kN32_SkColorType| stands for the appropriate RGBA/BGRA format. 374 // Skia doesn't support painting in I420, RGB, or Y16.
332 const SkColorType colorspace = (pixel_format_ == PIXEL_FORMAT_ARGB) 375 // We paint an 8bpp monochrome image to the beginning of |target_buffer|.
333 ? kN32_SkColorType 376 // For I420, this section of |target_buffer| corresponds to the Y-plane of the
334 : kAlpha_8_SkColorType; 377 // YUV image. We do not touch the U or V planes of |target_buffer|.
335 // Skia doesn't support 16 bit alpha rendering, so we 8 bit alpha and then use 378 // Assuming they have been initialized to 0, which corresponds to a green
336 // this as high byte values in 16 bit pixels. 379 // color tone, the result will be an green-ish monochrome frame.
380 // For RGB, we later move the bytes to fill the green channel of the RGB
381 // pixels. For Y16, we later move the 8bit pixel values to a position
382 // corresponding to the high byte values of 16bit pixel values (assuming the
383 // order is little-endian).
384 const SkColorType colorspace = kAlpha_8_SkColorType;
337 const SkImageInfo info = 385 const SkImageInfo info =
338 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType); 386 SkImageInfo::Make(width, height, colorspace, kOpaque_SkAlphaType);
339 SkBitmap bitmap; 387 SkBitmap bitmap;
340 bitmap.setInfo(info); 388 bitmap.setInfo(info);
341 bitmap.setPixels(target_buffer); 389 bitmap.setPixels(target_buffer);
342 SkPaint paint; 390 SkPaint paint;
343 paint.setStyle(SkPaint::kFill_Style); 391 paint.setStyle(SkPaint::kFill_Style);
344 SkCanvas canvas(bitmap); 392 SkCanvas canvas(bitmap);
345 393
346 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f; 394 const SkScalar unscaled_zoom = fake_device_state_->zoom / 100.f;
347 SkMatrix matrix; 395 SkMatrix matrix;
348 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2); 396 matrix.setScale(unscaled_zoom, unscaled_zoom, width / 2, height / 2);
349 canvas.setMatrix(matrix); 397 canvas.setMatrix(matrix);
350 398
351 // Equalize Alpha_8 that has light green background while RGBA has white.
352 if (pixel_format_ == PIXEL_FORMAT_ARGB) {
353 const SkRect full_frame = SkRect::MakeWH(width, height);
354 paint.setARGB(255, 0, 127, 0);
355 canvas.drawRect(full_frame, paint);
356 }
357 paint.setColor(SK_ColorGREEN);
358
359 // Draw a sweeping circle to show an animation. 399 // Draw a sweeping circle to show an animation.
360 const float end_angle = 400 const float end_angle =
361 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361); 401 fmod(kPacmanAngularVelocity * elapsed_time.InSecondsF(), 361);
362 const int radius = std::min(width, height) / 4; 402 const int radius = std::min(width, height) / 4;
363 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius, 403 const SkRect rect = SkRect::MakeXYWH(width / 2 - radius, height / 2 - radius,
364 2 * radius, 2 * radius); 404 2 * radius, 2 * radius);
365 canvas.drawArc(rect, 0, end_angle, true, paint); 405 canvas.drawArc(rect, 0, end_angle, true, paint);
366 406
367 // Draw current time. 407 // Draw current time.
368 const int milliseconds = elapsed_time.InMilliseconds() % 1000; 408 const int milliseconds = elapsed_time.InMilliseconds() % 1000;
369 const int seconds = elapsed_time.InSeconds() % 60; 409 const int seconds = elapsed_time.InSeconds() % 60;
370 const int minutes = elapsed_time.InMinutes() % 60; 410 const int minutes = elapsed_time.InMinutes() % 60;
371 const int hours = elapsed_time.InHours(); 411 const int hours = elapsed_time.InHours();
372 const int frame_count = elapsed_time.InMilliseconds() * 412 const int frame_count = elapsed_time.InMilliseconds() *
373 fake_device_state_->format.frame_rate / 1000; 413 fake_device_state_->format.frame_rate / 1000;
374 414
375 const std::string time_string = 415 const std::string time_string =
376 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds, 416 base::StringPrintf("%d:%02d:%02d:%03d %d", hours, minutes, seconds,
377 milliseconds, frame_count); 417 milliseconds, frame_count);
378 canvas.scale(3, 3); 418 canvas.scale(3, 3);
379 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint); 419 canvas.drawText(time_string.data(), time_string.length(), 30, 20, paint);
380 420
381 if (pixel_format_ == PIXEL_FORMAT_Y16) { 421 if (pixel_format_ == Format::Y16) {
382 // Use 8 bit bitmap rendered to first half of the buffer as high byte values 422 // Use 8 bit bitmap rendered to first half of the buffer as high byte values
383 // for the whole buffer. Low byte values are not important. 423 // for the whole buffer. Low byte values are not important.
384 for (int i = (width * height) - 1; i >= 0; --i) 424 for (int i = (width * height) - 1; i >= 0; --i)
385 target_buffer[i * 2 + 1] = target_buffer[i]; 425 target_buffer[i * 2 + 1] = target_buffer[i];
426 } else if (pixel_format_ == Format::RGB) {
427 // Move 8 bit pixel values to the green channel of 24bit RGB pixel values.
428 for (int i = (width * height) - 1; i >= 0; --i) {
429 target_buffer[i * 3 + 1] = target_buffer[i];
430 target_buffer[i] = 0u;
431 }
mcasas 2017/02/22 20:26:33 I'm not sure this pixel swizzling is clearer than
chfremer 2017/02/23 01:14:28 Definitely not clearer. Removed.
386 } 432 }
387 } 433 }
388 434
389 FakePhotoDevice::FakePhotoDevice( 435 FakePhotoDevice::FakePhotoDevice(
390 std::unique_ptr<PacmanFramePainter> argb_painter, 436 std::unique_ptr<PacmanFramePainter> rgb_painter,
391 const FakeDeviceState* fake_device_state) 437 const FakeDeviceState* fake_device_state)
392 : argb_painter_(std::move(argb_painter)), 438 : rgb_painter_(std::move(rgb_painter)),
393 fake_device_state_(fake_device_state) {} 439 fake_device_state_(fake_device_state) {}
394 440
395 FakePhotoDevice::~FakePhotoDevice() = default; 441 FakePhotoDevice::~FakePhotoDevice() = default;
396 442
397 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback, 443 void FakePhotoDevice::TakePhoto(VideoCaptureDevice::TakePhotoCallback callback,
398 base::TimeDelta elapsed_time) { 444 base::TimeDelta elapsed_time) {
399 // Create a PNG-encoded frame and send it back to |callback|. 445 // Create a PNG-encoded frame and send it back to |callback|.
400 std::unique_ptr<uint8_t[]> buffer(new uint8_t[VideoFrame::AllocationSize( 446 auto required_rgb_buffer_size = VideoFrame::AllocationSize(
401 PIXEL_FORMAT_ARGB, fake_device_state_->format.frame_size)]); 447 PIXEL_FORMAT_RGB24, fake_device_state_->format.frame_size);
402 argb_painter_->PaintFrame(elapsed_time, buffer.get()); 448 std::unique_ptr<uint8_t[]> buffer(new uint8_t[required_rgb_buffer_size]);
449 memset(buffer.get(), 0, required_rgb_buffer_size);
450 rgb_painter_->PaintFrame(elapsed_time, buffer.get());
403 mojom::BlobPtr blob = mojom::Blob::New(); 451 mojom::BlobPtr blob = mojom::Blob::New();
404 const bool result = 452 const bool result = gfx::PNGCodec::Encode(
405 gfx::PNGCodec::Encode(buffer.get(), gfx::PNGCodec::FORMAT_RGBA, 453 buffer.get(), gfx::PNGCodec::FORMAT_RGB,
406 fake_device_state_->format.frame_size, 454 fake_device_state_->format.frame_size,
407 fake_device_state_->format.frame_size.width() * 4, 455 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_RGB24,
408 true /* discard_transparency */, 456 fake_device_state_->format.frame_size.width()),
409 std::vector<gfx::PNGCodec::Comment>(), &blob->data); 457 true /* discard_transparency */, std::vector<gfx::PNGCodec::Comment>(),
458 &blob->data);
410 DCHECK(result); 459 DCHECK(result);
411 460
412 blob->mime_type = "image/png"; 461 blob->mime_type = "image/png";
413 callback.Run(std::move(blob)); 462 callback.Run(std::move(blob));
414 } 463 }
415 464
416 FakeVideoCaptureDevice::FakeVideoCaptureDevice( 465 FakeVideoCaptureDevice::FakeVideoCaptureDevice(
417 std::unique_ptr<FrameDeliverer> frame_delivery_strategy, 466 std::unique_ptr<FrameDeliverer> frame_delivery_strategy,
418 std::unique_ptr<FakePhotoDevice> photo_device, 467 std::unique_ptr<FakePhotoDevice> photo_device,
419 std::unique_ptr<FakeDeviceState> device_state) 468 std::unique_ptr<FakeDeviceState> device_state)
(...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after
507 } 556 }
508 557
509 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) { 558 void FakeVideoCaptureDevice::TakePhoto(TakePhotoCallback callback) {
510 DCHECK(thread_checker_.CalledOnValidThread()); 559 DCHECK(thread_checker_.CalledOnValidThread());
511 base::ThreadTaskRunnerHandle::Get()->PostTask( 560 base::ThreadTaskRunnerHandle::Get()->PostTask(
512 FROM_HERE, base::Bind(&FakePhotoDevice::TakePhoto, 561 FROM_HERE, base::Bind(&FakePhotoDevice::TakePhoto,
513 base::Unretained(photo_device_.get()), 562 base::Unretained(photo_device_.get()),
514 base::Passed(&callback), elapsed_time_)); 563 base::Passed(&callback), elapsed_time_));
515 } 564 }
516 565
566 JpegEncodingFrameDeliverer::JpegEncodingFrameDeliverer(
mcasas 2017/02/22 20:26:33 JpegEncodingFrameDeliverer is declared after OwnB
chfremer 2017/02/23 01:14:28 Done.
567 std::unique_ptr<PacmanFramePainter> frame_painter)
568 : FrameDeliverer(std::move(frame_painter)) {}
569
570 JpegEncodingFrameDeliverer::~JpegEncodingFrameDeliverer() = default;
571
572 void JpegEncodingFrameDeliverer::UninitializeDerived() {
573 rgb_buffer_.clear();
574 jpeg_buffer_.clear();
575 }
576
577 void JpegEncodingFrameDeliverer::PaintAndDeliverNextFrame(
578 base::TimeDelta timestamp_to_paint) {
579 if (!has_client())
580 return;
581
582 auto required_rgb_buffer_size = VideoFrame::AllocationSize(
583 PIXEL_FORMAT_RGB24, device_state()->format.frame_size);
584 rgb_buffer_.resize(required_rgb_buffer_size);
585 memset(&rgb_buffer_[0], 0, required_rgb_buffer_size);
586
587 frame_painter()->PaintFrame(timestamp_to_paint, &rgb_buffer_[0]);
588
589 static const int kQuality = 75;
590 bool success = gfx::JPEGCodec::Encode(
591 &rgb_buffer_[0], gfx::JPEGCodec::FORMAT_RGB,
592 device_state()->format.frame_size.width(),
593 device_state()->format.frame_size.height(),
594 VideoFrame::RowBytes(0 /* plane */, PIXEL_FORMAT_RGB24,
595 device_state()->format.frame_size.width()),
596 kQuality, &jpeg_buffer_);
597 if (!success) {
598 DLOG(ERROR) << "Jpeg encoding failed";
599 return;
600 }
601
602 const size_t frame_size = jpeg_buffer_.size();
603 base::TimeTicks now = base::TimeTicks::Now();
604 client()->OnIncomingCapturedData(&jpeg_buffer_[0], frame_size,
605 device_state()->format, 0 /* rotation */,
606 now, CalculateTimeSinceFirstInvocation(now));
607 }
608
517 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer( 609 OwnBufferFrameDeliverer::OwnBufferFrameDeliverer(
518 std::unique_ptr<PacmanFramePainter> frame_painter) 610 std::unique_ptr<PacmanFramePainter> frame_painter)
519 : FrameDeliverer(std::move(frame_painter)) {} 611 : FrameDeliverer(std::move(frame_painter)) {}
520 612
521 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default; 613 OwnBufferFrameDeliverer::~OwnBufferFrameDeliverer() = default;
522 614
523 void OwnBufferFrameDeliverer::Initialize( 615 void OwnBufferFrameDeliverer::InitializeDerived(VideoPixelFormat pixel_format) {
524 VideoPixelFormat pixel_format,
525 std::unique_ptr<VideoCaptureDevice::Client> client,
526 const FakeDeviceState* device_state) {
527 client_ = std::move(client);
528 device_state_ = device_state;
529 buffer_.reset(new uint8_t[VideoFrame::AllocationSize( 616 buffer_.reset(new uint8_t[VideoFrame::AllocationSize(
530 pixel_format, device_state_->format.frame_size)]); 617 pixel_format, device_state()->format.frame_size)]);
531 } 618 }
532 619
533 void OwnBufferFrameDeliverer::Uninitialize() { 620 void OwnBufferFrameDeliverer::UninitializeDerived() {
534 client_.reset();
535 device_state_ = nullptr;
536 buffer_.reset(); 621 buffer_.reset();
537 } 622 }
538 623
539 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame( 624 void OwnBufferFrameDeliverer::PaintAndDeliverNextFrame(
540 base::TimeDelta timestamp_to_paint) { 625 base::TimeDelta timestamp_to_paint) {
541 if (!client_) 626 if (!has_client())
542 return; 627 return;
543 const size_t frame_size = device_state_->format.ImageAllocationSize(); 628 const size_t frame_size = device_state()->format.ImageAllocationSize();
544 memset(buffer_.get(), 0, frame_size); 629 memset(buffer_.get(), 0, frame_size);
545 frame_painter_->PaintFrame(timestamp_to_paint, buffer_.get()); 630 frame_painter()->PaintFrame(timestamp_to_paint, buffer_.get());
546 base::TimeTicks now = base::TimeTicks::Now(); 631 base::TimeTicks now = base::TimeTicks::Now();
547 client_->OnIncomingCapturedData(buffer_.get(), frame_size, 632 client()->OnIncomingCapturedData(buffer_.get(), frame_size,
548 device_state_->format, 0 /* rotation */, now, 633 device_state()->format, 0 /* rotation */,
549 CalculateTimeSinceFirstInvocation(now)); 634 now, CalculateTimeSinceFirstInvocation(now));
550 } 635 }
551 636
552 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer( 637 ClientBufferFrameDeliverer::ClientBufferFrameDeliverer(
553 std::unique_ptr<PacmanFramePainter> frame_painter) 638 std::unique_ptr<PacmanFramePainter> frame_painter)
554 : FrameDeliverer(std::move(frame_painter)) {} 639 : FrameDeliverer(std::move(frame_painter)) {}
555 640
556 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default; 641 ClientBufferFrameDeliverer::~ClientBufferFrameDeliverer() = default;
557 642
558 void ClientBufferFrameDeliverer::Initialize(
559 VideoPixelFormat,
560 std::unique_ptr<VideoCaptureDevice::Client> client,
561 const FakeDeviceState* device_state) {
562 client_ = std::move(client);
563 device_state_ = device_state;
564 }
565
566 void ClientBufferFrameDeliverer::Uninitialize() {
567 client_.reset();
568 device_state_ = nullptr;
569 }
570
571 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame( 643 void ClientBufferFrameDeliverer::PaintAndDeliverNextFrame(
572 base::TimeDelta timestamp_to_paint) { 644 base::TimeDelta timestamp_to_paint) {
573 if (client_ == nullptr) 645 if (!has_client())
574 return; 646 return;
575 647
576 const int arbitrary_frame_feedback_id = 0; 648 const int arbitrary_frame_feedback_id = 0;
577 auto capture_buffer = client_->ReserveOutputBuffer( 649 auto capture_buffer = client()->ReserveOutputBuffer(
578 device_state_->format.frame_size, device_state_->format.pixel_format, 650 device_state()->format.frame_size, device_state()->format.pixel_format,
579 device_state_->format.pixel_storage, arbitrary_frame_feedback_id); 651 device_state()->format.pixel_storage, arbitrary_frame_feedback_id);
580 DLOG_IF(ERROR, !capture_buffer.is_valid()) 652 DLOG_IF(ERROR, !capture_buffer.is_valid())
581 << "Couldn't allocate Capture Buffer"; 653 << "Couldn't allocate Capture Buffer";
582 auto buffer_access = 654 auto buffer_access =
583 capture_buffer.handle_provider->GetHandleForInProcessAccess(); 655 capture_buffer.handle_provider->GetHandleForInProcessAccess();
584 DCHECK(buffer_access->data()) << "Buffer has NO backing memory"; 656 DCHECK(buffer_access->data()) << "Buffer has NO backing memory";
585 657
586 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state_->format.pixel_storage); 658 DCHECK_EQ(PIXEL_STORAGE_CPU, device_state()->format.pixel_storage);
587 659
588 uint8_t* data_ptr = buffer_access->data(); 660 uint8_t* data_ptr = buffer_access->data();
589 memset(data_ptr, 0, buffer_access->mapped_size()); 661 memset(data_ptr, 0, buffer_access->mapped_size());
590 frame_painter_->PaintFrame(timestamp_to_paint, data_ptr); 662 frame_painter()->PaintFrame(timestamp_to_paint, data_ptr);
591 663
592 base::TimeTicks now = base::TimeTicks::Now(); 664 base::TimeTicks now = base::TimeTicks::Now();
593 client_->OnIncomingCapturedBuffer(std::move(capture_buffer), 665 client()->OnIncomingCapturedBuffer(std::move(capture_buffer),
594 device_state_->format, now, 666 device_state()->format, now,
595 CalculateTimeSinceFirstInvocation(now)); 667 CalculateTimeSinceFirstInvocation(now));
596 } 668 }
597 669
598 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture( 670 void FakeVideoCaptureDevice::BeepAndScheduleNextCapture(
599 base::TimeTicks expected_execution_time) { 671 base::TimeTicks expected_execution_time) {
600 DCHECK(thread_checker_.CalledOnValidThread()); 672 DCHECK(thread_checker_.CalledOnValidThread());
601 const base::TimeDelta beep_interval = 673 const base::TimeDelta beep_interval =
602 base::TimeDelta::FromMilliseconds(kBeepInterval); 674 base::TimeDelta::FromMilliseconds(kBeepInterval);
603 const base::TimeDelta frame_interval = 675 const base::TimeDelta frame_interval =
604 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate); 676 base::TimeDelta::FromMicroseconds(1e6 / device_state_->format.frame_rate);
605 beep_time_ += frame_interval; 677 beep_time_ += frame_interval;
606 elapsed_time_ += frame_interval; 678 elapsed_time_ += frame_interval;
607 679
608 // Generate a synchronized beep twice per second. 680 // Generate a synchronized beep twice per second.
609 if (beep_time_ >= beep_interval) { 681 if (beep_time_ >= beep_interval) {
610 FakeAudioInputStream::BeepOnce(); 682 FakeAudioInputStream::BeepOnce();
611 beep_time_ -= beep_interval; 683 beep_time_ -= beep_interval;
612 } 684 }
613 685
614 // Reschedule next CaptureTask. 686 // Reschedule next CaptureTask.
615 const base::TimeTicks current_time = base::TimeTicks::Now(); 687 const base::TimeTicks current_time = base::TimeTicks::Now();
616 // Don't accumulate any debt if we are lagging behind - just post the next 688 // Don't accumulate any debt if we are lagging behind - just post the next
617 // frame immediately and continue as normal. 689 // frame immediately and continue as normal.
618 const base::TimeTicks next_execution_time = 690 const base::TimeTicks next_execution_time =
619 std::max(current_time, expected_execution_time + frame_interval); 691 std::max(current_time, expected_execution_time + frame_interval);
620 const base::TimeDelta delay = next_execution_time - current_time; 692 const base::TimeDelta delay = next_execution_time - current_time;
621 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask( 693 base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
622 FROM_HERE, base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue, 694 FROM_HERE,
623 weak_factory_.GetWeakPtr(), next_execution_time, 695 base::Bind(&FakeVideoCaptureDevice::OnNextFrameDue,
624 current_session_id_), 696 weak_factory_.GetWeakPtr(), next_execution_time,
697 current_session_id_),
625 delay); 698 delay);
626 } 699 }
627 700
628 void FakeVideoCaptureDevice::OnNextFrameDue( 701 void FakeVideoCaptureDevice::OnNextFrameDue(
629 base::TimeTicks expected_execution_time, 702 base::TimeTicks expected_execution_time,
630 int session_id) { 703 int session_id) {
631 DCHECK(thread_checker_.CalledOnValidThread()); 704 DCHECK(thread_checker_.CalledOnValidThread());
632 if (session_id != current_session_id_) 705 if (session_id != current_session_id_)
633 return; 706 return;
634 707
635 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_); 708 frame_deliverer_->PaintAndDeliverNextFrame(elapsed_time_);
636 BeepAndScheduleNextCapture(expected_execution_time); 709 BeepAndScheduleNextCapture(expected_execution_time);
637 } 710 }
638 711
639 } // namespace media 712 } // namespace media
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698