OLD | NEW |
1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "cc/resources/video_resource_updater.h" | 5 #include "cc/resources/video_resource_updater.h" |
6 | 6 |
7 #include <stddef.h> | 7 #include <stddef.h> |
8 #include <stdint.h> | 8 #include <stdint.h> |
9 | 9 |
10 #include <algorithm> | 10 #include <algorithm> |
(...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
60 switch (video_frame->mailbox_holder(0).texture_target) { | 60 switch (video_frame->mailbox_holder(0).texture_target) { |
61 case GL_TEXTURE_EXTERNAL_OES: | 61 case GL_TEXTURE_EXTERNAL_OES: |
62 return VideoFrameExternalResources::YUV_RESOURCE; | 62 return VideoFrameExternalResources::YUV_RESOURCE; |
63 case GL_TEXTURE_RECTANGLE_ARB: | 63 case GL_TEXTURE_RECTANGLE_ARB: |
64 return VideoFrameExternalResources::RGB_RESOURCE; | 64 return VideoFrameExternalResources::RGB_RESOURCE; |
65 default: | 65 default: |
66 NOTREACHED(); | 66 NOTREACHED(); |
67 break; | 67 break; |
68 } | 68 } |
69 break; | 69 break; |
| 70 case media::PIXEL_FORMAT_Y8: |
| 71 case media::PIXEL_FORMAT_Y16: |
| 72 return VideoFrameExternalResources::Y_RESOURCE; |
| 73 break; |
70 case media::PIXEL_FORMAT_YV12: | 74 case media::PIXEL_FORMAT_YV12: |
71 case media::PIXEL_FORMAT_YV16: | 75 case media::PIXEL_FORMAT_YV16: |
72 case media::PIXEL_FORMAT_YV24: | 76 case media::PIXEL_FORMAT_YV24: |
73 case media::PIXEL_FORMAT_YV12A: | 77 case media::PIXEL_FORMAT_YV12A: |
74 case media::PIXEL_FORMAT_NV21: | 78 case media::PIXEL_FORMAT_NV21: |
75 case media::PIXEL_FORMAT_YUY2: | 79 case media::PIXEL_FORMAT_YUY2: |
76 case media::PIXEL_FORMAT_RGB24: | 80 case media::PIXEL_FORMAT_RGB24: |
77 case media::PIXEL_FORMAT_RGB32: | 81 case media::PIXEL_FORMAT_RGB32: |
78 case media::PIXEL_FORMAT_MJPEG: | 82 case media::PIXEL_FORMAT_MJPEG: |
79 case media::PIXEL_FORMAT_MT21: | 83 case media::PIXEL_FORMAT_MT21: |
80 case media::PIXEL_FORMAT_YUV420P9: | 84 case media::PIXEL_FORMAT_YUV420P9: |
81 case media::PIXEL_FORMAT_YUV422P9: | 85 case media::PIXEL_FORMAT_YUV422P9: |
82 case media::PIXEL_FORMAT_YUV444P9: | 86 case media::PIXEL_FORMAT_YUV444P9: |
83 case media::PIXEL_FORMAT_YUV420P10: | 87 case media::PIXEL_FORMAT_YUV420P10: |
84 case media::PIXEL_FORMAT_YUV422P10: | 88 case media::PIXEL_FORMAT_YUV422P10: |
85 case media::PIXEL_FORMAT_YUV444P10: | 89 case media::PIXEL_FORMAT_YUV444P10: |
86 case media::PIXEL_FORMAT_UNKNOWN: | 90 case media::PIXEL_FORMAT_UNKNOWN: |
87 break; | 91 break; |
88 } | 92 } |
89 return VideoFrameExternalResources::NONE; | 93 return VideoFrameExternalResources::NONE; |
90 } | 94 } |
91 | 95 |
| 96 static const uint8_t exp_lookup_table_lower[256] = { |
| 97 #define LT(n) n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, n |
| 98 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, |
| 99 2, 2, 2, 2, 2, LT(3), LT(4), LT(4), LT(5), LT(5), LT(5), |
| 100 LT(5), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6)}; |
| 101 |
| 102 static const uint8_t exp_lookup_table_upper[256] = { |
| 103 #define LT(n) n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, n |
| 104 7, 7, 8, 8, 9, 9, 9, 9, |
| 105 10, 10, 10, 10, 10, 10, 10, 10, |
| 106 LT(11), LT(12), LT(12), LT(13), LT(13), LT(13), LT(13), LT(14), |
| 107 LT(14), LT(14), LT(14), LT(14), LT(14), LT(14), LT(14)}; |
| 108 |
| 109 uint16_t ushort_to_half_float(uint16_t value) { |
| 110 unsigned short upper = value >> 8; |
| 111 unsigned short exponent = |
| 112 upper ? exp_lookup_table_upper[upper] : exp_lookup_table_lower[value]; |
| 113 return (exponent << 10) | (((value << (15 - exponent)) >> 6) & 0x3FF); |
| 114 } |
| 115 |
92 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { | 116 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { |
93 public: | 117 public: |
94 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, | 118 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, |
95 const gpu::SyncToken& sync_token) | 119 const gpu::SyncToken& sync_token) |
96 : gl_(gl), sync_token_(sync_token) {} | 120 : gl_(gl), sync_token_(sync_token) {} |
97 ~SyncTokenClientImpl() override {} | 121 ~SyncTokenClientImpl() override {} |
98 void GenerateSyncToken(gpu::SyncToken* sync_token) override { | 122 void GenerateSyncToken(gpu::SyncToken* sync_token) override { |
99 if (sync_token_.HasData()) { | 123 if (sync_token_.HasData()) { |
100 *sync_token = sync_token_; | 124 *sync_token = sync_token_; |
101 } else { | 125 } else { |
(...skipping 141 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
243 plane_index, input_frame->format(), coded_size.width()); | 267 plane_index, input_frame->format(), coded_size.width()); |
244 int plane_height = media::VideoFrame::Rows(plane_index, input_frame->format(), | 268 int plane_height = media::VideoFrame::Rows(plane_index, input_frame->format(), |
245 coded_size.height()); | 269 coded_size.height()); |
246 return gfx::Size(plane_width, plane_height); | 270 return gfx::Size(plane_width, plane_height); |
247 } | 271 } |
248 | 272 |
249 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( | 273 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
250 scoped_refptr<media::VideoFrame> video_frame) { | 274 scoped_refptr<media::VideoFrame> video_frame) { |
251 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); | 275 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); |
252 const media::VideoPixelFormat input_frame_format = video_frame->format(); | 276 const media::VideoPixelFormat input_frame_format = video_frame->format(); |
253 | |
254 // TODO(hubbe): Make this a video frame method. | 277 // TODO(hubbe): Make this a video frame method. |
255 int bits_per_channel = 0; | 278 int bits_per_channel = 0; |
256 switch (input_frame_format) { | 279 switch (input_frame_format) { |
257 case media::PIXEL_FORMAT_UNKNOWN: | 280 case media::PIXEL_FORMAT_UNKNOWN: |
258 NOTREACHED(); | 281 NOTREACHED(); |
259 // Fall through! | 282 // Fall through! |
260 case media::PIXEL_FORMAT_I420: | 283 case media::PIXEL_FORMAT_I420: |
261 case media::PIXEL_FORMAT_YV12: | 284 case media::PIXEL_FORMAT_YV12: |
262 case media::PIXEL_FORMAT_YV16: | 285 case media::PIXEL_FORMAT_YV16: |
263 case media::PIXEL_FORMAT_YV12A: | 286 case media::PIXEL_FORMAT_YV12A: |
264 case media::PIXEL_FORMAT_YV24: | 287 case media::PIXEL_FORMAT_YV24: |
265 case media::PIXEL_FORMAT_NV12: | 288 case media::PIXEL_FORMAT_NV12: |
266 case media::PIXEL_FORMAT_NV21: | 289 case media::PIXEL_FORMAT_NV21: |
267 case media::PIXEL_FORMAT_UYVY: | 290 case media::PIXEL_FORMAT_UYVY: |
268 case media::PIXEL_FORMAT_YUY2: | 291 case media::PIXEL_FORMAT_YUY2: |
269 case media::PIXEL_FORMAT_ARGB: | 292 case media::PIXEL_FORMAT_ARGB: |
270 case media::PIXEL_FORMAT_XRGB: | 293 case media::PIXEL_FORMAT_XRGB: |
271 case media::PIXEL_FORMAT_RGB24: | 294 case media::PIXEL_FORMAT_RGB24: |
272 case media::PIXEL_FORMAT_RGB32: | 295 case media::PIXEL_FORMAT_RGB32: |
273 case media::PIXEL_FORMAT_MJPEG: | 296 case media::PIXEL_FORMAT_MJPEG: |
274 case media::PIXEL_FORMAT_MT21: | 297 case media::PIXEL_FORMAT_MT21: |
| 298 case media::PIXEL_FORMAT_Y8: |
275 bits_per_channel = 8; | 299 bits_per_channel = 8; |
276 break; | 300 break; |
277 case media::PIXEL_FORMAT_YUV420P9: | 301 case media::PIXEL_FORMAT_YUV420P9: |
278 case media::PIXEL_FORMAT_YUV422P9: | 302 case media::PIXEL_FORMAT_YUV422P9: |
279 case media::PIXEL_FORMAT_YUV444P9: | 303 case media::PIXEL_FORMAT_YUV444P9: |
280 bits_per_channel = 9; | 304 bits_per_channel = 9; |
281 break; | 305 break; |
282 case media::PIXEL_FORMAT_YUV420P10: | 306 case media::PIXEL_FORMAT_YUV420P10: |
283 case media::PIXEL_FORMAT_YUV422P10: | 307 case media::PIXEL_FORMAT_YUV422P10: |
284 case media::PIXEL_FORMAT_YUV444P10: | 308 case media::PIXEL_FORMAT_YUV444P10: |
285 bits_per_channel = 10; | 309 bits_per_channel = 10; |
286 break; | 310 break; |
| 311 case media::PIXEL_FORMAT_Y16: |
| 312 bits_per_channel = 16; |
| 313 break; |
287 } | 314 } |
288 | 315 |
289 // Only YUV software video frames are supported. | 316 // Only YUV, Y8 and Y16 software video frames are supported. |
290 if (!media::IsYuvPlanar(input_frame_format)) { | 317 const bool isYuvPlanar = media::IsYuvPlanar(input_frame_format); |
| 318 if (!(isYuvPlanar || input_frame_format == media::PIXEL_FORMAT_Y16 || |
| 319 input_frame_format == media::PIXEL_FORMAT_Y8)) { |
291 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); | 320 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); |
292 return VideoFrameExternalResources(); | 321 return VideoFrameExternalResources(); |
293 } | 322 } |
294 | 323 |
295 const bool software_compositor = context_provider_ == NULL; | 324 const bool software_compositor = context_provider_ == NULL; |
296 | 325 |
| 326 if ((input_frame_format == media::PIXEL_FORMAT_Y8 || |
| 327 input_frame_format == media::PIXEL_FORMAT_Y16) && |
| 328 software_compositor) { |
| 329 // TODO(astojilj) Y8 and Y16 software compositor support. |
| 330 NOTREACHED() << "Software compositor doesn't support PIXEL_FORMAT_Y8/Y16"; |
| 331 return VideoFrameExternalResources(); |
| 332 } |
| 333 |
297 ResourceFormat output_resource_format = | 334 ResourceFormat output_resource_format = |
298 resource_provider_->YuvResourceFormat(bits_per_channel); | 335 (input_frame_format == media::PIXEL_FORMAT_Y16) |
| 336 ? resource_provider_->Y16ResourceFormat() |
| 337 : resource_provider_->YuvResourceFormat(bits_per_channel); |
299 | 338 |
300 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); | 339 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); |
301 | 340 |
302 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB | 341 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB |
303 // conversion here. That involves an extra copy of each frame to a bitmap. | 342 // conversion here. That involves an extra copy of each frame to a bitmap. |
304 // Obviously, this is suboptimal and should be addressed once ubercompositor | 343 // Obviously, this is suboptimal and should be addressed once ubercompositor |
305 // starts shaping up. | 344 // starts shaping up. |
306 if (software_compositor) { | 345 if (software_compositor) { |
307 output_resource_format = kRGBResourceFormat; | 346 output_resource_format = kRGBResourceFormat; |
308 output_plane_count = 1; | 347 output_plane_count = 1; |
(...skipping 94 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
403 plane_resource.resource_id()); | 442 plane_resource.resource_id()); |
404 external_resources.software_release_callback = | 443 external_resources.software_release_callback = |
405 base::Bind(&RecycleResource, AsWeakPtr(), plane_resource.resource_id()); | 444 base::Bind(&RecycleResource, AsWeakPtr(), plane_resource.resource_id()); |
406 external_resources.type = VideoFrameExternalResources::SOFTWARE_RESOURCE; | 445 external_resources.type = VideoFrameExternalResources::SOFTWARE_RESOURCE; |
407 return external_resources; | 446 return external_resources; |
408 } | 447 } |
409 | 448 |
410 for (size_t i = 0; i < plane_resources.size(); ++i) { | 449 for (size_t i = 0; i < plane_resources.size(); ++i) { |
411 PlaneResource& plane_resource = *plane_resources[i]; | 450 PlaneResource& plane_resource = *plane_resources[i]; |
412 // Update each plane's resource id with its content. | 451 // Update each plane's resource id with its content. |
413 DCHECK_EQ(plane_resource.resource_format(), | 452 DCHECK_EQ(plane_resource.resource_format(), output_resource_format); |
414 resource_provider_->YuvResourceFormat(bits_per_channel)); | |
415 | 453 |
416 if (!plane_resource.Matches(video_frame->unique_id(), i)) { | 454 if (!plane_resource.Matches(video_frame->unique_id(), i)) { |
417 // We need to transfer data from |video_frame| to the plane resource. | 455 // We need to transfer data from |video_frame| to the plane resource. |
418 // TODO(reveman): Can use GpuMemoryBuffers here to improve performance. | 456 // TODO(reveman): Can use GpuMemoryBuffers here to improve performance. |
419 | 457 |
420 // The |resource_size_pixels| is the size of the resource we want to | 458 // The |resource_size_pixels| is the size of the resource we want to |
421 // upload to. | 459 // upload to. |
422 gfx::Size resource_size_pixels = plane_resource.resource_size(); | 460 gfx::Size resource_size_pixels = plane_resource.resource_size(); |
423 // The |video_stride_bytes| is the width of the video frame we are | 461 // The |video_stride_bytes| is the width of the video frame we are |
424 // uploading (including non-frame data to fill in the stride). | 462 // uploading (including non-frame data to fill in the stride). |
425 int video_stride_bytes = video_frame->stride(i); | 463 int video_stride_bytes = video_frame->stride(i); |
426 | 464 |
427 size_t bytes_per_row = ResourceUtil::UncheckedWidthInBytes<size_t>( | 465 size_t bytes_per_row = ResourceUtil::UncheckedWidthInBytes<size_t>( |
428 resource_size_pixels.width(), plane_resource.resource_format()); | 466 resource_size_pixels.width(), plane_resource.resource_format()); |
429 // Use 4-byte row alignment (OpenGL default) for upload performance. | 467 // Use 4-byte row alignment (OpenGL default) for upload performance. |
430 // Assuming that GL_UNPACK_ALIGNMENT has not changed from default. | 468 // Assuming that GL_UNPACK_ALIGNMENT has not changed from default. |
431 size_t upload_image_stride = | 469 size_t upload_image_stride = |
432 MathUtil::UncheckedRoundUp<size_t>(bytes_per_row, 4u); | 470 MathUtil::UncheckedRoundUp<size_t>(bytes_per_row, 4u); |
433 | 471 |
434 bool needs_conversion = false; | 472 bool needs_conversion = false; |
435 int shift = 0; | 473 int shift = 0; |
436 | 474 |
437 // LUMINANCE_F16 uses half-floats, so we always need a conversion step. | 475 // LUMINANCE_F16 uses half-floats, so we always need a conversion step. |
438 if (plane_resource.resource_format() == LUMINANCE_F16) { | 476 if (plane_resource.resource_format() == LUMINANCE_F16) { |
439 needs_conversion = true; | 477 needs_conversion = true; |
440 // Note that the current method of converting integers to half-floats | 478 // Note that the current method of converting integers to half-floats |
441 // stops working if you have more than 10 bits of data. | 479 // stops working if you have more than 10 bits of data. |
442 DCHECK_LE(bits_per_channel, 10); | 480 DCHECK(bits_per_channel <= 10 || !isYuvPlanar); |
| 481 } else if (input_frame_format == media::PIXEL_FORMAT_Y16) { |
| 482 if (plane_resource.resource_format() == RGBA_8888) |
| 483 needs_conversion = true; |
443 } else if (bits_per_channel > 8) { | 484 } else if (bits_per_channel > 8) { |
444 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to | 485 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to |
445 // shift the data down and create an 8-bit texture. | 486 // shift the data down and create an 8-bit texture. |
446 needs_conversion = true; | 487 needs_conversion = true; |
447 shift = bits_per_channel - 8; | 488 shift = bits_per_channel - 8; |
448 } | 489 } |
449 const uint8_t* pixels; | 490 const uint8_t* pixels; |
450 if (static_cast<int>(upload_image_stride) == video_stride_bytes && | 491 if (static_cast<int>(upload_image_stride) == video_stride_bytes && |
451 !needs_conversion) { | 492 !needs_conversion) { |
452 pixels = video_frame->data(i); | 493 pixels = video_frame->data(i); |
453 } else { | 494 } else { |
454 // Avoid malloc for each frame/plane if possible. | 495 // Avoid malloc for each frame/plane if possible. |
455 size_t needed_size = | 496 size_t needed_size = |
456 upload_image_stride * resource_size_pixels.height(); | 497 upload_image_stride * resource_size_pixels.height(); |
457 if (upload_pixels_.size() < needed_size) | 498 if (upload_pixels_.size() < needed_size) |
458 upload_pixels_.resize(needed_size); | 499 upload_pixels_.resize(needed_size); |
459 | 500 |
460 for (int row = 0; row < resource_size_pixels.height(); ++row) { | 501 for (int row = 0; row < resource_size_pixels.height(); ++row) { |
461 if (plane_resource.resource_format() == LUMINANCE_F16) { | 502 if (plane_resource.resource_format() == LUMINANCE_F16) { |
462 uint16_t* dst = reinterpret_cast<uint16_t*>( | 503 uint16_t* dst = reinterpret_cast<uint16_t*>( |
463 &upload_pixels_[upload_image_stride * row]); | 504 &upload_pixels_[upload_image_stride * row]); |
464 const uint16_t* src = reinterpret_cast<uint16_t*>( | 505 const uint16_t* src = reinterpret_cast<uint16_t*>( |
465 video_frame->data(i) + (video_stride_bytes * row)); | 506 video_frame->data(i) + (video_stride_bytes * row)); |
| 507 |
| 508 if (input_frame_format == media::PIXEL_FORMAT_Y16) { |
| 509 for (size_t i = 0; i < bytes_per_row / 2; i++) |
| 510 dst[i] = ushort_to_half_float(src[i]); |
| 511 continue; |
| 512 } |
466 // Micro-benchmarking indicates that the compiler does | 513 // Micro-benchmarking indicates that the compiler does |
467 // a good enough job of optimizing this loop that trying | 514 // a good enough job of optimizing this loop that trying |
468 // to manually operate on one uint64 at a time is not | 515 // to manually operate on one uint64 at a time is not |
469 // actually helpful. | 516 // actually helpful. |
470 // Note to future optimizers: Benchmark your optimizations! | 517 // Note to future optimizers: Benchmark your optimizations! |
471 for (size_t i = 0; i < bytes_per_row / 2; i++) | 518 for (size_t i = 0; i < bytes_per_row / 2; i++) |
472 dst[i] = src[i] | 0x3800; | 519 dst[i] = src[i] | 0x3800; |
473 } else if (shift != 0) { | 520 } else if (shift != 0) { |
474 // We have more-than-8-bit input which we need to shift | 521 // We have more-than-8-bit input which we need to shift |
475 // down to fit it into an 8-bit texture. | 522 // down to fit it into an 8-bit texture. |
476 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; | 523 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; |
477 const uint16_t* src = reinterpret_cast<uint16_t*>( | 524 const uint16_t* src = reinterpret_cast<uint16_t*>( |
478 video_frame->data(i) + (video_stride_bytes * row)); | 525 video_frame->data(i) + (video_stride_bytes * row)); |
479 for (size_t i = 0; i < bytes_per_row; i++) | 526 for (size_t i = 0; i < bytes_per_row; i++) |
480 dst[i] = src[i] >> shift; | 527 dst[i] = src[i] >> shift; |
| 528 } else if (input_frame_format == media::PIXEL_FORMAT_Y16 && |
| 529 plane_resource.resource_format() == RGBA_8888) { |
| 530 uint32_t* dst = reinterpret_cast<uint32_t*>( |
| 531 &upload_pixels_[upload_image_stride * row]); |
| 532 const uint16_t* src = reinterpret_cast<uint16_t*>( |
| 533 video_frame->data(i) + (video_stride_bytes * row)); |
| 534 for (size_t i = 0; i < bytes_per_row / 4; ++i) |
| 535 *dst++ = *src++; |
481 } else { | 536 } else { |
482 // Input and output are the same size and format, but | 537 // Input and output are the same size and format, but |
483 // differ in stride, copy one row at a time. | 538 // differ in stride, copy one row at a time. |
484 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; | 539 uint8_t* dst = &upload_pixels_[upload_image_stride * row]; |
485 const uint8_t* src = | 540 const uint8_t* src = |
486 video_frame->data(i) + (video_stride_bytes * row); | 541 video_frame->data(i) + (video_stride_bytes * row); |
487 memcpy(dst, src, bytes_per_row); | 542 memcpy(dst, src, bytes_per_row); |
488 } | 543 } |
489 } | 544 } |
490 pixels = &upload_pixels_[0]; | 545 pixels = &upload_pixels_[0]; |
491 } | 546 } |
492 | 547 |
493 resource_provider_->CopyToResource(plane_resource.resource_id(), pixels, | 548 resource_provider_->CopyToResource(plane_resource.resource_id(), pixels, |
494 resource_size_pixels); | 549 resource_size_pixels); |
495 plane_resource.SetUniqueId(video_frame->unique_id(), i); | 550 plane_resource.SetUniqueId(video_frame->unique_id(), i); |
496 } | 551 } |
497 | 552 |
498 if (plane_resource.resource_format() == LUMINANCE_F16) { | 553 if (plane_resource.resource_format() == LUMINANCE_F16 && isYuvPlanar) { |
499 // By OR-ing with 0x3800, 10-bit numbers become half-floats in the | 554 // By OR-ing with 0x3800, 10-bit numbers become half-floats in the |
500 // range [0.5..1) and 9-bit numbers get the range [0.5..0.75). | 555 // range [0.5..1) and 9-bit numbers get the range [0.5..0.75). |
501 // | 556 // |
502 // Half-floats are evaluated as: | 557 // Half-floats are evaluated as: |
503 // float value = pow(2.0, exponent - 25) * (0x400 + fraction); | 558 // float value = pow(2.0, exponent - 25) * (0x400 + fraction); |
504 // | 559 // |
505 // In our case the exponent is 14 (since we or with 0x3800) and | 560 // In our case the exponent is 14 (since we or with 0x3800) and |
506 // pow(2.0, 14-25) * 0x400 evaluates to 0.5 (our offset) and | 561 // pow(2.0, 14-25) * 0x400 evaluates to 0.5 (our offset) and |
507 // pow(2.0, 14-25) * fraction is [0..0.49951171875] for 10-bit and | 562 // pow(2.0, 14-25) * fraction is [0..0.49951171875] for 10-bit and |
508 // [0..0.24951171875] for 9-bit. | 563 // [0..0.24951171875] for 9-bit. |
(...skipping 11 matching lines...) Expand all Loading... |
520 | 575 |
521 TextureMailbox mailbox(plane_resource.mailbox(), gpu::SyncToken(), | 576 TextureMailbox mailbox(plane_resource.mailbox(), gpu::SyncToken(), |
522 resource_provider_->GetResourceTextureTarget( | 577 resource_provider_->GetResourceTextureTarget( |
523 plane_resource.resource_id())); | 578 plane_resource.resource_id())); |
524 mailbox.set_color_space(video_frame->ColorSpace()); | 579 mailbox.set_color_space(video_frame->ColorSpace()); |
525 external_resources.mailboxes.push_back(mailbox); | 580 external_resources.mailboxes.push_back(mailbox); |
526 external_resources.release_callbacks.push_back(base::Bind( | 581 external_resources.release_callbacks.push_back(base::Bind( |
527 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); | 582 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); |
528 } | 583 } |
529 | 584 |
530 external_resources.type = VideoFrameExternalResources::YUV_RESOURCE; | 585 external_resources.type = |
| 586 (input_frame_format == media::PIXEL_FORMAT_Y16) |
| 587 ? VideoFrameExternalResources::Y_RESOURCE |
| 588 : (isYuvPlanar ? VideoFrameExternalResources::YUV_RESOURCE |
| 589 : VideoFrameExternalResources::RGB_RESOURCE); |
531 return external_resources; | 590 return external_resources; |
532 } | 591 } |
533 | 592 |
534 // static | 593 // static |
535 void VideoResourceUpdater::ReturnTexture( | 594 void VideoResourceUpdater::ReturnTexture( |
536 base::WeakPtr<VideoResourceUpdater> updater, | 595 base::WeakPtr<VideoResourceUpdater> updater, |
537 const scoped_refptr<media::VideoFrame>& video_frame, | 596 const scoped_refptr<media::VideoFrame>& video_frame, |
538 const gpu::SyncToken& sync_token, | 597 const gpu::SyncToken& sync_token, |
539 bool lost_resource, | 598 bool lost_resource, |
540 BlockingTaskRunner* main_thread_task_runner) { | 599 BlockingTaskRunner* main_thread_task_runner) { |
(...skipping 116 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
657 video_frame->coded_size(), | 716 video_frame->coded_size(), |
658 video_frame->metadata()->IsTrue( | 717 video_frame->metadata()->IsTrue( |
659 media::VideoFrameMetadata::ALLOW_OVERLAY), | 718 media::VideoFrameMetadata::ALLOW_OVERLAY), |
660 false); | 719 false); |
661 mailbox.set_color_space(video_frame->ColorSpace()); | 720 mailbox.set_color_space(video_frame->ColorSpace()); |
662 external_resources.mailboxes.push_back(mailbox); | 721 external_resources.mailboxes.push_back(mailbox); |
663 external_resources.release_callbacks.push_back( | 722 external_resources.release_callbacks.push_back( |
664 base::Bind(&ReturnTexture, AsWeakPtr(), video_frame)); | 723 base::Bind(&ReturnTexture, AsWeakPtr(), video_frame)); |
665 } | 724 } |
666 } | 725 } |
| 726 |
| 727 external_resources.bits_per_channel = |
| 728 (video_frame->format() == media::PIXEL_FORMAT_Y16) ? 16 : 8; |
667 return external_resources; | 729 return external_resources; |
668 } | 730 } |
669 | 731 |
670 // static | 732 // static |
671 void VideoResourceUpdater::RecycleResource( | 733 void VideoResourceUpdater::RecycleResource( |
672 base::WeakPtr<VideoResourceUpdater> updater, | 734 base::WeakPtr<VideoResourceUpdater> updater, |
673 ResourceId resource_id, | 735 ResourceId resource_id, |
674 const gpu::SyncToken& sync_token, | 736 const gpu::SyncToken& sync_token, |
675 bool lost_resource, | 737 bool lost_resource, |
676 BlockingTaskRunner* main_thread_task_runner) { | 738 BlockingTaskRunner* main_thread_task_runner) { |
(...skipping 19 matching lines...) Expand all Loading... |
696 if (lost_resource) { | 758 if (lost_resource) { |
697 resource_it->clear_refs(); | 759 resource_it->clear_refs(); |
698 updater->DeleteResource(resource_it); | 760 updater->DeleteResource(resource_it); |
699 return; | 761 return; |
700 } | 762 } |
701 | 763 |
702 resource_it->remove_ref(); | 764 resource_it->remove_ref(); |
703 } | 765 } |
704 | 766 |
705 } // namespace cc | 767 } // namespace cc |
OLD | NEW |