| OLD | NEW |
| 1 // Copyright 2013 The Chromium Authors. All rights reserved. | 1 // Copyright 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "cc/resources/video_resource_updater.h" | 5 #include "cc/resources/video_resource_updater.h" |
| 6 | 6 |
| 7 #include <stddef.h> | 7 #include <stddef.h> |
| 8 #include <stdint.h> | 8 #include <stdint.h> |
| 9 | 9 |
| 10 #include <algorithm> | 10 #include <algorithm> |
| (...skipping 49 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 60 switch (video_frame->mailbox_holder(0).texture_target) { | 60 switch (video_frame->mailbox_holder(0).texture_target) { |
| 61 case GL_TEXTURE_EXTERNAL_OES: | 61 case GL_TEXTURE_EXTERNAL_OES: |
| 62 return VideoFrameExternalResources::YUV_RESOURCE; | 62 return VideoFrameExternalResources::YUV_RESOURCE; |
| 63 case GL_TEXTURE_RECTANGLE_ARB: | 63 case GL_TEXTURE_RECTANGLE_ARB: |
| 64 return VideoFrameExternalResources::RGB_RESOURCE; | 64 return VideoFrameExternalResources::RGB_RESOURCE; |
| 65 default: | 65 default: |
| 66 NOTREACHED(); | 66 NOTREACHED(); |
| 67 break; | 67 break; |
| 68 } | 68 } |
| 69 break; | 69 break; |
| 70 case media::PIXEL_FORMAT_Y8: |
| 71 case media::PIXEL_FORMAT_Y16: |
| 72 return VideoFrameExternalResources::Y_RESOURCE; |
| 73 break; |
| 70 case media::PIXEL_FORMAT_YV12: | 74 case media::PIXEL_FORMAT_YV12: |
| 71 case media::PIXEL_FORMAT_YV16: | 75 case media::PIXEL_FORMAT_YV16: |
| 72 case media::PIXEL_FORMAT_YV24: | 76 case media::PIXEL_FORMAT_YV24: |
| 73 case media::PIXEL_FORMAT_YV12A: | 77 case media::PIXEL_FORMAT_YV12A: |
| 74 case media::PIXEL_FORMAT_NV21: | 78 case media::PIXEL_FORMAT_NV21: |
| 75 case media::PIXEL_FORMAT_YUY2: | 79 case media::PIXEL_FORMAT_YUY2: |
| 76 case media::PIXEL_FORMAT_RGB24: | 80 case media::PIXEL_FORMAT_RGB24: |
| 77 case media::PIXEL_FORMAT_RGB32: | 81 case media::PIXEL_FORMAT_RGB32: |
| 78 case media::PIXEL_FORMAT_MJPEG: | 82 case media::PIXEL_FORMAT_MJPEG: |
| 79 case media::PIXEL_FORMAT_MT21: | 83 case media::PIXEL_FORMAT_MT21: |
| 80 case media::PIXEL_FORMAT_YUV420P9: | 84 case media::PIXEL_FORMAT_YUV420P9: |
| 81 case media::PIXEL_FORMAT_YUV422P9: | 85 case media::PIXEL_FORMAT_YUV422P9: |
| 82 case media::PIXEL_FORMAT_YUV444P9: | 86 case media::PIXEL_FORMAT_YUV444P9: |
| 83 case media::PIXEL_FORMAT_YUV420P10: | 87 case media::PIXEL_FORMAT_YUV420P10: |
| 84 case media::PIXEL_FORMAT_YUV422P10: | 88 case media::PIXEL_FORMAT_YUV422P10: |
| 85 case media::PIXEL_FORMAT_YUV444P10: | 89 case media::PIXEL_FORMAT_YUV444P10: |
| 86 case media::PIXEL_FORMAT_UNKNOWN: | 90 case media::PIXEL_FORMAT_UNKNOWN: |
| 87 break; | 91 break; |
| 88 } | 92 } |
| 89 return VideoFrameExternalResources::NONE; | 93 return VideoFrameExternalResources::NONE; |
| 90 } | 94 } |
| 91 | 95 |
| 96 static const uint8_t exp_lookup_table_lower[256] = { |
| 97 #define LT(n) n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, n |
| 98 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 2, |
| 99 2, 2, 2, 2, 2, LT(3), LT(4), LT(4), LT(5), LT(5), LT(5), |
| 100 LT(5), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6), LT(6)}; |
| 101 |
| 102 static const uint8_t exp_lookup_table_upper[256] = { |
| 103 #define LT(n) n, n, n, n, n, n, n, n, n, n, n, n, n, n, n, n |
| 104 7, 7, 8, 8, 9, 9, 9, 9, |
| 105 10, 10, 10, 10, 10, 10, 10, 10, |
| 106 LT(11), LT(12), LT(12), LT(13), LT(13), LT(13), LT(13), LT(14), |
| 107 LT(14), LT(14), LT(14), LT(14), LT(14), LT(14), LT(14)}; |
| 108 |
| 109 uint16_t ushort_to_half_float(uint16_t value) { |
| 110 unsigned short upper = value >> 8; |
| 111 unsigned short exponent = |
| 112 upper ? exp_lookup_table_upper[upper] : exp_lookup_table_lower[value]; |
| 113 return (exponent << 10) | (((value << (15 - exponent)) >> 6) & 0x3FF); |
| 114 } |
| 115 |
| 92 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { | 116 class SyncTokenClientImpl : public media::VideoFrame::SyncTokenClient { |
| 93 public: | 117 public: |
| 94 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, | 118 SyncTokenClientImpl(gpu::gles2::GLES2Interface* gl, |
| 95 const gpu::SyncToken& sync_token) | 119 const gpu::SyncToken& sync_token) |
| 96 : gl_(gl), sync_token_(sync_token) {} | 120 : gl_(gl), sync_token_(sync_token) {} |
| 97 ~SyncTokenClientImpl() override {} | 121 ~SyncTokenClientImpl() override {} |
| 98 void GenerateSyncToken(gpu::SyncToken* sync_token) override { | 122 void GenerateSyncToken(gpu::SyncToken* sync_token) override { |
| 99 if (sync_token_.HasData()) { | 123 if (sync_token_.HasData()) { |
| 100 *sync_token = sync_token_; | 124 *sync_token = sync_token_; |
| 101 } else { | 125 } else { |
| (...skipping 139 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 241 plane_index, input_frame->format(), coded_size.width()); | 265 plane_index, input_frame->format(), coded_size.width()); |
| 242 int plane_height = media::VideoFrame::Rows(plane_index, input_frame->format(), | 266 int plane_height = media::VideoFrame::Rows(plane_index, input_frame->format(), |
| 243 coded_size.height()); | 267 coded_size.height()); |
| 244 return gfx::Size(plane_width, plane_height); | 268 return gfx::Size(plane_width, plane_height); |
| 245 } | 269 } |
| 246 | 270 |
| 247 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( | 271 VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( |
| 248 scoped_refptr<media::VideoFrame> video_frame) { | 272 scoped_refptr<media::VideoFrame> video_frame) { |
| 249 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); | 273 TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes"); |
| 250 const media::VideoPixelFormat input_frame_format = video_frame->format(); | 274 const media::VideoPixelFormat input_frame_format = video_frame->format(); |
| 251 | |
| 252 // TODO(hubbe): Make this a video frame method. | 275 // TODO(hubbe): Make this a video frame method. |
| 253 int bits_per_channel = 0; | 276 int bits_per_channel = 0; |
| 254 switch (input_frame_format) { | 277 switch (input_frame_format) { |
| 255 case media::PIXEL_FORMAT_UNKNOWN: | 278 case media::PIXEL_FORMAT_UNKNOWN: |
| 256 NOTREACHED(); | 279 NOTREACHED(); |
| 257 // Fall through! | 280 // Fall through! |
| 258 case media::PIXEL_FORMAT_I420: | 281 case media::PIXEL_FORMAT_I420: |
| 259 case media::PIXEL_FORMAT_YV12: | 282 case media::PIXEL_FORMAT_YV12: |
| 260 case media::PIXEL_FORMAT_YV16: | 283 case media::PIXEL_FORMAT_YV16: |
| 261 case media::PIXEL_FORMAT_YV12A: | 284 case media::PIXEL_FORMAT_YV12A: |
| 262 case media::PIXEL_FORMAT_YV24: | 285 case media::PIXEL_FORMAT_YV24: |
| 263 case media::PIXEL_FORMAT_NV12: | 286 case media::PIXEL_FORMAT_NV12: |
| 264 case media::PIXEL_FORMAT_NV21: | 287 case media::PIXEL_FORMAT_NV21: |
| 265 case media::PIXEL_FORMAT_UYVY: | 288 case media::PIXEL_FORMAT_UYVY: |
| 266 case media::PIXEL_FORMAT_YUY2: | 289 case media::PIXEL_FORMAT_YUY2: |
| 267 case media::PIXEL_FORMAT_ARGB: | 290 case media::PIXEL_FORMAT_ARGB: |
| 268 case media::PIXEL_FORMAT_XRGB: | 291 case media::PIXEL_FORMAT_XRGB: |
| 269 case media::PIXEL_FORMAT_RGB24: | 292 case media::PIXEL_FORMAT_RGB24: |
| 270 case media::PIXEL_FORMAT_RGB32: | 293 case media::PIXEL_FORMAT_RGB32: |
| 271 case media::PIXEL_FORMAT_MJPEG: | 294 case media::PIXEL_FORMAT_MJPEG: |
| 272 case media::PIXEL_FORMAT_MT21: | 295 case media::PIXEL_FORMAT_MT21: |
| 296 case media::PIXEL_FORMAT_Y8: |
| 273 bits_per_channel = 8; | 297 bits_per_channel = 8; |
| 274 break; | 298 break; |
| 275 case media::PIXEL_FORMAT_YUV420P9: | 299 case media::PIXEL_FORMAT_YUV420P9: |
| 276 case media::PIXEL_FORMAT_YUV422P9: | 300 case media::PIXEL_FORMAT_YUV422P9: |
| 277 case media::PIXEL_FORMAT_YUV444P9: | 301 case media::PIXEL_FORMAT_YUV444P9: |
| 278 bits_per_channel = 9; | 302 bits_per_channel = 9; |
| 279 break; | 303 break; |
| 280 case media::PIXEL_FORMAT_YUV420P10: | 304 case media::PIXEL_FORMAT_YUV420P10: |
| 281 case media::PIXEL_FORMAT_YUV422P10: | 305 case media::PIXEL_FORMAT_YUV422P10: |
| 282 case media::PIXEL_FORMAT_YUV444P10: | 306 case media::PIXEL_FORMAT_YUV444P10: |
| 283 bits_per_channel = 10; | 307 bits_per_channel = 10; |
| 284 break; | 308 break; |
| 309 case media::PIXEL_FORMAT_Y16: |
| 310 bits_per_channel = 16; |
| 311 break; |
| 285 } | 312 } |
| 286 | 313 |
| 287 // Only YUV software video frames are supported. | 314 // Only YUV, Y8 and Y16 software video frames are supported. |
| 288 if (!media::IsYuvPlanar(input_frame_format)) { | 315 const bool isYuvPlanar = media::IsYuvPlanar(input_frame_format); |
| 316 if (!(isYuvPlanar || input_frame_format == media::PIXEL_FORMAT_Y16 || |
| 317 input_frame_format == media::PIXEL_FORMAT_Y8)) { |
| 289 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); | 318 NOTREACHED() << media::VideoPixelFormatToString(input_frame_format); |
| 290 return VideoFrameExternalResources(); | 319 return VideoFrameExternalResources(); |
| 291 } | 320 } |
| 292 | 321 |
| 293 const bool software_compositor = context_provider_ == NULL; | 322 const bool software_compositor = context_provider_ == NULL; |
| 294 | 323 |
| 324 if ((input_frame_format == media::PIXEL_FORMAT_Y8 || |
| 325 input_frame_format == media::PIXEL_FORMAT_Y16) && |
| 326 software_compositor) { |
| 327 // TODO(astojilj) Y8 and Y16 software compositor support. |
| 328 NOTREACHED() << "Software compositor doesn't support PIXEL_FORMAT_Y8/Y16"; |
| 329 return VideoFrameExternalResources(); |
| 330 } |
| 331 |
| 295 ResourceFormat output_resource_format = | 332 ResourceFormat output_resource_format = |
| 296 resource_provider_->YuvResourceFormat(bits_per_channel); | 333 (input_frame_format == media::PIXEL_FORMAT_Y16) |
| 334 ? resource_provider_->Y16ResourceFormat(bits_per_channel) |
| 335 : resource_provider_->YuvResourceFormat(bits_per_channel); |
| 297 | 336 |
| 298 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); | 337 size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format); |
| 299 | 338 |
| 300 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB | 339 // TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB |
| 301 // conversion here. That involves an extra copy of each frame to a bitmap. | 340 // conversion here. That involves an extra copy of each frame to a bitmap. |
| 302 // Obviously, this is suboptimal and should be addressed once ubercompositor | 341 // Obviously, this is suboptimal and should be addressed once ubercompositor |
| 303 // starts shaping up. | 342 // starts shaping up. |
| 304 if (software_compositor) { | 343 if (software_compositor) { |
| 305 output_resource_format = kRGBResourceFormat; | 344 output_resource_format = kRGBResourceFormat; |
| 306 output_plane_count = 1; | 345 output_plane_count = 1; |
| (...skipping 121 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 428 MathUtil::UncheckedRoundUp<size_t>(bytes_per_row, 4u); | 467 MathUtil::UncheckedRoundUp<size_t>(bytes_per_row, 4u); |
| 429 | 468 |
| 430 bool needs_conversion = false; | 469 bool needs_conversion = false; |
| 431 int shift = 0; | 470 int shift = 0; |
| 432 | 471 |
| 433 // LUMINANCE_F16 uses half-floats, so we always need a conversion step. | 472 // LUMINANCE_F16 uses half-floats, so we always need a conversion step. |
| 434 if (plane_resource.resource_format() == LUMINANCE_F16) { | 473 if (plane_resource.resource_format() == LUMINANCE_F16) { |
| 435 needs_conversion = true; | 474 needs_conversion = true; |
| 436 // Note that the current method of converting integers to half-floats | 475 // Note that the current method of converting integers to half-floats |
| 437 // stops working if you have more than 10 bits of data. | 476 // stops working if you have more than 10 bits of data. |
| 438 DCHECK_LE(bits_per_channel, 10); | 477 DCHECK(bits_per_channel < 10 || !isYuvPlanar); |
| 439 } else if (bits_per_channel > 8) { | 478 } else if (bits_per_channel > 8 && |
| 479 plane_resource.resource_format() != RG_88) { |
| 440 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to | 480 // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to |
| 441 // shift the data down and create an 8-bit texture. | 481 // shift the data down and create an 8-bit texture. |
| 442 needs_conversion = true; | 482 needs_conversion = true; |
| 443 shift = bits_per_channel - 8; | 483 shift = bits_per_channel - 8; |
| 444 } | 484 } |
| 445 const uint8_t* pixels; | 485 const uint8_t* pixels; |
| 446 if (static_cast<int>(upload_image_stride) == video_stride_bytes && | 486 if (static_cast<int>(upload_image_stride) == video_stride_bytes && |
| 447 !needs_conversion) { | 487 !needs_conversion) { |
| 448 pixels = video_frame->data(i); | 488 pixels = video_frame->data(i); |
| 449 } else { | 489 } else { |
| 450 // Avoid malloc for each frame/plane if possible. | 490 // Avoid malloc for each frame/plane if possible. |
| 451 size_t needed_size = | 491 size_t needed_size = |
| 452 upload_image_stride * resource_size_pixels.height(); | 492 upload_image_stride * resource_size_pixels.height(); |
| 453 if (upload_pixels_.size() < needed_size) | 493 if (upload_pixels_.size() < needed_size) |
| 454 upload_pixels_.resize(needed_size); | 494 upload_pixels_.resize(needed_size); |
| 455 | 495 |
| 456 for (int row = 0; row < resource_size_pixels.height(); ++row) { | 496 for (int row = 0; row < resource_size_pixels.height(); ++row) { |
| 457 if (plane_resource.resource_format() == LUMINANCE_F16) { | 497 if (plane_resource.resource_format() == LUMINANCE_F16) { |
| 458 uint16_t* dst = reinterpret_cast<uint16_t*>( | 498 uint16_t* dst = reinterpret_cast<uint16_t*>( |
| 459 &upload_pixels_[upload_image_stride * row]); | 499 &upload_pixels_[upload_image_stride * row]); |
| 460 const uint16_t* src = reinterpret_cast<uint16_t*>( | 500 const uint16_t* src = reinterpret_cast<uint16_t*>( |
| 461 video_frame->data(i) + (video_stride_bytes * row)); | 501 video_frame->data(i) + (video_stride_bytes * row)); |
| 502 |
| 503 if (input_frame_format == media::PIXEL_FORMAT_Y16) { |
| 504 for (size_t i = 0; i < bytes_per_row / 2; i++) |
| 505 dst[i] = ushort_to_half_float(src[i]); |
| 506 continue; |
| 507 } |
| 462 // Micro-benchmarking indicates that the compiler does | 508 // Micro-benchmarking indicates that the compiler does |
| 463 // a good enough job of optimizing this loop that trying | 509 // a good enough job of optimizing this loop that trying |
| 464 // to manually operate on one uint64 at a time is not | 510 // to manually operate on one uint64 at a time is not |
| 465 // actually helpful. | 511 // actually helpful. |
| 466 // Note to future optimizers: Benchmark your optimizations! | 512 // Note to future optimizers: Benchmark your optimizations! |
| 467 for (size_t i = 0; i < bytes_per_row / 2; i++) | 513 for (size_t i = 0; i < bytes_per_row / 2; i++) |
| 468 dst[i] = src[i] | 0x3800; | 514 dst[i] = src[i] | 0x3800; |
| 469 } else if (shift != 0) { | 515 } else if (shift != 0) { |
| 470 // We have more-than-8-bit input which we need to shift | 516 // We have more-than-8-bit input which we need to shift |
| 471 // down to fit it into an 8-bit texture. | 517 // down to fit it into an 8-bit texture. |
| (...skipping 12 matching lines...) Expand all Loading... |
| 484 } | 530 } |
| 485 } | 531 } |
| 486 pixels = &upload_pixels_[0]; | 532 pixels = &upload_pixels_[0]; |
| 487 } | 533 } |
| 488 | 534 |
| 489 resource_provider_->CopyToResource(plane_resource.resource_id(), pixels, | 535 resource_provider_->CopyToResource(plane_resource.resource_id(), pixels, |
| 490 resource_size_pixels); | 536 resource_size_pixels); |
| 491 plane_resource.SetUniqueId(video_frame->unique_id(), i); | 537 plane_resource.SetUniqueId(video_frame->unique_id(), i); |
| 492 } | 538 } |
| 493 | 539 |
| 494 if (plane_resource.resource_format() == LUMINANCE_F16) { | 540 if (plane_resource.resource_format() == LUMINANCE_F16 && isYuvPlanar) { |
| 495 // By OR-ing with 0x3800, 10-bit numbers become half-floats in the | 541 // By OR-ing with 0x3800, 10-bit numbers become half-floats in the |
| 496 // range [0.5..1) and 9-bit numbers get the range [0.5..0.75). | 542 // range [0.5..1) and 9-bit numbers get the range [0.5..0.75). |
| 497 // | 543 // |
| 498 // Half-floats are evaluated as: | 544 // Half-floats are evaluated as: |
| 499 // float value = pow(2.0, exponent - 25) * (0x400 + fraction); | 545 // float value = pow(2.0, exponent - 25) * (0x400 + fraction); |
| 500 // | 546 // |
| 501 // In our case the exponent is 14 (since we or with 0x3800) and | 547 // In our case the exponent is 14 (since we or with 0x3800) and |
| 502 // pow(2.0, 14-25) * 0x400 evaluates to 0.5 (our offset) and | 548 // pow(2.0, 14-25) * 0x400 evaluates to 0.5 (our offset) and |
| 503 // pow(2.0, 14-25) * fraction is [0..0.49951171875] for 10-bit and | 549 // pow(2.0, 14-25) * fraction is [0..0.49951171875] for 10-bit and |
| 504 // [0..0.24951171875] for 9-bit. | 550 // [0..0.24951171875] for 9-bit. |
| (...skipping 10 matching lines...) Expand all Loading... |
| 515 } | 561 } |
| 516 | 562 |
| 517 external_resources.mailboxes.push_back( | 563 external_resources.mailboxes.push_back( |
| 518 TextureMailbox(plane_resource.mailbox(), gpu::SyncToken(), | 564 TextureMailbox(plane_resource.mailbox(), gpu::SyncToken(), |
| 519 resource_provider_->GetResourceTextureTarget( | 565 resource_provider_->GetResourceTextureTarget( |
| 520 plane_resource.resource_id()))); | 566 plane_resource.resource_id()))); |
| 521 external_resources.release_callbacks.push_back(base::Bind( | 567 external_resources.release_callbacks.push_back(base::Bind( |
| 522 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); | 568 &RecycleResource, AsWeakPtr(), plane_resource.resource_id())); |
| 523 } | 569 } |
| 524 | 570 |
| 525 external_resources.type = VideoFrameExternalResources::YUV_RESOURCE; | 571 external_resources.type = |
| 572 (output_resource_format == RG_88) |
| 573 ? VideoFrameExternalResources::Y_RESOURCE |
| 574 : (isYuvPlanar ? VideoFrameExternalResources::YUV_RESOURCE |
| 575 : VideoFrameExternalResources::RGB_RESOURCE); |
| 526 return external_resources; | 576 return external_resources; |
| 527 } | 577 } |
| 528 | 578 |
| 529 // static | 579 // static |
| 530 void VideoResourceUpdater::ReturnTexture( | 580 void VideoResourceUpdater::ReturnTexture( |
| 531 base::WeakPtr<VideoResourceUpdater> updater, | 581 base::WeakPtr<VideoResourceUpdater> updater, |
| 532 const scoped_refptr<media::VideoFrame>& video_frame, | 582 const scoped_refptr<media::VideoFrame>& video_frame, |
| 533 const gpu::SyncToken& sync_token, | 583 const gpu::SyncToken& sync_token, |
| 534 bool lost_resource, | 584 bool lost_resource, |
| 535 BlockingTaskRunner* main_thread_task_runner) { | 585 BlockingTaskRunner* main_thread_task_runner) { |
| (...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 689 if (lost_resource) { | 739 if (lost_resource) { |
| 690 resource_it->clear_refs(); | 740 resource_it->clear_refs(); |
| 691 updater->DeleteResource(resource_it); | 741 updater->DeleteResource(resource_it); |
| 692 return; | 742 return; |
| 693 } | 743 } |
| 694 | 744 |
| 695 resource_it->remove_ref(); | 745 resource_it->remove_ref(); |
| 696 } | 746 } |
| 697 | 747 |
| 698 } // namespace cc | 748 } // namespace cc |
| OLD | NEW |