| OLD | NEW |
| 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/base/video_frame.h" | 5 #include "media/base/video_frame.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 | 8 |
| 9 #include "base/bind.h" | 9 #include "base/bind.h" |
| 10 #include "base/callback_helpers.h" | 10 #include "base/callback_helpers.h" |
| (...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 46 switch (format) { | 46 switch (format) { |
| 47 case VideoFrame::YV24: | 47 case VideoFrame::YV24: |
| 48 return gfx::Size(1, 1); | 48 return gfx::Size(1, 1); |
| 49 | 49 |
| 50 case VideoFrame::YV16: | 50 case VideoFrame::YV16: |
| 51 return gfx::Size(2, 1); | 51 return gfx::Size(2, 1); |
| 52 | 52 |
| 53 case VideoFrame::YV12: | 53 case VideoFrame::YV12: |
| 54 case VideoFrame::I420: | 54 case VideoFrame::I420: |
| 55 case VideoFrame::YV12A: | 55 case VideoFrame::YV12A: |
| 56 #if defined(OS_MACOSX) || defined(OS_CHROMEOS) |
| 56 case VideoFrame::NV12: | 57 case VideoFrame::NV12: |
| 58 #endif |
| 57 return gfx::Size(2, 2); | 59 return gfx::Size(2, 2); |
| 58 | 60 |
| 59 case VideoFrame::UNKNOWN: | 61 case VideoFrame::UNKNOWN: |
| 60 #if defined(VIDEO_HOLE) | |
| 61 case VideoFrame::HOLE: | |
| 62 #endif // defined(VIDEO_HOLE) | |
| 63 case VideoFrame::NATIVE_TEXTURE: | |
| 64 case VideoFrame::ARGB: | 62 case VideoFrame::ARGB: |
| 63 case VideoFrame::XRGB: |
| 65 break; | 64 break; |
| 66 } | 65 } |
| 67 } | 66 } |
| 68 NOTREACHED(); | 67 NOTREACHED(); |
| 69 return gfx::Size(); | 68 return gfx::Size(); |
| 70 } | 69 } |
| 71 | 70 |
| 72 // Return the alignment for the whole frame, calculated as the max of the | 71 // Return the alignment for the whole frame, calculated as the max of the |
| 73 // alignment for each individual plane. | 72 // alignment for each individual plane. |
| 74 static gfx::Size CommonAlignment(VideoFrame::Format format) { | 73 static gfx::Size CommonAlignment(VideoFrame::Format format) { |
| 75 int max_sample_width = 0; | 74 int max_sample_width = 0; |
| 76 int max_sample_height = 0; | 75 int max_sample_height = 0; |
| 77 for (size_t plane = 0; plane < VideoFrame::NumPlanes(format); ++plane) { | 76 for (size_t plane = 0; plane < VideoFrame::NumPlanes(format); ++plane) { |
| 78 const gfx::Size sample_size = SampleSize(format, plane); | 77 const gfx::Size sample_size = SampleSize(format, plane); |
| 79 max_sample_width = std::max(max_sample_width, sample_size.width()); | 78 max_sample_width = std::max(max_sample_width, sample_size.width()); |
| 80 max_sample_height = std::max(max_sample_height, sample_size.height()); | 79 max_sample_height = std::max(max_sample_height, sample_size.height()); |
| 81 } | 80 } |
| 82 return gfx::Size(max_sample_width, max_sample_height); | 81 return gfx::Size(max_sample_width, max_sample_height); |
| 83 } | 82 } |
| 84 | 83 |
| 85 // Returns the number of bytes per element for given |plane| and |format|. E.g. | 84 // Returns the number of bytes per element for given |plane| and |format|. |
| 86 // 2 for the UV plane in NV12. | |
| 87 static int BytesPerElement(VideoFrame::Format format, size_t plane) { | 85 static int BytesPerElement(VideoFrame::Format format, size_t plane) { |
| 88 DCHECK(VideoFrame::IsValidPlane(plane, format)); | 86 DCHECK(VideoFrame::IsValidPlane(plane, format)); |
| 89 if (format == VideoFrame::ARGB) | 87 if (format == VideoFrame::ARGB || format == VideoFrame::XRGB) |
| 90 return 4; | 88 return 4; |
| 91 | |
| 92 if (format == VideoFrame::NV12 && plane == VideoFrame::kUVPlane) | |
| 93 return 2; | |
| 94 | |
| 95 return 1; | 89 return 1; |
| 96 } | 90 } |
| 97 | 91 |
| 98 // Rounds up |coded_size| if necessary for |format|. | 92 // Rounds up |coded_size| if necessary for |format|. |
| 99 static gfx::Size AdjustCodedSize(VideoFrame::Format format, | 93 static gfx::Size AdjustCodedSize(VideoFrame::Format format, |
| 100 const gfx::Size& coded_size) { | 94 const gfx::Size& coded_size) { |
| 101 const gfx::Size alignment = CommonAlignment(format); | 95 const gfx::Size alignment = CommonAlignment(format); |
| 102 return gfx::Size(RoundUp(coded_size.width(), alignment.width()), | 96 return gfx::Size(RoundUp(coded_size.width(), alignment.width()), |
| 103 RoundUp(coded_size.height(), alignment.height())); | 97 RoundUp(coded_size.height(), alignment.height())); |
| 104 } | 98 } |
| 105 | 99 |
| 106 // static | 100 // Release data allocated by AllocateYUV(). |
| 107 scoped_refptr<VideoFrame> VideoFrame::CreateFrame( | 101 static void ReleaseData(uint8* data) { |
| 108 VideoFrame::Format format, | 102 DCHECK(data); |
| 109 const gfx::Size& coded_size, | 103 base::AlignedFree(data); |
| 110 const gfx::Rect& visible_rect, | 104 } |
| 111 const gfx::Size& natural_size, | 105 |
| 112 base::TimeDelta timestamp) { | 106 //static |
| 107 bool VideoFrame::IsYuvPlanar(Format format) { |
| 113 switch (format) { | 108 switch (format) { |
| 114 case VideoFrame::YV12: | 109 case YV12: |
| 115 case VideoFrame::YV16: | 110 case I420: |
| 116 case VideoFrame::I420: | 111 case YV16: |
| 117 case VideoFrame::YV12A: | 112 case YV12A: |
| 118 case VideoFrame::YV24: | 113 case YV24: |
| 119 break; | 114 return true; |
| 115 #if defined(OS_MACOSX) || defined(OS_CHROMEOS) |
| 116 case NV12: |
| 117 #endif |
| 118 case UNKNOWN: |
| 119 case ARGB: |
| 120 case XRGB: |
| 121 return false; |
| 122 } |
| 123 return false; |
| 124 } |
| 120 | 125 |
| 121 case VideoFrame::UNKNOWN: | 126 //static |
| 122 case VideoFrame::NV12: | 127 bool VideoFrame::IsMappable(StorageType storage_type) { |
| 123 case VideoFrame::NATIVE_TEXTURE: | 128 return storage_type == STORAGE_SHMEM || |
| 124 #if defined(VIDEO_HOLE) | 129 storage_type == STORAGE_OWNED_MEMORY || |
| 125 case VideoFrame::HOLE: | 130 storage_type == STORAGE_UNOWNED_MEMORY; |
| 126 #endif // defined(VIDEO_HOLE) | |
| 127 case VideoFrame::ARGB: | |
| 128 NOTIMPLEMENTED(); | |
| 129 return nullptr; | |
| 130 } | |
| 131 | |
| 132 // Since we're creating a new YUV frame (and allocating memory for it | |
| 133 // ourselves), we can pad the requested |coded_size| if necessary if the | |
| 134 // request does not line up on sample boundaries. | |
| 135 const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); | |
| 136 DCHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size)); | |
| 137 | |
| 138 gpu::MailboxHolder mailboxes[kMaxPlanes]; | |
| 139 scoped_refptr<VideoFrame> frame( | |
| 140 new VideoFrame(format, new_coded_size, visible_rect, natural_size, | |
| 141 mailboxes, TEXTURE_RGBA, timestamp, false)); | |
| 142 frame->AllocateYUV(); | |
| 143 return frame; | |
| 144 } | 131 } |
| 145 | 132 |
| 146 // static | 133 // static |
| 147 std::string VideoFrame::FormatToString(VideoFrame::Format format) { | 134 std::string VideoFrame::FormatToString(Format format) { |
| 148 switch (format) { | 135 switch (format) { |
| 149 case VideoFrame::UNKNOWN: | 136 case UNKNOWN: |
| 150 return "UNKNOWN"; | 137 return "UNKNOWN"; |
| 151 case VideoFrame::YV12: | 138 case YV12: |
| 152 return "YV12"; | 139 return "YV12"; |
| 153 case VideoFrame::YV16: | 140 case YV16: |
| 154 return "YV16"; | 141 return "YV16"; |
| 155 case VideoFrame::I420: | 142 case I420: |
| 156 return "I420"; | 143 return "I420"; |
| 157 case VideoFrame::NATIVE_TEXTURE: | 144 case YV12A: |
| 158 return "NATIVE_TEXTURE"; | |
| 159 #if defined(VIDEO_HOLE) | |
| 160 case VideoFrame::HOLE: | |
| 161 return "HOLE"; | |
| 162 #endif // defined(VIDEO_HOLE) | |
| 163 case VideoFrame::YV12A: | |
| 164 return "YV12A"; | 145 return "YV12A"; |
| 165 case VideoFrame::NV12: | 146 case YV24: |
| 147 return "YV24"; |
| 148 case ARGB: |
| 149 return "ARGB"; |
| 150 case XRGB: |
| 151 return "XRGB"; |
| 152 #if defined(OS_MACOSX) || defined(OS_CHROMEOS) |
| 153 case NV12: |
| 166 return "NV12"; | 154 return "NV12"; |
| 167 case VideoFrame::YV24: | 155 #endif |
| 168 return "YV24"; | |
| 169 case VideoFrame::ARGB: | |
| 170 return "ARGB"; | |
| 171 } | 156 } |
| 172 NOTREACHED() << "Invalid videoframe format provided: " << format; | 157 NOTREACHED() << "Invalid VideoFrame format provided: " << format; |
| 173 return ""; | 158 return ""; |
| 174 } | 159 } |
| 175 | 160 |
| 176 // static | 161 // static |
| 177 bool VideoFrame::IsValidConfig(VideoFrame::Format format, | 162 bool VideoFrame::IsValidConfig(Format format, |
| 163 StorageType storage_type, |
| 178 const gfx::Size& coded_size, | 164 const gfx::Size& coded_size, |
| 179 const gfx::Rect& visible_rect, | 165 const gfx::Rect& visible_rect, |
| 180 const gfx::Size& natural_size) { | 166 const gfx::Size& natural_size) { |
| 181 // Check maximum limits for all formats. | 167 // Check maximum limits for all formats. |
| 182 if (coded_size.GetArea() > limits::kMaxCanvas || | 168 if (coded_size.GetArea() > limits::kMaxCanvas || |
| 183 coded_size.width() > limits::kMaxDimension || | 169 coded_size.width() > limits::kMaxDimension || |
| 184 coded_size.height() > limits::kMaxDimension || | 170 coded_size.height() > limits::kMaxDimension || |
| 185 visible_rect.x() < 0 || visible_rect.y() < 0 || | 171 visible_rect.x() < 0 || visible_rect.y() < 0 || |
| 186 visible_rect.right() > coded_size.width() || | 172 visible_rect.right() > coded_size.width() || |
| 187 visible_rect.bottom() > coded_size.height() || | 173 visible_rect.bottom() > coded_size.height() || |
| 188 natural_size.GetArea() > limits::kMaxCanvas || | 174 natural_size.GetArea() > limits::kMaxCanvas || |
| 189 natural_size.width() > limits::kMaxDimension || | 175 natural_size.width() > limits::kMaxDimension || |
| 190 natural_size.height() > limits::kMaxDimension) | 176 natural_size.height() > limits::kMaxDimension) |
| 191 return false; | 177 return false; |
| 192 | 178 |
| 179 // TODO(mcasas): Remove parameter |storage_type| when STORAGE_HOLE and |
| 180 // STORAGE_TEXTURE comply with the checks below. Right now we skip them. |
| 181 #if defined(VIDEO_HOLE) |
| 182 if (storage_type == STORAGE_HOLE) |
| 183 return true; |
| 184 #endif |
| 185 if(storage_type == STORAGE_TEXTURE) |
| 186 return true; |
| 187 |
| 193 // Check format-specific width/height requirements. | 188 // Check format-specific width/height requirements. |
| 194 switch (format) { | 189 switch (format) { |
| 195 case VideoFrame::UNKNOWN: | 190 case UNKNOWN: |
| 196 return (coded_size.IsEmpty() && visible_rect.IsEmpty() && | 191 return (coded_size.IsEmpty() && visible_rect.IsEmpty() && |
| 197 natural_size.IsEmpty()); | 192 natural_size.IsEmpty()); |
| 198 | 193 case YV24: |
| 199 // NATIVE_TEXTURE and HOLE have no software-allocated buffers and are | 194 case YV12: |
| 200 // allowed to skip the below check. | 195 case I420: |
| 201 case VideoFrame::NATIVE_TEXTURE: | 196 case YV12A: |
| 202 #if defined(VIDEO_HOLE) | 197 case YV16: |
| 203 case VideoFrame::HOLE: | 198 case ARGB: |
| 204 #endif // defined(VIDEO_HOLE) | 199 case XRGB: |
| 205 return true; | 200 #if defined(OS_MACOSX) || defined(OS_CHROMEOS) |
| 206 | 201 case NV12: |
| 207 case VideoFrame::YV24: | 202 #endif |
| 208 case VideoFrame::YV12: | |
| 209 case VideoFrame::I420: | |
| 210 case VideoFrame::YV12A: | |
| 211 case VideoFrame::NV12: | |
| 212 case VideoFrame::YV16: | |
| 213 case VideoFrame::ARGB: | |
| 214 // Check that software-allocated buffer formats are aligned correctly and | 203 // Check that software-allocated buffer formats are aligned correctly and |
| 215 // not empty. | 204 // not empty. |
| 216 const gfx::Size alignment = CommonAlignment(format); | 205 const gfx::Size alignment = CommonAlignment(format); |
| 217 return RoundUp(visible_rect.right(), alignment.width()) <= | 206 return RoundUp(visible_rect.right(), alignment.width()) <= |
| 218 static_cast<size_t>(coded_size.width()) && | 207 static_cast<size_t>(coded_size.width()) && |
| 219 RoundUp(visible_rect.bottom(), alignment.height()) <= | 208 RoundUp(visible_rect.bottom(), alignment.height()) <= |
| 220 static_cast<size_t>(coded_size.height()) && | 209 static_cast<size_t>(coded_size.height()) && |
| 221 !coded_size.IsEmpty() && !visible_rect.IsEmpty() && | 210 !coded_size.IsEmpty() && !visible_rect.IsEmpty() && |
| 222 !natural_size.IsEmpty(); | 211 !natural_size.IsEmpty(); |
| 223 } | 212 } |
| 224 | 213 |
| 214 // TODO(mcasas): Check that storage type and underlying mailboxes/dataptr are |
| 215 // matching. |
| 225 NOTREACHED(); | 216 NOTREACHED(); |
| 226 return false; | 217 return false; |
| 227 } | 218 } |
| 228 | 219 |
| 229 // static | 220 // static |
| 221 scoped_refptr<VideoFrame> VideoFrame::CreateFrame( |
| 222 Format format, |
| 223 const gfx::Size& coded_size, |
| 224 const gfx::Rect& visible_rect, |
| 225 const gfx::Size& natural_size, |
| 226 base::TimeDelta timestamp) { |
| 227 if (!IsYuvPlanar(format)) { |
| 228 NOTIMPLEMENTED(); |
| 229 return nullptr; |
| 230 } |
| 231 |
| 232 // Since we're creating a new YUV frame (and allocating memory for it |
| 233 // ourselves), we can pad the requested |coded_size| if necessary if the |
| 234 // request does not line up on sample boundaries. |
| 235 const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); |
| 236 DCHECK(IsValidConfig(format, STORAGE_OWNED_MEMORY, new_coded_size, |
| 237 visible_rect, natural_size)); |
| 238 |
| 239 scoped_refptr<VideoFrame> frame( |
| 240 new VideoFrame(format, STORAGE_OWNED_MEMORY, new_coded_size, visible_rect, |
| 241 natural_size, timestamp, false)); |
| 242 frame->AllocateYUV(); |
| 243 return frame; |
| 244 } |
| 245 |
| 246 // static |
| 230 scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( | 247 scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( |
| 231 const gpu::MailboxHolder& mailbox_holder, | 248 const gpu::MailboxHolder& mailbox_holder, |
| 232 const ReleaseMailboxCB& mailbox_holder_release_cb, | 249 const ReleaseMailboxCB& mailbox_holder_release_cb, |
| 233 const gfx::Size& coded_size, | 250 const gfx::Size& coded_size, |
| 234 const gfx::Rect& visible_rect, | 251 const gfx::Rect& visible_rect, |
| 235 const gfx::Size& natural_size, | 252 const gfx::Size& natural_size, |
| 236 base::TimeDelta timestamp, | 253 base::TimeDelta timestamp, |
| 237 bool allow_overlay, | 254 bool allow_overlay, |
| 238 bool has_alpha) { | 255 bool has_alpha) { |
| 239 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | 256 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; |
| 240 mailbox_holders[kARGBPlane] = mailbox_holder; | 257 mailbox_holders[kARGBPlane] = mailbox_holder; |
| 241 TextureFormat texture_format = has_alpha ? TEXTURE_RGBA : TEXTURE_RGB; | 258 Format texture_format = has_alpha ? ARGB : XRGB; |
| 242 scoped_refptr<VideoFrame> frame( | 259 scoped_refptr<VideoFrame> frame( |
| 243 new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size, | 260 new VideoFrame(texture_format, STORAGE_TEXTURE, coded_size, visible_rect, |
| 244 mailbox_holders, texture_format, timestamp, false)); | 261 natural_size, mailbox_holders, timestamp, false)); |
| 245 frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; | 262 frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; |
| 246 frame->allow_overlay_ = allow_overlay; | 263 frame->allow_overlay_ = allow_overlay; |
| 247 return frame; | 264 return frame; |
| 248 } | 265 } |
| 249 | 266 |
| 250 // static | 267 // static |
| 251 scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures( | 268 scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures( |
| 252 const gpu::MailboxHolder& y_mailbox_holder, | 269 const gpu::MailboxHolder& y_mailbox_holder, |
| 253 const gpu::MailboxHolder& u_mailbox_holder, | 270 const gpu::MailboxHolder& u_mailbox_holder, |
| 254 const gpu::MailboxHolder& v_mailbox_holder, | 271 const gpu::MailboxHolder& v_mailbox_holder, |
| 255 const ReleaseMailboxCB& mailbox_holder_release_cb, | 272 const ReleaseMailboxCB& mailbox_holder_release_cb, |
| 256 const gfx::Size& coded_size, | 273 const gfx::Size& coded_size, |
| 257 const gfx::Rect& visible_rect, | 274 const gfx::Rect& visible_rect, |
| 258 const gfx::Size& natural_size, | 275 const gfx::Size& natural_size, |
| 259 base::TimeDelta timestamp, | 276 base::TimeDelta timestamp, |
| 260 bool allow_overlay) { | 277 bool allow_overlay) { |
| 261 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | 278 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; |
| 262 mailbox_holders[kYPlane] = y_mailbox_holder; | 279 mailbox_holders[kYPlane] = y_mailbox_holder; |
| 263 mailbox_holders[kUPlane] = u_mailbox_holder; | 280 mailbox_holders[kUPlane] = u_mailbox_holder; |
| 264 mailbox_holders[kVPlane] = v_mailbox_holder; | 281 mailbox_holders[kVPlane] = v_mailbox_holder; |
| 265 scoped_refptr<VideoFrame> frame( | 282 scoped_refptr<VideoFrame> frame( |
| 266 new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size, | 283 new VideoFrame(I420, STORAGE_TEXTURE, coded_size, visible_rect, |
| 267 mailbox_holders, TEXTURE_YUV_420, timestamp, false)); | 284 natural_size, mailbox_holders, timestamp, false)); |
| 268 frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; | 285 frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; |
| 269 frame->allow_overlay_ = allow_overlay; | 286 frame->allow_overlay_ = allow_overlay; |
| 270 return frame; | 287 return frame; |
| 271 } | 288 } |
| 272 | 289 |
| 273 // static | 290 // static |
| 274 scoped_refptr<VideoFrame> VideoFrame::WrapExternalPackedMemory( | 291 scoped_refptr<VideoFrame> VideoFrame::WrapExternalData( |
| 275 Format format, | 292 Format format, |
| 276 const gfx::Size& coded_size, | 293 const gfx::Size& coded_size, |
| 277 const gfx::Rect& visible_rect, | 294 const gfx::Rect& visible_rect, |
| 295 const gfx::Size& natural_size, |
| 296 uint8* data, |
| 297 size_t data_size, |
| 298 base::TimeDelta timestamp) { |
| 299 return WrapExternalStorage(format, STORAGE_UNOWNED_MEMORY, coded_size, |
| 300 visible_rect, natural_size, data, data_size, |
| 301 timestamp, base::SharedMemory::NULLHandle(), 0); |
| 302 } |
| 303 |
| 304 // static |
| 305 scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory( |
| 306 Format format, |
| 307 const gfx::Size& coded_size, |
| 308 const gfx::Rect& visible_rect, |
| 278 const gfx::Size& natural_size, | 309 const gfx::Size& natural_size, |
| 279 uint8* data, | 310 uint8* data, |
| 280 size_t data_size, | 311 size_t data_size, |
| 281 base::SharedMemoryHandle handle, | 312 base::SharedMemoryHandle handle, |
| 282 size_t data_offset, | 313 size_t data_offset, |
| 283 base::TimeDelta timestamp) { | 314 base::TimeDelta timestamp) { |
| 284 const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); | 315 return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect, |
| 285 | 316 natural_size, data, data_size, timestamp, handle, |
| 286 if (!IsValidConfig(format, new_coded_size, visible_rect, natural_size)) | 317 data_offset); |
| 287 return NULL; | |
| 288 if (data_size < AllocationSize(format, new_coded_size)) | |
| 289 return NULL; | |
| 290 | |
| 291 switch (format) { | |
| 292 case VideoFrame::I420: { | |
| 293 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | |
| 294 scoped_refptr<VideoFrame> frame( | |
| 295 new VideoFrame(format, new_coded_size, visible_rect, natural_size, | |
| 296 mailbox_holders, TEXTURE_RGBA, timestamp, false)); | |
| 297 frame->shared_memory_handle_ = handle; | |
| 298 frame->shared_memory_offset_ = data_offset; | |
| 299 frame->strides_[kYPlane] = new_coded_size.width(); | |
| 300 frame->strides_[kUPlane] = new_coded_size.width() / 2; | |
| 301 frame->strides_[kVPlane] = new_coded_size.width() / 2; | |
| 302 frame->data_[kYPlane] = data; | |
| 303 frame->data_[kUPlane] = data + new_coded_size.GetArea(); | |
| 304 frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4); | |
| 305 return frame; | |
| 306 } | |
| 307 default: | |
| 308 NOTIMPLEMENTED(); | |
| 309 return NULL; | |
| 310 } | |
| 311 } | 318 } |
| 312 | 319 |
| 313 // static | 320 // static |
| 314 scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData( | 321 scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData( |
| 315 Format format, | 322 Format format, |
| 316 const gfx::Size& coded_size, | 323 const gfx::Size& coded_size, |
| 317 const gfx::Rect& visible_rect, | 324 const gfx::Rect& visible_rect, |
| 318 const gfx::Size& natural_size, | 325 const gfx::Size& natural_size, |
| 319 int32 y_stride, | 326 int32 y_stride, |
| 320 int32 u_stride, | 327 int32 u_stride, |
| 321 int32 v_stride, | 328 int32 v_stride, |
| 322 uint8* y_data, | 329 uint8* y_data, |
| 323 uint8* u_data, | 330 uint8* u_data, |
| 324 uint8* v_data, | 331 uint8* v_data, |
| 325 base::TimeDelta timestamp) { | 332 base::TimeDelta timestamp) { |
| 326 const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); | 333 const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); |
| 327 CHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size)); | 334 CHECK(IsValidConfig(format, STORAGE_UNOWNED_MEMORY, new_coded_size, |
| 335 visible_rect, natural_size)); |
| 328 | 336 |
| 329 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | |
| 330 scoped_refptr<VideoFrame> frame( | 337 scoped_refptr<VideoFrame> frame( |
| 331 new VideoFrame(format, new_coded_size, visible_rect, natural_size, | 338 new VideoFrame(format, STORAGE_UNOWNED_MEMORY, new_coded_size, |
| 332 mailbox_holders, TEXTURE_RGBA, timestamp, false)); | 339 visible_rect, natural_size, timestamp, false)); |
| 333 frame->strides_[kYPlane] = y_stride; | 340 frame->strides_[kYPlane] = y_stride; |
| 334 frame->strides_[kUPlane] = u_stride; | 341 frame->strides_[kUPlane] = u_stride; |
| 335 frame->strides_[kVPlane] = v_stride; | 342 frame->strides_[kVPlane] = v_stride; |
| 336 frame->data_[kYPlane] = y_data; | 343 frame->data_[kYPlane] = y_data; |
| 337 frame->data_[kUPlane] = u_data; | 344 frame->data_[kUPlane] = u_data; |
| 338 frame->data_[kVPlane] = v_data; | 345 frame->data_[kVPlane] = v_data; |
| 339 return frame; | 346 return frame; |
| 340 } | 347 } |
| 341 | 348 |
| 342 #if defined(OS_POSIX) | 349 #if defined(OS_LINUX) |
| 343 // static | 350 // static |
| 344 scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs( | 351 scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs( |
| 345 Format format, | 352 Format format, |
| 346 const gfx::Size& coded_size, | 353 const gfx::Size& coded_size, |
| 347 const gfx::Rect& visible_rect, | 354 const gfx::Rect& visible_rect, |
| 348 const gfx::Size& natural_size, | 355 const gfx::Size& natural_size, |
| 349 const std::vector<int> dmabuf_fds, | 356 const std::vector<int> dmabuf_fds, |
| 350 base::TimeDelta timestamp) { | 357 base::TimeDelta timestamp) { |
| 351 if (!IsValidConfig(format, coded_size, visible_rect, natural_size)) | 358 if (!IsValidConfig(format, STORAGE_DMABUFS, coded_size, visible_rect, |
| 359 natural_size)) { |
| 352 return NULL; | 360 return NULL; |
| 361 } |
| 353 | 362 |
| 354 // TODO(posciak): This is not exactly correct, it's possible for one | 363 // TODO(posciak): This is not exactly correct, it's possible for one |
| 355 // buffer to contain more than one plane. | 364 // buffer to contain more than one plane. |
| 356 if (dmabuf_fds.size() != NumPlanes(format)) { | 365 if (dmabuf_fds.size() != NumPlanes(format)) { |
| 357 LOG(FATAL) << "Not enough dmabuf fds provided!"; | 366 LOG(FATAL) << "Not enough dmabuf fds provided!"; |
| 358 return NULL; | 367 return NULL; |
| 359 } | 368 } |
| 360 | 369 |
| 361 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | 370 DCHECK_EQ(format, ARGB); |
| 362 scoped_refptr<VideoFrame> frame( | 371 scoped_refptr<VideoFrame> frame( |
| 363 new VideoFrame(format, coded_size, visible_rect, natural_size, | 372 new VideoFrame(format, STORAGE_DMABUFS, coded_size, visible_rect, |
| 364 mailbox_holders, TEXTURE_RGBA, timestamp, false)); | 373 natural_size, timestamp, false)); |
| 365 | 374 |
| 366 for (size_t i = 0; i < dmabuf_fds.size(); ++i) { | 375 for (size_t i = 0; i < dmabuf_fds.size(); ++i) { |
| 367 int duped_fd = HANDLE_EINTR(dup(dmabuf_fds[i])); | 376 int duped_fd = HANDLE_EINTR(dup(dmabuf_fds[i])); |
| 368 if (duped_fd == -1) { | 377 if (duped_fd == -1) { |
| 369 // The already-duped in previous iterations fds will be closed when | 378 // The already-duped in previous iterations fds will be closed when |
| 370 // the partially-created frame drops out of scope here. | 379 // the partially-created frame drops out of scope here. |
| 371 DLOG(ERROR) << "Failed duplicating a dmabuf fd"; | 380 DLOG(ERROR) << "Failed duplicating a dmabuf fd"; |
| 372 return NULL; | 381 return NULL; |
| 373 } | 382 } |
| 374 | 383 |
| (...skipping 12 matching lines...) Expand all Loading... |
| 387 scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer( | 396 scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer( |
| 388 CVPixelBufferRef cv_pixel_buffer, | 397 CVPixelBufferRef cv_pixel_buffer, |
| 389 base::TimeDelta timestamp) { | 398 base::TimeDelta timestamp) { |
| 390 DCHECK(cv_pixel_buffer); | 399 DCHECK(cv_pixel_buffer); |
| 391 DCHECK(CFGetTypeID(cv_pixel_buffer) == CVPixelBufferGetTypeID()); | 400 DCHECK(CFGetTypeID(cv_pixel_buffer) == CVPixelBufferGetTypeID()); |
| 392 | 401 |
| 393 const OSType cv_format = CVPixelBufferGetPixelFormatType(cv_pixel_buffer); | 402 const OSType cv_format = CVPixelBufferGetPixelFormatType(cv_pixel_buffer); |
| 394 Format format; | 403 Format format; |
| 395 // There are very few compatible CV pixel formats, so just check each. | 404 // There are very few compatible CV pixel formats, so just check each. |
| 396 if (cv_format == kCVPixelFormatType_420YpCbCr8Planar) { | 405 if (cv_format == kCVPixelFormatType_420YpCbCr8Planar) { |
| 397 format = Format::I420; | 406 format = I420; |
| 398 } else if (cv_format == kCVPixelFormatType_444YpCbCr8) { | 407 } else if (cv_format == kCVPixelFormatType_444YpCbCr8) { |
| 399 format = Format::YV24; | 408 format = YV24; |
| 400 } else if (cv_format == '420v') { | 409 } else if (cv_format == '420v') { |
| 401 // TODO(jfroy): Use kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange when the | 410 // TODO(jfroy): Use kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange when the |
| 402 // minimum OS X and iOS SDKs permits it. | 411 // minimum OS X and iOS SDKs permits it. |
| 403 format = Format::NV12; | 412 format = NV12; |
| 404 } else { | 413 } else { |
| 405 DLOG(ERROR) << "CVPixelBuffer format not supported: " << cv_format; | 414 DLOG(ERROR) << "CVPixelBuffer format not supported: " << cv_format; |
| 406 return NULL; | 415 return NULL; |
| 407 } | 416 } |
| 408 | 417 |
| 409 const gfx::Size coded_size(CVImageBufferGetEncodedSize(cv_pixel_buffer)); | 418 const gfx::Size coded_size(CVImageBufferGetEncodedSize(cv_pixel_buffer)); |
| 410 const gfx::Rect visible_rect(CVImageBufferGetCleanRect(cv_pixel_buffer)); | 419 const gfx::Rect visible_rect(CVImageBufferGetCleanRect(cv_pixel_buffer)); |
| 411 const gfx::Size natural_size(CVImageBufferGetDisplaySize(cv_pixel_buffer)); | 420 const gfx::Size natural_size(CVImageBufferGetDisplaySize(cv_pixel_buffer)); |
| 412 | 421 |
| 413 if (!IsValidConfig(format, coded_size, visible_rect, natural_size)) | 422 if (!IsValidConfig(format, STORAGE_UNOWNED_MEMORY, coded_size, visible_rect, |
| 423 natural_size)) { |
| 414 return NULL; | 424 return NULL; |
| 425 } |
| 415 | 426 |
| 416 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | |
| 417 scoped_refptr<VideoFrame> frame( | 427 scoped_refptr<VideoFrame> frame( |
| 418 new VideoFrame(format, coded_size, visible_rect, natural_size, | 428 new VideoFrame(format, STORAGE_UNOWNED_MEMORY, coded_size, visible_rect, |
| 419 mailbox_holders, TEXTURE_RGBA, timestamp, false)); | 429 natural_size, timestamp, false)); |
| 420 | 430 |
| 421 frame->cv_pixel_buffer_.reset(cv_pixel_buffer, base::scoped_policy::RETAIN); | 431 frame->cv_pixel_buffer_.reset(cv_pixel_buffer, base::scoped_policy::RETAIN); |
| 422 return frame; | 432 return frame; |
| 423 } | 433 } |
| 424 #endif | 434 #endif |
| 425 | 435 |
| 426 // static | 436 // static |
| 427 scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame( | 437 scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame( |
| 428 const scoped_refptr<VideoFrame>& frame, | 438 const scoped_refptr<VideoFrame>& frame, |
| 429 const gfx::Rect& visible_rect, | 439 const gfx::Rect& visible_rect, |
| 430 const gfx::Size& natural_size) { | 440 const gfx::Size& natural_size) { |
| 431 // NATIVE_TEXTURE frames need mailbox info propagated, and there's no support | 441 // STORAGE_TEXTURE frames need mailbox info propagated, and there's no support |
| 432 // for that here yet, see http://crbug/362521. | 442 // for that here yet, see http://crbug/362521. |
| 433 CHECK_NE(frame->format(), NATIVE_TEXTURE); | 443 CHECK_NE(frame->storage_type(), STORAGE_TEXTURE); |
| 434 | 444 |
| 435 DCHECK(frame->visible_rect().Contains(visible_rect)); | 445 DCHECK(frame->visible_rect().Contains(visible_rect)); |
| 436 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | |
| 437 scoped_refptr<VideoFrame> wrapped_frame( | 446 scoped_refptr<VideoFrame> wrapped_frame( |
| 438 new VideoFrame(frame->format(), frame->coded_size(), visible_rect, | 447 new VideoFrame(frame->format(), frame->storage_type(), |
| 439 natural_size, mailbox_holders, TEXTURE_RGBA, | 448 frame->coded_size(), visible_rect, natural_size, |
| 440 frame->timestamp(), frame->end_of_stream())); | 449 frame->timestamp(), frame->end_of_stream())); |
| 441 | 450 |
| 442 for (size_t i = 0; i < NumPlanes(frame->format()); ++i) { | 451 for (size_t i = 0; i < NumPlanes(frame->format()); ++i) { |
| 443 wrapped_frame->strides_[i] = frame->stride(i); | 452 wrapped_frame->strides_[i] = frame->stride(i); |
| 444 wrapped_frame->data_[i] = frame->data(i); | 453 wrapped_frame->data_[i] = frame->data(i); |
| 445 } | 454 } |
| 446 | 455 |
| 447 return wrapped_frame; | 456 return wrapped_frame; |
| 448 } | 457 } |
| 449 | 458 |
| 450 // static | 459 // static |
| 451 scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() { | 460 scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() { |
| 452 gpu::MailboxHolder mailbox_holders[kMaxPlanes]; | 461 return new VideoFrame(UNKNOWN, STORAGE_UNKNOWN, gfx::Size(), gfx::Rect(), |
| 453 return new VideoFrame(VideoFrame::UNKNOWN, gfx::Size(), gfx::Rect(), | 462 gfx::Size(), kNoTimestamp(), true); |
| 454 gfx::Size(), mailbox_holders, TEXTURE_RGBA, | |
| 455 kNoTimestamp(), true); | |
| 456 } | 463 } |
| 457 | 464 |
| 458 // static | 465 // static |
| 459 scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame( | 466 scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame( |
| 460 const gfx::Size& size, | 467 const gfx::Size& size, |
| 461 uint8 y, uint8 u, uint8 v, | 468 uint8 y, uint8 u, uint8 v, |
| 462 base::TimeDelta timestamp) { | 469 base::TimeDelta timestamp) { |
| 463 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame( | 470 scoped_refptr<VideoFrame> frame = |
| 464 VideoFrame::YV12, size, gfx::Rect(size), size, timestamp); | 471 CreateFrame(YV12, size, gfx::Rect(size), size, timestamp); |
| 465 FillYUV(frame.get(), y, u, v); | 472 FillYUV(frame.get(), y, u, v); |
| 466 return frame; | 473 return frame; |
| 467 } | 474 } |
| 468 | 475 |
| 469 // static | 476 // static |
| 470 scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) { | 477 scoped_refptr<VideoFrame> VideoFrame::CreateBlackFrame(const gfx::Size& size) { |
| 471 const uint8 kBlackY = 0x00; | 478 const uint8 kBlackY = 0x00; |
| 472 const uint8 kBlackUV = 0x80; | 479 const uint8 kBlackUV = 0x80; |
| 473 const base::TimeDelta kZero; | 480 const base::TimeDelta kZero; |
| 474 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero); | 481 return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero); |
| 475 } | 482 } |
| 476 | 483 |
| 477 // static | 484 // static |
| 478 scoped_refptr<VideoFrame> VideoFrame::CreateTransparentFrame( | 485 scoped_refptr<VideoFrame> VideoFrame::CreateTransparentFrame( |
| 479 const gfx::Size& size) { | 486 const gfx::Size& size) { |
| 480 const uint8 kBlackY = 0x00; | 487 const uint8 kBlackY = 0x00; |
| 481 const uint8 kBlackUV = 0x00; | 488 const uint8 kBlackUV = 0x00; |
| 482 const uint8 kTransparentA = 0x00; | 489 const uint8 kTransparentA = 0x00; |
| 483 const base::TimeDelta kZero; | 490 const base::TimeDelta kZero; |
| 484 scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame( | 491 scoped_refptr<VideoFrame> frame = |
| 485 VideoFrame::YV12A, size, gfx::Rect(size), size, kZero); | 492 CreateFrame(YV12A, size, gfx::Rect(size), size, kZero); |
| 486 FillYUVA(frame.get(), kBlackY, kBlackUV, kBlackUV, kTransparentA); | 493 FillYUVA(frame.get(), kBlackY, kBlackUV, kBlackUV, kTransparentA); |
| 487 return frame; | 494 return frame; |
| 488 } | 495 } |
| 489 | 496 |
| 490 #if defined(VIDEO_HOLE) | 497 #if defined(VIDEO_HOLE) |
| 491 // This block and other blocks wrapped around #if defined(VIDEO_HOLE) is not | 498 // This block and other blocks wrapped around #if defined(VIDEO_HOLE) is not |
| 492 // maintained by the general compositor team. Please contact the following | 499 // maintained by the general compositor team. Please contact |
| 493 // people instead: | 500 // wonsik@chromium.org . |
| 494 // | |
| 495 // wonsik@chromium.org | |
| 496 // ycheo@chromium.org | |
| 497 | 501 |
| 498 // static | 502 // static |
| 499 scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame( | 503 scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame( |
| 500 const gfx::Size& size) { | 504 const gfx::Size& size) { |
| 501 DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size)); | 505 DCHECK(IsValidConfig(UNKNOWN, STORAGE_HOLE, size, gfx::Rect(size), size)); |
| 502 gpu::MailboxHolder mailboxes[kMaxPlanes]; | |
| 503 scoped_refptr<VideoFrame> frame( | 506 scoped_refptr<VideoFrame> frame( |
| 504 new VideoFrame(VideoFrame::HOLE, size, gfx::Rect(size), size, mailboxes, | 507 new VideoFrame(UNKNOWN, STORAGE_HOLE, size, gfx::Rect(size), size, |
| 505 TEXTURE_RGBA, base::TimeDelta(), false)); | 508 base::TimeDelta(), false)); |
| 506 return frame; | 509 return frame; |
| 507 } | 510 } |
| 508 #endif // defined(VIDEO_HOLE) | 511 #endif // defined(VIDEO_HOLE) |
| 509 | 512 |
| 510 // static | 513 // static |
| 511 size_t VideoFrame::NumPlanes(Format format) { | 514 size_t VideoFrame::NumPlanes(Format format) { |
| 512 switch (format) { | 515 switch (format) { |
| 513 case VideoFrame::NATIVE_TEXTURE: | 516 case ARGB: |
| 514 #if defined(VIDEO_HOLE) | 517 case XRGB: |
| 515 case VideoFrame::HOLE: | |
| 516 #endif // defined(VIDEO_HOLE) | |
| 517 return 0; | |
| 518 case VideoFrame::ARGB: | |
| 519 return 1; | 518 return 1; |
| 520 case VideoFrame::NV12: | 519 #if defined(OS_MACOSX) || defined(OS_CHROMEOS) |
| 520 case NV12: |
| 521 return 2; | 521 return 2; |
| 522 case VideoFrame::YV12: | 522 #endif |
| 523 case VideoFrame::YV16: | 523 case YV12: |
| 524 case VideoFrame::I420: | 524 case YV16: |
| 525 case VideoFrame::YV24: | 525 case I420: |
| 526 case YV24: |
| 526 return 3; | 527 return 3; |
| 527 case VideoFrame::YV12A: | 528 case YV12A: |
| 528 return 4; | 529 return 4; |
| 529 case VideoFrame::UNKNOWN: | 530 case UNKNOWN: |
| 530 break; | 531 break; |
| 531 } | 532 } |
| 532 NOTREACHED() << "Unsupported video frame format: " << format; | 533 NOTREACHED() << "Unsupported video frame format: " << format; |
| 533 return 0; | 534 return 0; |
| 534 } | 535 } |
| 535 | 536 |
| 536 // static | 537 // static |
| 537 size_t VideoFrame::NumTextures(TextureFormat texture_format) { | |
| 538 switch (texture_format) { | |
| 539 case TEXTURE_RGBA: | |
| 540 case TEXTURE_RGB: | |
| 541 return 1; | |
| 542 case TEXTURE_YUV_420: | |
| 543 return 3; | |
| 544 } | |
| 545 | |
| 546 NOTREACHED(); | |
| 547 return 0; | |
| 548 } | |
| 549 | |
| 550 // static | |
| 551 size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) { | 538 size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) { |
| 552 size_t total = 0; | 539 size_t total = 0; |
| 553 for (size_t i = 0; i < NumPlanes(format); ++i) | 540 for (size_t i = 0; i < NumPlanes(format); ++i) |
| 554 total += PlaneAllocationSize(format, i, coded_size); | 541 total += PlaneAllocationSize(format, i, coded_size); |
| 555 return total; | 542 return total; |
| 556 } | 543 } |
| 557 | 544 |
| 558 // static | 545 // static |
| 559 gfx::Size VideoFrame::PlaneSize(Format format, | 546 gfx::Size VideoFrame::PlaneSize(Format format, |
| 560 size_t plane, | 547 size_t plane, |
| 561 const gfx::Size& coded_size) { | 548 const gfx::Size& coded_size) { |
| 562 DCHECK(IsValidPlane(plane, format)); | 549 DCHECK(IsValidPlane(plane, format)); |
| 563 | 550 |
| 564 int width = coded_size.width(); | 551 int width = coded_size.width(); |
| 565 int height = coded_size.height(); | 552 int height = coded_size.height(); |
| 566 if (format != VideoFrame::ARGB) { | 553 if (format != ARGB) { |
| 567 // Align to multiple-of-two size overall. This ensures that non-subsampled | 554 // Align to multiple-of-two size overall. This ensures that non-subsampled |
| 568 // planes can be addressed by pixel with the same scaling as the subsampled | 555 // planes can be addressed by pixel with the same scaling as the subsampled |
| 569 // planes. | 556 // planes. |
| 570 width = RoundUp(width, 2); | 557 width = RoundUp(width, 2); |
| 571 height = RoundUp(height, 2); | 558 height = RoundUp(height, 2); |
| 572 } | 559 } |
| 573 | 560 |
| 574 const gfx::Size subsample = SampleSize(format, plane); | 561 const gfx::Size subsample = SampleSize(format, plane); |
| 575 DCHECK(width % subsample.width() == 0); | 562 DCHECK(width % subsample.width() == 0); |
| 576 DCHECK(height % subsample.height() == 0); | 563 DCHECK(height % subsample.height() == 0); |
| (...skipping 16 matching lines...) Expand all Loading... |
| 593 return bits_per_element / horiz_pixels_per_element; | 580 return bits_per_element / horiz_pixels_per_element; |
| 594 } | 581 } |
| 595 | 582 |
| 596 // static | 583 // static |
| 597 int VideoFrame::PlaneBitsPerPixel(Format format, size_t plane) { | 584 int VideoFrame::PlaneBitsPerPixel(Format format, size_t plane) { |
| 598 DCHECK(IsValidPlane(plane, format)); | 585 DCHECK(IsValidPlane(plane, format)); |
| 599 return PlaneHorizontalBitsPerPixel(format, plane) / | 586 return PlaneHorizontalBitsPerPixel(format, plane) / |
| 600 SampleSize(format, plane).height(); | 587 SampleSize(format, plane).height(); |
| 601 } | 588 } |
| 602 | 589 |
| 603 // Release data allocated by AllocateYUV(). | |
| 604 static void ReleaseData(uint8* data) { | |
| 605 DCHECK(data); | |
| 606 base::AlignedFree(data); | |
| 607 } | |
| 608 | |
| 609 void VideoFrame::AllocateYUV() { | 590 void VideoFrame::AllocateYUV() { |
| 610 DCHECK(format_ == YV12 || format_ == YV16 || format_ == YV12A || | 591 DCHECK_EQ(storage_type_, STORAGE_OWNED_MEMORY); |
| 611 format_ == I420 || format_ == YV24); | |
| 612 static_assert(0 == kYPlane, "y plane data must be index 0"); | 592 static_assert(0 == kYPlane, "y plane data must be index 0"); |
| 613 | 593 |
| 614 size_t data_size = 0; | 594 size_t data_size = 0; |
| 615 size_t offset[kMaxPlanes]; | 595 size_t offset[kMaxPlanes]; |
| 616 for (size_t plane = 0; plane < VideoFrame::NumPlanes(format_); ++plane) { | 596 for (size_t plane = 0; plane < NumPlanes(format_); ++plane) { |
| 617 // The *2 in alignment for height is because some formats (e.g. h264) allow | 597 // The *2 in alignment for height is because some formats (e.g. h264) allow |
| 618 // interlaced coding, and then the size needs to be a multiple of two | 598 // interlaced coding, and then the size needs to be a multiple of two |
| 619 // macroblocks (vertically). See | 599 // macroblocks (vertically). See |
| 620 // libavcodec/utils.c:avcodec_align_dimensions2(). | 600 // libavcodec/utils.c:avcodec_align_dimensions2(). |
| 621 const size_t height = RoundUp(rows(plane), kFrameSizeAlignment * 2); | 601 const size_t height = RoundUp(rows(plane), kFrameSizeAlignment * 2); |
| 622 strides_[plane] = RoundUp(row_bytes(plane), kFrameSizeAlignment); | 602 strides_[plane] = RoundUp(row_bytes(plane), kFrameSizeAlignment); |
| 623 offset[plane] = data_size; | 603 offset[plane] = data_size; |
| 624 data_size += height * strides_[plane]; | 604 data_size += height * strides_[plane]; |
| 625 } | 605 } |
| 626 | 606 |
| 627 // The extra line of UV being allocated is because h264 chroma MC | 607 // The extra line of UV being allocated is because h264 chroma MC |
| 628 // overreads by one line in some cases, see libavcodec/utils.c: | 608 // overreads by one line in some cases, see libavcodec/utils.c: |
| 629 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm: | 609 // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm: |
| 630 // put_h264_chroma_mc4_ssse3(). | 610 // put_h264_chroma_mc4_ssse3(). |
| 631 DCHECK(IsValidPlane(kUPlane, format_)); | 611 DCHECK(IsValidPlane(kUPlane, format_)); |
| 632 data_size += strides_[kUPlane] + kFrameSizePadding; | 612 data_size += strides_[kUPlane] + kFrameSizePadding; |
| 633 | 613 |
| 634 // FFmpeg expects the initialize allocation to be zero-initialized. Failure | 614 // FFmpeg expects the initialize allocation to be zero-initialized. Failure |
| 635 // to do so can lead to unitialized value usage. See http://crbug.com/390941 | 615 // to do so can lead to unitialized value usage. See http://crbug.com/390941 |
| 636 uint8* data = reinterpret_cast<uint8*>( | 616 uint8* data = reinterpret_cast<uint8*>( |
| 637 base::AlignedAlloc(data_size, kFrameAddressAlignment)); | 617 base::AlignedAlloc(data_size, kFrameAddressAlignment)); |
| 638 memset(data, 0, data_size); | 618 memset(data, 0, data_size); |
| 639 | 619 |
| 640 for (size_t plane = 0; plane < VideoFrame::NumPlanes(format_); ++plane) | 620 for (size_t plane = 0; plane < NumPlanes(format_); ++plane) |
| 641 data_[plane] = data + offset[plane]; | 621 data_[plane] = data + offset[plane]; |
| 642 | 622 |
| 643 AddDestructionObserver(base::Bind(&ReleaseData, data)); | 623 AddDestructionObserver(base::Bind(&ReleaseData, data)); |
| 644 } | 624 } |
| 645 | 625 |
| 646 VideoFrame::VideoFrame(VideoFrame::Format format, | 626 VideoFrame::VideoFrame(Format format, |
| 627 StorageType storage_type, |
| 647 const gfx::Size& coded_size, | 628 const gfx::Size& coded_size, |
| 648 const gfx::Rect& visible_rect, | 629 const gfx::Rect& visible_rect, |
| 649 const gfx::Size& natural_size, | 630 const gfx::Size& natural_size, |
| 650 const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes], | |
| 651 VideoFrame::TextureFormat texture_format, | |
| 652 base::TimeDelta timestamp, | 631 base::TimeDelta timestamp, |
| 653 bool end_of_stream) | 632 bool end_of_stream) |
| 654 : format_(format), | 633 : format_(format), |
| 655 texture_format_(texture_format), | 634 storage_type_(storage_type), |
| 656 coded_size_(coded_size), | 635 coded_size_(coded_size), |
| 657 visible_rect_(visible_rect), | 636 visible_rect_(visible_rect), |
| 658 natural_size_(natural_size), | 637 natural_size_(natural_size), |
| 659 shared_memory_handle_(base::SharedMemory::NULLHandle()), | 638 shared_memory_handle_(base::SharedMemory::NULLHandle()), |
| 660 shared_memory_offset_(0), | 639 shared_memory_offset_(0), |
| 661 timestamp_(timestamp), | 640 timestamp_(timestamp), |
| 662 release_sync_point_(0), | 641 release_sync_point_(0), |
| 663 end_of_stream_(end_of_stream), | 642 end_of_stream_(end_of_stream), |
| 664 allow_overlay_(false) { | 643 allow_overlay_(false) { |
| 665 DCHECK(IsValidConfig(format_, coded_size_, visible_rect_, natural_size_)); | 644 DCHECK(IsValidConfig(format_, storage_type, coded_size_, visible_rect_, |
| 666 memcpy(&mailbox_holders_, mailbox_holders, sizeof(mailbox_holders_)); | 645 natural_size_)); |
| 646 memset(&mailbox_holders_, 0, sizeof(mailbox_holders_)); |
| 667 memset(&strides_, 0, sizeof(strides_)); | 647 memset(&strides_, 0, sizeof(strides_)); |
| 668 memset(&data_, 0, sizeof(data_)); | 648 memset(&data_, 0, sizeof(data_)); |
| 669 } | 649 } |
| 670 | 650 |
| 651 VideoFrame::VideoFrame(Format format, |
| 652 StorageType storage_type, |
| 653 const gfx::Size& coded_size, |
| 654 const gfx::Rect& visible_rect, |
| 655 const gfx::Size& natural_size, |
| 656 base::TimeDelta timestamp, |
| 657 bool end_of_stream, |
| 658 base::SharedMemoryHandle handle, |
| 659 size_t shared_memory_offset) |
| 660 : VideoFrame(format, storage_type, coded_size, visible_rect, natural_size, |
| 661 timestamp, end_of_stream) { |
| 662 shared_memory_handle_ = handle; |
| 663 shared_memory_offset_ = shared_memory_offset; |
| 664 } |
| 665 |
| 666 VideoFrame::VideoFrame(Format format, |
| 667 StorageType storage_type, |
| 668 const gfx::Size& coded_size, |
| 669 const gfx::Rect& visible_rect, |
| 670 const gfx::Size& natural_size, |
| 671 const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes], |
| 672 base::TimeDelta timestamp, |
| 673 bool end_of_stream) |
| 674 : VideoFrame(format, storage_type, coded_size, visible_rect, natural_size, |
| 675 timestamp, end_of_stream) { |
| 676 memcpy(&mailbox_holders_, mailbox_holders, sizeof(mailbox_holders_)); |
| 677 } |
| 678 |
| 671 VideoFrame::~VideoFrame() { | 679 VideoFrame::~VideoFrame() { |
| 672 if (!mailbox_holders_release_cb_.is_null()) { | 680 if (!mailbox_holders_release_cb_.is_null()) { |
| 673 uint32 release_sync_point; | 681 uint32 release_sync_point; |
| 674 { | 682 { |
| 675 // To ensure that changes to |release_sync_point_| are visible on this | 683 // To ensure that changes to |release_sync_point_| are visible on this |
| 676 // thread (imply a memory barrier). | 684 // thread (imply a memory barrier). |
| 677 base::AutoLock locker(release_sync_point_lock_); | 685 base::AutoLock locker(release_sync_point_lock_); |
| 678 release_sync_point = release_sync_point_; | 686 release_sync_point = release_sync_point_; |
| 679 } | 687 } |
| 680 base::ResetAndReturn(&mailbox_holders_release_cb_).Run(release_sync_point); | 688 base::ResetAndReturn(&mailbox_holders_release_cb_).Run(release_sync_point); |
| 681 } | 689 } |
| 682 | 690 |
| 683 for (auto& callback : done_callbacks_) | 691 for (auto& callback : done_callbacks_) |
| 684 base::ResetAndReturn(&callback).Run(); | 692 base::ResetAndReturn(&callback).Run(); |
| 685 } | 693 } |
| 686 | 694 |
| 687 // static | 695 // static |
| 688 bool VideoFrame::IsValidPlane(size_t plane, VideoFrame::Format format) { | 696 scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage( |
| 697 Format format, |
| 698 StorageType storage_type, |
| 699 const gfx::Size& coded_size, |
| 700 const gfx::Rect& visible_rect, |
| 701 const gfx::Size& natural_size, |
| 702 uint8* data, |
| 703 size_t data_size, |
| 704 base::TimeDelta timestamp, |
| 705 base::SharedMemoryHandle handle, |
| 706 size_t data_offset) { |
| 707 const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); |
| 708 |
| 709 if (!IsValidConfig(format, storage_type, new_coded_size, visible_rect, |
| 710 natural_size) || |
| 711 data_size < AllocationSize(format, new_coded_size)) { |
| 712 return NULL; |
| 713 } |
| 714 DLOG_IF(ERROR, format != I420) << "Only I420 format supported: " |
| 715 << FormatToString(format); |
| 716 if (format != I420) |
| 717 return NULL; |
| 718 |
| 719 scoped_refptr<VideoFrame> frame; |
| 720 if (storage_type == STORAGE_SHMEM) { |
| 721 frame = new VideoFrame(format, storage_type, new_coded_size, visible_rect, |
| 722 natural_size, timestamp, false, handle, data_offset); |
| 723 } else { |
| 724 frame = new VideoFrame(format, storage_type, new_coded_size, visible_rect, |
| 725 natural_size, timestamp, false); |
| 726 } |
| 727 frame->strides_[kYPlane] = new_coded_size.width(); |
| 728 frame->strides_[kUPlane] = new_coded_size.width() / 2; |
| 729 frame->strides_[kVPlane] = new_coded_size.width() / 2; |
| 730 frame->data_[kYPlane] = data; |
| 731 frame->data_[kUPlane] = data + new_coded_size.GetArea(); |
| 732 frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4); |
| 733 return frame; |
| 734 } |
| 735 |
| 736 // static |
| 737 bool VideoFrame::IsValidPlane(size_t plane, Format format) { |
| 689 return (plane < NumPlanes(format)); | 738 return (plane < NumPlanes(format)); |
| 690 } | 739 } |
| 691 | 740 |
| 692 int VideoFrame::stride(size_t plane) const { | 741 int VideoFrame::stride(size_t plane) const { |
| 693 DCHECK(IsValidPlane(plane, format_)); | 742 DCHECK(IsValidPlane(plane, format_)); |
| 694 return strides_[plane]; | 743 return strides_[plane]; |
| 695 } | 744 } |
| 696 | 745 |
| 697 // static | 746 // static |
| 698 size_t VideoFrame::RowBytes(size_t plane, | 747 size_t VideoFrame::RowBytes(size_t plane, Format format, int width) { |
| 699 VideoFrame::Format format, | |
| 700 int width) { | |
| 701 DCHECK(IsValidPlane(plane, format)); | 748 DCHECK(IsValidPlane(plane, format)); |
| 702 return BytesPerElement(format, plane) * Columns(plane, format, width); | 749 return BytesPerElement(format, plane) * Columns(plane, format, width); |
| 703 } | 750 } |
| 704 | 751 |
| 705 int VideoFrame::row_bytes(size_t plane) const { | 752 int VideoFrame::row_bytes(size_t plane) const { |
| 706 return RowBytes(plane, format_, coded_size_.width()); | 753 return RowBytes(plane, format_, coded_size_.width()); |
| 707 } | 754 } |
| 708 | 755 |
| 709 // static | 756 // static |
| 710 size_t VideoFrame::Rows(size_t plane, VideoFrame::Format format, int height) { | 757 size_t VideoFrame::Rows(size_t plane, Format format, int height) { |
| 711 DCHECK(IsValidPlane(plane, format)); | 758 DCHECK(IsValidPlane(plane, format)); |
| 712 const int sample_height = SampleSize(format, plane).height(); | 759 const int sample_height = SampleSize(format, plane).height(); |
| 713 return RoundUp(height, sample_height) / sample_height; | 760 return RoundUp(height, sample_height) / sample_height; |
| 714 } | 761 } |
| 715 | 762 |
| 716 // static | 763 // static |
| 717 size_t VideoFrame::Columns(size_t plane, Format format, int width) { | 764 size_t VideoFrame::Columns(size_t plane, Format format, int width) { |
| 718 DCHECK(IsValidPlane(plane, format)); | 765 DCHECK(IsValidPlane(plane, format)); |
| 719 const int sample_width = SampleSize(format, plane).width(); | 766 const int sample_width = SampleSize(format, plane).width(); |
| 720 return RoundUp(width, sample_width) / sample_width; | 767 return RoundUp(width, sample_width) / sample_width; |
| 721 } | 768 } |
| 722 | 769 |
| 723 int VideoFrame::rows(size_t plane) const { | 770 int VideoFrame::rows(size_t plane) const { |
| 724 return Rows(plane, format_, coded_size_.height()); | 771 return Rows(plane, format_, coded_size_.height()); |
| 725 } | 772 } |
| 726 | 773 |
| 727 const uint8* VideoFrame::data(size_t plane) const { | 774 const uint8* VideoFrame::data(size_t plane) const { |
| 728 DCHECK(IsValidPlane(plane, format_)); | 775 DCHECK(IsValidPlane(plane, format_)); |
| 776 DCHECK(IsMappable(storage_type_)); |
| 729 return data_[plane]; | 777 return data_[plane]; |
| 730 } | 778 } |
| 731 | 779 |
| 732 uint8* VideoFrame::data(size_t plane) { | 780 uint8* VideoFrame::data(size_t plane) { |
| 733 DCHECK(IsValidPlane(plane, format_)); | 781 DCHECK(IsValidPlane(plane, format_)); |
| 782 DCHECK(IsMappable(storage_type_)); |
| 734 return data_[plane]; | 783 return data_[plane]; |
| 735 } | 784 } |
| 736 | 785 |
| 737 const uint8* VideoFrame::visible_data(size_t plane) const { | 786 const uint8* VideoFrame::visible_data(size_t plane) const { |
| 738 DCHECK(IsValidPlane(plane, format_)); | 787 DCHECK(IsValidPlane(plane, format_)); |
| 788 DCHECK(IsMappable(storage_type_)); |
| 739 | 789 |
| 740 // Calculate an offset that is properly aligned for all planes. | 790 // Calculate an offset that is properly aligned for all planes. |
| 741 const gfx::Size alignment = CommonAlignment(format_); | 791 const gfx::Size alignment = CommonAlignment(format_); |
| 742 const gfx::Point offset(RoundDown(visible_rect_.x(), alignment.width()), | 792 const gfx::Point offset(RoundDown(visible_rect_.x(), alignment.width()), |
| 743 RoundDown(visible_rect_.y(), alignment.height())); | 793 RoundDown(visible_rect_.y(), alignment.height())); |
| 744 | 794 |
| 745 const gfx::Size subsample = SampleSize(format_, plane); | 795 const gfx::Size subsample = SampleSize(format_, plane); |
| 746 DCHECK(offset.x() % subsample.width() == 0); | 796 DCHECK(offset.x() % subsample.width() == 0); |
| 747 DCHECK(offset.y() % subsample.height() == 0); | 797 DCHECK(offset.y() % subsample.height() == 0); |
| 748 return data(plane) + | 798 return data(plane) + |
| 749 stride(plane) * (offset.y() / subsample.height()) + // Row offset. | 799 stride(plane) * (offset.y() / subsample.height()) + // Row offset. |
| 750 BytesPerElement(format_, plane) * // Column offset. | 800 BytesPerElement(format_, plane) * // Column offset. |
| 751 (offset.x() / subsample.width()); | 801 (offset.x() / subsample.width()); |
| 752 } | 802 } |
| 753 | 803 |
| 754 uint8* VideoFrame::visible_data(size_t plane) { | 804 uint8* VideoFrame::visible_data(size_t plane) { |
| 755 return const_cast<uint8*>( | 805 return const_cast<uint8*>( |
| 756 static_cast<const VideoFrame*>(this)->visible_data(plane)); | 806 static_cast<const VideoFrame*>(this)->visible_data(plane)); |
| 757 } | 807 } |
| 758 | 808 |
| 759 const gpu::MailboxHolder& VideoFrame::mailbox_holder(size_t texture) const { | 809 const gpu::MailboxHolder& |
| 760 DCHECK_EQ(format_, NATIVE_TEXTURE); | 810 VideoFrame::mailbox_holder(size_t texture_index) const { |
| 761 DCHECK_LT(texture, NumTextures(texture_format_)); | 811 #if defined(OS_LINUX) |
| 762 return mailbox_holders_[texture]; | 812 DCHECK(storage_type_ == STORAGE_TEXTURE || storage_type_ == STORAGE_DMABUFS); |
| 813 #else |
| 814 DCHECK(storage_type_ == STORAGE_TEXTURE); |
| 815 #endif |
| 816 DCHECK_LT(texture_index, NumPlanes(format_)); |
| 817 return mailbox_holders_[texture_index]; |
| 763 } | 818 } |
| 764 | 819 |
| 765 base::SharedMemoryHandle VideoFrame::shared_memory_handle() const { | 820 base::SharedMemoryHandle VideoFrame::shared_memory_handle() const { |
| 821 DCHECK_EQ(storage_type_, STORAGE_SHMEM); |
| 822 DCHECK(shared_memory_handle_ != base::SharedMemory::NULLHandle()); |
| 766 return shared_memory_handle_; | 823 return shared_memory_handle_; |
| 767 } | 824 } |
| 768 | 825 |
| 769 size_t VideoFrame::shared_memory_offset() const { | 826 size_t VideoFrame::shared_memory_offset() const { |
| 827 DCHECK_EQ(storage_type_, STORAGE_SHMEM); |
| 828 DCHECK(shared_memory_handle_ != base::SharedMemory::NULLHandle()); |
| 770 return shared_memory_offset_; | 829 return shared_memory_offset_; |
| 771 } | 830 } |
| 772 | 831 |
| 773 void VideoFrame::AddDestructionObserver(const base::Closure& callback) { | 832 void VideoFrame::AddDestructionObserver(const base::Closure& callback) { |
| 774 DCHECK(!callback.is_null()); | 833 DCHECK(!callback.is_null()); |
| 775 done_callbacks_.push_back(callback); | 834 done_callbacks_.push_back(callback); |
| 776 } | 835 } |
| 777 | 836 |
| 778 void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) { | 837 void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) { |
| 779 DCHECK_EQ(format_, NATIVE_TEXTURE); | 838 #if defined(OS_LINUX) |
| 839 DCHECK(storage_type_ == STORAGE_TEXTURE || storage_type_ == STORAGE_DMABUFS); |
| 840 #else |
| 841 DCHECK(storage_type_ == STORAGE_TEXTURE); |
| 842 #endif |
| 780 base::AutoLock locker(release_sync_point_lock_); | 843 base::AutoLock locker(release_sync_point_lock_); |
| 781 // Must wait on the previous sync point before inserting a new sync point so | 844 // Must wait on the previous sync point before inserting a new sync point so |
| 782 // that |mailbox_holders_release_cb_| guarantees the previous sync point | 845 // that |mailbox_holders_release_cb_| guarantees the previous sync point |
| 783 // occurred when it waits on |release_sync_point_|. | 846 // occurred when it waits on |release_sync_point_|. |
| 784 if (release_sync_point_) | 847 if (release_sync_point_) |
| 785 client->WaitSyncPoint(release_sync_point_); | 848 client->WaitSyncPoint(release_sync_point_); |
| 786 release_sync_point_ = client->InsertSyncPoint(); | 849 release_sync_point_ = client->InsertSyncPoint(); |
| 787 } | 850 } |
| 788 | 851 |
| 789 #if defined(OS_POSIX) | 852 #if defined(OS_LINUX) |
| 790 int VideoFrame::dmabuf_fd(size_t plane) const { | 853 int VideoFrame::dmabuf_fd(size_t plane) const { |
| 854 DCHECK_EQ(storage_type_, STORAGE_DMABUFS); |
| 791 return dmabuf_fds_[plane].get(); | 855 return dmabuf_fds_[plane].get(); |
| 792 } | 856 } |
| 793 #endif | 857 #endif |
| 794 | 858 |
| 795 #if defined(OS_MACOSX) | 859 #if defined(OS_MACOSX) |
| 796 CVPixelBufferRef VideoFrame::cv_pixel_buffer() const { | 860 CVPixelBufferRef VideoFrame::cv_pixel_buffer() const { |
| 797 return cv_pixel_buffer_.get(); | 861 return cv_pixel_buffer_.get(); |
| 798 } | 862 } |
| 799 #endif | 863 #endif |
| 800 | 864 |
| 801 void VideoFrame::HashFrameForTesting(base::MD5Context* context) { | 865 void VideoFrame::HashFrameForTesting(base::MD5Context* context) { |
| 802 for (size_t plane = 0; plane < NumPlanes(format_); ++plane) { | 866 for (size_t plane = 0; plane < NumPlanes(format_); ++plane) { |
| 803 for (int row = 0; row < rows(plane); ++row) { | 867 for (int row = 0; row < rows(plane); ++row) { |
| 804 base::MD5Update(context, base::StringPiece( | 868 base::MD5Update(context, base::StringPiece( |
| 805 reinterpret_cast<char*>(data(plane) + stride(plane) * row), | 869 reinterpret_cast<char*>(data(plane) + stride(plane) * row), |
| 806 row_bytes(plane))); | 870 row_bytes(plane))); |
| 807 } | 871 } |
| 808 } | 872 } |
| 809 } | 873 } |
| 810 | 874 |
| 811 } // namespace media | 875 } // namespace media |
| OLD | NEW |