| OLD | NEW |
| 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. | 1 // Copyright (c) 2013 The Chromium Authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "media/base/android/sdk_media_codec_bridge.h" | 5 #include "media/base/android/media_codec_bridge_impl.h" |
| 6 | 6 |
| 7 #include <algorithm> | 7 #include <algorithm> |
| 8 #include <limits> | 8 #include <limits> |
| 9 #include <memory> | 9 #include <memory> |
| 10 #include <utility> | 10 #include <utility> |
| 11 | 11 |
| 12 #include "base/android/build_info.h" | 12 #include "base/android/build_info.h" |
| 13 #include "base/android/jni_android.h" | 13 #include "base/android/jni_android.h" |
| 14 #include "base/android/jni_array.h" | 14 #include "base/android/jni_array.h" |
| 15 #include "base/android/jni_string.h" | 15 #include "base/android/jni_string.h" |
| (...skipping 13 matching lines...) Expand all Loading... |
| 29 | 29 |
| 30 #define RETURN_ON_ERROR(condition) \ | 30 #define RETURN_ON_ERROR(condition) \ |
| 31 do { \ | 31 do { \ |
| 32 if (!(condition)) { \ | 32 if (!(condition)) { \ |
| 33 LOG(ERROR) << "Unable to parse AAC header: " #condition; \ | 33 LOG(ERROR) << "Unable to parse AAC header: " #condition; \ |
| 34 return false; \ | 34 return false; \ |
| 35 } \ | 35 } \ |
| 36 } while (0) | 36 } while (0) |
| 37 | 37 |
| 38 namespace media { | 38 namespace media { |
| 39 namespace { |
| 39 | 40 |
| 40 enum { | 41 enum { |
| 41 kBufferFlagSyncFrame = 1, // BUFFER_FLAG_SYNC_FRAME | 42 kBufferFlagSyncFrame = 1, // BUFFER_FLAG_SYNC_FRAME |
| 42 kBufferFlagEndOfStream = 4, // BUFFER_FLAG_END_OF_STREAM | 43 kBufferFlagEndOfStream = 4, // BUFFER_FLAG_END_OF_STREAM |
| 43 kConfigureFlagEncode = 1, // CONFIGURE_FLAG_ENCODE | 44 kConfigureFlagEncode = 1, // CONFIGURE_FLAG_ENCODE |
| 44 }; | 45 }; |
| 45 | 46 |
| 46 static ScopedJavaLocalRef<jintArray> | 47 const std::string AudioCodecToAndroidMimeType(const AudioCodec& codec) { |
| 47 ToJavaIntArray(JNIEnv* env, std::unique_ptr<jint[]> native_array, int size) { | |
| 48 ScopedJavaLocalRef<jintArray> j_array(env, env->NewIntArray(size)); | |
| 49 env->SetIntArrayRegion(j_array.obj(), 0, size, native_array.get()); | |
| 50 return j_array; | |
| 51 } | |
| 52 | |
| 53 static const std::string AudioCodecToAndroidMimeType(const AudioCodec& codec) { | |
| 54 switch (codec) { | 48 switch (codec) { |
| 55 case kCodecMP3: | 49 case kCodecMP3: |
| 56 return "audio/mpeg"; | 50 return "audio/mpeg"; |
| 57 case kCodecVorbis: | 51 case kCodecVorbis: |
| 58 return "audio/vorbis"; | 52 return "audio/vorbis"; |
| 59 case kCodecOpus: | 53 case kCodecOpus: |
| 60 return "audio/opus"; | 54 return "audio/opus"; |
| 61 case kCodecAAC: | 55 case kCodecAAC: |
| 62 return "audio/mp4a-latm"; | 56 return "audio/mp4a-latm"; |
| 63 case kCodecAC3: | 57 case kCodecAC3: |
| 64 return "audio/ac3"; | 58 return "audio/ac3"; |
| 65 case kCodecEAC3: | 59 case kCodecEAC3: |
| 66 return "audio/eac3"; | 60 return "audio/eac3"; |
| 67 default: | 61 default: |
| 68 return std::string(); | 62 return std::string(); |
| 69 } | 63 } |
| 70 } | 64 } |
| 71 | 65 |
| 72 static const std::string VideoCodecToAndroidMimeType(const VideoCodec& codec) { | 66 const std::string VideoCodecToAndroidMimeType(const VideoCodec& codec) { |
| 73 switch (codec) { | 67 switch (codec) { |
| 74 case kCodecH264: | 68 case kCodecH264: |
| 75 return "video/avc"; | 69 return "video/avc"; |
| 76 case kCodecHEVC: | 70 case kCodecHEVC: |
| 77 return "video/hevc"; | 71 return "video/hevc"; |
| 78 case kCodecVP8: | 72 case kCodecVP8: |
| 79 return "video/x-vnd.on2.vp8"; | 73 return "video/x-vnd.on2.vp8"; |
| 80 case kCodecVP9: | 74 case kCodecVP9: |
| 81 return "video/x-vnd.on2.vp9"; | 75 return "video/x-vnd.on2.vp9"; |
| 82 default: | 76 default: |
| 83 return std::string(); | 77 return std::string(); |
| 84 } | 78 } |
| 85 } | 79 } |
| 86 | 80 |
| 87 SdkMediaCodecBridge::SdkMediaCodecBridge(const std::string& mime, | 81 static ScopedJavaLocalRef<jintArray> |
| 88 bool is_secure, | 82 ToJavaIntArray(JNIEnv* env, std::unique_ptr<jint[]> native_array, int size) { |
| 89 MediaCodecDirection direction, | 83 ScopedJavaLocalRef<jintArray> j_array(env, env->NewIntArray(size)); |
| 90 bool require_software_codec) { | 84 env->SetIntArrayRegion(j_array.obj(), 0, size, native_array.get()); |
| 85 return j_array; |
| 86 } |
| 87 |
| 88 } // namespace |
| 89 |
| 90 MediaCodecBridgeImpl::MediaCodecBridgeImpl(const std::string& mime, |
| 91 bool is_secure, |
| 92 MediaCodecDirection direction, |
| 93 bool require_software_codec) { |
| 91 JNIEnv* env = AttachCurrentThread(); | 94 JNIEnv* env = AttachCurrentThread(); |
| 92 DCHECK(!mime.empty()); | 95 DCHECK(!mime.empty()); |
| 93 ScopedJavaLocalRef<jstring> j_mime = ConvertUTF8ToJavaString(env, mime); | 96 ScopedJavaLocalRef<jstring> j_mime = ConvertUTF8ToJavaString(env, mime); |
| 94 j_media_codec_.Reset(Java_MediaCodecBridge_create( | 97 j_media_codec_.Reset(Java_MediaCodecBridge_create( |
| 95 env, j_mime, is_secure, direction, require_software_codec)); | 98 env, j_mime, is_secure, direction, require_software_codec)); |
| 96 } | 99 } |
| 97 | 100 |
| 98 SdkMediaCodecBridge::~SdkMediaCodecBridge() { | 101 MediaCodecBridgeImpl::~MediaCodecBridgeImpl() { |
| 99 JNIEnv* env = AttachCurrentThread(); | 102 JNIEnv* env = AttachCurrentThread(); |
| 100 if (j_media_codec_.obj()) | 103 if (j_media_codec_.obj()) |
| 101 Java_MediaCodecBridge_release(env, j_media_codec_); | 104 Java_MediaCodecBridge_release(env, j_media_codec_); |
| 102 } | 105 } |
| 103 | 106 |
| 104 bool SdkMediaCodecBridge::Start() { | 107 bool MediaCodecBridgeImpl::Start() { |
| 105 JNIEnv* env = AttachCurrentThread(); | 108 JNIEnv* env = AttachCurrentThread(); |
| 106 return Java_MediaCodecBridge_start(env, j_media_codec_); | 109 return Java_MediaCodecBridge_start(env, j_media_codec_); |
| 107 } | 110 } |
| 108 | 111 |
| 109 void SdkMediaCodecBridge::Stop() { | 112 void MediaCodecBridgeImpl::Stop() { |
| 110 JNIEnv* env = AttachCurrentThread(); | 113 JNIEnv* env = AttachCurrentThread(); |
| 111 Java_MediaCodecBridge_stop(env, j_media_codec_); | 114 Java_MediaCodecBridge_stop(env, j_media_codec_); |
| 112 } | 115 } |
| 113 | 116 |
| 114 MediaCodecStatus SdkMediaCodecBridge::Flush() { | 117 MediaCodecStatus MediaCodecBridgeImpl::Flush() { |
| 115 JNIEnv* env = AttachCurrentThread(); | 118 JNIEnv* env = AttachCurrentThread(); |
| 116 return static_cast<MediaCodecStatus>( | 119 return static_cast<MediaCodecStatus>( |
| 117 Java_MediaCodecBridge_flush(env, j_media_codec_)); | 120 Java_MediaCodecBridge_flush(env, j_media_codec_)); |
| 118 } | 121 } |
| 119 | 122 |
| 120 MediaCodecStatus SdkMediaCodecBridge::GetOutputSize(gfx::Size* size) { | 123 MediaCodecStatus MediaCodecBridgeImpl::GetOutputSize(gfx::Size* size) { |
| 121 JNIEnv* env = AttachCurrentThread(); | 124 JNIEnv* env = AttachCurrentThread(); |
| 122 ScopedJavaLocalRef<jobject> result = | 125 ScopedJavaLocalRef<jobject> result = |
| 123 Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_); | 126 Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_); |
| 124 MediaCodecStatus status = static_cast<MediaCodecStatus>( | 127 MediaCodecStatus status = static_cast<MediaCodecStatus>( |
| 125 Java_GetOutputFormatResult_status(env, result)); | 128 Java_GetOutputFormatResult_status(env, result)); |
| 126 if (status == MEDIA_CODEC_OK) { | 129 if (status == MEDIA_CODEC_OK) { |
| 127 size->SetSize(Java_GetOutputFormatResult_width(env, result), | 130 size->SetSize(Java_GetOutputFormatResult_width(env, result), |
| 128 Java_GetOutputFormatResult_height(env, result)); | 131 Java_GetOutputFormatResult_height(env, result)); |
| 129 } | 132 } |
| 130 return status; | 133 return status; |
| 131 } | 134 } |
| 132 | 135 |
| 133 MediaCodecStatus SdkMediaCodecBridge::GetOutputSamplingRate( | 136 MediaCodecStatus MediaCodecBridgeImpl::GetOutputSamplingRate( |
| 134 int* sampling_rate) { | 137 int* sampling_rate) { |
| 135 JNIEnv* env = AttachCurrentThread(); | 138 JNIEnv* env = AttachCurrentThread(); |
| 136 ScopedJavaLocalRef<jobject> result = | 139 ScopedJavaLocalRef<jobject> result = |
| 137 Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_); | 140 Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_); |
| 138 MediaCodecStatus status = static_cast<MediaCodecStatus>( | 141 MediaCodecStatus status = static_cast<MediaCodecStatus>( |
| 139 Java_GetOutputFormatResult_status(env, result)); | 142 Java_GetOutputFormatResult_status(env, result)); |
| 140 if (status == MEDIA_CODEC_OK) | 143 if (status == MEDIA_CODEC_OK) |
| 141 *sampling_rate = Java_GetOutputFormatResult_sampleRate(env, result); | 144 *sampling_rate = Java_GetOutputFormatResult_sampleRate(env, result); |
| 142 return status; | 145 return status; |
| 143 } | 146 } |
| 144 | 147 |
| 145 MediaCodecStatus SdkMediaCodecBridge::GetOutputChannelCount( | 148 MediaCodecStatus MediaCodecBridgeImpl::GetOutputChannelCount( |
| 146 int* channel_count) { | 149 int* channel_count) { |
| 147 JNIEnv* env = AttachCurrentThread(); | 150 JNIEnv* env = AttachCurrentThread(); |
| 148 ScopedJavaLocalRef<jobject> result = | 151 ScopedJavaLocalRef<jobject> result = |
| 149 Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_); | 152 Java_MediaCodecBridge_getOutputFormat(env, j_media_codec_); |
| 150 MediaCodecStatus status = static_cast<MediaCodecStatus>( | 153 MediaCodecStatus status = static_cast<MediaCodecStatus>( |
| 151 Java_GetOutputFormatResult_status(env, result)); | 154 Java_GetOutputFormatResult_status(env, result)); |
| 152 if (status == MEDIA_CODEC_OK) | 155 if (status == MEDIA_CODEC_OK) |
| 153 *channel_count = Java_GetOutputFormatResult_channelCount(env, result); | 156 *channel_count = Java_GetOutputFormatResult_channelCount(env, result); |
| 154 return status; | 157 return status; |
| 155 } | 158 } |
| 156 | 159 |
| 157 MediaCodecStatus SdkMediaCodecBridge::QueueInputBuffer( | 160 MediaCodecStatus MediaCodecBridgeImpl::QueueInputBuffer( |
| 158 int index, | 161 int index, |
| 159 const uint8_t* data, | 162 const uint8_t* data, |
| 160 size_t data_size, | 163 size_t data_size, |
| 161 base::TimeDelta presentation_time) { | 164 base::TimeDelta presentation_time) { |
| 162 DVLOG(3) << __func__ << index << ": " << data_size; | 165 DVLOG(3) << __func__ << " " << index << ": " << data_size; |
| 163 if (data_size > | 166 if (data_size > |
| 164 base::checked_cast<size_t>(std::numeric_limits<int32_t>::max())) { | 167 base::checked_cast<size_t>(std::numeric_limits<int32_t>::max())) { |
| 165 return MEDIA_CODEC_ERROR; | 168 return MEDIA_CODEC_ERROR; |
| 166 } | 169 } |
| 167 if (data && !FillInputBuffer(index, data, data_size)) | 170 if (data && !FillInputBuffer(index, data, data_size)) |
| 168 return MEDIA_CODEC_ERROR; | 171 return MEDIA_CODEC_ERROR; |
| 169 JNIEnv* env = AttachCurrentThread(); | 172 JNIEnv* env = AttachCurrentThread(); |
| 170 return static_cast<MediaCodecStatus>(Java_MediaCodecBridge_queueInputBuffer( | 173 return static_cast<MediaCodecStatus>(Java_MediaCodecBridge_queueInputBuffer( |
| 171 env, j_media_codec_, index, 0, data_size, | 174 env, j_media_codec_, index, 0, data_size, |
| 172 presentation_time.InMicroseconds(), 0)); | 175 presentation_time.InMicroseconds(), 0)); |
| 173 } | 176 } |
| 174 | 177 |
| 175 // TODO(timav): Combine this and above methods together keeping only the first | 178 MediaCodecStatus MediaCodecBridgeImpl::QueueSecureInputBuffer( |
| 176 // interface after we switch to Spitzer pipeline. | |
| 177 MediaCodecStatus SdkMediaCodecBridge::QueueSecureInputBuffer( | |
| 178 int index, | 179 int index, |
| 179 const uint8_t* data, | 180 const uint8_t* data, |
| 180 size_t data_size, | 181 size_t data_size, |
| 181 const std::vector<char>& key_id, | 182 const std::string& key_id, |
| 182 const std::vector<char>& iv, | 183 const std::string& iv, |
| 183 const SubsampleEntry* subsamples, | 184 const std::vector<SubsampleEntry>& subsamples, |
| 184 int subsamples_size, | |
| 185 const EncryptionScheme& encryption_scheme, | 185 const EncryptionScheme& encryption_scheme, |
| 186 base::TimeDelta presentation_time) { | 186 base::TimeDelta presentation_time) { |
| 187 DVLOG(3) << __func__ << index << ": " << data_size; | 187 DVLOG(3) << __func__ << " " << index << ": " << data_size; |
| 188 if (data_size > | 188 if (data_size > |
| 189 base::checked_cast<size_t>(std::numeric_limits<int32_t>::max())) { | 189 base::checked_cast<size_t>(std::numeric_limits<int32_t>::max())) { |
| 190 return MEDIA_CODEC_ERROR; | 190 return MEDIA_CODEC_ERROR; |
| 191 } | 191 } |
| 192 if (data && !FillInputBuffer(index, data, data_size)) | 192 if (data && !FillInputBuffer(index, data, data_size)) |
| 193 return MEDIA_CODEC_ERROR; | 193 return MEDIA_CODEC_ERROR; |
| 194 | 194 |
| 195 JNIEnv* env = AttachCurrentThread(); | 195 JNIEnv* env = AttachCurrentThread(); |
| 196 ScopedJavaLocalRef<jbyteArray> j_key_id = base::android::ToJavaByteArray( | 196 ScopedJavaLocalRef<jbyteArray> j_key_id = base::android::ToJavaByteArray( |
| 197 env, reinterpret_cast<const uint8_t*>(key_id.data()), key_id.size()); | 197 env, reinterpret_cast<const uint8_t*>(key_id.data()), key_id.size()); |
| 198 ScopedJavaLocalRef<jbyteArray> j_iv = base::android::ToJavaByteArray( | 198 ScopedJavaLocalRef<jbyteArray> j_iv = base::android::ToJavaByteArray( |
| 199 env, reinterpret_cast<const uint8_t*>(iv.data()), iv.size()); | 199 env, reinterpret_cast<const uint8_t*>(iv.data()), iv.size()); |
| 200 | 200 |
| 201 // MediaCodec.CryptoInfo documentations says passing NULL for |clear_array| | 201 // The MediaCodec.CryptoInfo documentation says to pass NULL for |clear_array| |
| 202 // to indicate that all data is encrypted. But it doesn't specify what | 202 // to indicate that all data is encrypted. But it doesn't specify what |
| 203 // |cypher_array| and |subsamples_size| should be in that case. Passing | 203 // |cypher_array| and |subsamples_size| should be in that case. We pass |
| 204 // one subsample here just to be on the safe side. | 204 // one subsample here just to be on the safe side. |
| 205 int new_subsamples_size = subsamples_size == 0 ? 1 : subsamples_size; | 205 int num_subsamples = std::max(static_cast<size_t>(1), subsamples.size()); |
| 206 | 206 |
| 207 std::unique_ptr<jint[]> native_clear_array(new jint[new_subsamples_size]); | 207 std::unique_ptr<jint[]> native_clear_array(new jint[num_subsamples]); |
| 208 std::unique_ptr<jint[]> native_cypher_array(new jint[new_subsamples_size]); | 208 std::unique_ptr<jint[]> native_cypher_array(new jint[num_subsamples]); |
| 209 | 209 |
| 210 if (subsamples_size == 0) { | 210 if (subsamples.empty()) { |
| 211 DCHECK(!subsamples); | |
| 212 native_clear_array[0] = 0; | 211 native_clear_array[0] = 0; |
| 213 native_cypher_array[0] = data_size; | 212 native_cypher_array[0] = data_size; |
| 214 } else { | 213 } else { |
| 215 DCHECK_GT(subsamples_size, 0); | 214 for (size_t i = 0; i < subsamples.size(); ++i) { |
| 216 DCHECK(subsamples); | |
| 217 for (int i = 0; i < subsamples_size; ++i) { | |
| 218 DCHECK(subsamples[i].clear_bytes <= std::numeric_limits<uint16_t>::max()); | 215 DCHECK(subsamples[i].clear_bytes <= std::numeric_limits<uint16_t>::max()); |
| 219 if (subsamples[i].cypher_bytes > | 216 if (subsamples[i].cypher_bytes > |
| 220 static_cast<uint32_t>(std::numeric_limits<jint>::max())) { | 217 static_cast<uint32_t>(std::numeric_limits<jint>::max())) { |
| 221 return MEDIA_CODEC_ERROR; | 218 return MEDIA_CODEC_ERROR; |
| 222 } | 219 } |
| 223 | 220 |
| 224 native_clear_array[i] = subsamples[i].clear_bytes; | 221 native_clear_array[i] = subsamples[i].clear_bytes; |
| 225 native_cypher_array[i] = subsamples[i].cypher_bytes; | 222 native_cypher_array[i] = subsamples[i].cypher_bytes; |
| 226 } | 223 } |
| 227 } | 224 } |
| 228 | 225 |
| 229 ScopedJavaLocalRef<jintArray> clear_array = | 226 ScopedJavaLocalRef<jintArray> clear_array = |
| 230 ToJavaIntArray(env, std::move(native_clear_array), new_subsamples_size); | 227 ToJavaIntArray(env, std::move(native_clear_array), num_subsamples); |
| 231 ScopedJavaLocalRef<jintArray> cypher_array = | 228 ScopedJavaLocalRef<jintArray> cypher_array = |
| 232 ToJavaIntArray(env, std::move(native_cypher_array), new_subsamples_size); | 229 ToJavaIntArray(env, std::move(native_cypher_array), num_subsamples); |
| 233 | 230 |
| 234 return static_cast<MediaCodecStatus>( | 231 return static_cast<MediaCodecStatus>( |
| 235 Java_MediaCodecBridge_queueSecureInputBuffer( | 232 Java_MediaCodecBridge_queueSecureInputBuffer( |
| 236 env, j_media_codec_.obj(), index, 0, j_iv.obj(), j_key_id.obj(), | 233 env, j_media_codec_.obj(), index, 0, j_iv.obj(), j_key_id.obj(), |
| 237 clear_array, cypher_array, new_subsamples_size, | 234 clear_array, cypher_array, num_subsamples, |
| 238 static_cast<int>(encryption_scheme.mode()), | 235 static_cast<int>(encryption_scheme.mode()), |
| 239 static_cast<int>(encryption_scheme.pattern().encrypt_blocks()), | 236 static_cast<int>(encryption_scheme.pattern().encrypt_blocks()), |
| 240 static_cast<int>(encryption_scheme.pattern().skip_blocks()), | 237 static_cast<int>(encryption_scheme.pattern().skip_blocks()), |
| 241 presentation_time.InMicroseconds())); | 238 presentation_time.InMicroseconds())); |
| 242 } | 239 } |
| 243 | 240 |
| 244 void SdkMediaCodecBridge::QueueEOS(int input_buffer_index) { | 241 void MediaCodecBridgeImpl::QueueEOS(int input_buffer_index) { |
| 245 DVLOG(3) << __func__ << ": " << input_buffer_index; | 242 DVLOG(3) << __func__ << ": " << input_buffer_index; |
| 246 JNIEnv* env = AttachCurrentThread(); | 243 JNIEnv* env = AttachCurrentThread(); |
| 247 Java_MediaCodecBridge_queueInputBuffer( | 244 Java_MediaCodecBridge_queueInputBuffer( |
| 248 env, j_media_codec_, input_buffer_index, 0, 0, 0, kBufferFlagEndOfStream); | 245 env, j_media_codec_, input_buffer_index, 0, 0, 0, kBufferFlagEndOfStream); |
| 249 } | 246 } |
| 250 | 247 |
| 251 MediaCodecStatus SdkMediaCodecBridge::DequeueInputBuffer( | 248 MediaCodecStatus MediaCodecBridgeImpl::DequeueInputBuffer( |
| 252 base::TimeDelta timeout, | 249 base::TimeDelta timeout, |
| 253 int* index) { | 250 int* index) { |
| 254 JNIEnv* env = AttachCurrentThread(); | 251 JNIEnv* env = AttachCurrentThread(); |
| 255 ScopedJavaLocalRef<jobject> result = Java_MediaCodecBridge_dequeueInputBuffer( | 252 ScopedJavaLocalRef<jobject> result = Java_MediaCodecBridge_dequeueInputBuffer( |
| 256 env, j_media_codec_, timeout.InMicroseconds()); | 253 env, j_media_codec_, timeout.InMicroseconds()); |
| 257 *index = Java_DequeueInputResult_index(env, result); | 254 *index = Java_DequeueInputResult_index(env, result); |
| 258 MediaCodecStatus status = static_cast<MediaCodecStatus>( | 255 MediaCodecStatus status = static_cast<MediaCodecStatus>( |
| 259 Java_DequeueInputResult_status(env, result)); | 256 Java_DequeueInputResult_status(env, result)); |
| 260 DVLOG(3) << __func__ << ": status: " << status << ", index: " << *index; | 257 DVLOG(3) << __func__ << ": status: " << status << ", index: " << *index; |
| 261 return status; | 258 return status; |
| 262 } | 259 } |
| 263 | 260 |
| 264 MediaCodecStatus SdkMediaCodecBridge::DequeueOutputBuffer( | 261 MediaCodecStatus MediaCodecBridgeImpl::DequeueOutputBuffer( |
| 265 base::TimeDelta timeout, | 262 base::TimeDelta timeout, |
| 266 int* index, | 263 int* index, |
| 267 size_t* offset, | 264 size_t* offset, |
| 268 size_t* size, | 265 size_t* size, |
| 269 base::TimeDelta* presentation_time, | 266 base::TimeDelta* presentation_time, |
| 270 bool* end_of_stream, | 267 bool* end_of_stream, |
| 271 bool* key_frame) { | 268 bool* key_frame) { |
| 272 JNIEnv* env = AttachCurrentThread(); | 269 JNIEnv* env = AttachCurrentThread(); |
| 273 ScopedJavaLocalRef<jobject> result = | 270 ScopedJavaLocalRef<jobject> result = |
| 274 Java_MediaCodecBridge_dequeueOutputBuffer(env, j_media_codec_, | 271 Java_MediaCodecBridge_dequeueOutputBuffer(env, j_media_codec_, |
| (...skipping 13 matching lines...) Expand all Loading... |
| 288 if (key_frame) | 285 if (key_frame) |
| 289 *key_frame = flags & kBufferFlagSyncFrame; | 286 *key_frame = flags & kBufferFlagSyncFrame; |
| 290 MediaCodecStatus status = static_cast<MediaCodecStatus>( | 287 MediaCodecStatus status = static_cast<MediaCodecStatus>( |
| 291 Java_DequeueOutputResult_status(env, result)); | 288 Java_DequeueOutputResult_status(env, result)); |
| 292 DVLOG(3) << __func__ << ": status: " << status << ", index: " << *index | 289 DVLOG(3) << __func__ << ": status: " << status << ", index: " << *index |
| 293 << ", offset: " << *offset << ", size: " << *size | 290 << ", offset: " << *offset << ", size: " << *size |
| 294 << ", flags: " << flags; | 291 << ", flags: " << flags; |
| 295 return status; | 292 return status; |
| 296 } | 293 } |
| 297 | 294 |
| 298 void SdkMediaCodecBridge::ReleaseOutputBuffer(int index, bool render) { | 295 void MediaCodecBridgeImpl::ReleaseOutputBuffer(int index, bool render) { |
| 299 DVLOG(3) << __func__ << ": " << index; | 296 DVLOG(3) << __func__ << ": " << index; |
| 300 JNIEnv* env = AttachCurrentThread(); | 297 JNIEnv* env = AttachCurrentThread(); |
| 301 Java_MediaCodecBridge_releaseOutputBuffer(env, j_media_codec_, index, render); | 298 Java_MediaCodecBridge_releaseOutputBuffer(env, j_media_codec_, index, render); |
| 302 } | 299 } |
| 303 | 300 |
| 304 MediaCodecStatus SdkMediaCodecBridge::GetInputBuffer(int input_buffer_index, | 301 MediaCodecStatus MediaCodecBridgeImpl::GetInputBuffer(int input_buffer_index, |
| 305 uint8_t** data, | 302 uint8_t** data, |
| 306 size_t* capacity) { | 303 size_t* capacity) { |
| 307 JNIEnv* env = AttachCurrentThread(); | 304 JNIEnv* env = AttachCurrentThread(); |
| 308 ScopedJavaLocalRef<jobject> j_buffer(Java_MediaCodecBridge_getInputBuffer( | 305 ScopedJavaLocalRef<jobject> j_buffer(Java_MediaCodecBridge_getInputBuffer( |
| 309 env, j_media_codec_, input_buffer_index)); | 306 env, j_media_codec_, input_buffer_index)); |
| 310 if (j_buffer.is_null()) | 307 if (j_buffer.is_null()) |
| 311 return MEDIA_CODEC_ERROR; | 308 return MEDIA_CODEC_ERROR; |
| 312 | 309 |
| 313 *data = static_cast<uint8_t*>(env->GetDirectBufferAddress(j_buffer.obj())); | 310 *data = static_cast<uint8_t*>(env->GetDirectBufferAddress(j_buffer.obj())); |
| 314 *capacity = | 311 *capacity = |
| 315 base::checked_cast<size_t>(env->GetDirectBufferCapacity(j_buffer.obj())); | 312 base::checked_cast<size_t>(env->GetDirectBufferCapacity(j_buffer.obj())); |
| 316 return MEDIA_CODEC_OK; | 313 return MEDIA_CODEC_OK; |
| 317 } | 314 } |
| 318 | 315 |
| 319 MediaCodecStatus SdkMediaCodecBridge::GetOutputBufferAddress( | 316 MediaCodecStatus MediaCodecBridgeImpl::CopyFromOutputBuffer(int index, |
| 317 size_t offset, |
| 318 void* dst, |
| 319 size_t num) { |
| 320 const uint8_t* src_data = nullptr; |
| 321 size_t src_capacity = 0; |
| 322 MediaCodecStatus status = |
| 323 GetOutputBufferAddress(index, offset, &src_data, &src_capacity); |
| 324 if (status == MEDIA_CODEC_OK) { |
| 325 CHECK_GE(src_capacity, num); |
| 326 memcpy(dst, src_data, num); |
| 327 } |
| 328 return status; |
| 329 } |
| 330 |
| 331 MediaCodecStatus MediaCodecBridgeImpl::GetOutputBufferAddress( |
| 320 int index, | 332 int index, |
| 321 size_t offset, | 333 size_t offset, |
| 322 const uint8_t** addr, | 334 const uint8_t** addr, |
| 323 size_t* capacity) { | 335 size_t* capacity) { |
| 324 JNIEnv* env = AttachCurrentThread(); | 336 JNIEnv* env = AttachCurrentThread(); |
| 325 ScopedJavaLocalRef<jobject> j_buffer( | 337 ScopedJavaLocalRef<jobject> j_buffer( |
| 326 Java_MediaCodecBridge_getOutputBuffer(env, j_media_codec_, index)); | 338 Java_MediaCodecBridge_getOutputBuffer(env, j_media_codec_, index)); |
| 327 if (j_buffer.is_null()) | 339 if (j_buffer.is_null()) |
| 328 return MEDIA_CODEC_ERROR; | 340 return MEDIA_CODEC_ERROR; |
| 329 const size_t total_capacity = env->GetDirectBufferCapacity(j_buffer.obj()); | 341 const size_t total_capacity = env->GetDirectBufferCapacity(j_buffer.obj()); |
| 330 CHECK_GE(total_capacity, offset); | 342 CHECK_GE(total_capacity, offset); |
| 331 *addr = reinterpret_cast<const uint8_t*>( | 343 *addr = reinterpret_cast<const uint8_t*>( |
| 332 env->GetDirectBufferAddress(j_buffer.obj())) + | 344 env->GetDirectBufferAddress(j_buffer.obj())) + |
| 333 offset; | 345 offset; |
| 334 *capacity = total_capacity - offset; | 346 *capacity = total_capacity - offset; |
| 335 return MEDIA_CODEC_OK; | 347 return MEDIA_CODEC_OK; |
| 336 } | 348 } |
| 337 | 349 |
| 338 std::string SdkMediaCodecBridge::GetName() { | 350 std::string MediaCodecBridgeImpl::GetName() { |
| 339 if (base::android::BuildInfo::GetInstance()->sdk_int() < 18) | 351 if (base::android::BuildInfo::GetInstance()->sdk_int() < 18) |
| 340 return ""; | 352 return ""; |
| 341 JNIEnv* env = AttachCurrentThread(); | 353 JNIEnv* env = AttachCurrentThread(); |
| 342 ScopedJavaLocalRef<jstring> j_name = | 354 ScopedJavaLocalRef<jstring> j_name = |
| 343 Java_MediaCodecBridge_getName(env, j_media_codec_); | 355 Java_MediaCodecBridge_getName(env, j_media_codec_); |
| 344 return ConvertJavaStringToUTF8(env, j_name); | 356 return ConvertJavaStringToUTF8(env, j_name); |
| 345 } | 357 } |
| 346 | 358 |
| 359 bool MediaCodecBridgeImpl::FillInputBuffer(int index, |
| 360 const uint8_t* data, |
| 361 size_t size) { |
| 362 uint8_t* dst = nullptr; |
| 363 size_t capacity = 0; |
| 364 if (GetInputBuffer(index, &dst, &capacity) != MEDIA_CODEC_OK) { |
| 365 LOG(ERROR) << "GetInputBuffer failed"; |
| 366 return false; |
| 367 } |
| 368 CHECK(dst); |
| 369 |
| 370 if (size > capacity) { |
| 371 LOG(ERROR) << "Input buffer size " << size |
| 372 << " exceeds MediaCodec input buffer capacity: " << capacity; |
| 373 return false; |
| 374 } |
| 375 |
| 376 memcpy(dst, data, size); |
| 377 return true; |
| 378 } |
| 379 |
| 347 // static | 380 // static |
| 348 AudioCodecBridge* AudioCodecBridge::Create(const AudioCodec& codec) { | 381 AudioCodecBridge* AudioCodecBridge::Create(const AudioCodec& codec) { |
| 349 if (!MediaCodecUtil::IsMediaCodecAvailable()) | 382 if (!MediaCodecUtil::IsMediaCodecAvailable()) |
| 350 return nullptr; | 383 return nullptr; |
| 351 | 384 |
| 352 const std::string mime = AudioCodecToAndroidMimeType(codec); | 385 const std::string mime = AudioCodecToAndroidMimeType(codec); |
| 353 if (mime.empty()) | 386 if (mime.empty()) |
| 354 return nullptr; | 387 return nullptr; |
| 355 | 388 |
| 356 std::unique_ptr<AudioCodecBridge> bridge(new AudioCodecBridge(mime)); | 389 std::unique_ptr<AudioCodecBridge> bridge(new AudioCodecBridge(mime)); |
| 357 if (!bridge->media_codec()) | 390 if (!bridge->media_codec()) |
| 358 return nullptr; | 391 return nullptr; |
| 359 | 392 |
| 360 return bridge.release(); | 393 return bridge.release(); |
| 361 } | 394 } |
| 362 | 395 |
| 363 // static | 396 // static |
| 364 bool AudioCodecBridge::IsKnownUnaccelerated(const AudioCodec& codec) { | 397 bool AudioCodecBridge::IsKnownUnaccelerated(const AudioCodec& codec) { |
| 365 return MediaCodecUtil::IsKnownUnaccelerated( | 398 return MediaCodecUtil::IsKnownUnaccelerated( |
| 366 AudioCodecToAndroidMimeType(codec), MEDIA_CODEC_DECODER); | 399 AudioCodecToAndroidMimeType(codec), MEDIA_CODEC_DECODER); |
| 367 } | 400 } |
| 368 | 401 |
| 369 AudioCodecBridge::AudioCodecBridge(const std::string& mime) | 402 AudioCodecBridge::AudioCodecBridge(const std::string& mime) |
| 370 // Audio codec doesn't care about security level and there is no need for | 403 // Audio codec doesn't care about security level and there is no need for |
| 371 // audio encoding yet. | 404 // audio encoding yet. |
| 372 : SdkMediaCodecBridge(mime, false, MEDIA_CODEC_DECODER, false) {} | 405 : MediaCodecBridgeImpl(mime, false, MEDIA_CODEC_DECODER, false) {} |
| 373 | 406 |
| 374 bool AudioCodecBridge::ConfigureAndStart(const AudioDecoderConfig& config, | 407 bool AudioCodecBridge::ConfigureAndStart(const AudioDecoderConfig& config, |
| 375 jobject media_crypto) { | 408 jobject media_crypto) { |
| 376 const int channel_count = | 409 const int channel_count = |
| 377 ChannelLayoutToChannelCount(config.channel_layout()); | 410 ChannelLayoutToChannelCount(config.channel_layout()); |
| 378 const int64_t codec_delay_ns = base::Time::kNanosecondsPerSecond * | 411 const int64_t codec_delay_ns = base::Time::kNanosecondsPerSecond * |
| 379 config.codec_delay() / | 412 config.codec_delay() / |
| 380 config.samples_per_second(); | 413 config.samples_per_second(); |
| 381 const int64_t seek_preroll_ns = | 414 const int64_t seek_preroll_ns = |
| 382 1000LL * config.seek_preroll().InMicroseconds(); | 415 1000LL * config.seek_preroll().InMicroseconds(); |
| (...skipping 267 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 650 return nullptr; | 683 return nullptr; |
| 651 } | 684 } |
| 652 | 685 |
| 653 return bridge->Start() ? bridge.release() : nullptr; | 686 return bridge->Start() ? bridge.release() : nullptr; |
| 654 } | 687 } |
| 655 | 688 |
| 656 VideoCodecBridge::VideoCodecBridge(const std::string& mime, | 689 VideoCodecBridge::VideoCodecBridge(const std::string& mime, |
| 657 bool is_secure, | 690 bool is_secure, |
| 658 MediaCodecDirection direction, | 691 MediaCodecDirection direction, |
| 659 bool require_software_codec) | 692 bool require_software_codec) |
| 660 : SdkMediaCodecBridge(mime, is_secure, direction, require_software_codec), | 693 : MediaCodecBridgeImpl(mime, is_secure, direction, require_software_codec), |
| 661 adaptive_playback_supported_for_testing_(-1) {} | 694 adaptive_playback_supported_for_testing_(-1) {} |
| 662 | 695 |
| 663 bool VideoCodecBridge::SetSurface(jobject surface) { | 696 bool VideoCodecBridge::SetSurface(jobject surface) { |
| 664 DCHECK_GE(base::android::BuildInfo::GetInstance()->sdk_int(), 23); | 697 DCHECK_GE(base::android::BuildInfo::GetInstance()->sdk_int(), 23); |
| 665 JNIEnv* env = AttachCurrentThread(); | 698 JNIEnv* env = AttachCurrentThread(); |
| 666 return Java_MediaCodecBridge_setSurface(env, media_codec(), surface); | 699 return Java_MediaCodecBridge_setSurface(env, media_codec(), surface); |
| 667 } | 700 } |
| 668 | 701 |
| 669 void VideoCodecBridge::SetVideoBitrate(int bps, int frame_rate) { | 702 void VideoCodecBridge::SetVideoBitrate(int bps, int frame_rate) { |
| 670 JNIEnv* env = AttachCurrentThread(); | 703 JNIEnv* env = AttachCurrentThread(); |
| 671 Java_MediaCodecBridge_setVideoBitrate(env, media_codec(), bps, frame_rate); | 704 Java_MediaCodecBridge_setVideoBitrate(env, media_codec(), bps, frame_rate); |
| 672 } | 705 } |
| 673 | 706 |
| 674 void VideoCodecBridge::RequestKeyFrameSoon() { | 707 void VideoCodecBridge::RequestKeyFrameSoon() { |
| 675 JNIEnv* env = AttachCurrentThread(); | 708 JNIEnv* env = AttachCurrentThread(); |
| 676 Java_MediaCodecBridge_requestKeyFrameSoon(env, media_codec()); | 709 Java_MediaCodecBridge_requestKeyFrameSoon(env, media_codec()); |
| 677 } | 710 } |
| 678 | 711 |
| 679 bool VideoCodecBridge::IsAdaptivePlaybackSupported(int width, int height) { | 712 bool VideoCodecBridge::IsAdaptivePlaybackSupported(int width, int height) { |
| 680 if (adaptive_playback_supported_for_testing_ == 0) | 713 if (adaptive_playback_supported_for_testing_ == 0) |
| 681 return false; | 714 return false; |
| 682 else if (adaptive_playback_supported_for_testing_ > 0) | 715 else if (adaptive_playback_supported_for_testing_ > 0) |
| 683 return true; | 716 return true; |
| 684 JNIEnv* env = AttachCurrentThread(); | 717 JNIEnv* env = AttachCurrentThread(); |
| 685 return Java_MediaCodecBridge_isAdaptivePlaybackSupported(env, media_codec(), | 718 return Java_MediaCodecBridge_isAdaptivePlaybackSupported(env, media_codec(), |
| 686 width, height); | 719 width, height); |
| 687 } | 720 } |
| 688 | 721 |
| 689 } // namespace media | 722 } // namespace media |
| OLD | NEW |