Chromium Code Reviews| Index: media/base/android/webaudio_media_codec_bridge.cc |
| diff --git a/media/base/android/webaudio_media_codec_bridge.cc b/media/base/android/webaudio_media_codec_bridge.cc |
| new file mode 100644 |
| index 0000000000000000000000000000000000000000..c1242890e77210ad8436897a7509685e8c865ea4 |
| --- /dev/null |
| +++ b/media/base/android/webaudio_media_codec_bridge.cc |
| @@ -0,0 +1,131 @@ |
| +// Copyright (c) 2012 The Chromium Authors. All rights reserved. |
| +// Use of this source code is governed by a BSD-style license that can be |
| +// found in the LICENSE file. |
| + |
| +#include "media/base/android/webaudio_media_codec_bridge.h" |
| + |
| +#include <unistd.h> |
| +#include <vector> |
| + |
| +#include "base/android/jni_android.h" |
| +#include "base/android/jni_array.h" |
| +#include "base/android/jni_string.h" |
| +#include "base/basictypes.h" |
| +#include "base/logging.h" |
| +#include "jni/MediaCodec_jni.h" |
|
bulach
2013/03/28 13:39:25
it seems this is unused..
|
| +#include "jni/WebAudioMediaCodecBridge_jni.h" |
| + |
| + |
| +using base::android::AttachCurrentThread; |
| +using base::android::DetachFromVM; |
| + |
| +namespace media { |
| + |
| +void WebAudioMediaCodecBridge::RunWebAudioMediaCodec( |
| + base::SharedMemoryHandle input_fd, |
| + base::FileDescriptor output_fd) { |
| + DVLOG(0) << "RunWebAudioMediaCodec"; |
| + |
| + WebAudioMediaCodecBridge bridge(input_fd, output_fd); |
| + bool result = bridge.DecodeInMemoryAudioFile(); |
| + DVLOG(0) << "RunWebAudioMediaCodec returned " << result; |
| +} |
| + |
| +WebAudioMediaCodecBridge::WebAudioMediaCodecBridge( |
| + base::SharedMemoryHandle input_fd, |
| + base::FileDescriptor output_fd) |
| + : input_fd_(input_fd.fd), |
| + output_fd_(output_fd.fd) { |
| + |
| + DVLOG(0) << "WebAudioMediaCodecBridge start **********************"; |
| + DVLOG(0) << "input fd = " << input_fd_ |
| + << " output fd = " << output_fd.fd; |
| + env_ = AttachCurrentThread(); |
|
bulach
2013/03/28 13:39:25
nit: as above, avoid caching env and context.
|
| + CHECK(env_); |
| + |
| + j_context_ = base::android::GetApplicationContext(); |
| + |
| +} |
| + |
| +WebAudioMediaCodecBridge::~WebAudioMediaCodecBridge() { |
| + DVLOG(0) << "closing output fd " << output_fd_; |
| + int rc = close(output_fd_); |
| + if (rc != 0) { |
|
bulach
2013/03/28 13:39:25
nit:
if (rc)
VLOG(0) << "Couldn't close output f
|
| + VLOG(0) << "Couldn't close output fd: rc = " << rc; |
| + } |
| + DVLOG(0) << "Closing shared memory fd " << input_fd_; |
| + rc = close(input_fd_); |
| + if (rc != 0) { |
|
bulach
2013/03/28 13:39:25
as above..
|
| + VLOG(0) << "Couldn't close shared mem fd: rc = " << rc; |
| + } |
| +} |
| + |
| +bool WebAudioMediaCodecBridge::DecodeInMemoryAudioFile() { |
| + // Process the encoded data that is in shared memory given by the |
| + // file descriptor encodedDataFD_. |
| + |
| + jboolean decoded = Java_WebAudioMediaCodecBridge_decodeAudioFile( |
| + env_, |
| + j_context_, |
| + reinterpret_cast<intptr_t>(this), |
| + input_fd_); |
| + |
| + DVLOG(0) << "decoded = " << static_cast<bool>(decoded); |
| + return decoded; |
| +} |
| + |
| +void WebAudioMediaCodecBridge::InitializeDestination( |
| + JNIEnv* env, |
| + jobject /*java object*/, |
| + jint number_of_channels, |
| + jint sample_rate, |
| + jlong duration_us, |
| + jboolean is_vorbis) { |
| + |
| + long info[4]; |
| + info[0] = number_of_channels; |
| + info[1] = sample_rate; |
| + info[2] = 0.5 + (duration_us * 0.000001 * sample_rate); |
|
bulach
2013/03/28 13:39:25
since this is in integer space, how about:
const l
Raymond Toy (Google)
2013/03/28 17:38:31
I think it's clearer to do a floating-point conver
|
| + info[3] = is_vorbis ? 1 : 0; |
| + |
| + DVLOG(0) << "InitializeDestination:"; |
| + DVLOG(0) << " number of channels = " << number_of_channels; |
| + DVLOG(0) << " rate = " << sample_rate; |
| + DVLOG(0) << " duration = " << duration_us << " us"; |
| + DVLOG(0) << " vorbis = " << (is_vorbis ? "yes" : "no"); |
| + |
| + write(output_fd_, info, sizeof(info)); |
| +} |
| + |
| +void WebAudioMediaCodecBridge::OnChunkDecoded( |
| + JNIEnv* env, |
| + jobject /*java object*/, |
| + jobject buf, |
| + jint buf_size) { |
| + |
| + signed short* decoded_buffer = |
| + static_cast<signed short*>(env->GetDirectBufferAddress(buf)); |
| + DCHECK((buf_size % sizeof(decoded_buffer[0])) == 0); |
| + |
| + int bytes_left = buf_size; |
| + signed short* buffer = decoded_buffer; |
| + |
| + // Write out the data to the pipe atomically, in small chunks if |
| + // necessary. |
| + while (bytes_left > 0) { |
| + int nwrite = (bytes_left >= PIPE_BUF) ? PIPE_BUF : bytes_left; |
| + write(output_fd_, buffer, nwrite); |
|
bulach
2013/03/28 13:39:25
question: perhaps check the return value was actua
|
| + bytes_left -= nwrite; |
| + buffer += nwrite / sizeof(decoded_buffer[0]); |
| + } |
| +} |
| + |
| +bool WebAudioMediaCodecBridge::RegisterWebAudioMediaCodecBridge(JNIEnv* env) { |
| + bool ret = RegisterNativesImpl(env); |
| + DCHECK(g_WebAudioMediaCodecBridge_clazz); |
| + if (ret) |
| + ret = JNI_MediaCodec::RegisterNativesImpl(env); |
|
bulach
2013/03/28 13:39:25
as above, this seems unused, also there's no need
|
| + return ret; |
| +} |
| + |
| +} // namespace |