Index: media/base/android/webaudio_media_codec_bridge.cc |
diff --git a/media/base/android/webaudio_media_codec_bridge.cc b/media/base/android/webaudio_media_codec_bridge.cc |
new file mode 100644 |
index 0000000000000000000000000000000000000000..ef7838d1c97466f91d40f392903191a7cc5d18b0 |
--- /dev/null |
+++ b/media/base/android/webaudio_media_codec_bridge.cc |
@@ -0,0 +1,128 @@ |
+// Copyright (c) 2012 The Chromium Authors. All rights reserved. |
+// Use of this source code is governed by a BSD-style license that can be |
+// found in the LICENSE file. |
+ |
+#include "media/base/android/webaudio_media_codec_bridge.h" |
+ |
+#include <unistd.h> |
+#include <vector> |
+ |
+#include "base/android/jni_android.h" |
+#include "base/android/jni_array.h" |
+#include "base/android/jni_string.h" |
+#include "base/basictypes.h" |
+#include "base/logging.h" |
+#include "jni/WebAudioMediaCodecBridge_jni.h" |
+ |
+ |
+using base::android::AttachCurrentThread; |
+ |
+namespace media { |
+ |
+void WebAudioMediaCodecBridge::RunWebAudioMediaCodec( |
+ base::SharedMemoryHandle input_fd, |
+ base::FileDescriptor output_fd) { |
+ DVLOG(0) << "RunWebAudioMediaCodec"; |
+ |
+ WebAudioMediaCodecBridge bridge(input_fd, output_fd); |
+ bool result = bridge.DecodeInMemoryAudioFile(); |
+ DVLOG(0) << "RunWebAudioMediaCodec returned " << result; |
+} |
+ |
+WebAudioMediaCodecBridge::WebAudioMediaCodecBridge( |
+ base::SharedMemoryHandle input_fd, |
+ base::FileDescriptor output_fd) |
+ : input_fd_(input_fd.fd), |
+ output_fd_(output_fd.fd) { |
+ DVLOG(0) << "WebAudioMediaCodecBridge start **********************"; |
palmer
2013/04/02 18:01:08
Combine these two DVLOG statements into one.
|
+ DVLOG(0) << "input fd = " << input_fd_ |
+ << " output fd = " << output_fd.fd; |
+} |
+ |
+WebAudioMediaCodecBridge::~WebAudioMediaCodecBridge() { |
+ int result = close(output_fd_); |
+ if (result) |
+ VLOG(0) << "Couldn't close output fd " << output_fd_ |
palmer
2013/04/02 18:01:08
In both of these VLOGs, would it help to use strer
|
+ << ": result = " << result; |
+ |
+ result = close(input_fd_); |
+ if (result) |
+ VLOG(0) << "Couldn't close shared mem fd " << input_fd_ |
+ << ": result = " << result; |
+} |
+ |
+bool WebAudioMediaCodecBridge::DecodeInMemoryAudioFile() { |
+ // Process the encoded data that is in shared memory given by the |
+ // file descriptor encodedDataFD_. |
+ |
+ JNIEnv* env = AttachCurrentThread(); |
+ CHECK(env); |
+ jboolean decoded = Java_WebAudioMediaCodecBridge_decodeAudioFile( |
+ env, |
+ base::android::GetApplicationContext(), |
+ reinterpret_cast<intptr_t>(this), |
+ input_fd_); |
+ |
+ DVLOG(0) << "decoded = " << static_cast<bool>(decoded); |
+ return decoded; |
+} |
+ |
+void WebAudioMediaCodecBridge::InitializeDestination( |
+ JNIEnv* env, |
+ jobject /*java object*/, |
+ jint number_of_channels, |
+ jint sample_rate, |
+ jlong duration_us, |
+ jboolean is_vorbis) { |
+ long info[4]; |
palmer
2013/04/02 18:01:08
NIT: I think you can combine the declaration with
Raymond Toy (Google)
2013/04/02 21:52:21
It's long so we can handle very long audio files,
|
+ |
+ // Send information about this audio file: number of channels, |
+ // sample rate (Hz), number of frames, a flag indicating whether |
+ // this file is an ogg/vorbis file. Information is sent as a set of |
palmer
2013/04/02 18:01:08
Use the same MIME type string as elsewhere; I thin
|
+ // 4 longs. This must be coordinated with DecodeAudioFileData! |
+ info[0] = number_of_channels; |
+ info[1] = sample_rate; |
+ // The number of frames is the length of the file (in us) times the |
palmer
2013/04/02 18:01:08
NIT: "in microseconds"
|
+ // sample rate. |
+ info[2] = 0.5 + (duration_us * 0.000001 * sample_rate); |
palmer
2013/04/02 18:01:08
NIT: Only one space after *.
|
+ info[3] = is_vorbis ? 1 : 0; |
+ |
+ DVLOG(0) << "InitializeDestination:"; |
palmer
2013/04/02 18:01:08
Combine these into one statement.
|
+ DVLOG(0) << " number of channels = " << number_of_channels; |
+ DVLOG(0) << " rate = " << sample_rate; |
+ DVLOG(0) << " duration = " << duration_us << " us"; |
+ DVLOG(0) << " vorbis = " << (is_vorbis ? "yes" : "no"); |
+ |
+ write(output_fd_, info, sizeof(info)); |
+} |
+ |
+void WebAudioMediaCodecBridge::OnChunkDecoded( |
+ JNIEnv* env, |
+ jobject /*java object*/, |
+ jobject buf, |
+ jint buf_size) { |
+ signed short* decoded_buffer = |
palmer
2013/04/02 18:01:08
Do you need the signed keyword?
Also, might it be
|
+ static_cast<signed short*>(env->GetDirectBufferAddress(buf)); |
+ DCHECK((buf_size % sizeof(decoded_buffer[0])) == 0); |
+ |
+ int bytes_left = buf_size; |
palmer
2013/04/02 18:01:08
Not sure you need this; just use buf_size.
|
+ signed short* buffer = decoded_buffer; |
palmer
2013/04/02 18:01:08
You don't really need to have both |buffer| and |d
|
+ |
+ // Write out the data to the pipe in small chunks if necessary. |
+ while (bytes_left > 0) { |
+ int bytes_to_write = (bytes_left >= PIPE_BUF) ? PIPE_BUF : bytes_left; |
+ int bytes_written = write(output_fd_, buffer, bytes_to_write); |
palmer
2013/04/02 18:01:08
write(2) returns ssize_t, not int.
|
+ if (bytes_written != bytes_to_write) |
palmer
2013/04/02 18:01:08
You detect short writes, but not errors (return of
|
+ VLOG(0) << "Only wrote " << bytes_written |
+ << " bytes but expected " << bytes_to_write; |
+ bytes_left -= bytes_written; |
+ buffer += bytes_written / sizeof(decoded_buffer[0]); |
palmer
2013/04/02 18:01:08
You could just use sizeof(buffer[0]) or sizeof(int
|
+ } |
+} |
+ |
+bool WebAudioMediaCodecBridge::RegisterWebAudioMediaCodecBridge(JNIEnv* env) { |
+ bool ret = RegisterNativesImpl(env); |
+ return ret; |
+} |
+ |
+} // namespace |