| OLD | NEW |
| (Empty) |
| 1 // Copyright (c) 2011 The Chromium Authors. All rights reserved. | |
| 2 // Use of this source code is governed by a BSD-style license that can be | |
| 3 // found in the LICENSE file. | |
| 4 | |
| 5 // A test program that drives an OpenMAX video decoder module. This program | |
| 6 // will take video in elementary stream and read into the decoder. | |
| 7 // | |
| 8 // Run the following command to see usage: | |
| 9 // ./omx_test | |
| 10 | |
| 11 #include "base/at_exit.h" | |
| 12 #include "base/callback.h" | |
| 13 #include "base/command_line.h" | |
| 14 #include "base/memory/scoped_ptr.h" | |
| 15 #include "base/message_loop.h" | |
| 16 #include "base/string_number_conversions.h" | |
| 17 #include "base/time.h" | |
| 18 #include "base/utf_string_conversions.h" | |
| 19 #include "media/base/data_buffer.h" | |
| 20 #include "media/base/media.h" | |
| 21 #include "media/base/video_frame.h" | |
| 22 #include "media/ffmpeg/ffmpeg_common.h" | |
| 23 #include "media/ffmpeg/file_protocol.h" | |
| 24 #include "media/filters/bitstream_converter.h" | |
| 25 #include "media/tools/omx_test/color_space_util.h" | |
| 26 #include "media/tools/omx_test/file_reader_util.h" | |
| 27 #include "media/tools/omx_test/file_sink.h" | |
| 28 #include "media/video/omx_video_decode_engine.h" | |
| 29 | |
| 30 using media::BlockFileReader; | |
| 31 using media::Buffer; | |
| 32 using media::DataBuffer; | |
| 33 using media::FFmpegFileReader; | |
| 34 using media::FileReader; | |
| 35 using media::FileSink; | |
| 36 using media::H264FileReader; | |
| 37 using media::OmxConfigurator; | |
| 38 using media::OmxDecoderConfigurator; | |
| 39 using media::OmxEncoderConfigurator; | |
| 40 using media::OmxVideoDecodeEngine; | |
| 41 using media::PipelineStatistics; | |
| 42 using media::VideoFrame; | |
| 43 using media::YuvFileReader; | |
| 44 | |
| 45 // This is the driver object to feed the decoder with data from a file. | |
| 46 // It also provides callbacks for the decoder to receive events from the | |
| 47 // decoder. | |
| 48 // TODO(wjia): AVStream should be replaced with a new structure which is | |
| 49 // neutral to any video decoder. Also change media.gyp correspondingly. | |
| 50 class TestApp : public base::RefCountedThreadSafe<TestApp>, | |
| 51 public media::VideoDecodeEngine::EventHandler { | |
| 52 public: | |
| 53 TestApp(AVStream* av_stream, | |
| 54 FileSink* file_sink, | |
| 55 FileReader* file_reader) | |
| 56 : av_stream_(av_stream), | |
| 57 file_reader_(file_reader), | |
| 58 file_sink_(file_sink), | |
| 59 stopped_(false), | |
| 60 error_(false) { | |
| 61 } | |
| 62 | |
| 63 bool Initialize() { | |
| 64 if (!file_reader_->Initialize()) { | |
| 65 file_reader_.reset(); | |
| 66 LOG(ERROR) << "can't initialize file reader"; | |
| 67 return false;; | |
| 68 } | |
| 69 | |
| 70 if (!file_sink_->Initialize()) { | |
| 71 LOG(ERROR) << "can't initialize output writer"; | |
| 72 return false; | |
| 73 } | |
| 74 return true; | |
| 75 } | |
| 76 | |
| 77 virtual void OnInitializeComplete(const media::VideoCodecInfo& info) {} | |
| 78 | |
| 79 virtual void OnUninitializeComplete() { | |
| 80 // If this callback is received, mark the |stopped_| flag so that we don't | |
| 81 // feed more buffers into the decoder. | |
| 82 // We need to exit the current message loop because we have no more work | |
| 83 // to do on the message loop. This is done by calling | |
| 84 // message_loop_.Quit(). | |
| 85 stopped_ = true; | |
| 86 message_loop_.Quit(); | |
| 87 } | |
| 88 | |
| 89 virtual void OnError() { | |
| 90 // In case of error, this method is called. Mark the error flag and | |
| 91 // exit the message loop because we have no more work to do. | |
| 92 LOG(ERROR) << "Error callback received!"; | |
| 93 error_ = true; | |
| 94 message_loop_.Quit(); | |
| 95 } | |
| 96 | |
| 97 virtual void OnFlushComplete() { | |
| 98 NOTIMPLEMENTED(); | |
| 99 } | |
| 100 | |
| 101 virtual void OnSeekComplete() { | |
| 102 NOTIMPLEMENTED(); | |
| 103 } | |
| 104 | |
| 105 virtual void OnFormatChange(media::VideoStreamInfo stream_info) { | |
| 106 NOTIMPLEMENTED(); | |
| 107 } | |
| 108 | |
| 109 void FormatCallback( | |
| 110 const OmxConfigurator::MediaFormat& input_format, | |
| 111 const OmxConfigurator::MediaFormat& output_format) { | |
| 112 // This callback will be called when port reconfiguration is done. | |
| 113 // Input format and output format will be used in the codec. | |
| 114 | |
| 115 DCHECK_EQ(input_format.video_header.width, | |
| 116 output_format.video_header.width); | |
| 117 DCHECK_EQ(input_format.video_header.height, | |
| 118 output_format.video_header.height); | |
| 119 | |
| 120 file_sink_->UpdateSize(input_format.video_header.width, | |
| 121 input_format.video_header.height); | |
| 122 } | |
| 123 | |
| 124 virtual void ProduceVideoSample(scoped_refptr<Buffer> buffer) { | |
| 125 // We receive this callback when the decoder has consumed an input buffer. | |
| 126 // In this case, delete the previous buffer and enqueue a new one. | |
| 127 // There are some conditions we don't want to enqueue, for example when | |
| 128 // the last buffer is an end-of-stream buffer, when we have stopped, and | |
| 129 // when we have received an error. | |
| 130 bool eos = buffer.get() && buffer->IsEndOfStream(); | |
| 131 if (!eos && !stopped_ && !error_) | |
| 132 FeedInputBuffer(); | |
| 133 } | |
| 134 | |
| 135 virtual void ConsumeVideoFrame(scoped_refptr<VideoFrame> frame, | |
| 136 const PipelineStatistics& statistics) { | |
| 137 // This callback is received when the decoder has completed a decoding | |
| 138 // task and given us some output data. The frame is owned by the decoder. | |
| 139 if (stopped_ || error_) | |
| 140 return; | |
| 141 | |
| 142 if (!frame_count_) | |
| 143 first_sample_delivered_time_ = base::TimeTicks::HighResNow(); | |
| 144 | |
| 145 // If we are reading to the end, then stop. | |
| 146 if (frame->IsEndOfStream()) { | |
| 147 engine_->Uninitialize(); | |
| 148 return; | |
| 149 } | |
| 150 | |
| 151 if (file_sink_.get()) { | |
| 152 for (size_t i = 0; i < frame->planes(); i++) { | |
| 153 int plane_size = frame->width() * frame->height(); | |
| 154 if (i > 0) plane_size >>= 2; | |
| 155 file_sink_->BufferReady(plane_size, frame->data(i)); | |
| 156 } | |
| 157 } | |
| 158 | |
| 159 // could OMX IL return patial sample for decoder? | |
| 160 frame_count_++; | |
| 161 } | |
| 162 | |
| 163 void FeedInputBuffer() { | |
| 164 uint8* data; | |
| 165 int read; | |
| 166 file_reader_->Read(&data, &read); | |
| 167 engine_->ConsumeVideoSample(new DataBuffer(data, read)); | |
| 168 } | |
| 169 | |
| 170 void Run() { | |
| 171 StartProfiler(); | |
| 172 | |
| 173 media::VideoDecoderConfig config( | |
| 174 media::CodecIDToVideoCodec(av_stream_->codec->codec_id), | |
| 175 av_stream_->codec->coded_width, | |
| 176 av_stream_->codec->coded_height, | |
| 177 av_stream_->r_frame_rate.num, | |
| 178 av_stream_->r_frame_rate.den, | |
| 179 av_stream_->codec->extradata, | |
| 180 av_stream_->codec->extradata_size); | |
| 181 | |
| 182 engine_.reset(new OmxVideoDecodeEngine()); | |
| 183 engine_->Initialize(&message_loop_, this, NULL, config); | |
| 184 | |
| 185 // Execute the message loop so that we can run tasks on it. This call | |
| 186 // will return when we call message_loop_.Quit(). | |
| 187 message_loop_.Run(); | |
| 188 | |
| 189 StopProfiler(); | |
| 190 } | |
| 191 | |
| 192 void StartProfiler() { | |
| 193 start_time_ = base::TimeTicks::HighResNow(); | |
| 194 frame_count_ = 0; | |
| 195 } | |
| 196 | |
| 197 void StopProfiler() { | |
| 198 base::TimeDelta duration = base::TimeTicks::HighResNow() - start_time_; | |
| 199 int64 duration_ms = duration.InMilliseconds(); | |
| 200 int64 fps = 0; | |
| 201 if (duration_ms) { | |
| 202 fps = (static_cast<int64>(frame_count_) * | |
| 203 base::Time::kMillisecondsPerSecond) / duration_ms; | |
| 204 } | |
| 205 base::TimeDelta delay = first_sample_delivered_time_ - start_time_; | |
| 206 printf("\n<<< frame delivered : %d >>>", frame_count_); | |
| 207 printf("\n<<< time used(ms) : %d >>>", static_cast<int>(duration_ms)); | |
| 208 printf("\n<<< fps : %d >>>", static_cast<int>(fps)); | |
| 209 printf("\n<<< initial delay used(us): %d >>>", | |
| 210 static_cast<int>(delay.InMicroseconds())); | |
| 211 printf("\n"); | |
| 212 } | |
| 213 | |
| 214 scoped_ptr<OmxVideoDecodeEngine> engine_; | |
| 215 MessageLoop message_loop_; | |
| 216 scoped_ptr<AVStream> av_stream_; | |
| 217 scoped_ptr<FileReader> file_reader_; | |
| 218 scoped_ptr<FileSink> file_sink_; | |
| 219 | |
| 220 // Internal states for execution. | |
| 221 bool stopped_; | |
| 222 bool error_; | |
| 223 | |
| 224 // Counters for performance. | |
| 225 base::TimeTicks start_time_; | |
| 226 base::TimeTicks first_sample_delivered_time_; | |
| 227 int frame_count_; | |
| 228 }; | |
| 229 | |
| 230 static std::string GetStringSwitch(const char* name) { | |
| 231 return CommandLine::ForCurrentProcess()->GetSwitchValueASCII(name); | |
| 232 } | |
| 233 | |
| 234 static bool HasSwitch(const char* name) { | |
| 235 return CommandLine::ForCurrentProcess()->HasSwitch(name); | |
| 236 } | |
| 237 | |
| 238 static int GetIntSwitch(const char* name) { | |
| 239 if (HasSwitch(name)) { | |
| 240 int val; | |
| 241 base::StringToInt(GetStringSwitch(name), &val); | |
| 242 return val; | |
| 243 } | |
| 244 return 0; | |
| 245 } | |
| 246 | |
| 247 static bool PrepareDecodeFormats(AVStream *av_stream) { | |
| 248 std::string codec = GetStringSwitch("codec"); | |
| 249 av_stream->codec->codec_id = CODEC_ID_NONE; | |
| 250 if (codec == "h264") { | |
| 251 av_stream->codec->codec_id = CODEC_ID_H264; | |
| 252 } else if (codec == "mpeg4") { | |
| 253 av_stream->codec->codec_id = CODEC_ID_MPEG4; | |
| 254 } else if (codec == "h263") { | |
| 255 av_stream->codec->codec_id = CODEC_ID_H263; | |
| 256 } else if (codec == "vc1") { | |
| 257 av_stream->codec->codec_id = CODEC_ID_VC1; | |
| 258 } else { | |
| 259 LOG(ERROR) << "Unknown codec."; | |
| 260 return false; | |
| 261 } | |
| 262 return true; | |
| 263 } | |
| 264 | |
| 265 static bool PrepareEncodeFormats(AVStream *av_stream) { | |
| 266 av_stream->codec->width = GetIntSwitch("width"); | |
| 267 av_stream->codec->height = GetIntSwitch("height"); | |
| 268 av_stream->avg_frame_rate.num = GetIntSwitch("framerate"); | |
| 269 av_stream->avg_frame_rate.den = 1; | |
| 270 | |
| 271 std::string codec = GetStringSwitch("codec"); | |
| 272 av_stream->codec->codec_id = CODEC_ID_NONE; | |
| 273 if (codec == "h264") { | |
| 274 av_stream->codec->codec_id = CODEC_ID_H264; | |
| 275 } else if (codec == "mpeg4") { | |
| 276 av_stream->codec->codec_id = CODEC_ID_MPEG4; | |
| 277 } else if (codec == "h263") { | |
| 278 av_stream->codec->codec_id = CODEC_ID_H263; | |
| 279 } else if (codec == "vc1") { | |
| 280 av_stream->codec->codec_id = CODEC_ID_VC1; | |
| 281 } else { | |
| 282 LOG(ERROR) << "Unknown codec."; | |
| 283 return false; | |
| 284 } | |
| 285 // TODO(jiesun): assume constant bitrate now. | |
| 286 av_stream->codec->bit_rate = GetIntSwitch("bitrate"); | |
| 287 | |
| 288 // TODO(wjia): add more configurations needed by encoder | |
| 289 return true; | |
| 290 } | |
| 291 | |
| 292 static bool InitFFmpeg() { | |
| 293 if (!media::InitializeMediaLibrary(FilePath())) | |
| 294 return false; | |
| 295 avcodec_init(); | |
| 296 av_register_all(); | |
| 297 av_register_protocol2(&kFFmpegFileProtocol, sizeof(kFFmpegFileProtocol)); | |
| 298 return true; | |
| 299 } | |
| 300 | |
| 301 static void PrintHelp() { | |
| 302 printf("Using for decoding...\n"); | |
| 303 printf("\n"); | |
| 304 printf("Usage: omx_test --input-file=FILE --codec=CODEC" | |
| 305 " [--output-file=FILE] [--enable-csc]" | |
| 306 " [--copy] [--use-ffmpeg]\n"); | |
| 307 printf(" CODEC: h264/mpeg4/h263/vc1\n"); | |
| 308 printf("\n"); | |
| 309 printf("Optional Arguments\n"); | |
| 310 printf(" --output-file Dump raw OMX output to file.\n"); | |
| 311 printf(" --enable-csc Dump the CSCed output to file.\n"); | |
| 312 printf(" --copy Simulate a memcpy from the output.\n"); | |
| 313 printf(" --use-ffmpeg Use ffmpeg demuxer\n"); | |
| 314 printf("\n"); | |
| 315 printf("Using for encoding...\n"); | |
| 316 printf("\n"); | |
| 317 printf("Usage: omx_test --encoder --input-file=FILE --codec=CODEC" | |
| 318 " --width=PIXEL_WIDTH --height=PIXEL_HEIGHT" | |
| 319 " --bitrate=BIT_PER_SECOND --framerate=FRAME_PER_SECOND" | |
| 320 " [--output-file=FILE] [--enable-csc]" | |
| 321 " [--copy]\n"); | |
| 322 printf(" CODEC: h264/mpeg4/h263/vc1\n"); | |
| 323 printf("\n"); | |
| 324 printf("Optional Arguments\n"); | |
| 325 printf(" --output-file Dump raw OMX output to file.\n"); | |
| 326 printf(" --enable-csc Dump the CSCed input from file.\n"); | |
| 327 printf(" --copy Simulate a memcpy from the output.\n"); | |
| 328 printf(" --loop=COUNT loop input streams\n"); | |
| 329 } | |
| 330 | |
| 331 int main(int argc, char** argv) { | |
| 332 base::AtExitManager at_exit_manager; | |
| 333 CommandLine::Init(argc, argv); | |
| 334 | |
| 335 // Print help if there is not enough arguments. | |
| 336 if (argc == 1) { | |
| 337 PrintHelp(); | |
| 338 return -1; | |
| 339 } | |
| 340 | |
| 341 const CommandLine& cmd_line = *CommandLine::ForCurrentProcess(); | |
| 342 // Read a bunch of parameters. | |
| 343 FilePath input_path = cmd_line.GetSwitchValuePath("input-file"); | |
| 344 FilePath output_path = cmd_line.GetSwitchValuePath("output-file"); | |
| 345 bool encoder = HasSwitch("encoder"); | |
| 346 bool copy = HasSwitch("copy"); | |
| 347 bool enable_csc = HasSwitch("enable-csc"); | |
| 348 bool use_ffmpeg = HasSwitch("use-ffmpeg"); | |
| 349 int loop_count = GetIntSwitch("loop"); | |
| 350 if (loop_count == 0) | |
| 351 loop_count = 1; | |
| 352 DCHECK_GE(loop_count, 1); | |
| 353 | |
| 354 // Initialize OpenMAX. | |
| 355 if (!media::InitializeOpenMaxLibrary(FilePath())) { | |
| 356 LOG(ERROR) << "Unable to initialize OpenMAX library."; | |
| 357 return false; | |
| 358 } | |
| 359 | |
| 360 // If FFmpeg should be used for demuxing load the library here and do | |
| 361 // the initialization. | |
| 362 if (use_ffmpeg && !InitFFmpeg()) { | |
| 363 LOG(ERROR) << "Unable to initialize the media library."; | |
| 364 return -1; | |
| 365 } | |
| 366 | |
| 367 // Create AVStream | |
| 368 AVStream *av_stream = new AVStream; | |
| 369 AVCodecContext *av_codec_context = new AVCodecContext; | |
| 370 memset(av_stream, 0, sizeof(AVStream)); | |
| 371 memset(av_codec_context, 0, sizeof(AVCodecContext)); | |
| 372 scoped_ptr<AVCodecContext> av_codec_context_deleter(av_codec_context); | |
| 373 av_stream->codec = av_codec_context; | |
| 374 av_codec_context->width = 320; | |
| 375 av_codec_context->height = 240; | |
| 376 if (encoder) | |
| 377 PrepareEncodeFormats(av_stream); | |
| 378 else | |
| 379 PrepareDecodeFormats(av_stream); | |
| 380 | |
| 381 // Creates the FileReader to read input file. | |
| 382 FileReader* file_reader; | |
| 383 if (encoder) { | |
| 384 file_reader = new YuvFileReader( | |
| 385 input_path, av_stream->codec->width, | |
| 386 av_stream->codec->height, loop_count, enable_csc); | |
| 387 } else if (use_ffmpeg) { | |
| 388 // Use ffmepg for reading. | |
| 389 file_reader = new FFmpegFileReader(input_path); | |
| 390 } else if (input_path.Extension() == FILE_PATH_LITERAL(".264")) { | |
| 391 file_reader = new H264FileReader(input_path); | |
| 392 } else { | |
| 393 // Creates a reader that reads in blocks of 32KB. | |
| 394 const int kReadSize = 32768; | |
| 395 file_reader = new BlockFileReader(input_path, kReadSize); | |
| 396 } | |
| 397 | |
| 398 // Create a file sink. | |
| 399 FileSink* file_sink = new FileSink(output_path, copy, enable_csc); | |
| 400 | |
| 401 // Create a test app object and initialize it. | |
| 402 scoped_refptr<TestApp> test = new TestApp(av_stream, file_sink, file_reader); | |
| 403 if (!test->Initialize()) { | |
| 404 LOG(ERROR) << "can't initialize this application"; | |
| 405 return -1; | |
| 406 } | |
| 407 | |
| 408 // This will run the decoder until EOS is reached or an error | |
| 409 // is encountered. | |
| 410 test->Run(); | |
| 411 return 0; | |
| 412 } | |
| OLD | NEW |