Mediacodec解码器在解码H264文件时总是超时

问题描述 投票:3回答:2

我一直试图decode用Android的MediaCodec通过H264编码编码的视频文件,并试图将解码器的输出放置到surface,但是当我运行该应用程序时,它显示黑色表面,并且在DDMS logcat中,我看到解码器超时

我已将文件解析为有效的[[frames]],首先[先读取4个字节表示即将到来的帧的长度,然后读取表示该帧的长度量字节,然后再次读取4个字节作为下一个帧的长度然后将这些帧循环传递给解码器。在配置解码器时,我通过对来自编码文件的值进行硬编码,从而在mediaFormat中传递了[[sps&pps。我做了not设置了任何presentationTimeUS并使用了0。现在,解码器的dequeInputBuffer()方法返回> = 0值,但是dequeOutputBuffer()仅返回MediaCodec.INFO_TRY_AGAIN_LATER,这最终意味着解码器已超时。请查看我的代码,请提供帮助。

提前感谢。

这里是文件网址-https://drive.google.com/file/d/0B39qOyEnXlR8Z3FSb2lzTWlORUU/edit?usp=sharing

这是我的代码-

import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; import java.util.ArrayList; import android.app.Activity; import android.media.MediaCodec; import android.media.MediaCodec.BufferInfo; import android.media.MediaFormat; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.util.Log; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.widget.Toast; public class MainActivity extends Activity implements SurfaceHolder.Callback { private static final String filePath = Environment.getExternalStorageDirectory()+ "/H264Data1.264"; // + "/video_encoded.263";//"/video_encoded.264"; private PlayerThread mPlayer = null; Handler handler = null; public static byte[] SPS = null; public static byte[] PPS = null; public static ArrayList<Frame> frames = null; public static int frameID = 0; public static boolean incompleteLastFrame = false; File encodedFile = new File(filePath); InputStream is; private static class Frame { public int id; public byte[] frameData; public Frame(int id) { this.id = id; } } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); try { is = new FileInputStream(encodedFile); byte[] data = new byte[(int)encodedFile.length()]; System.out.println("Total file size : " + encodedFile.length()); frameID = 0; frames = new ArrayList<Frame>(); try { if ((is.read(data, 0, (int)encodedFile.length())) != -1) { getFramesFromData(data); Toast.makeText(getApplicationContext(), "frames processing finished. number of frames : " + frames.size(), Toast.LENGTH_SHORT).show(); SurfaceView sv = new SurfaceView(this); handler = new Handler(); sv.getHolder().addCallback(this); setContentView(sv); } } catch (IOException e) { e.printStackTrace(); } } catch (FileNotFoundException e) { e.printStackTrace(); } } public static void getFramesFromData(byte[] data) { int dataLength = data.length; int frameLength = 0; frameID = 0; if(data.length <= 0) return; // each iteration in this loop indicates generation of a new frame for(int i = 0; ; ) { if(i+3 >= dataLength) return; frameLength = ((data[i] & 0xff) << 24) + ((data[i + 1] & 0xff) << 16) + ((data[i + 2] & 0xff) << 8) + (data[i + 3] & 0xff); i += 4; if(frameLength > 0) { if(i+frameLength-1 >= dataLength) return; Frame frame = new Frame(frameID); frame.frameData = new byte[frameLength]; System.arraycopy(data, i, frame.frameData, 0, frameLength); frames.add(frame); frameID++; i += frameLength; } } } @Override public void surfaceCreated(SurfaceHolder holder) { Log.d("DecodeActivity", "in surfaceCreated"); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.d("DecodeActivity", "in surfaceChanged"); if (mPlayer == null) { Toast.makeText(getApplicationContext(), "in surfaceChanged. creating playerthread", Toast.LENGTH_SHORT).show(); mPlayer = new PlayerThread(holder.getSurface()); mPlayer.start(); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { if (mPlayer != null) { mPlayer.interrupt(); } } private class PlayerThread extends Thread { //private MediaExtractor extractor; private MediaCodec decoder; private Surface surface; public PlayerThread(Surface surface) { this.surface = surface; } @Override public void run() { handler.post(new Runnable() { @Override public void run() { decoder = MediaCodec.createDecoderByType("video/avc"); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240); byte[] header_sps = { 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, (byte)0x80, 0x0C, (byte)0xE4, 0x40, (byte)0xA0, (byte)0xFD, 0x00, (byte)0xDA, 0x14, 0x26, (byte)0xA0 }; byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte)0xCE, 0x38, (byte)0x80 }; mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps)); mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps)); decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */); if (decoder == null) { Log.e("DecodeActivity", "Can't find video info!"); return; } decoder.start(); Log.d("DecodeActivity", "decoder.start() called"); ByteBuffer[] inputBuffers = decoder.getInputBuffers(); ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); long startMs = System.currentTimeMillis(); int i = 0; while(!Thread.interrupted()) { if(i >= frames.size()) break; byte[] data = new byte[frames.get(i).frameData.length]; System.arraycopy(frames.get(i).frameData, 0, data, 0, frames.get(i).frameData.length); Log.d("DecodeActivity", "i = " + i + " dataLength = " + frames.get(i).frameData.length); int inIndex = 0; while ((inIndex = decoder.dequeueInputBuffer(1)) < 0) ; if (inIndex >= 0) { ByteBuffer buffer = inputBuffers[inIndex]; buffer.clear(); int sampleSize = data.length; if (sampleSize < 0) { Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); break; } else { Log.d("DecodeActivity", "sample size: " + sampleSize); buffer = ByteBuffer.allocate(data.length); buffer.put(data); decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0); } BufferInfo info = new BufferInfo(); int outIndex = decoder.dequeueOutputBuffer(info, 100000); switch (outIndex) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED"); outputBuffers = decoder.getOutputBuffers(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: Log.d("DecodeActivity", "New format " + decoder.getOutputFormat()); break; case MediaCodec.INFO_TRY_AGAIN_LATER: Log.d("DecodeActivity", "dequeueOutputBuffer timed out!"); try { sleep(100); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } break; default: ByteBuffer outbuffer = outputBuffers[outIndex]; Log.d("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outbuffer); /*while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) { try { sleep(10); } catch (InterruptedException e) { e.printStackTrace(); break; } }*/ decoder.releaseOutputBuffer(outIndex, true); break; } i++; // All decoded frames have been rendered, we can stop playing now /*if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); break; }*/ } } decoder.stop(); decoder.release(); } }); } } }

我一直在尝试解码视频文件,该视频文件使用Android的MediaCodec通过H264编码进行编码,并试图将解码器的输出放到表面上,但是当我运行该应用程序时,它显示为黑色...

android surfaceview decode h.264 mediacodec
2个回答
2
投票
ByteBuffer buffer = inputBuffers[inIndex]; [...] buffer = ByteBuffer.allocate(data.length); buffer.put(data); decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);

0
投票
© www.soinside.com 2019 - 2024. All rights reserved.