decode h264 raw stream using mediacodec

寵の児 提交于 2020-01-06 07:00:33

问题


I recieve h264 data from server, I want to decode this stream using mediacodec and texture view on android.I got the data from the server , parssing it to get the SPS , the PPS and the video frame data, then I passed this data to the mediacodec , but the function dequeueOutputBuffer(info, 100000) always returns -1 and I get dequeueOutputBuffer timed out.

Any help please, I'am stucked at this issues from three weeks.

this is the code used to decode the video frame.

public class H264PlayerActivity extends AppCompatActivity implements TextureView.SurfaceTextureListener {
private TextureView m_surface;// View that contains the Surface Texture

    private H264Provider provider;// Object that connects to our server and gets H264 frames

    private MediaCodec m_codec;// Media decoder

   // private DecodeFramesTask m_frameTask;// AsyncTask that takes H264 frames and uses the decoder to update the Surface Texture
    // the channel used to receive the partner's video
    private ZMQ.Socket subscriber = null;
    private ZMQ.Context context;
    // thread handling the video reception
   // byte[] byte_SPSPPS = null;
    //byte[] byte_Frame = null;
    public static String stringSubscribe=null;
    public static String myIpAcquisition=null;
    public static byte[] byte_SPSPPS = null;
    public static byte[] byte_Frame = null;
    boolean isIframe = false;
    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.h264player_activity);
        Bundle extras = getIntent().getExtras();
        if(extras!=null)
        {
            stringSubscribe=extras.getString("stringSubscribe");
            myIpAcquisition=(extras.getString("myIpAcquisition"));
        }
        // Get a referance to the TextureView in the UI
        m_surface = (TextureView) findViewById(R.id.textureView);

        // Add this class as a call back so we can catch the events from the Surface Texture
        m_surface.setSurfaceTextureListener(this);
    }

    @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
    @Override
    // Invoked when a TextureView's SurfaceTexture is ready for use
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
        // when the surface is ready, we make a H264 provider Object. When its constructor runs it starts an AsyncTask to log into our server and start getting frames
        provider = new H264Provider(stringSubscribe, myIpAcquisition,byte_SPSPPS,byte_Frame);

    }

    @Override
    // Invoked when the SurfaceTexture's buffers size changed
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
    }

    @Override
    // Invoked when the specified SurfaceTexture is about to be destroyed
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
        return false;
    }

    @Override
    // Invoked when the specified SurfaceTexture is updated through updateTexImage()
    public void onSurfaceTextureUpdated(SurfaceTexture surface) {
    }

    private class H264Provider {
        String stringSubscribe = "";
        String myIpAcquisition = "";
        byte[] byte_SPSPPS = null;
        byte[] byte_PPS = null;
        byte[] byte_Frame = null;

        H264Provider(String stringSubscribe, String myIpAcquisition, byte[] byte_SPS, byte[] byte_Frame) {
            this.stringSubscribe = stringSubscribe;
            this.myIpAcquisition = myIpAcquisition;
            this.byte_SPSPPS = byte_SPS;
            this.byte_PPS = byte_PPS;
            this.byte_Frame = byte_Frame;
            System.out.println(" subscriber client started");
            //SetUpConnection setup=new SetUpConnection();
           // setup.execute();
            PlayerThread mPlayer = new PlayerThread();
            mPlayer.start();
        }
        void release(){
            // close ØMQ socket
            subscriber.close();
            //terminate 0MQ context
            context.term();
        }
         byte[] getCSD( ) {
            return byte_SPSPPS;
        }
         byte[] nextFrame(  ) {

            return byte_Frame;
        }
        private class PlayerThread extends Thread
        {

            public PlayerThread()
            {
                System.out.println(" subscriber client started");
            }

            @TargetApi(Build.VERSION_CODES.LOLLIPOP)
            @RequiresApi(api = Build.VERSION_CODES.JELLY_BEAN)
            @Override
            public void run() {
                /******************************************ZMQ****************************/
                // Prepare our context and subscriber
                ZMQ.Context context = ZMQ.context(1);
                //create 0MQ socket
                ZMQ.Socket subscriber = context.socket(ZMQ.SUB);
                //create outgoing connection from socket
                String address = "tcp://" + myIpAcquisition + ":xxxx";
                Boolean bbbb = subscriber.connect(address);
                subscriber.setHWM(20);//  the number of messages to queue.
                Log.e("zmq_tag", "connect connect " + bbbb);
                //boolean bbbb = subscriber.setSocketOpt(zmq.ZMQ.ZMQ_SNDHWM, 1);
                subscriber.subscribe(stringSubscribe.getBytes(ZMQ.CHARSET));
                Log.e("zmq_tag", " zmq stringSubscribe " + stringSubscribe);
                boolean bRun = true;

                while (bRun) {
                    ZMsg msg = ZMsg.recvMsg(subscriber);
                    String string_SPS = null;
                    String string_PPS = null;
                    String SPSPPS = null;
                    String string_Frame = null;
                    if (msg != null) {

                        // create a video message out of the zmq message
                        VideoMessage oVideoMsg = VideoMessage.fromZMsg(msg);
                        // wait until get Iframe
                        String szInfoPublisher = new String(oVideoMsg.szInfoPublisher);
                        Log.e("zmq_tag", "szInfoPublisher   " + szInfoPublisher);
                        if (szInfoPublisher.contains("0000000167")) {
                            isIframe = true;
                            String[] split_IFrame = szInfoPublisher.split("0000000165");
                            String SPS__PPS =  split_IFrame[0];
                            String [] split_SPSPPS=SPS__PPS.split("0000000167");
                            SPSPPS="0000000167" + split_SPSPPS[1];
                            Log.e("zmq_tag", "SPS+PPS  " + SPSPPS);
                            String iFrame = "0000000165" + split_IFrame[1];
                            Log.e("zmq_tag", "IFrame  " + iFrame);
                            string_Frame = iFrame;
                        } else {
                            if ((szInfoPublisher.contains("0000000161")||szInfoPublisher.contains("0000000141")) && isIframe) {
                                if (szInfoPublisher.contains("0000000161"))
                                {
                                    String[] split_IFrame = szInfoPublisher.split("0000000161");
                                    String newMSG = "0000000161" + split_IFrame[1];
                                    Log.e("zmq_tag", " P Frame  " + newMSG);
                                    string_Frame = newMSG;
                                } else
                                if (szInfoPublisher.contains("0000000141"))
                                {
                                    String[] split_IFrame = szInfoPublisher.split("0000000141");
                                    String newMSG = "0000000141" + split_IFrame[1];
                                    Log.e("zmq_tag", " P Frame  " + newMSG);
                                    string_Frame = newMSG;
                                }

                            } else {
                                isIframe = false;
                            }

                        }


                    }
                    if (SPSPPS != null) {
                        byte_SPSPPS = SPSPPS.getBytes();
                        Log.e("zmq_tag", " byte_SPSPPS  " + new String(byte_SPSPPS));
                    }
                    if (string_Frame != null) {
                        byte_Frame = string_Frame.getBytes();
                        Log.e("zmq_tag", " byte_Frame  " + new String(byte_Frame));
                    }

                    if(SPSPPS != null) {

                        // Create the format settinsg for the MediaCodec
                        MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080);// MIMETYPE: a two-part identifier for file formats and format contents

                        // Set the PPS and SPS frame
                        format.setByteBuffer("csd-0", ByteBuffer.wrap(byte_SPSPPS));
                        // Set the buffer size
                        format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 100000);

                        try {
                            // Get an instance of MediaCodec and give it its Mime type
                            m_codec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
                            // Configure the Codec
                            m_codec.configure(format, new Surface(m_surface.getSurfaceTexture()), null, 0);
                            // Start the codec
                            m_codec.start();
                            // Create the AsyncTask to get the frames and decode them using the Codec
                            while (!Thread.interrupted()) {
                                // Get the next frame
                                byte[] frame = byte_Frame;
                                Log.e("zmq_tag", " frame  " + new String(frame));
                                // Now we need to give it to the Codec to decode into the surface
                                // Get the input buffer from the decoder
                                int inputIndex = m_codec.dequeueInputBuffer(1);// Pass in -1 here as in this example we don't have a playback time reference
                                Log.e("zmq_tag", "inputIndex  " + inputIndex);
                                // If  the buffer number is valid use the buffer with that index
                                if (inputIndex >= 0) {
                                    ByteBuffer buffer =m_codec.getInputBuffer(inputIndex);
                                    buffer.put(frame);
                                    // tell the decoder to process the frame
                                    m_codec.queueInputBuffer(inputIndex, 0, frame.length, 0, 0);
                                }

                                MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
                                int outputIndex = m_codec.dequeueOutputBuffer(info, 100000);
                                Log.e("zmq_tag", "outputIndex  " + outputIndex);
                                if (outputIndex >= 0) {
                                    m_codec.releaseOutputBuffer(outputIndex, true);
                                }
                                // wait for the next frame to be ready, our server makes a frame every 250ms
                                try {
                                    Thread.sleep(250);
                                } catch (Exception e) {
                                    e.printStackTrace();
                                }
                            }
                        } catch (Exception e) {
                            e.printStackTrace();
                        }
                    }
                }
                // close ØMQ socket
                subscriber.close();
                //terminate 0MQ context
                context.term();
            }

        }

回答1:


sorry I can't comment, but I see some probable mistakes in your code :

  • Your KEY_MAX_INPUT_SIZE is wrong, it must be at least your Height * Width, in your case the Height * Width = 1920 * 1080 = 2073600 > 100000, you feed your decoder input buffer with data that can be > 100000, so since the decoder wants NALUs it probably wouldn't like it.
  • You do not clear input buffer before pushing data (realy needed?)


来源:https://stackoverflow.com/questions/49444446/decode-h264-raw-stream-using-mediacodec

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!