在Nexus设备上H264视频流的解码不正确

问题描述:

我试图使用android MediaCodec类来解码远程摄像头的h264视频流。我的代码是:在Nexus设备上H264视频流的解码不正确

public class RemoteCamera { 

public interface OnCameraListener { 
    void onConnected(); 
    void onFailureConnection(); 
    void onDisconnected(); 
    void onReady(); 
} 

private static final int MAX_NAL_LEN = 1024 * 1024; 
private static final String TAG = "RemoteCamera"; 

private OutputThread mOutputThread; 

private WebSocketManager mWebSocketManager; 
private OnCameraListener mOnCameraListener; 

private int mSearchState = 0; 
private byte[] mNalData; 
private int mNalDataPos; 

private MediaCodec mDecoder; 
private MediaFormat mFormat; 
private SurfaceView mSurfaceView; 
private MediaCodec.BufferInfo mInfo = new MediaCodec.BufferInfo(); 
private boolean mIsWaitingForSPS = true; 

public RemoteCamera(final SurfaceView surfaceView, final String wss) { 
    mSurfaceView = surfaceView; 
    mWebSocketManager = new WebSocketManager(wss); 
    mWebSocketManager.setWSListener(new WebSocketManager.OnWSListener() { 
     @Override 
     public void onOpen() { 
      if (mOnCameraListener != null) { 
       mOnCameraListener.onConnected(); 
      } 
     } 

     @Override 
     public void onClosed() { 
      if (mOnCameraListener != null) { 
       mOnCameraListener.onDisconnected(); 
      } 
     } 

     @Override 
     public void onFailure() { 
      if (mOnCameraListener != null) { 
       mOnCameraListener.onFailureConnection(); 
      } 
     } 

     @Override 
     public synchronized void onMessage(final ByteString bytes) { 
      final ByteBuffer bb = ByteBuffer.wrap(bytes.toByteArray()); 

      if (mIsWaitingForSPS) { 
       if (isSPSUnit(bb)) { 
        mIsWaitingForSPS = false; 
        if (mOnCameraListener != null) { 
         mOnCameraListener.onReady(); 
        } 
       } else { 
        return; 
       } 
      } 

      parseDatagram(bb.array(), bytes.size()); 
     } 
    }); 

    mNalData = new byte[MAX_NAL_LEN]; 
    mNalDataPos = 0; 
    try { 
     mDecoder = MediaCodec.createDecoderByType("video/avc"); 
    } catch (Exception e) { 
     Log.d(TAG, e.toString()); 
     return; 
    } 

    mFormat = MediaFormat.createVideoFormat("video/avc", 320, 240); 
} 

public void setOnCameraListener(final OnCameraListener cameraListener) { 
    mOnCameraListener = cameraListener; 
} 

public void startStreaming() { 
    mSurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() { 
     @Override 
     public void surfaceCreated(SurfaceHolder surfaceHolder) { 
      try { 
       mDecoder.configure(mFormat, mSurfaceView.getHolder().getSurface(), null, 0); 
      } catch (Exception e) { 
       Log.d(TAG, e.toString()); 
       return; 
      } 
      mWebSocketManager.wsRegister(); 
      mDecoder.start(); 
      mOutputThread = new OutputThread(); 
      mOutputThread.start(); 
     } 

     @Override 
     public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) { 

     } 

     @Override 
     public void surfaceDestroyed(SurfaceHolder surfaceHolder) { 

     } 
    }); 
} 


private void feedDecoder(byte[] n, int len) { 
    for (; ;) { 
     try { 
      int inputBufferIndex = mDecoder.dequeueInputBuffer(0); 
      if (inputBufferIndex >= 0) { 
       final ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferIndex); 
       inputBuffer.put(n, 0, len); 
       mDecoder.queueInputBuffer(inputBufferIndex, 0, len, System.currentTimeMillis(), 0); 
       break; 
      } 
     } catch (Exception e) { 
      Log.d(TAG, e.toString()); 
     } 
    } 
} 

private void parseDatagram(byte[] p, int plen) { 
    try { 
     for (int i = 0; i < plen; ++i) { 
      mNalData[mNalDataPos++] = p[i]; 
      if (mNalDataPos == MAX_NAL_LEN - 1) { 
       mNalDataPos = 0; 
      } 
      switch (mSearchState) { 
       case 0: 
       case 1: 
       case 2: 
        if (p[i] == 0) 
         mSearchState++; 
        else 
         mSearchState = 0; 
        break; 
       case 3: 
        if (p[i] == 1) { 
         mNalData[0] = 0; 
         mNalData[1] = 0; 
         mNalData[2] = 0; 
         mNalData[3] = 1; 
         feedDecoder(mNalData, mNalDataPos - 4); 
         mNalDataPos = 4; 
        } 
        mSearchState = 0; 
        break; 
       default: 
        break; 
      } 
     } 
    } catch (Exception e) { 
     Log.d(TAG, e.toString()); 
    } 
} 

private boolean isSPSUnit(final ByteBuffer unit) { 
    return unit.get(4) == 0x67; 
} 


private class OutputThread extends Thread { 

    @Override 
    public void run() { 
     while (true) { 
      try { 
       int outputBufferIndex = mDecoder.dequeueOutputBuffer(mInfo, 10); 
       if (outputBufferIndex >= 0) { 
        mDecoder.releaseOutputBuffer(outputBufferIndex, true); 
       } 
      } catch (Exception e) { 
       Log.d(TAG, e.toString()); 
      } 
     } 
    } 
} 

我测试了索尼Xperia Z5 Compact和Yota Phone 2上的代码,并且它在这些设备上正常工作。我从索尼那里得到的图片是really good。 然后,我尝试了Nexus 9和Nexus 7设备上的视频流式传输,但它看起来像是从上到下的一排。 Nexus设备上没有正确的输出:nexus results

我知道它取决于原生android媒体编解码器,但我应该怎么做才能解决问题并能够在所有设备上显示视频?

+0

解码器非常宽松。我猜想索尼和Yota比Nexus X更宽松。你能将流保存到一个共享文件吗? –

+0

谢谢你的回答。我会尽快提供该文件 –

不要将0x00 0x00 0x00 0x01 NALU开始代码传递给解码器。

+0

谢谢,但它不适合我。没有改变。而且,当我删除所有NALU设备的启动代码时,所有设备的结果都不好。所以,我必须为SPS和PPS单元保留0x00 0x00 0x00 0x01。它帮助索尼和Yota恢复了良好的形象,但它仍然不适用于Nexus 9和Nexus 7。 –