MediaCodec + YUV录像

550 阅读5分钟

MediaCodec + YUV录像

输入数据

ImageReader设置数据格式为YUV420_888,具体格式由hal层决定,一般情况下是NV21,实测NV12会导致反色。

注:NV12和NV21都属于YUV420SP。格式还有可能是YUV420P,YUV420P包含I420和YV12

从ImageReader获取数据转出byte[]:

 /**

 * 从ImageReader中获取byte[]数据

 */

public static byte[] getBytesFromImageReader(ImageReader imageReader) {

    try (Image image = imageReader.acquireNextImage()) {

        final Image.Plane[] planes = image.getPlanes();

        ByteBuffer b0 = planes[0].getBuffer();//Y平面,数量为像素个数

        ByteBuffer b1 = planes[1].getBuffer();

        ByteBuffer b2 = planes[2].getBuffer();

        int y = b0.remaining(), u = y >> 2, v = u;

        byte[] bytes = new byte[y + u + v];

        if(b1.remaining() > u) { // y420sp

            b0.get(bytes, 0, b0.remaining());

            b1.get(bytes, y, b1.remaining()); // uv

        } else { // y420p

            b0.get(bytes, 0, b0.remaining());

            b1.get(bytes, y, b1.remaining()); // u

            b2.get(bytes, y + u, b2.remaining()); // v

        }

        return bytes;

    } catch (Exception e) {

        e.printStackTrace();

    }

    return null;

}

将byte[]插入一个LinkedBlockingQueue :

private LinkedBlockingQueue<RawData> queue = new LinkedBlockingQueue<>();



queue.offer(new RawData(yuvData, System.nanoTime()));



static class RawData {

    byte[] buf;

    long timeStamp;



    RawData(byte[] buf, long timeStamp) {

        this.buf = buf;

        this.timeStamp = timeStamp;

    }

}

录像

录制时设置好相关参数,分辨率要和ImageReader的宽高匹配!:

private EncoderParams setEncodeParams(int width, int height) {

    EncoderParams params = new EncoderParams();

    params.setVideoPath(mSavePath);    // 视频文件路径

    params.setFrameWidth(width);             // 分辨率

    params.setFrameHeight(height);

    params.setBitRate(600000);   // 视频编码码率

    params.setFrameRate(30);// 视频编码帧率

    params.setAudioBitrate(44100);        // 音频比特率

    params.setAudioSampleRate(AACEncodeConsumer.DEFAULT_SAMPLE_RATE);  // 音频采样率

    params.setAudioChannelConfig(AACEncodeConsumer.CHANNEL_IN_MONO);// 单声道

    params.setAudioChannelCount(AACEncodeConsumer.CHANNEL_COUNT_MONO);       // 单声道通道数量

    params.setAudioFormat(AACEncodeConsumer.ENCODING_PCM_16BIT);       // 采样精度为16位

    return params;

}

视频流编码

音视频编码都是耗时操作,放在子线程执行

MediaCodec输入流:

private void feedMediaCodecData(byte[] data, long timeStamp) {

    int inputBufferIndex = mVideoEncodec.dequeueInputBuffer(TIMES_OUT);

    if (inputBufferIndex >= 0) {

        ByteBuffer inputBuffer = mVideoEncodec.getInputBuffer(inputBufferIndex);

        if (inputBuffer != null) {

            inputBuffer.clear();

            inputBuffer.put(data);

        }

        Log.e(TAG, "video set pts......." + (timeStamp) / 1000 / 1000);

        mVideoEncodec.queueInputBuffer(inputBufferIndex, 0, data.length, System.nanoTime() / 1000

                , MediaCodec.BUFFER_FLAG_KEY_FRAME);

    }

}

MediaCodec输出:



//视频编码输出

MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();

int outputBufferIndex;

do {

    outputBufferIndex = mVideoEncodec.dequeueOutputBuffer(mBufferInfo, TIMES_OUT);

    if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {

        Log.i(TAG, "INFO_TRY_AGAIN_LATER");

    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

        synchronized (H264EncodeConsumer.this) {

            newFormat = mVideoEncodec.getOutputFormat();

            if (mMuxerRef != null) {

                MediaMuxerUtil muxer = mMuxerRef.get();

                if (muxer != null) {

                    muxer.addTrack(newFormat, true);

                }

            }

        }

        Log.i(TAG, "编码器输出缓存区格式改变,添加视频轨道到混合器");

    } else {

        ByteBuffer outputBuffer = mVideoEncodec.getOutputBuffer(outputBufferIndex);

        int type = outputBuffer.get(4) & 0x1F;



        Log.d(TAG, "------还有数据---->" + type);

        if (type == 7 || type == 8) {

            Log.d(TAG, "------PPS、SPS帧(非图像数据),忽略-------");

            mBufferInfo.size = 0;

        } else if (type == 5) {

            if (mMuxerRef != null) {

                MediaMuxerUtil muxer = mMuxerRef.get();

                if (muxer != null) {

                    Log.i(TAG, "------编码混合  视频关键帧数据-----" + mBufferInfo.presentationTimeUs / 1000);

                    muxer.pumpStream(outputBuffer, mBufferInfo, true);

                }

                isAddKeyFrame = true;

            }

        } else {

            if (isAddKeyFrame) {

                if (isAddKeyFrame && mMuxerRef != null) {

                    MediaMuxerUtil muxer = mMuxerRef.get();

                    if (muxer != null) {

                        Log.i(TAG, "------编码混合  视频普通帧数据-----" + mBufferInfo.presentationTimeUs / 1000);

                        muxer.pumpStream(outputBuffer, mBufferInfo, true);

                    }

                }

            }

        }

        mVideoEncodec.releaseOutputBuffer(outputBufferIndex, false);

    }

} while (outputBufferIndex >= 0);

音频流编码

与视频类似:

 /**

 * 对ACC音频进行编码

 */

public class AACEncodeConsumer extends Thread {

    private static final String TAG = "EncodeAudio";

    private static final String MIME_TYPE = "audio/mp4a-latm";

    private static final int TIMES_OUT = 10000;

    private static final int ACC_PROFILE = MediaCodecInfo.CodecProfileLevel.AACObjectLC;

    private static final int BUFFER_SIZE = 3584;//1600;

    private static final int AUDIO_BUFFER_SIZE = 1024;

    /**

 * 默认采样率

 */

 public static final int DEFAULT_SAMPLE_RATE = 44100;



    /**

 * 通道数为1

 */

 public static final int CHANNEL_COUNT_MONO = 1;

    /**

 * 通道数为2

 */

 public static final int CHANNEL_COUNT_STEREO = 2;

    /**

 * 单声道

 */

 public static final int CHANNEL_IN_MONO = AudioFormat.CHANNEL_IN_MONO;

    /**

 * 立体声

 */

 public static final int CHANNEL_IN_STEREO = AudioFormat.CHANNEL_IN_STEREO;

    /**

 * 16位采样精度

 */

 public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT;

    /**

 * 8位采样精度

 */

 public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT;

    /**

 * 音频源为MIC

 */

 public static final int SOURCE_MIC = MediaRecorder.AudioSource.MIC;



    // 编码器

    private boolean isExit = false;

    private boolean isEncoderStarted = false;

    private WeakReference<MediaMuxerUtil> mMuxerRef;

    private EncoderParams mParams;

    private MediaCodec mAudioEncoder;

    private MediaFormat newFormat;

    private long prevPresentationTimes = 0;

    private long nanoTime = 0;//System.nanoTime();



    synchronized void setTmpuMuxer(MediaMuxerUtil mMuxer, EncoderParams mParams) {

        this.mMuxerRef = new WeakReference<>(mMuxer);

        this.mParams = mParams;



        MediaMuxerUtil muxer = mMuxerRef.get();

        if (muxer != null && newFormat != null) {

            muxer.addTrack(newFormat, false);

        }

    }



    static class RawData {

        byte[] buf;

        int readBytes;

        long timeStamp;



        RawData() {

            buf = new byte[BUFFER_SIZE];

        }



        void merge(ByteBuffer byteBuffer, int length) {

            System.arraycopy(byteBuffer.array(), byteBuffer.arrayOffset(), buf, readBytes, length);

            readBytes += length;

            timeStamp = System.nanoTime();

        }



        boolean canMerge(int length) {

            return readBytes + length < buf.length;

        }

    }



    private LinkedBlockingQueue<RawData> queue = new LinkedBlockingQueue<>();



    private RawData bigShip;



    // queue数据没处理完时,先放到bigShip里,确保编码器消费速度

    public void addData(ByteBuffer byteBuffer, int length) {

        if(bigShip == null) {

            bigShip = new RawData();

            bigShip.merge(byteBuffer, length);

            if(queue.isEmpty()) {

                queue.offer(bigShip);

                bigShip = null;

            }

        } else {

            if(bigShip.canMerge(length)) {

                bigShip.merge(byteBuffer, length);

            } else {

                queue.offer(bigShip);

                bigShip = null;

            }

        }

    }



    private RawData removeData() {

        return queue.poll();

    }



    @Override

    public void run() {

        startCodec();

        while (!isExit) {

            try {

                RawData data = removeData();

                if(data != null) {

                    Log.d("encode", "onWebRtcAudioRecording take data");

                    encoderBytes(data.buf, data.readBytes, data.timeStamp);

                }

                MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();

                int outputBufferIndex;

                do {

                    outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo, TIMES_OUT);

                    if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {

//                Log.i(TAG, "INFO_TRY_AGAIN_LATER");

                    } else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {

                        Log.i(TAG, "INFO_OUTPUT_FORMAT_CHANGED");

                        synchronized (AACEncodeConsumer.this) {

                            newFormat = mAudioEncoder.getOutputFormat();

                            if (mMuxerRef != null) {

                                MediaMuxerUtil muxer = mMuxerRef.get();

                                if (muxer != null) {

                                    muxer.addTrack(newFormat, false);

                                }

                            }

                        }

                    } else {

                        if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {

                            mBufferInfo.size = 0;

                        }

                        if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {



                            Log.i(TAG, "数据流结束,退出循环");

                            break;

                        }

                        ByteBuffer outputBuffer = mAudioEncoder.getOutputBuffer(outputBufferIndex);

                        if (mBufferInfo.size != 0) {

                            if (outputBuffer == null) {

                                throw new RuntimeException("encodecOutputBuffer" + outputBufferIndex + "was null");

                            }

                            if (mMuxerRef != null) {

                                MediaMuxerUtil muxer = mMuxerRef.get();

                                if (muxer != null) {

                                    Log.i(TAG, "------编码混合音频数据------------" + mBufferInfo.presentationTimeUs / 1000);

                                    muxer.pumpStream(outputBuffer, mBufferInfo, false);

                                }

                            }

                        }

                        mAudioEncoder.releaseOutputBuffer(outputBufferIndex, false);

                    }

                } while (outputBufferIndex >= 0);

            } catch (Exception e) {

                e.printStackTrace();

            }



        }

        stopCodec();

    }



    @SuppressLint("NewApi")

    public void encoderBytes(byte[] audioBuf, int readBytes, long timeStamp) {

        int inputBufferIndex = mAudioEncoder.dequeueInputBuffer(TIMES_OUT);

        if (inputBufferIndex >= 0) {

            ByteBuffer inputBuffer = mAudioEncoder.getInputBuffer(inputBufferIndex);



            if (audioBuf == null || readBytes <= 0) {

                mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, 0, System.nanoTime() / 1000, MediaCodec.BUFFER_FLAG_END_OF_STREAM);

            } else {

                if(inputBuffer != null) {

                    inputBuffer.clear();

                    inputBuffer.put(audioBuf);

                }

                Log.e(TAG, "audio set pts-------" + timeStamp / 1000 / 1000);

                mAudioEncoder.queueInputBuffer(inputBufferIndex, 0, readBytes, System.nanoTime() / 1000, 0);



            }

        }

    }



    private void startCodec() {

        MediaCodecInfo mCodecInfo = selectSupportCodec(MIME_TYPE);

        if (mCodecInfo == null) {

            return;

        }

        try {

            mAudioEncoder = MediaCodec.createByCodecName(mCodecInfo.getName());

            MediaFormat mediaFormat = new MediaFormat();

            mediaFormat.setString(MediaFormat.KEY_MIME, MIME_TYPE);

            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mParams.getAudioBitrate());

            mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, mParams.getAudioSampleRate());

            mediaFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, ACC_PROFILE);

            mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, mParams.getAudioChannelCount());

            mediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, BUFFER_SIZE);

            if (mAudioEncoder != null) {

                mAudioEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

                mAudioEncoder.start();

                isEncoderStarted = true;

            }

        } catch (Exception e) {

            Log.e(TAG, "startCodec" + e.getMessage());

            e.printStackTrace();

        }

    }



    private void stopCodec() {

        try {

            if (mAudioEncoder != null) {

                mAudioEncoder.stop();

                mAudioEncoder.release();

                mAudioEncoder = null;

            }

        } catch (Exception e) {

            // 捕获release()方法抛出异常

        }

        isEncoderStarted = false;

    }



    public void exit() {

        isExit = true;

    }



    /**

 * 遍历所有编解码器,返回第一个与指定MIME类型匹配的编码器

 * 判断是否有支持指定mime类型的编码器

 */

 private MediaCodecInfo selectSupportCodec(String mimeType) {

        int numCodecs = MediaCodecList.getCodecCount();

        for (int i = 0; i < numCodecs; i++) {

            MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);

            if (!codecInfo.isEncoder()) {

                continue;

            }

            String[] types = codecInfo.getSupportedTypes();

            for (int j = 0; j < types.length; j++) {

                if (types[j].equalsIgnoreCase(mimeType)) {

                    return codecInfo;

                }

            }

        }

        return null;

    }

}

MediaMuxer混合

使用MediaMuxer来封装编码后的视频流和音频流到mp4容器中。通常视频编码使用H.264(AVC)编码,音频编码使用AAC编码,在MediaFormat中我们可以看到各种编码格式,本例中编码格式如下:

private static final String MIME_TYPE = "video/avc";

private static final String MIME_TYPE = "audio/mp4a-latm";

指定输出路径和文件格式:

mMuxer = new MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);

www.jianshu.com/p/da1000779…

www.jianshu.com/p/30e7de494…

blog.csdn.net/gb702250823…

踩坑记录:

1.1440x1080size无法编码

字节对齐,把数据复制一份去掉stride padding

2.1440x1080有绿边

uv数据少一个字节,需要手动填最后一个字节

3.画质模糊

比特率设置与系统相机动态照片保持一致:20000000

未完待续...