通过camerax给视频添加水印

2,699 阅读9分钟

实现效果:

image.png

通过camerax实现是因为camerax中已经实现了很多的功能,可以不用写视频编码,音视频融合和视频文件存储。网上实现的方法各种各样,很难找到合适并且好用的。

camerax提供的功能有:

1、imageAnalysis能够获取到每帧的数据

2、videoCapture能够进行视频录制

实现的思路:

1、通过imageAnalysis获取每帧的argb数据,argb数据提供给bitmap,通过bitmap的canvas添加水印,最后把bitmap中的argb数据转换为yuv数据。

2、因为camerax的videocapture有视频编码,音视频融合和视频存储的功能,所以我们只需对videocapture进行简单的修改来实现视频录制工作,将其中数据的输入源从源码的surface改为每帧的yuv数据。

优化方向:

通过每帧修改bitmap的方式添加水印,可能性能上没那么好,但当前视频质量要求不高,所以没有太大的问题。 如果水印只是固定的几个字体,可以先将字体转换为yuv,添加到每帧的yuv上。

可以考虑的方式是通过surfaceView,在surfaceView上添加字体,将surfaceView的数据作为视频源。

代码实现:

1、引用Camerax

def camerax_version = '1.2.0-alpha01'
implementation "androidx.camera:camera-core:$camerax_version"
implementation "androidx.camera:camera-camera2:$camerax_version"
implementation "androidx.camera:camera-lifecycle:$camerax_version"
implementation "androidx.camera:camera-view:$camerax_version"
implementation "androidx.camera:camera-extensions:$camerax_version"

implementation "androidx.concurrent:concurrent-futures:1.1.0"

2、添加布局activity_main

<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:app="http://schemas.android.com/apk/res-auto"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context=".MainActivity">

    <TextView
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"
        android:text="Hello World!"
        app:layout_constraintBottom_toBottomOf="parent"
        app:layout_constraintLeft_toLeftOf="parent"
        app:layout_constraintRight_toRightOf="parent"
        app:layout_constraintTop_toTopOf="parent" />

    <androidx.camera.view.PreviewView
        android:id="@+id/previewView"
        android:layout_width="0dp"
        android:layout_height="0dp"
        app:layout_constraintBottom_toBottomOf="parent"
        app:layout_constraintEnd_toEndOf="parent"
        app:layout_constraintStart_toStartOf="parent"
        app:layout_constraintTop_toTopOf="parent"
        app:scaleType="fillCenter" />

    <TextView
        android:id="@+id/beginTextView"
        android:layout_width="70dp"
        android:layout_height="28dp"
        android:layout_marginBottom="20dp"
        android:gravity="center"
        android:text="开始"
        android:textColor="@color/white"
        android:textSize="12sp"
        android:textStyle="bold"
        app:layout_constraintBottom_toBottomOf="parent"
        app:layout_constraintEnd_toEndOf="parent"
        app:layout_constraintStart_toStartOf="parent" />

</androidx.constraintlayout.widget.ConstraintLayout>

3、MainActivity

package com.cloudly.test;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.ContentValues;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Rect;
import android.net.Uri;
import android.os.Bundle;
import android.os.Handler;
import android.provider.MediaStore;
import android.util.Size;
import android.view.Window;
import android.view.WindowManager;

import androidx.activity.result.ActivityResultLauncher;
import androidx.activity.result.contract.ActivityResultContracts;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.core.content.ContextCompat;

import com.cloudly.test.Utils.Utils;
import com.cloudly.test.camera.VideoRecorder;
import com.cloudly.test.databinding.ActivityMainBinding;
import com.google.common.util.concurrent.ListenableFuture;

import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

public class MainActivity extends AppCompatActivity {

    private ActivityMainBinding binding;
    private ListenableFuture<ProcessCameraProvider> cameraProviderFuture;
    private ExecutorService cameraExecutor;
    private Preview preview;

    private ImageAnalysis imageAnalysis;
    private VideoRecorder videoRecorder;

    private Bitmap videoBitmap;

    private ProcessCameraProvider cameraProvider;
    private CameraSelector cameraSelector;


    protected int videoWidth = 640;
    protected int videoHeight = 480;

    private boolean isVideoRecording = false;


    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
        supportRequestWindowFeature(Window.FEATURE_NO_TITLE);
        binding = ActivityMainBinding.inflate(getLayoutInflater());
        setContentView(binding.getRoot());

        videoRecorder = new VideoRecorder();
        videoRecorder.onCreate();

        initPermissions();
        initCamera();

        binding.beginTextView.setOnClickListener(view -> {
            if (isVideoRecording) {
                stopRecording();
                binding.beginTextView.setText("开始");
            }else{
                startRecording();
                binding.beginTextView.setText("停止");
            }
        });
    }

    /**
     * 获取摄像头和录音权限
     */
    public void initPermissions(){

        ActivityResultLauncher<String[]> permissionLauncher = registerForActivityResult(new ActivityResultContracts.RequestMultiplePermissions(),
                result -> {
                    Object cameraResult = result.get(Manifest.permission.CAMERA);
                    if (cameraResult == null || !cameraResult.equals(true)) {


                    }

                    Object audioResult = result.get(Manifest.permission.RECORD_AUDIO);
                    if (audioResult == null || !audioResult.equals(true)) {

                    }
                });

        permissionLauncher.launch(new String[]{Manifest.permission.RECORD_AUDIO, Manifest.permission.CAMERA});
    }


    /**
     * 初始化摄像头
     */
    private void initCamera() {

        videoRecorder = new VideoRecorder();
        videoRecorder.onCreate();

        cameraProviderFuture = ProcessCameraProvider.getInstance(this);
        cameraExecutor = Executors.newSingleThreadExecutor();
        cameraProviderFuture.addListener(() -> {
            try {
                cameraProvider = cameraProviderFuture.get();
                int rotation = getWindowManager().getDefaultDisplay().getRotation();
                preview = new Preview.Builder().setTargetRotation(rotation).build();
                cameraSelector = new CameraSelector.Builder().requireLensFacing(CameraSelector.LENS_FACING_BACK).build();
                preview.setSurfaceProvider(binding.previewView.getSurfaceProvider());
                initImageAnalysis();
                cameraProvider.unbindAll();
                cameraProvider.bindToLifecycle(this, cameraSelector, preview, imageAnalysis);
            } catch (ExecutionException | InterruptedException e) {
                e.printStackTrace();
            }
        }, ContextCompat.getMainExecutor(this));
    }


    /**
     * 初始化视频帧处理器
     */
    private void initImageAnalysis() {

        videoBitmap = Bitmap.createBitmap(videoWidth, videoHeight, Bitmap.Config.ARGB_8888);

        int[] argb = new int[videoWidth * videoHeight];
        byte[] yuv = new byte[videoWidth * videoHeight * 3 / 2];

        String waterMark = Utils.getCurrentTimeString();
        Canvas canvas = new Canvas(videoBitmap);
        Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
        paint.setColor(getColor(R.color.white));
        paint.setAntiAlias(true);
        paint.setTextSize(18);
        Rect bounds = new Rect();
        paint.getTextBounds(waterMark, 0, waterMark.length(), bounds);
        int x = 10;
        int y = 30;

        imageAnalysis =
                new ImageAnalysis.Builder()
                        .setOutputImageFormat(ImageAnalysis.OUTPUT_IMAGE_FORMAT_RGBA_8888)
                        .setTargetRotation(getWindowManager().getDefaultDisplay().getRotation())
                        .setOutputImageRotationEnabled(true)
                        .setTargetResolution(new Size(videoWidth, videoHeight))
                        .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
                        .build();

        imageAnalysis.setAnalyzer(cameraExecutor, imageProxy -> {
            //开始录制视频工作
            if (isVideoRecording) {
                //将rgb数据加载到bitmap中
                videoBitmap.copyPixelsFromBuffer(imageProxy.getPlanes()[0].getBuffer());
                //添加水印
                canvas.drawText(Utils.getCurrentTimeString(), x, y, paint);
                //将rgb数据转换为int数组类型
                videoBitmap.getPixels(argb, 0, videoWidth, 0, 0, videoWidth, videoHeight);
                //将rgb数据转换为yuv数据
                Utils.convertArgb2Yuv(yuv, argb, videoWidth, videoHeight);
                videoRecorder.getCameraDataCallback().onData(yuv);
            }
            imageProxy.close();
        });
    }


    /**
     * 开始录像
     */
    protected void startRecording() {
        isVideoRecording = true;
        String name = System.currentTimeMillis() + ".mp4";
        ContentValues contentValues = new ContentValues();
        contentValues.put(MediaStore.Video.Media.DISPLAY_NAME, name);
        VideoRecorder.OutputFileOptions mediaStoreOutput = new VideoRecorder.OutputFileOptions.Builder(getContentResolver(),
                MediaStore.Video.Media.EXTERNAL_CONTENT_URI, contentValues)
                .build();

        //延迟执行,确保视频帧分析先执行有数据产生
        new Handler().postDelayed(() -> videoRecorder.startRecording(mediaStoreOutput, cameraExecutor, new VideoRecorder.OnVideoSavedCallback() {
            @Override
            public void onVideoSaved(@NonNull VideoRecorder.OutputFileResults outputFileResults) {
                isVideoRecording = false;
                Utils.log("录屏成功了");
                runOnUiThread(() -> {
                    Uri uri = outputFileResults.getSavedUri();
                    if (uri != null) {
                        Utils.log("成功存储视频 " + uri.getPath());
                    }
                });
            }

            @Override
            public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
                isVideoRecording = false;
                Utils.log("录屏失败了 " + message);
            }
        }), 500);
    }

    /**
     * 停止录像
     */
    protected void stopRecording() {
        videoRecorder.stopRecording();
    }

    @SuppressLint("RestrictedApi")
    protected void onDestroy() {
        super.onDestroy();
        cameraExecutor.execute(() -> {
            videoRecorder.onDestroy();
            cameraProvider.shutdown();
            cameraExecutor.shutdown();
        });
    }
}

3、根据VideoCapture修改的VideoRecorder

package com.cloudly.test.camera;

import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
import static android.media.MediaFormat.MIMETYPE_VIDEO_AVC;

import android.Manifest;
import android.annotation.SuppressLint;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.location.Location;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaRecorder.AudioSource;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.os.ParcelFileDescriptor;

import androidx.annotation.DoNotInline;
import androidx.annotation.IntDef;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RequiresPermission;
import androidx.annotation.RestrictTo;
import androidx.annotation.RestrictTo.Scope;
import androidx.camera.core.CameraXThreads;
import androidx.camera.core.Logger;
import androidx.camera.core.impl.utils.executor.CameraXExecutors;
import androidx.camera.core.internal.utils.VideoUtil;
import androidx.concurrent.futures.CallbackToFutureAdapter;
import androidx.concurrent.futures.CallbackToFutureAdapter.Completer;
import androidx.core.util.Preconditions;

import com.cloudly.test.Utils.Utils;
import com.google.common.util.concurrent.ListenableFuture;

import java.io.File;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.Executor;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;

@SuppressLint("RestrictedApi")
@RestrictTo(Scope.LIBRARY_GROUP)
public final class VideoRecorder {

    public static final int ERROR_UNKNOWN = 0;
    public static final int ERROR_ENCODER = 1;
    public static final int ERROR_MUXER = 2;
    public static final int ERROR_RECORDING_IN_PROGRESS = 3;
    public static final int ERROR_FILE_IO = 4;
    public static final int ERROR_INVALID_CAMERA = 5;
    public static final int ERROR_RECORDING_TOO_SHORT = 6;

    private static final String TAG = "VideoRecorder";

    private static final String VIDEO_MIME_TYPE = "video/avc";
    private static final String AUDIO_MIME_TYPE = "audio/mp4a-latm";


    private final BufferInfo mVideoBufferInfo = new BufferInfo();
    private final Object mMuxerLock = new Object();
    private final AtomicBoolean mEndOfVideoStreamSignal = new AtomicBoolean(true);
    private final AtomicBoolean mEndOfAudioStreamSignal = new AtomicBoolean(true);
    private final AtomicBoolean mEndOfAudioVideoSignal = new AtomicBoolean(true);
    private final BufferInfo mAudioBufferInfo = new BufferInfo();

    public final AtomicBoolean mIsFirstVideoKeyFrameWrite = new AtomicBoolean(false);
    public final AtomicBoolean mIsFirstAudioSampleWrite = new AtomicBoolean(false);

    private HandlerThread mVideoHandlerThread;
    private Handler mVideoHandler;

    private HandlerThread mAudioHandlerThread;
    private Handler mAudioHandler;

    MediaCodec mVideoEncoder;

    private MediaCodec mAudioEncoder;

    private ListenableFuture<Void> mRecordingFuture = null;


    private MediaMuxer mMuxer;
    private final AtomicBoolean mMuxerStarted = new AtomicBoolean(false);

    private int mVideoTrackIndex;
    private int mAudioTrackIndex;

    private volatile AudioRecord mAudioRecorder;
    private volatile int mAudioBufferSize;
    private volatile boolean mIsRecording = false;
    private final int mAudioChannelCount = 1;
    private final int mAudioSampleRate = 8000;

    private static final int frameRate = 15;

    volatile Uri mSavedVideoUri;
    private volatile ParcelFileDescriptor mParcelFileDescriptor;
    private final AtomicBoolean mIsAudioEnabled = new AtomicBoolean(true);

    private VideoEncoderInitStatus mVideoEncoderInitStatus = VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_UNINITIALIZED;

    private Throwable mVideoEncoderErrorMessage;


    private static MediaFormat createVideoMediaFormat() {
        MediaFormat format = MediaFormat.createVideoFormat(MIMETYPE_VIDEO_AVC, 640, 480);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);
        format.setInteger(MediaFormat.KEY_BIT_RATE, 600*1024);
        format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
        return format;
    }

    /**
     * 进行初始化相关的功能
     */
    public void onCreate() {
        mVideoHandlerThread = new HandlerThread(CameraXThreads.TAG + "video encoding thread");
        mAudioHandlerThread = new HandlerThread(CameraXThreads.TAG + "audio encoding thread");

        mVideoHandlerThread.start();
        mVideoHandler = new Handler(mVideoHandlerThread.getLooper());

        mAudioHandlerThread.start();
        mAudioHandler = new Handler(mAudioHandlerThread.getLooper());

        try {
            mVideoEncoder = MediaCodec.createEncoderByType(VIDEO_MIME_TYPE);
            mAudioEncoder = MediaCodec.createEncoderByType(AUDIO_MIME_TYPE);
        } catch (IOException e) {
            throw new IllegalStateException("Unable to create MediaCodec due to: " + e.getCause());
        }
        setupEncoder();
    }

    /**
     * 开始录屏
     */

    private long mPresentationTimeUs;

    public void startRecording(OutputFileOptions outputFileOptions, Executor executor, OnVideoSavedCallback callback) {
        if (Looper.getMainLooper() != Looper.myLooper()) {
            CameraXExecutors.mainThreadExecutor().execute(() -> startRecording(outputFileOptions, executor, callback));
            return;
        }
        Utils.log("startRecording");
        mIsFirstVideoKeyFrameWrite.set(false);
        mIsFirstAudioSampleWrite.set(false);

        OnVideoSavedCallback postListener = new VideoSavedListenerWrapper(executor, callback);

        //判断编译器的状态是否正常
        if (mVideoEncoderInitStatus
                == VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INSUFFICIENT_RESOURCE
                || mVideoEncoderInitStatus
                == VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED
                || mVideoEncoderInitStatus
                == VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_RESOURCE_RECLAIMED) {
            postListener.onError(ERROR_ENCODER, "Video encoder initialization failed before start"
                    + " recording ", mVideoEncoderErrorMessage);
            return;
        }

        //判断是否为录制结束状态
        if (!mEndOfAudioVideoSignal.get()) {
            postListener.onError(ERROR_RECORDING_IN_PROGRESS, "It is still in video recording!", null);
            return;
        }

        //开始录音
        if (mIsAudioEnabled.get()) {
            try {
                if (mAudioRecorder.getState() == AudioRecord.STATE_INITIALIZED) {
                    mAudioRecorder.startRecording();
                }
            } catch (IllegalStateException e) {
                Utils.log("AudioRecorder cannot start recording, disable audio." + e.getMessage());
                mIsAudioEnabled.set(false);
                releaseAudioInputResource();
            }

            if (mAudioRecorder.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
                Utils.log("AudioRecorder startRecording failed - incorrect state: " + mAudioRecorder.getRecordingState());
                mIsAudioEnabled.set(false);
                releaseAudioInputResource();
            }
        }

        AtomicReference<Completer<Void>> recordingCompleterRef = new AtomicReference<>();
        mRecordingFuture = CallbackToFutureAdapter.getFuture(
                completer -> {
                    recordingCompleterRef.set(completer);
                    return "startRecording";
                });
        Completer<Void> recordingCompleter =
                Preconditions.checkNotNull(recordingCompleterRef.get());

        mRecordingFuture.addListener(() -> {
            mRecordingFuture = null;
            setupEncoder();
        }, CameraXExecutors.mainThreadExecutor());

        try {
            Utils.log("videoEncoder start");
            mPresentationTimeUs = System.currentTimeMillis() * 1000;
            mVideoEncoder.start();

            if (mIsAudioEnabled.get()) {
                Utils.log("audioEncoder start");
                mAudioEncoder.start();
            }
        } catch (IllegalStateException e) {
            recordingCompleter.set(null);
            postListener.onError(ERROR_ENCODER, "Audio/Video encoder start fail", e);
            return;
        }

        try {
            synchronized (mMuxerLock) {
                mMuxer = initMediaMuxer(outputFileOptions);
                Preconditions.checkNotNull(mMuxer);
                mMuxer.setOrientationHint(0);

                Metadata metadata = outputFileOptions.getMetadata();
                if (metadata != null && metadata.location != null) {
                    mMuxer.setLocation(
                            (float) metadata.location.getLatitude(),
                            (float) metadata.location.getLongitude());
                }
            }
        } catch (IOException e) {
            recordingCompleter.set(null);
            postListener.onError(ERROR_MUXER, "MediaMuxer creation failed!", e);
            return;
        }

        mEndOfVideoStreamSignal.set(false);
        mEndOfAudioStreamSignal.set(false);
        mEndOfAudioVideoSignal.set(false);
        mIsRecording = true;


        //开始编码工作
        if (mIsAudioEnabled.get()) {
            mAudioHandler.post(() -> audioEncode(postListener));
        }

        mVideoHandler.post(() -> {
            boolean errorOccurred = videoEncode(postListener, outputFileOptions);
            if (!errorOccurred) {
                postListener.onVideoSaved(new OutputFileResults(mSavedVideoUri));
                mSavedVideoUri = null;
            }
            recordingCompleter.set(null);
        });
    }

    public interface CameraDataCallback {
        void onData(byte[] bytes);
    }

    private final CameraDataCallback cameraDataCallback = new CameraDataCallback() {
        @Override
        public void onData(byte[] b) {
            bytes = b;
        }
    };

    public CameraDataCallback getCameraDataCallback() {
        return cameraDataCallback;
    }

    private byte[] bytes;

    private long computePresentationTime(long frameIndex) {
        return 132 + frameIndex * 1000000 / frameRate;
    }


    boolean videoEncode(OnVideoSavedCallback videoSavedCallback, OutputFileOptions outputFileOptions) {
        boolean errorOccurred = false;
        boolean videoEos = false;

        long generateIndex = 0;
        long pts = 0;

        while (!mEndOfVideoStreamSignal.get()) {

            int inputBufferId = mVideoEncoder.dequeueInputBuffer(10000);

            if (inputBufferId >= 0) {
                ByteBuffer inputBuffer = mVideoEncoder.getInputBuffer(inputBufferId);
                if(bytes != null){
                    inputBuffer.clear();
                    inputBuffer.put(bytes);
                    pts = computePresentationTime(generateIndex);
                    mVideoEncoder.queueInputBuffer(inputBufferId, 0, bytes.length, pts, 0);
                    generateIndex++;
                }
            }

            int outputBufferId = mVideoEncoder.dequeueOutputBuffer(mVideoBufferInfo, 10000);

            switch (outputBufferId) {
                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    if (mMuxerStarted.get()) {
                        videoSavedCallback.onError(ERROR_ENCODER, "Unexpected change in video encoding format.", null);
                        errorOccurred = true;
                    }
                    synchronized (mMuxerLock) {
                        mVideoTrackIndex = mMuxer.addTrack(mVideoEncoder.getOutputFormat());
                        if ((mIsAudioEnabled.get() && mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) ||
                                (!mIsAudioEnabled.get() && mVideoTrackIndex >= 0)) {
                            Utils.log("MediaMuxer started on video encode thread and audio enabled: " + mIsAudioEnabled);
                            mMuxer.start();
                            mMuxerStarted.set(true);
                        }
                    }
                    break;
                case MediaCodec.INFO_TRY_AGAIN_LATER:
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    break;
                default:
                    videoEos = writeVideoEncodedBuffer(outputBufferId);
                    try {
                        Thread.sleep(1000/frameRate);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
            }
        }

        try {
            Utils.log("videoEncoder stop");
            mVideoEncoder.stop();
        } catch (IllegalStateException e) {
            videoSavedCallback.onError(ERROR_ENCODER, "Video encoder stop failed!", e);
            errorOccurred = true;
        }

        try {
            synchronized (mMuxerLock) {
                if (mMuxer != null) {
                    if (mMuxerStarted.get()) {
                        Utils.log("Muxer already started");
                        mMuxer.stop();
                    }
                    mMuxer.release();
                    mMuxer = null;
                }
            }

            boolean checkResult = removeRecordingResultIfNoVideoKeyFrameArrived(outputFileOptions);

            if (!checkResult) {
                videoSavedCallback.onError(ERROR_RECORDING_TOO_SHORT, "The file has no video key frame.", null);
                errorOccurred = true;
            }
        } catch (IllegalStateException e) {
            Utils.log("muxer stop IllegalStateException: " + System.currentTimeMillis());
            Utils.log("muxer stop exception, mIsFirstVideoKeyFrameWrite: " + mIsFirstVideoKeyFrameWrite.get());
            if (mIsFirstVideoKeyFrameWrite.get()) {
                videoSavedCallback.onError(ERROR_MUXER, "Muxer stop failed!", e);
            } else {
                videoSavedCallback.onError(ERROR_RECORDING_TOO_SHORT, "The file has no video key frame.", null);
            }
            errorOccurred = true;
        }

        if (mParcelFileDescriptor != null) {
            try {
                mParcelFileDescriptor.close();
                mParcelFileDescriptor = null;
            } catch (IOException e) {
                videoSavedCallback.onError(ERROR_MUXER, "File descriptor close failed!", e);
                errorOccurred = true;
            }
        }

        mMuxerStarted.set(false);

        mEndOfAudioVideoSignal.set(true);
        mIsFirstVideoKeyFrameWrite.set(false);

        Utils.log("Video encode thread end.");
        return errorOccurred;
    }

    /**
     * 停止录屏
     */
    public void stopRecording() {
        if (Looper.getMainLooper() != Looper.myLooper()) {
            CameraXExecutors.mainThreadExecutor().execute(this::stopRecording);
            return;
        }

        if (mIsRecording) {
            if (mIsAudioEnabled.get()) {
                mEndOfAudioStreamSignal.set(true);
            } else {
                mEndOfVideoStreamSignal.set(true);
            }
        }
    }


    public void onDestroy() {
        stopRecording();
        if (mRecordingFuture != null) {
            mRecordingFuture.addListener(this::releaseResources,
                    CameraXExecutors.mainThreadExecutor());
        } else {
            releaseResources();
        }
    }

    private void releaseResources() {
        mVideoHandlerThread.quitSafely();
        releaseAudioInputResource();
    }

    private void releaseAudioInputResource() {
        mAudioHandlerThread.quitSafely();
        if (mAudioEncoder != null) {
            mAudioEncoder.release();
            mAudioEncoder = null;
        }

        if (mAudioRecorder != null) {
            mAudioRecorder.release();
            mAudioRecorder = null;
        }
    }


    @SuppressLint("MissingPermission")
    void setupEncoder() {
        Utils.log("setup encoder");
        mVideoEncoder.reset();
        mVideoEncoderInitStatus = VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_UNINITIALIZED;
        try {
            mVideoEncoder.configure(createVideoMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        } catch (MediaCodec.CodecException e) {
            int errorCode;
            errorCode = Api23Impl.getCodecExceptionErrorCode(e);
            String diagnosticInfo = e.getDiagnosticInfo();
            if (errorCode == MediaCodec.CodecException.ERROR_INSUFFICIENT_RESOURCE) {
                Utils.log("CodecException: code: " + errorCode + " diagnostic: " + diagnosticInfo);
                mVideoEncoderInitStatus = VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INSUFFICIENT_RESOURCE;
            } else if (errorCode == MediaCodec.CodecException.ERROR_RECLAIMED) {
                Utils.log("CodecException: code: " + errorCode + " diagnostic: " + diagnosticInfo);
                mVideoEncoderInitStatus = VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_RESOURCE_RECLAIMED;
            }
            mVideoEncoderErrorMessage = e;
            return;
        } catch (IllegalArgumentException | IllegalStateException e) {
            mVideoEncoderInitStatus = VideoEncoderInitStatus.VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED;
            mVideoEncoderErrorMessage = e;
            return;
        }

        mIsAudioEnabled.set(true);
        mAudioEncoder.reset();
        mAudioEncoder.configure(createAudioMediaFormat(), null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

        if (mAudioRecorder != null) {
            mAudioRecorder.release();
        }
        mAudioRecorder = autoConfigAudioRecordSource();

        if (mAudioRecorder == null) {
            Utils.log("AudioRecord object cannot initialized correctly!");
            mIsAudioEnabled.set(false);
        }

        synchronized (mMuxerLock) {
            mVideoTrackIndex = -1;
            mAudioTrackIndex = -1;
        }
        mIsRecording = false;
    }

    private boolean writeVideoEncodedBuffer(int bufferIndex) {

        if (bufferIndex < 0) {
            Utils.log("Output buffer should not have negative index: " + bufferIndex);
            return false;
        }

        ByteBuffer outputBuffer = mVideoEncoder.getOutputBuffer(bufferIndex);

        if (outputBuffer == null) {
            Utils.log("OutputBuffer was null.");
            return false;
        }

        if (mMuxerStarted.get()) {
            if (mVideoBufferInfo.size > 0) {
                outputBuffer.position(mVideoBufferInfo.offset);
                outputBuffer.limit(mVideoBufferInfo.offset + mVideoBufferInfo.size);
                mVideoBufferInfo.presentationTimeUs = (System.nanoTime() / 1000);

                synchronized (mMuxerLock) {
                    if (!mIsFirstVideoKeyFrameWrite.get()) {
                        boolean isKeyFrame = (mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0;
                        if (isKeyFrame) {
                            Utils.log("First video key frame written.");
                            mIsFirstVideoKeyFrameWrite.set(true);
                        } else {
                            final Bundle syncFrame = new Bundle();
                            syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
                            mVideoEncoder.setParameters(syncFrame);
                        }
                    }
                    mMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, mVideoBufferInfo);
                }
            } else {
                Utils.log("mVideoBufferInfo.size <= 0, index " + bufferIndex);
            }
        }

        mVideoEncoder.releaseOutputBuffer(bufferIndex, false);

        return (mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
    }

    private boolean writeAudioEncodedBuffer(int bufferIndex) {
        ByteBuffer buffer = getOutputBuffer(mAudioEncoder, bufferIndex);
        buffer.position(mAudioBufferInfo.offset);
        if (mMuxerStarted.get()) {
            try {
                if (mAudioBufferInfo.size > 0 && mAudioBufferInfo.presentationTimeUs > 0) {
                    synchronized (mMuxerLock) {
                        if (!mIsFirstAudioSampleWrite.get()) {
                            Utils.log("First audio sample written.");
                            mIsFirstAudioSampleWrite.set(true);
                        }
                        mMuxer.writeSampleData(mAudioTrackIndex, buffer, mAudioBufferInfo);
                    }
                } else {
                    Utils.log("mAudioBufferInfo size: " + mAudioBufferInfo.size + " "
                            + "presentationTimeUs: " + mAudioBufferInfo.presentationTimeUs);
                }
            } catch (Exception e) {
                Logger.e(
                        TAG,
                        "audio error:size="
                                + mAudioBufferInfo.size
                                + "/offset="
                                + mAudioBufferInfo.offset
                                + "/timeUs="
                                + mAudioBufferInfo.presentationTimeUs);
                e.printStackTrace();
            }
        }
        mAudioEncoder.releaseOutputBuffer(bufferIndex, false);
        return (mAudioBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
    }


    boolean audioEncode(OnVideoSavedCallback videoSavedCallback) {
        // Audio encoding loop. Exits on end of stream.
        boolean audioEos = false;
        int outIndex;
        long lastAudioTimestamp = 0;
        while (!audioEos && mIsRecording) {
            // Check for end of stream from main thread
            if (mEndOfAudioStreamSignal.get()) {
                mEndOfAudioStreamSignal.set(false);
                mIsRecording = false;
            }

            // get audio deque input buffer
            if (mAudioEncoder != null && mAudioRecorder != null) {
                try {
                    int index = mAudioEncoder.dequeueInputBuffer(-1);
                    if (index >= 0) {
                        final ByteBuffer buffer = getInputBuffer(mAudioEncoder, index);
                        buffer.clear();
                        int length = mAudioRecorder.read(buffer, mAudioBufferSize);
                        if (length > 0) {
                            mAudioEncoder.queueInputBuffer(
                                    index,
                                    0,
                                    length,
                                    (System.nanoTime() / 1000),
                                    mIsRecording ? 0 : MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        }
                    }
                } catch (MediaCodec.CodecException e) {
                    Utils.log("audio dequeueInputBuffer CodecException " + e.getMessage());
                } catch (IllegalStateException e) {
                    Utils.log(
                            "audio dequeueInputBuffer IllegalStateException " + e.getMessage());
                }

                // start to dequeue audio output buffer
                do {
                    outIndex = mAudioEncoder.dequeueOutputBuffer(mAudioBufferInfo, 0);
                    switch (outIndex) {
                        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                            synchronized (mMuxerLock) {
                                mAudioTrackIndex = mMuxer.addTrack(mAudioEncoder.getOutputFormat());
                                if (mAudioTrackIndex >= 0 && mVideoTrackIndex >= 0) {
                                    Utils.log("MediaMuxer start on audio encoder thread.");
                                    mMuxer.start();
                                    mMuxerStarted.set(true);
                                }
                            }
                            break;
                        case MediaCodec.INFO_TRY_AGAIN_LATER:
                            break;
                        default:
                            // Drops out of order audio frame if the frame's earlier than last
                            // frame.
                            if (mAudioBufferInfo.presentationTimeUs > lastAudioTimestamp) {
                                audioEos = writeAudioEncodedBuffer(outIndex);
                                lastAudioTimestamp = mAudioBufferInfo.presentationTimeUs;
                            } else {
                                Logger.w(TAG,
                                        "Drops frame, current frame's timestamp "
                                                + mAudioBufferInfo.presentationTimeUs
                                                + " is earlier that last frame "
                                                + lastAudioTimestamp);
                                // Releases this frame from output buffer
                                mAudioEncoder.releaseOutputBuffer(outIndex, false);
                            }
                    }
                } while (outIndex >= 0 && !audioEos); // end of dequeue output buffer
            }
        } // end of while loop

        // Audio Stop
        try {
            Utils.log("audioRecorder stop");
            mAudioRecorder.stop();
        } catch (IllegalStateException e) {
            videoSavedCallback.onError(
                    ERROR_ENCODER, "Audio recorder stop failed!", e);
        }

        try {
            mAudioEncoder.stop();
        } catch (IllegalStateException e) {
            videoSavedCallback.onError(ERROR_ENCODER,
                    "Audio encoder stop failed!", e);
        }

        Utils.log("Audio encode thread end");
        // Use AtomicBoolean to signal because MediaCodec.signalEndOfInputStream() is not thread
        // safe
        mEndOfVideoStreamSignal.set(true);

        return false;
    }

    private ByteBuffer getInputBuffer(MediaCodec codec, int index) {
        return codec.getInputBuffer(index);
    }

    private ByteBuffer getOutputBuffer(MediaCodec codec, int index) {
        return codec.getOutputBuffer(index);
    }

    /**
     * Creates a {@link MediaFormat} using parameters for audio from the configuration
     */
    private MediaFormat createAudioMediaFormat() {
        MediaFormat format =
                MediaFormat.createAudioFormat(AUDIO_MIME_TYPE, mAudioSampleRate,
                        mAudioChannelCount);
        format.setInteger(
                MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        int mAudioBitRate = 64000;
        format.setInteger(MediaFormat.KEY_BIT_RATE, mAudioBitRate);

        return format;
    }

    /**
     * Create a AudioRecord object to get raw data
     */
    @RequiresPermission(Manifest.permission.RECORD_AUDIO)
    private AudioRecord autoConfigAudioRecordSource() {
        // Use channel count to determine stereo vs mono
        int channelConfig =
                mAudioChannelCount == 1
                        ? AudioFormat.CHANNEL_IN_MONO
                        : AudioFormat.CHANNEL_IN_STEREO;

        try {
            // Use only ENCODING_PCM_16BIT because it mandatory supported.
            int bufferSize =
                    AudioRecord.getMinBufferSize(mAudioSampleRate, channelConfig,
                            AudioFormat.ENCODING_PCM_16BIT);

            if (bufferSize <= 0) {
                bufferSize = 16000;
            }

            AudioRecord recorder =
                    new AudioRecord(
                            AudioSource.CAMCORDER,
                            mAudioSampleRate,
                            channelConfig,
                            AudioFormat.ENCODING_PCM_16BIT,
                            bufferSize * 2);

            if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
                mAudioBufferSize = bufferSize;
                Logger.i(
                        TAG,
                        "source: "
                                + AudioSource.CAMCORDER
                                + " audioSampleRate: "
                                + mAudioSampleRate
                                + " channelConfig: "
                                + channelConfig
                                + " audioFormat: "
                                + AudioFormat.ENCODING_PCM_16BIT
                                + " bufferSize: "
                                + bufferSize);
                return recorder;
            }
        } catch (Exception e) {
            e.printStackTrace();
            Utils.log("Exception, keep trying.");
        }
        return null;
    }


    private boolean removeRecordingResultIfNoVideoKeyFrameArrived(
            @NonNull OutputFileOptions outputFileOptions) {
        boolean checkKeyFrame;

        // 1. There should be one video key frame at least.
        Utils.log(
                "check Recording Result First Video Key Frame Write: "
                        + mIsFirstVideoKeyFrameWrite.get());
        if (!mIsFirstVideoKeyFrameWrite.get()) {
            Utils.log("The recording result has no key frame.");
            checkKeyFrame = false;
        } else {
            checkKeyFrame = true;
        }

        // 2. If no key frame, remove file except the target is a file descriptor case.
        if (outputFileOptions.isSavingToFile()) {
            File outputFile = outputFileOptions.getFile();
            if (!checkKeyFrame) {
                Utils.log("Delete file.");
                outputFile.delete();
            }
        } else if (outputFileOptions.isSavingToMediaStore()) {
            if (!checkKeyFrame) {
                Utils.log("Delete file.");
                if (mSavedVideoUri != null) {
                    ContentResolver contentResolver = outputFileOptions.getContentResolver();
                    contentResolver.delete(mSavedVideoUri, null, null);
                }
            }
        }

        return checkKeyFrame;
    }

    @NonNull
    private MediaMuxer initMediaMuxer(@NonNull OutputFileOptions outputFileOptions)
            throws IOException {
        MediaMuxer mediaMuxer;

        if (outputFileOptions.isSavingToFile()) {
            File savedVideoFile = outputFileOptions.getFile();
            mSavedVideoUri = Uri.fromFile(outputFileOptions.getFile());

            mediaMuxer = new MediaMuxer(savedVideoFile.getAbsolutePath(),
                    MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } else if (outputFileOptions.isSavingToFileDescriptor()) {
            if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
                throw new IllegalArgumentException("Using a FileDescriptor to record a video is "
                        + "only supported for Android 8.0 or above.");
            }

            mediaMuxer = Api26Impl.createMediaMuxer(outputFileOptions.getFileDescriptor(),
                    MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } else if (outputFileOptions.isSavingToMediaStore()) {
            ContentValues values = outputFileOptions.getContentValues() != null
                    ? new ContentValues(outputFileOptions.getContentValues())
                    : new ContentValues();

            mSavedVideoUri = outputFileOptions.getContentResolver().insert(
                    outputFileOptions.getSaveCollection(), values);

            if (mSavedVideoUri == null) {
                throw new IOException("Invalid Uri!");
            }

            // Sine API 26, media muxer could be initiated by a FileDescriptor.
            try {
                if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
                    String savedLocationPath = VideoUtil.getAbsolutePathFromUri(
                            outputFileOptions.getContentResolver(), mSavedVideoUri);

                    Utils.log("Saved Location Path: " + savedLocationPath);
                    mediaMuxer = new MediaMuxer(savedLocationPath,
                            MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
                } else {
                    mParcelFileDescriptor =
                            outputFileOptions.getContentResolver().openFileDescriptor(
                                    mSavedVideoUri, "rw");
                    mediaMuxer = Api26Impl.createMediaMuxer(
                            mParcelFileDescriptor.getFileDescriptor(),
                            MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
                }
            } catch (IOException e) {
                mSavedVideoUri = null;
                throw e;
            }
        } else {
            throw new IllegalArgumentException(
                    "The OutputFileOptions should assign before recording");
        }

        return mediaMuxer;
    }

    @IntDef({ERROR_UNKNOWN, ERROR_ENCODER, ERROR_MUXER, ERROR_RECORDING_IN_PROGRESS,
            ERROR_FILE_IO, ERROR_INVALID_CAMERA, ERROR_RECORDING_TOO_SHORT})
    public @interface VideoCaptureError {
    }

    enum VideoEncoderInitStatus {
        VIDEO_ENCODER_INIT_STATUS_UNINITIALIZED,
        VIDEO_ENCODER_INIT_STATUS_INITIALIZED_FAILED,
        VIDEO_ENCODER_INIT_STATUS_INSUFFICIENT_RESOURCE,
        VIDEO_ENCODER_INIT_STATUS_RESOURCE_RECLAIMED,
    }


    public interface OnVideoSavedCallback {
        void onVideoSaved(@NonNull OutputFileResults outputFileResults);

        void onError(@VideoCaptureError int videoCaptureError, @NonNull String message, @Nullable Throwable cause);
    }


    public static final class Metadata {
        @Nullable
        public Location location;
    }

    private static final class VideoSavedListenerWrapper implements OnVideoSavedCallback {

        @NonNull
        Executor mExecutor;
        @NonNull
        OnVideoSavedCallback mOnVideoSavedCallback;

        VideoSavedListenerWrapper(@NonNull Executor executor,
                                  @NonNull OnVideoSavedCallback onVideoSavedCallback) {
            mExecutor = executor;
            mOnVideoSavedCallback = onVideoSavedCallback;
        }

        @Override
        public void onVideoSaved(@NonNull OutputFileResults outputFileResults) {
            try {
                mExecutor.execute(() -> mOnVideoSavedCallback.onVideoSaved(outputFileResults));
            } catch (RejectedExecutionException e) {
                Utils.log("Unable to post to the supplied executor.");
            }
        }

        @Override
        public void onError(@VideoCaptureError int videoCaptureError, @NonNull String message,
                            @Nullable Throwable cause) {
            try {
                mExecutor.execute(
                        () -> mOnVideoSavedCallback.onError(videoCaptureError, message, cause));
            } catch (RejectedExecutionException e) {
                Utils.log("Unable to post to the supplied executor.");
            }
        }

    }

    public static class OutputFileResults {
        @Nullable
        private Uri mSavedUri;

        OutputFileResults(@Nullable Uri savedUri) {
            mSavedUri = savedUri;
        }

        @Nullable
        public Uri getSavedUri() {
            return mSavedUri;
        }
    }

    public static final class OutputFileOptions {

        private static final Metadata EMPTY_METADATA = new Metadata();

        @Nullable
        private final File mFile;
        @Nullable
        private final FileDescriptor mFileDescriptor;
        @Nullable
        private final ContentResolver mContentResolver;
        @Nullable
        private final Uri mSaveCollection;
        @Nullable
        private final ContentValues mContentValues;
        @Nullable
        private final Metadata mMetadata;

        OutputFileOptions(@Nullable File file,
                          @Nullable FileDescriptor fileDescriptor,
                          @Nullable ContentResolver contentResolver,
                          @Nullable Uri saveCollection,
                          @Nullable ContentValues contentValues,
                          @Nullable Metadata metadata) {
            mFile = file;
            mFileDescriptor = fileDescriptor;
            mContentResolver = contentResolver;
            mSaveCollection = saveCollection;
            mContentValues = contentValues;
            mMetadata = metadata == null ? EMPTY_METADATA : metadata;
        }

        @Nullable
        File getFile() {
            return mFile;
        }

        @Nullable
        FileDescriptor getFileDescriptor() {
            return mFileDescriptor;
        }

        @Nullable
        ContentResolver getContentResolver() {
            return mContentResolver;
        }

        @Nullable
        Uri getSaveCollection() {
            return mSaveCollection;
        }

        @Nullable
        ContentValues getContentValues() {
            return mContentValues;
        }

        @Nullable
        Metadata getMetadata() {
            return mMetadata;
        }

        boolean isSavingToMediaStore() {
            return getSaveCollection() != null && getContentResolver() != null
                    && getContentValues() != null;
        }

        boolean isSavingToFile() {
            return getFile() != null;
        }

        boolean isSavingToFileDescriptor() {
            return getFileDescriptor() != null;
        }

        public static final class Builder {
            @Nullable
            private File mFile;
            @Nullable
            private FileDescriptor mFileDescriptor;
            @Nullable
            private ContentResolver mContentResolver;
            @Nullable
            private Uri mSaveCollection;
            @Nullable
            private ContentValues mContentValues;
            @Nullable
            private Metadata mMetadata;

            public Builder(@NonNull File file) {
                mFile = file;
            }

            public Builder(@NonNull FileDescriptor fileDescriptor) {
                Preconditions.checkArgument(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O,
                        "Using a FileDescriptor to record a video is only supported for Android 8"
                                + ".0 or above.");

                mFileDescriptor = fileDescriptor;
            }

            public Builder(@NonNull ContentResolver contentResolver,
                           @NonNull Uri saveCollection,
                           @NonNull ContentValues contentValues) {
                mContentResolver = contentResolver;
                mSaveCollection = saveCollection;
                mContentValues = contentValues;
            }

            @NonNull
            public Builder setMetadata(@NonNull Metadata metadata) {
                mMetadata = metadata;
                return this;
            }

            @NonNull
            public OutputFileOptions build() {
                return new OutputFileOptions(mFile, mFileDescriptor, mContentResolver,
                        mSaveCollection, mContentValues, mMetadata);
            }
        }
    }

    @RequiresApi(26)
    private static class Api26Impl {

        private Api26Impl() {
        }

        @DoNotInline
        @NonNull
        static MediaMuxer createMediaMuxer(@NonNull FileDescriptor fileDescriptor, int format)
                throws IOException {
            return new MediaMuxer(fileDescriptor, format);
        }
    }

    @RequiresApi(23)
    private static class Api23Impl {

        private Api23Impl() {
        }

        @DoNotInline
        static int getCodecExceptionErrorCode(MediaCodec.CodecException e) {
            return e.getErrorCode();
        }
    }
}

3、argb数据转yuv

package com.cloudly.test.Utils;

import android.util.Log;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;


public class Utils {

    public static void log(String message) {
        Log.d("测试", message);
    }

    public static String getCurrentTimeString() {
        return getDateToString(System.currentTimeMillis(), "yyyy-MM-dd HH:mm:ss");
    }


    public static String getDateToString(long milSecond, String pattern) {
        Date date = new Date(milSecond);
        SimpleDateFormat format = new SimpleDateFormat(pattern, Locale.getDefault());
        return format.format(date);
    }

    // argb 8888 转 i420
    public static void convertArgb2Yuv(byte[] i420, int[] argb, int width, int height) {

        int frameSize = width * height;
        int yIndex = 0;
        int uvIndex = frameSize;
        int R, G, B, Y, U, V;
        int index = 0;
        for (int j = 0; j < height; j++) {
            for (int i = 0; i < width; i++) {
                R = (argb[index] & 0xff0000) >> 16;
                G = (argb[index] & 0xff00) >> 8;
                B = argb[index] & 0xff;

                Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
                U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
                V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

                i420[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
                if (j % 2 == 0 && index % 2 == 0) {
                    i420[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
                    i420[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
                }
                index++;
            }
        }
    }
}