使用H264或者H265实现手机投屏

3,039 阅读6分钟

下一篇把摄像头转成H265数据给接收方

一 概述

大概流程是下面

  1. 发送端:使用MediaProjection获取屏幕视频 ,然后通过 MediaCodec 编码成 H264 或者 H265 码流,最终通过 WebSocet 发送给接受端
  2. 接收端:通过WebSocet接受到 H264 或者 H265 码流,然后再通过 MediaCodec 解码成YUV视频显示在 SurfaceView上

二 发送端

2.1 MainActivity

MainActivity主要是用来请求录屏权限,并且开启前台服务去录屏幕,因为在Target 28 之后 录屏必须是前台服务

public class MainActivity extends AppCompatActivity {

    public static final int REQUEST_CODE = 1;

    /**
     * 录屏的 manger
     */
    private MediaProjectionManager mProjectionManager;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        mProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);

        frameGif();
    }

    private void frameGif() {
        ImageView iv = (ImageView) findViewById(R.id.iv);
        // 把帧动画的资源文件指定为iv的背景
        iv.setBackgroundResource(R.drawable.bg);
        // 获取iv的背景
        AnimationDrawable ad = (AnimationDrawable) iv.getBackground();
        ad.start();
    }

    public void start(View view) {
        // 请求录屏权限
        Intent intent = mProjectionManager.createScreenCaptureIntent();
        startActivityForResult(intent, REQUEST_CODE);
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
        super.onActivityResult(requestCode, resultCode, data);
        if (requestCode == REQUEST_CODE && resultCode == Activity.RESULT_OK) {
            Intent service = new Intent(this, PushService.class);
            service.putExtra("code", resultCode);
            service.putExtra("data", data);
            startForegroundService(service);

        }else{
            Toast.makeText(this,"请打开录屏权限",Toast.LENGTH_SHORT).show();
        }
    }


}

2.2 Service

主要是用来录屏 ,把数据给 MediaCodec 进行 编码

public class PushService extends Service {

    /**
     * 录屏的 manger
     */
    private MediaProjectionManager mProjectionManager;
    private PushSocket mSocket;

    @Nullable
    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    @Override
    public void onCreate() {
        super.onCreate();
        mProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);

    }

    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {
        createNotificationChannel();
        int code =  intent.getIntExtra("code",0);
        Intent data = intent.getParcelableExtra("data");

        MediaProjection mediaProjection = mProjectionManager.getMediaProjection(code, data);
        mSocket = new PushSocket();
        mSocket.start(mediaProjection);

        return super.onStartCommand(intent, flags, startId);

    }

    @Override
    public void onDestroy() {
        super.onDestroy();
        if (mSocket != null) {
            mSocket.close();
        }
    }
    private void createNotificationChannel() {
        //获取一个Notification构造器
        Notification.Builder builder = new Notification.Builder(this.getApplicationContext());
        //点击后跳转的界面,可以设置跳转数据
        Intent nfIntent = new Intent(this, MainActivity.class);

        // 设置PendingIntent
        builder.setContentIntent(PendingIntent.getActivity(this, 0, nfIntent, 0))
                // 设置下拉列表中的图标(大图标)
                .setLargeIcon(BitmapFactory.decodeResource(this.getResources(), R.mipmap.ic_launcher))
                // 设置下拉列表里的标题
                //.setContentTitle("SMI InstantView")

                // 设置状态栏内的小图标
                .setSmallIcon(R.mipmap.ic_launcher)
                // 设置上下文内容
                .setContentText("投屏中。。。")
                // 设置该通知发生的时间
                .setWhen(System.currentTimeMillis());

        /*以下是对Android 8.0的适配*/
        //普通notification适配
        if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            builder.setChannelId("notification_id");
        }
        //前台服务notification适配
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            NotificationManager notificationManager = (NotificationManager)getSystemService(NOTIFICATION_SERVICE);
            NotificationChannel channel = new NotificationChannel("notification_id", "notification_name", NotificationManager.IMPORTANCE_LOW);
            notificationManager.createNotificationChannel(channel);
        }

        // 设置该通知发生的时间
        Notification notification = builder.build();
        //设置为默认的声音
        notification.defaults = Notification.DEFAULT_SOUND;
        startForeground(110, notification);

    }
}

2.3 socekt

主要用来发送 编码之后的 每一帧数据

public class PushSocket {
    private static final String TAG = "PushSocket";
    private WebSocket mWebSocket;

    /**
     * 端口号
     */
    private static final int PORT = 13001;

//    private CodecH265 mCodecH265;
    private CodecH264 mCodecH264;

    public PushSocket() {
    }

    public void start(MediaProjection mediaProjection) {
        webSocketServer.start();
//        mCodecH265 = new CodecH265(this, mediaProjection);
//        mCodecH265.startLive();
        mCodecH264 = new CodecH264(this, mediaProjection);
        mCodecH264.startLive();
    }

    private WebSocketServer webSocketServer = new WebSocketServer(new InetSocketAddress(PORT)) {
        @Override
        public void onOpen(WebSocket webSocket, ClientHandshake clientHandshake) {
            mWebSocket = webSocket;
        }

        @Override
        public void onClose(WebSocket conn, int code, String reason, boolean remote) {
            Log.i(TAG, "onClose: 关闭 socket ");
        }

        @Override
        public void onMessage(WebSocket webSocket, String message) {
        }

        @Override
        public void onError(WebSocket conn, Exception e) {
            Log.i(TAG, "onError:  " + e.toString());
        }

        @Override
        public void onStart() {

        }
    };

    /**
     * 发送数据
     *
     * @param bytes
     */
    public void sendData(byte[] bytes) {
        if (mWebSocket != null && mWebSocket.isOpen()) {
            mWebSocket.send(bytes);
        }
    }

    /**
     * 关闭 Socket
     */
    public void close() {
        try {
            mWebSocket.close();
            webSocketServer.stop();
        } catch (IOException | InterruptedException e) {
            e.printStackTrace();
        }

    }
}

2.4 CodecH264

用来编码每一帧数据,注意这里发送每个I帧之前要加入sps-pps帧的数据

public class CodecH264 extends Thread {

    private static final String TAG = "CodecLiveH264";
    private MediaCodec mMediaCodec;

    private static final int WIDTH = 720;
    private static final int HEIGHT = 1280;
    /**
     * 用来录屏
     */
    private final MediaProjection mMediaProjection;
    VirtualDisplay virtualDisplay;
    private final PushSocket mSocket;

    public CodecH264(PushSocket socketLive, MediaProjection mediaProjection) {
        this.mMediaProjection = mediaProjection;
        this.mSocket = socketLive;
    }

    public void startLive() {

        try {
            /////////////////////////////////// 如果是H265 这里换成 MIMETYPE_VIDEO_HEVC //////////////////////////////////////////////
            //配置mediacodec的配置信息 设置 为 264  使用DSP芯片解析
            MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, WIDTH, HEIGHT);
            // 设置颜色格式
            format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
            // 设置码率  码率越大 视频越清晰。控制在编码或者解码视频画面的清晰度 ,编码一帧的长度会不一样
            format.setInteger(KEY_BIT_RATE, WIDTH * HEIGHT);
            // 每秒15帧
            format.setInteger(KEY_FRAME_RATE, 20);
            // 设置I帧的间隔
            format.setInteger(KEY_I_FRAME_INTERVAL, 1);
            // 使用 H264 编码格式 去 编码
            /////////////////////////////////// 如果是H265 这里换成 MIMETYPE_VIDEO_HEVC //////////////////////////////////////////////
            mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            // 设置格式 要编码
            mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            // 创建一个虚拟的 Surface
            Surface surface = mMediaCodec.createInputSurface();
            // 把录屏的 mediaProjection 和 Surface关联起来,把录制好的每帧数据丢到 Surface中
            virtualDisplay = mMediaProjection.createVirtualDisplay(
                    "-display",
                    WIDTH, HEIGHT, 1,
                    DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC, surface, null, null);

        } catch (IOException e) {
            e.printStackTrace();
        }


        start();
    }

    @Override
    public void run() {
        mMediaCodec.start();
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        while (true) {
            try {
                int outputBufferId = mMediaCodec.dequeueOutputBuffer(bufferInfo, 10000);
                if (outputBufferId >= 0) {
                    ByteBuffer byteBuffer = mMediaCodec.getOutputBuffer(outputBufferId);
                    // 拿到每一帧 如果是I帧 则在I帧前面插入 sps pps
                    dealFrame(byteBuffer, bufferInfo);
                    mMediaCodec.releaseOutputBuffer(outputBufferId, false);
                }
            } catch (Exception e) {
                e.printStackTrace();
                break;
            }

        }
    }

    /////////////////////////////////// 这里的算法要换 //////////////////////////////////////////////
    /////////////////////////////////// 如果是H265 NAL_I=19 NAL_SPS=32//////////////////////////////////////////////

    public static final int NAL_I = 5;
    public static final int NAL_SPS = 7;
    private byte[] sps_pps_buf;

    /**
     * 绘制每一帧,因为录屏 只有第一帧有 sps 、pps 和 vps,所以我们需要在每一 I 帧 之前插入 sps 、pps 和 vps 的内容
     *
     * @param byteBuffer
     * @param bufferInfo
     */
    private void dealFrame(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
        int offset = 4;
        if (byteBuffer.get(2) == 0x01) {
            offset = 3;
        }

        int type = byteBuffer.get(offset) & 0x1f;
        /////////////////////////////////// 如果是H265 这里type 要换 //////////////////////////////////////////////
//        int type = (byteBuffer.get(offset) & 0x7E) >> 1;
        // sps_pps_buf 帧记录下来
        if (type == NAL_SPS) {
            sps_pps_buf = new byte[bufferInfo.size];
            byteBuffer.get(sps_pps_buf);
        } else if (type == NAL_I) {
            // I 帧 ,把 vps_sps_pps 帧塞到 I帧之前一起发出去
            final byte[] bytes = new byte[bufferInfo.size];
            byteBuffer.get(bytes);

            byte[] newBuf = new byte[sps_pps_buf.length + bytes.length];
            System.arraycopy(sps_pps_buf, 0, newBuf, 0, sps_pps_buf.length);
            System.arraycopy(bytes, 0, newBuf, sps_pps_buf.length, bytes.length);
            mSocket.sendData(newBuf);
            Log.v(TAG, "I帧 视频数据  " + Arrays.toString(bytes));
        } else {
            // B 帧 P 帧 直接发送
            final byte[] bytes = new byte[bufferInfo.size];
            byteBuffer.get(bytes);
            mSocket.sendData(bytes);

        }

    }

}

三 接收端

3.1 MainActivity

主要用SurfaceView 来接收 MediaCodec 解码之后的每一帧数据

public class MainActivity extends AppCompatActivity  {
    private Surface mSurface;

    private Decode264 mDecode264;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        SurfaceView surfaceView = (SurfaceView) findViewById(R.id.surfaceView);
        surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
            @Override
            public void surfaceCreated(@NonNull SurfaceHolder holder) {
                mSurface = holder.getSurface();
                mDecode264 = new Decode264(mSurface);
                initSocket();
            }
            @Override
            public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {

            }
            @Override
            public void surfaceDestroyed(@NonNull SurfaceHolder holder) {

            }
        });

    }
    private void initSocket() {
        ReceiveSocket screenLive = new ReceiveSocket(this,mDecode264);
        screenLive.start();
    }
  
}


3.2 ReceiveSocket

主要用来接收数据

public class ReceiveSocket {
    private static final String TAG = "ReceiveSocket";
    private final SocketCallback mSocketCallback;
    MyWebSocketClient myWebSocketClient;
    private static final int PORT = 13001;
    //todo 这里填写自己发送端的wifiIp
    private static final String IP = "";

    private final Context mContext;

    public ReceiveSocket(Context context, SocketCallback socketCallback) {
        mContext = context;
        mSocketCallback = socketCallback;
    }

    public void start() {
        if (IP.isEmpty()) {
            Toast.makeText(mContext, "请填写发送端的wifi的IP", Toast.LENGTH_SHORT).show();
            throw new RuntimeException("请填写发送端的wifi的IP");
        }
        try {
            URI url = new URI("ws://"+IP+":" + PORT);
            myWebSocketClient = new MyWebSocketClient(url);
            myWebSocketClient.connect();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }


    private class MyWebSocketClient extends WebSocketClient {

        public MyWebSocketClient(URI serverURI) {
            super(serverURI);
        }

        @Override
        public void onOpen(ServerHandshake serverHandshake) {
            Log.i(TAG, "打开 socket  onOpen: ");
        }

        @Override
        public void onMessage(String s) {
        }

        @Override
        public void onMessage(ByteBuffer bytes) {
            Log.i(TAG, "消息长度  : " + bytes.remaining());
            byte[] buf = new byte[bytes.remaining()];
            bytes.get(buf);
            mSocketCallback.callBack(buf);
        }

        @Override
        public void onClose(int code, String reason, boolean remote) {
            Log.i(TAG, "onClose: " + code + "----" + reason + "----" + remote);
        }

        @Override
        public void onError(Exception e) {
            Log.i(TAG, "onError: " + e);
        }
    }

    public interface SocketCallback {
        /**
         * 返回给SurfaceView的数据
         * @param data
         */
        void callBack(byte[] data);
    }
}

3.3 Decode264

主要用把接收到的 H264 解码 到 SurfaceView上

public class Decode264 implements ReceiveSocket.SocketCallback {
    private static final String TAG = "Decode";
    private Surface mSurface;
    MediaCodec mMediaCodec;
    public Decode264(Surface surface) {
        mSurface = surface;
        initCodec();
    }

    private void initCodec() {
        try {
            // 把 h264 解码成 yuv视频
            /////////////////////////////////// 如果是H265 这里换成 MIMETYPE_VIDEO_HEVC //////////////////////////////////////////////
            mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
            /////////////////////////////////// 如果是H265 这里换成 MIMETYPE_VIDEO_HEVC //////////////////////////////////////////////
            final MediaFormat format = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, 720, 1280);
            format.setInteger(MediaFormat.KEY_BIT_RATE, 720 * 1280);
            format.setInteger(MediaFormat.KEY_FRAME_RATE, 20);
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
            // 解码之后直接显示在 Surface 上
            mMediaCodec.configure(format,
                    mSurface,
                    null, 0);
            mMediaCodec.start();
        } catch (
                IOException e) {
            e.printStackTrace();
        }
    }

    @Override
    public void callBack(byte[] data) {
        Log.i(TAG, Arrays.toString(data));
        int index = mMediaCodec.dequeueInputBuffer(100000);
        if (index >= 0) {
            ByteBuffer inputBuffer = mMediaCodec.getInputBuffer(index);
            inputBuffer.clear();
            inputBuffer.put(data, 0, data.length);
            mMediaCodec.queueInputBuffer(index,
                    0, data.length, System.currentTimeMillis(), 0);
        }
        //  获取数据
        MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
        int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 100000);

        while (outputBufferIndex > 0) {
            // true 就是显示在surface上
            mMediaCodec.releaseOutputBuffer(outputBufferIndex, true);
            outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0);
        }
    }
}

还用到了 Java-WebSocket

记得在在每个app的buid.gradle中 同时 别忘了在清单文件中加 INTERNET 权限

implementation "org.java-websocket:Java-WebSocket:1.4.0"

源码地址 github