WEBRTC-视频1对1demo

100 阅读2分钟
import { useEffect, useState } from 'react';

import { Button, Input, Select, message } from 'antd';

const { Option } = Select;
const VideoRoom = () => {
  const [videoStream, setVideo] = useState<any>('');
  const [videodevice, setvideodevice] = useState<any>([]);
  const [audiodevice, setaudiodevice] = useState<any>([]);
  const [speakerdevice, setspeakerdevice] = useState<any>([]);
  const [selectvideo, setselectvideo] = useState<number>(0);
  const [selectaudio, setselectaudio] = useState<number>(0);
  let peerA: RTCPeerConnection | null = null;
  let peerB: RTCPeerConnection | null = null;
  let channelA = null;
  let channelB = null;
  const offerOption = {
    offerToReceiveAudio: 1,
    offerToReceiveVideo: 1,
  };
  // 获取本地设备
  const getlocalDevice = async () => {
    const devicelist = await navigator.mediaDevices.enumerateDevices();
    const videolist = devicelist.filter((item) => {
      // 摄像头
      return item.kind === 'videoinput';
    });
    const audiolist = devicelist.filter((item) => {
      // 麦克风
      return item.kind === 'audioinput';
    });
    const speakerist = devicelist.filter((item) => {
      // 扬声器
      return item.kind === 'audioinput';
    });
    setvideodevice(videolist);
    setaudiodevice(audiolist);
  };

  function initPeer() {
    // 创建输出端 PeerConnection
    const PeerConnection =
      window.RTCPeerConnection ||
      window.mozRTCPeerConnection ||
      window.webkitRTCPeerConnection;
    peerA = new PeerConnection();
    // console.log('videoStream', videoStream);

    peerA.addStream(videoStream); // 添加本地流
    // 监听 A 的ICE候选信息
    // 如果收集到,就添加给 B
    peerA.onicecandidate = (event) => {
      if (event.candidate) {
        peerB.addIceCandidate(event.candidate);
      }
    };
    peerA.ondatachannel = (event) => {
      console.log(event);
      channelA = event.channel;
      channelA.binaryType = 'arraybuffer';
      channelA.onopen = (e) => {
        console.log('channelA onopen', e);
      };
      channelA.onclose = (e) => {
        console.log('channelA onclose', e);
      };
      channelA.onmessage = (e) => {
        //  receiveText = JSON.parse(e.data).name;
        console.log('channelA onmessage', e.data);
      };
    };
    //               channelA.send('Hi you!');
    // 创建呼叫端
    peerB = new PeerConnection();
    peerB.onaddstream = (event: { stream: any }) => {
      // 监听是否有媒体流接入,如果有就赋值给 rtcB 的 src
      console.log('event-streapeerB', event);
      const video = document.querySelector('#rtcb');

      video.srcObject = event.stream;
      video.onloadedmetadata = function (e: any) {
        video.play();
      };
      console.log('video', video);
    };
    channelB = peerB.createDataChannel('messagechannel');
    console.log('channelB', channelB);
    channelB.binaryType = 'arraybuffer';
    channelB.onopen = (event) => {
      console.log('channelB onopen', event);
      messageOpen = true;
    };
    channelB.onclose = function (event) {
      console.log('channelB onclose', event);
    };
    // 监听 B 的ICE候选信息
    // 如果收集到,就添加给 A
    peerB.onicecandidate = (event) => {
      if (event.candidate) {
        peerA.addIceCandidate(event.candidate);
      }
    };
    //  allowCall = false;
  }
  const getVideo = async () => {
    try {
      const stream = await navigator.mediaDevices.getUserMedia({
        audio: false,
        video: { width: 500, height: 400 },
      });
      const video = document.querySelector('#rtcA');
      if (video) {
        video.srcObject = stream;

        video.onloadedmetadata = function (e) {
          video.play();
        };
        setVideo(stream);
        initPeer(); // 获取到媒体流后,调用函数初始化 RTCPeerConnection
      }
    } catch (error: any) {
      message.error(error || error.namee);
    }
  };
  const onCreateAnswer = async (desc: any) => {
    try {
      await peerA.setLocalDescription(desc); // 接收端设置本地 answer 描述
    } catch (e) {
      console.log('answer-setLocalDescription: ', e);
    }
    try {
      await peerB.setRemoteDescription(desc); // 呼叫端设置远程 answer 描述
    } catch (e) {
      console.log('answer-setRemoteDescription: ', e);
    }
  };
  const onCreateOffer = async (desc: any) => {
    try {
      await peerB.setLocalDescription(desc); // 呼叫端设置本地 offer 描述
    } catch (e) {
      console.log('Offer-setLocalDescription: ', e);
    }
    try {
      await peerA.setRemoteDescription(desc); // 接收端设置远程 offer 描述
    } catch (e) {
      console.log('Offer-setRemoteDescription: ', e);
    }
    try {
      const answer = await peerA.createAnswer(); // 接收端创建 answer
      await onCreateAnswer(answer);
    } catch (e) {
      console.log('createAnswer: ', e);
    }
  };
  const call = async () => {
    console.log(peerA, peerB, 'peerB');
    if (!peerA || !peerB) {
      // 判断是否有对应实例,没有就重新创建

      initPeer();
    }
    try {
      const offer = await peerB?.createOffer(offerOption); // 创建 offer
      await onCreateOffer(offer);
    } catch (e) {
      console.log('createOffer: ', e);
    }
  };
  const getAudio = async () => {
    try {
      const stream = await navigator.mediaDevices.getUserMedia({
        audio: true,
        video: false,
      });
      const audio = document.querySelector('audio');
      if (audio) {
        audio.srcObject = stream;
      }
    } catch (error: any) {
      message.error(error || error.namee);
    }
  };
  const chooseAudio = (value: number) => {
    setselectaudio(value);
  };
  const chooseVideo = (value: number) => {
    setselectvideo(value);
  };

  useEffect(() => {
    // getlocalDevice();
    getVideo();
    // getAudio();
  }, []);
  console.log(videodevice, 'videodevice');
  console.log(audiodevice, 'audiodevice');
  return (
    <div className="demo">
      <p>webRTC Demo</p>
      <video src="" id="rtcA">
        {' '}
      </video>{' '}
      <video src="" id="rtcb" />
      <Button onClick={call}>call</Button>
    </div>
  );
};

export default VideoRoom;