我正在参加「掘金·启航计划」
一对一通话原理,对于WebRTC应用开发而言,主要关注RTCPeerConnection
类,主要包括信令设计,媒体协商,加入Stream/Track,网络协商
要做实时音视频,先通过几个例子了解音视频流的几个应用场景
注意以下例子,在chrome中运行都要用localhost访问
本文主要包括这几方面:
-
本地音视频例子
-
截屏例子
-
录音录像例子
-
共享屏幕例子
-
视频流捕获
-
canvas流捕获
-
coturn服务搭建
-
socket.io起服务演示
本地音视频
原理:
navigator.mediaDevices.getUserMedia({ audio: true, video: true})
打开音视频获得stream流- 把上述stream对象赋值给video的
srcObject
属性
html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>本地音视频</title>
<style>
video{
background: #000;
}
</style>
</head>
<body>
<video id="local-video" autoplay playsinline></video>
<button id="start-btn">start</button>
<script src="./main.js"></script>
</body>
</html>
js
document.getElementById("start-btn").addEventListener("click", function (e) {
navigator.mediaDevices
.getUserMedia({ audio: true, video: { height: 360, width: 640 } })
.then(
(stream) => {
console.log(`Using video device: `, stream.getTracks());
document.getElementById("local-video").srcObject = stream;
},
(error) => {
console.log(error);
}
);
});
效果
实现截屏
原理:
canvas.getContext("2d").drawImage
第一个参数传video就行
html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>实现截屏</title>
<style>
video,canvas {
background: #000;
}
</style>
</head>
<body>
<video id="local-video" autoplay playsinline></video>
<canvas id="local-canvas"></canvas>
<button id="start-btn">start</button>
<button id="snapshot-btn">take snapshot</button>
<script src="./main.js"></script>
</body>
</html>
js
document.getElementById("start-btn").addEventListener("click", function (e) {
navigator.mediaDevices
.getUserMedia({ audio: true, video: { height: 360, width: 640 } })
.then(
(stream) => {
console.log(`Using video device: `, stream.getTracks());
document.getElementById("local-video").srcObject = stream;
},
(error) => {
console.log(error);
}
);
});
document.getElementById("snapshot-btn").addEventListener(
"click",
debounce(function (e) {
const canvas = document.getElementById("local-canvas");
const video = document.getElementById("local-video");
canvas.height = video.videoHeight;
canvas.width = video.videoWidth;
canvas.getContext("2d").drawImage(video, 0, 0, canvas.width, canvas.height);
})
);
效果
实现录音录像
原理:
- 使用
MediaRecorder
类型建立录音录像对象 mediaRecorder.ondataavailable
回调中收集音视频数据- 停止录像后,播放和下载录像通过
new Blob(recordedBlobs, { type: "video/webm" })
制作二进制数据 window.URL.createObjectURL(blob)
获得地址后给video标签或a标签即可
html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Document</title>
<link href="./style.css" rel="stylesheet" />
</head>
<body>
<video id="local-video" autoplay playsinline></video>
<video id="record-video" autoplay playsinline></video>
<button id="start-btn">start</button>
<button id="start-record-btn">start record</button>
<button id="stop-record-btn">stop record</button>
<button id="play-record-btn">play record</button>
<button id="download-record-btn">download record</button>
<script src="./main.js"></script>
</body>
</html>
js
let localStream = null;
// 本地视频
document.getElementById("start-btn").addEventListener("click", function (e) {
navigator.mediaDevices
.getUserMedia({ audio: true, video: { width: 640, height: 320 } })
.then(
(stream) => {
console.log(`Using video device: `, stream.getTracks());
document.getElementById("local-video").srcObject = stream;
localStream = stream;
},
(error) => {
console.log(error);
}
);
});
let mediaRecorder = null;
let recordedBlobs = [];
// 开始记录
document
.getElementById("start-record-btn")
.addEventListener("click", function () {
recordedBlobs = [];
mediaRecorder = new MediaRecorder(localStream);
mediaRecorder.ondataavailable = (event) => {
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
};
mediaRecorder.start();
});
// 停止记录
document
.getElementById("stop-record-btn")
.addEventListener("click", function () {
mediaRecorder.stop();
});
// 播放记录
document
.getElementById("play-record-btn")
.addEventListener("click", function () {
const blob = new Blob(recordedBlobs, { type: "video/webm" });
const url = window.URL.createObjectURL(blob);
const recordedVideo = document.getElementById("record-video");
recordedVideo.src = url;
recordedVideo.controls = true;
recordedVideo.play();
});
// 下载记录
document
.getElementById("download-record-btn")
.addEventListener("click", function () {
const blob = new Blob(recordedBlobs, { type: "video/webm" });
const url = window.URL.createObjectURL(blob);
const a = document.createElement("a");
a.style.display = "none";
a.href = url;
a.download = "test.webm";
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
});
效果
共享屏幕
原理:
- 区别与打开音视频
getUserMedia
,改为使用getDisplayMedia
方法 navigator.mediaDevices.getDisplayMedia({ audio: true, video: true})
打开共享屏幕获得stream流- 把上述stream对象赋值给video的
srcObject
属性
html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>共享屏幕</title>
<style>
video{
background: #000;
}
</style>
</head>
<body>
<video id="local-video" autoplay playsinline></video>
<button id="start-btn">start</button>
<script src="./main.js"></script>
</body>
</html>
js
document.getElementById("start-btn").addEventListener("click", function (e) {
navigator.mediaDevices
.getDisplayMedia({ audio: true, video: { height: 360, width: 640 } })
.then(
(stream) => {
console.log(`Using video device: `, stream.getTracks());
document.getElementById("local-video").srcObject = stream;
},
(error) => {
console.log(error);
}
);
});
效果
视频流捕获
原理:
- 读取video的
captureStream
对象
html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>视频流视频</title>
</head>
<body>
<video id="local-video" controls>
<source src="./chrome.webm" type="video/webm" />
</video>
<video id="remote-video" playsinline autoplay muted></video>
<script src="./main.js"></script>
</body>
</html>
js
const leftVideo = document.getElementById("local-video");
const rightVideo = document.getElementById("remote-video");
leftVideo.addEventListener("canplay", () => {
let stream;
const fps = 0;
if (leftVideo.captureStream) {
stream = leftVideo.captureStream(fps);
} else if (leftVideo.mozCaptureStream) {
stream = leftVideo.mozCaptureStream(fps);
} else {
console.error("Stream capture is not supported");
stream = null;
}
rightVideo.srcObject = stream;
});
效果
cavas流捕获
原理:
- 使用cavans的captureStream
- 注意例子图片别用跨域的
html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta http-equiv="X-UA-Compatible" content="IE=edge" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>canvas流视频</title>
</head>
<body>
<canvas id="local-canvas"></canvas>
<video id="local-video" autoplay muted></video>
<script src="./main.js"></script>
</body>
</html>
js
const canvas = document.getElementById("local-canvas");
const video = document.getElementById("local-video");
const stream = canvas.captureStream();
video.srcObject = stream;
const ctx = canvas.getContext("2d");
const img = new Image();
img.onload = (e) => {
ctx.drawImage(img, 0, 0, img.width, img.height);
};
img.src = "./boy.png";
效果
搭建coturn服务器
docker方式安装coturn
使用官方镜像
docker run -d --network=host coturn/coturn
查看端口
netstat -anpl | grep turnserver
或者
lsof -i:3478
关闭防火墙
systemctl stop firewalld && systemctl disable firewalld
Trickle ICE 测试
打开地址 webrtc.github.io/samples/src…
分别测试 stun 和 turn
1、测试stun
STUN or TURN URI处写 stun:公网IP:3478
,默认3478端口,不需要账号密码,Add Server后,点下面 Gather candidates,立即就会返回Done
,表示成功,如果等待很久返回Done是失败,注意看是否防火墙没关闭或者云服务器端口没打开(UDP 3478)
2、测试turn,先生成用户名密码
拷贝下面直接执行得到用户名和密码
secret=mysecret && \
time=$(date +%s) && \
expiry=8400 && \
username=$(( $time + $expiry )) &&\
echo username:$username && \
echo password:$(echo -n $username | openssl dgst -binary -sha1 -hmac $secret | openssl base64)
STUN or TURN URI处写 turn:公网IP:3478
,账号密码分别用上面生成的,Add Server, 点下面 Gather candidates,立即就会返回Done
信令服务器正常聊天
html页面引入 socket.io.js
<body>
<script src="./node_modules/socket.io/client-dist/socket.io.min.js"></script>
<div id="console"></div>
<script src="./client.js"></script>
</body>
客户端 client.js
连接服务器失败会自动重连
const socket = io.connect("http://192.168.1.11:3000");
let myUser = null;
// 收到普通聊天
socket.on("message", (proto) => {
log(`${proto.form}说:` + proto.msg);
});
// 收到服务器自动生成的账号
socket.on("register", (user) => {
myUser = user;
log("【我的账号】" + myUser.username);
});
//socket 连接信息
socket.on("connect", () => {
log("【client】ws connect.");
});
socket.on("connect_error", () => {
log("【client】ws connect_error.");
});
socket.on("error", () => {
log("【client】ws error.");
});
服务端开启socket服务
const http = require("http");
const server = http.createServer();
server.listen(3000, function () {
console.log("开始监听3000");
});
const socket = require("socket.io")(server, { cors: true });
let connectionList = [];
socket.on("connection", (connection) => {
connectionList.push(connection);
connection.user = {
username: "user" + new Date().getTime().toString().substring(8),
};
// 自动注册账号给客户端
connection.emit("register", connection.user);
sendToAll(connection.user.username, "我进来了");
// 普通聊天消息直接群发
connection.on("message", function (msg) {
sendToAll(connection.user.username, msg);
});
// 客户端断开链接
connection.on("disconnect", function () {
connectionList = connectionList.filter(
(it) => it.user.username != connection.user.username
);
sendToAll(connection.user.username, "离开了");
});
// 群发消息函数
function sendToAll(from, msg) {
connectionList.forEach((targetConnection) => {
targetConnection.emit("message", {
form: from,
to: targetConnection.user.username,
msg,
});
});
}
});
function log(msg) {
console.log(
"<br/> 【client】" + (typeof msg == "object" ? JSON.stringify(msg) : msg)
);
}