WebRTC:⼀个视频聊天的简单例⼦
在前⾯的章节中,已经对WebRTC相关的重要知识点进⾏了介绍,包括涉及的⽹络协议、会话描述协议、如何进⾏⽹络穿透等,剩下的就是WebRTC的API了。
WebRTC通信相关的API⾮常多,主要完成了如下功能:
1. 信令交换
2. 通信候选地址交换
3. ⾳视频采集
4. ⾳视频发送、接收
相关API太多,为避免篇幅过长,⽂中部分采⽤了伪代码进⾏讲解。详细代码参考⽂章末尾,也可以在上找到,有问题欢迎留⾔交流。
信令交换是WebRTC通信中的关键环节,交换的信息包括编解码器、⽹络协议、候选地址等。对于如何进⾏信令交换,WebRTC并没有明确说明,⽽是交给应⽤⾃⼰来决定,⽐如可以采⽤WebSocket。
发送⽅伪代码如下:
const pc = new RTCPeerConnection(iceConfig);
const offer = ateOffer();
await pc.tLocalDescription(offer);
ndToPeerViaSignalingServer(SIGNALING_OFFER, offer); // 发送⽅发送信令消息
接收⽅伪代码如下:
const pc = new RTCPeerConnection(iceConfig);
await pc.tRemoteDescription(offer);
const answer = ateAnswer();
await pc.tLocalDescription(answer);
ndToPeerViaSignalingServer(SIGNALING_ANSWER, answer); // 接收⽅发送信令消息
当本地设置了会话描述信息,并添加了媒体流的情况下,ICE框架就会开始收集候选地址。两边收集到候选地址后,需要交换候选地址,并从中知道合适的候选地址对。
候选地址的交换,同样采⽤前⾯提到的信令服务,伪代码如下:
// 设置本地会话描述信息
const localPeer = new RTCPeerConnection(iceConfig);
const offer = ateOffer();
await localPeer.tLocalDescription(offer);
// 本地采集⾳视频
const localVideo = ElementById('local-video');
const mediaStream = UrMedia({
video: true,
audio: true
});
localVideo.srcObject = mediaStream;
戴敦邦
// 添加⾳视频流
localPeer.addTrack(track, mediaStream);
});
// 交换候选地址
if (evt.candidate) {
ndToPeerViaSignalingServer(SIGNALING_CANDIDATE, evt.candidate);
}
}
可以使⽤浏览器提供的getUrMedia接⼝,采集本地的⾳视频。
const localVideo = ElementById('local-video');
const mediaStream = UrMedia({
video: true,
audio: true
});
localVideo.srcObject = mediaStream;
将采集到的⾳视频轨道,通过addTrack进⾏添加,发送给远端。
localPeer.addTrack(track, mediaStream);
});
远端可以通过监听ontrack来监听⾳视频的到达,并进⾏播放。
const remoteVideo = ElementById('remote-video');
remoteVideo.srcObject = evt.streams[0];
}
包含两部分:客户端代码、服务端代码。
1、客户端代码
const socket = io.connect('localhost:3000');
const CLIENT_RTC_EVENT = 'CLIENT_RTC_EVENT';
const SERVER_RTC_EVENT = 'SERVER_RTC_EVENT';
const CLIENT_USER_EVENT = 'CLIENT_USER_EVENT';
const SERVER_USER_EVENT = 'SERVER_USER_EVENT';
const CLIENT_USER_EVENT_LOGIN = 'CLIENT_USER_EVENT_LOGIN'; // 登录
const SERVER_USER_EVENT_UPDATE_USERS = 'SERVER_USER_EVENT_UPDATE_USERS'; const SIGNALING_OFFER = 'SIGNALING_OFFER';
const SIGNALING_ANSWER = 'SIGNALING_ANSWER';
const SIGNALING_CANDIDATE = 'SIGNALING_CANDIDATE';
let remoteUr = ''; // 远端⽤户
let localUr = ''; // 本地登录⽤户
function log(msg) {
console.log(`[client] ${msg}`);
}
<('connect', function() {
log('ws connect.');
});
<('connect_error', function() {
log('ws connect_error.');
});
<('error', function(errorMessage) {
log('ws error, ' + errorMessage);
});
<(SERVER_USER_EVENT, function(msg) {
const type = pe;
const payload = msg.payload;
switch(type) {
ca SERVER_USER_EVENT_UPDATE_USERS:
updateUrList(payload);
break;
}
log(`[${SERVER_USER_EVENT}] [${type}], ${JSON.stringify(msg)}`);
});
<(SERVER_RTC_EVENT, function(msg) {
const {type} = msg;
switch(type) {
ca SIGNALING_OFFER:
handleReceiveOffer(msg);
break;
ca SIGNALING_ANSWER:
handleReceiveAnswer(msg);
break;
ca SIGNALING_CANDIDATE:
handleReceiveCandidate(msg);
break;
}天津旅游景点
});
async function handleReceiveOffer(msg) {
log(`receive remote description from ${msg.payload.from}`);
// 设置远端描述
const remoteDescription = new RTCSessionDescription(msg.payload.sdp);
remoteUr = msg.payload.from;
createPeerConnection();
await pc.tRemoteDescription(remoteDescription); // TODO 错误处理
// 本地⾳视频采集
const localVideo = ElementById('local-video');
const mediaStream = UrMedia({ video: true, audio: true }); localVideo.srcObject = mediaStream;
pc.addTrack(track, mediaStream);
炒饼的做法// pc.addTransceiver(track, {streams: [mediaStream]}); // 这个也可以
});
// pc.addStream(mediaStream); // ⽬前这个也可以,不过接⼝后续会废弃
const answer = ateAnswer(); // TODO 错误处理
await pc.tLocalDescription(answer);
ndRTCEvent({
type: SIGNALING_ANSWER,
payload: {
sdp: answer,
from: localUr,
target: remoteUr
}
});
}
async function handleReceiveAnswer(msg) {
log(`receive remote answer from ${msg.payload.from}`);
const remoteDescription = new RTCSessionDescription(msg.payload.sdp);
remoteUr = msg.payload.from;
await pc.tRemoteDescription(remoteDescription); // TODO 错误处理
}
async function handleReceiveCandidate(msg){
log(`receive candidate from ${msg.payload.from}`);
await pc.addIceCandidate(msg.payload.candidate); // TODO 错误处理
}
/**
* 发送⽤户相关消息给服务器
* @param {Object} msg 格式如 { type: 'xx', payload: {} }
*/
function ndUrEvent(msg) {
}
/**
* 发送RTC相关消息给服务器
* @param {Object} msg 格式如{ type: 'xx', payload: {} }
*/
function ndRTCEvent(msg) {
}
let pc = null;
/**
* 邀请⽤户加⼊视频聊天
* 1、本地启动视频采集
* 2、交换信令
*/
async function startVideoTalk() {
// 开启本地视频
const localVideo = ElementById('local-video');
const mediaStream = UrMedia({
video: true,
audio: true
});
localVideo.srcObject = mediaStream;
// 创建 peerConnection
createPeerConnection();
/
/ 将媒体流添加到webrtc的⾳视频收发器
pc.addTrack(track, mediaStream);
// pc.addTransceiver(track, {streams: [mediaStream]});
});
// pc.addStream(mediaStream); // ⽬前这个也可以,不过接⼝后续会废弃
}
function createPeerConnection() {
const iceConfig = {"iceServers": [
{url: 'stun:stun.ekiga'},
{url: '', urname: 'ur', credential: 'pass'}
]
};
pc = new RTCPeerConnection(iceConfig);
return pc;
}
async function onnegotiationneeded() {
log(`onnegotiationneeded.`);
const offer = ateOffer();
await pc.tLocalDescription(offer); // TODO 错误处理
ndRTCEvent({
type: SIGNALING_OFFER,
payload: {
from: localUr,
target: remoteUr,
sdp: pc.localDescription // TODO 直接⽤offer?
}
});
}
写春的作文function onicecandidate(evt) {
if (evt.candidate) {
log(`onicecandidate.`);
ndRTCEvent({
type: SIGNALING_CANDIDATE,
payload: {
from: localUr,
target: remoteUr,
candidate: evt.candidate
}
});
}
}
function onicegatheringstatechange(evt) {
log(`onicegatheringstatechange, pc.iceGatheringState is ${pc.iceGatheringState}.`);
}
function oniceconnectionstatechange(evt) {
log(`oniceconnectionstatechange, pc.iceConnectionState is ${pc.iceConnectionState}.`);
}
function onsignalingstatechange(evt) {
log(`onsignalingstatechange, pc.signalingstate is ${pc.signalingstate}.`);
}
// 调⽤ pc.addTrack(track, mediaStream),remote peer的 onTrack 会触发两次
红烧肘子的家常做法// 实际上两次触发时,evt.streams[0] 指向同⼀个mediaStream引⽤
// 这个⾏为有点奇怪,github issue 也有提到 /meetecho/janus-gateway/issues/1313 let stream;
function ontrack(evt) {
// if (!stream) {
// stream = evt.streams[0];
// } el {
// console.log(`${stream === evt.streams[0]}`); // 这⾥为true
// }
log(`ontrack.`);
const remoteVideo = ElementById('remote-video');
作业的拼音
remoteVideo.srcObject = evt.streams[0];
}
// 点击⽤户列表
async function handleUrClick(evt) {
const target = evt.target;
const urName = Attribute('data-name').trim();
if (urName === localUr) {
alert('不能跟⾃⼰进⾏视频会话');
return;
}
log(`online ur lected: ${urName}`);
remoteUr = urName;
await startVideoTalk(remoteUr);
}
/**
* 更新⽤户列表
* @param {Array} urs ⽤户列表,⽐如 [{name: '⼩明', name: '⼩强'}]
*/
function updateUrList(urs) {
const fragment = ateDocumentFragment();
const urList = ElementById('login-urs');
urList.innerHTML = '';
urs.forEach(ur => {
const li = ateElement('li');
li.innerHTML = ur.urName;
li.tAttribute('data-name', ur.urName);
li.addEventListener('click', handleUrClick);
fragment.appendChild(li);
});
urList.appendChild(fragment);
}
/**
* ⽤户登录
* @param {String} loginName ⽤户名
*/
function login(loginName) {
localUr = loginName;
ndUrEvent({
type: CLIENT_USER_EVENT_LOGIN,
payload: {
loginName: loginName
}个人财务规划
});
}
// 处理登录
function handleLogin(evt) {
let loginName = ElementById('login-name').im();
if (loginName === '') {
alert('⽤户名为空!');
return;
}
login(loginName);
}
function init() {
init();
2、服务端代码
// 添加ws服务
const io = require('socket.io')(rver);
嘉奖事迹材料let connectionList = [];
const CLIENT_RTC_EVENT = 'CLIENT_RTC_EVENT';
const SERVER_RTC_EVENT = 'SERVER_RTC_EVENT';
const CLIENT_USER_EVENT = 'CLIENT_USER_EVENT';