WebRTC通话后如何释放摄像头和麦克风?
how do I release the camera and microphone after a WebRTC call?
结束 WebRTC 通话后,我似乎没有删除浏览器选项卡上表示摄像头或麦克风正在使用中的红色图标。
我从 videoElement.srcObject.getTracks()
迭代轨道并在每个轨道上调用 track.stop()
。然后我从 DOM 中删除了 videoElement,但我仍然有红色图标。
(BroadcastChannel 无法与堆栈溢出片段一起使用,因此请使用代码笔提供示例代码)
(我在 Chrome 和 Firefox 上确认了操作)
在多个选项卡中打开 link,通过单击任一侧的 Cnnect 按钮检查 WebRTC 连接,然后切换到关闭按钮,因此单击关闭按钮将释放 Cam
https://codepen.io/gtk2k/pen/NWxzgKo?editors=1111
// open 2 tabs this page
const signalingChannel = new BroadcastChannel('signalingChannel');
let pc = null;
signalingChannel.onmessage = async evt => {
const msg = JSON.parse(evt.data);
if(msg.close) {
releaseStream();
return;
}
if(!pc)
await setupPC();
if(msg.sdp) {
console.log(`Receive ${msg.type}`);
await pc.setRemoteDescription(msg);
if(msg.type === 'offer') {
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
sendSignaling(answer);
}
} else if(msg.candidate) {
console.log(`Receive candidate`);
await pc.addIceCandidate(msg);
}
}
async function setupPC(isCaller) {
pc = new RTCPeerConnection();
pc.onconnectionstatechange = evt => {
console.log(pc.connectionState);
if(pc.connectionState === 'disconnected')
{
releaseStream();
}
}
pc.onicecandidate = evt => {
if(evt.candidate)
sendSignaling(evt.candidate);
}
pc.ontrack = evt => {
vidRemote.srcObject = evt.streams[0];
}
const stream = await navigator.mediaDevices.getUserMedia({video:true});
stream.getTracks().forEach(track => pc.addTrack(track, stream));
vidLocal.srcObject = stream;
if(isCaller) {
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
sendSignaling(offer);
}
}
(async _ => {
const stream = await navigator.mediaDevices.getUserMedia({video:true});
vidLocal.srcObject = stream;
});
btnConnect.onclick = evt => {
if(btnConnect.textContent === 'Connect') {
btnConnect.textContent = 'Close';
setupPC(true);
} else {
btnConnect.textContent = 'Connect';
pc.close();
pc = null;
releaseStream();
sendSignaling({close: true});
}
}
function sendSignaling(data) {
signalingChannel.postMessage(JSON.stringify(data));
}
function releaseStream() {
[vidLocal, vidRemote].forEach(vid => {
if(!vid.srcObject) return;
let stream = vid.srcObject;
vid.pause();
vid.srcObject = null;
stream.getTracks().forEach(track => track.stop());
stream = null;
});
}
video {
width: 360px;
height: 240px;
}
<button id="btnConnect">Connect</button>
<div>
<video id="vidLocal" muted autoplay></video>
<video id="vidRemote" muted autoplay></video>
</div>
在我的例子中,问题是由于我对 WebRTC 和 getUserMedia() 的误解导致我的代码中的错误引起的。我实际上调用了 getUserMedia() 两次,一次用于本地 <video>
元素,第二次用于添加到 RTCPeerConnection.
修复当然是只调用一次 getuserMedia() 并在两个地方使用返回的流。
结束 WebRTC 通话后,我似乎没有删除浏览器选项卡上表示摄像头或麦克风正在使用中的红色图标。
我从 videoElement.srcObject.getTracks()
迭代轨道并在每个轨道上调用 track.stop()
。然后我从 DOM 中删除了 videoElement,但我仍然有红色图标。
(BroadcastChannel 无法与堆栈溢出片段一起使用,因此请使用代码笔提供示例代码) (我在 Chrome 和 Firefox 上确认了操作) 在多个选项卡中打开 link,通过单击任一侧的 Cnnect 按钮检查 WebRTC 连接,然后切换到关闭按钮,因此单击关闭按钮将释放 Cam
https://codepen.io/gtk2k/pen/NWxzgKo?editors=1111
// open 2 tabs this page
const signalingChannel = new BroadcastChannel('signalingChannel');
let pc = null;
signalingChannel.onmessage = async evt => {
const msg = JSON.parse(evt.data);
if(msg.close) {
releaseStream();
return;
}
if(!pc)
await setupPC();
if(msg.sdp) {
console.log(`Receive ${msg.type}`);
await pc.setRemoteDescription(msg);
if(msg.type === 'offer') {
const answer = await pc.createAnswer();
await pc.setLocalDescription(answer);
sendSignaling(answer);
}
} else if(msg.candidate) {
console.log(`Receive candidate`);
await pc.addIceCandidate(msg);
}
}
async function setupPC(isCaller) {
pc = new RTCPeerConnection();
pc.onconnectionstatechange = evt => {
console.log(pc.connectionState);
if(pc.connectionState === 'disconnected')
{
releaseStream();
}
}
pc.onicecandidate = evt => {
if(evt.candidate)
sendSignaling(evt.candidate);
}
pc.ontrack = evt => {
vidRemote.srcObject = evt.streams[0];
}
const stream = await navigator.mediaDevices.getUserMedia({video:true});
stream.getTracks().forEach(track => pc.addTrack(track, stream));
vidLocal.srcObject = stream;
if(isCaller) {
const offer = await pc.createOffer();
await pc.setLocalDescription(offer);
sendSignaling(offer);
}
}
(async _ => {
const stream = await navigator.mediaDevices.getUserMedia({video:true});
vidLocal.srcObject = stream;
});
btnConnect.onclick = evt => {
if(btnConnect.textContent === 'Connect') {
btnConnect.textContent = 'Close';
setupPC(true);
} else {
btnConnect.textContent = 'Connect';
pc.close();
pc = null;
releaseStream();
sendSignaling({close: true});
}
}
function sendSignaling(data) {
signalingChannel.postMessage(JSON.stringify(data));
}
function releaseStream() {
[vidLocal, vidRemote].forEach(vid => {
if(!vid.srcObject) return;
let stream = vid.srcObject;
vid.pause();
vid.srcObject = null;
stream.getTracks().forEach(track => track.stop());
stream = null;
});
}
video {
width: 360px;
height: 240px;
}
<button id="btnConnect">Connect</button>
<div>
<video id="vidLocal" muted autoplay></video>
<video id="vidRemote" muted autoplay></video>
</div>
在我的例子中,问题是由于我对 WebRTC 和 getUserMedia() 的误解导致我的代码中的错误引起的。我实际上调用了 getUserMedia() 两次,一次用于本地 <video>
元素,第二次用于添加到 RTCPeerConnection.
修复当然是只调用一次 getuserMedia() 并在两个地方使用返回的流。