H5播放webrtc视频
一、简介
WebRTC概念
WebRTC是由Google主导的,由一组标准、协议和JavaScript API组成,用于实现浏览器之间(端到端之间)的音频、视频及数据共享。WebRTC不需要安装任何插件,通过简单的JavaScript API就可以使得实时通信变成一种标准功能。
为什么使用webrtc
现在各大浏览器以及终已经逐渐加大对WebRTC技术的支持。下图是webrtc官网给出的现在已经提供支持了的浏览器和平台。
二、H5播放webrtc
webrtc播放经过不断探索,基本上没有现行的库来直接播放一个webrtc的url的库,很大一部分的思路是通过websocket+webrtc来实现数据传输和播放的,也有很多使用EMScript将c代码编解码库编译成js代码来使用。
不过庆幸的是,我找了一个库是支持srs的webrtc流的,它就是开源的jswebrtc,使用代码如下:
通过html播放
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTCPlayer</title>
<style type="text/css">
html, body {
background-color: #111;
text-align: center;
}
</style>
</head>
<body>
<div class="jswebrtc" data-url="webrtc://192.168.12.187/live/1"></div>
<script type="text/javascript" src="jswebrtc.min.js"></script>
</body>
</html>
通过js播放
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebRTCPlayer</title>
</head>
<body>
<video id="video-webrtc" controls></video>
<script type="text/javascript" src="jswebrtc.min.js"></script>
<script type="text/javascript">
var video = document.getElementById('video-webrtc');
var url = 'webrtc://192.168.12.187/live/1';
var player = new JSWebrtc.Player(url, { video: video, autoplay: true, onPlay: (obj) => { console.log("start play") } });
</script>
</body>
</html>
播放效果如下:
这里要说明的是srs虽然提供了webrtc协议转换,但是webrtc是基于udp的,可能是udp丢包过于严重,srs并没有处理好,所以画面如果有动画,基本上是动画的地方会有花屏!!不过实时性确实不错,可以与rtmp实时协议相差无几。
三、扩展
webrtc是一种标准协议,不像只支持IE的OCX(ActiveX)插件或普遍浏览器都支持的flash插件(重点是google的chrome浏览器在2020年12月份之后不再支持flash插件),所以后续的无插件实时视频播放的重点就落在了webrtc上,所以从长远来讲它的特点(无插件、标准通用协议)使得webrtc被广泛和长久使用。
既然webrtc如此重要,在安防或互联网直播领域就少不了视频采集或视频发布功能,所以就避免不了采集本地摄像头的音视频功能,如下就是我要讲的如何通过webrtc协议采集本地音视频,H5代码如下:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="description" content="WebRTC code samples">
<meta name="viewport"
content="width=device-width, user-scalable=yes, initial-scale=1, maximum-scale=1">
<meta itemprop="description" content="Client-side WebRTC code samples">
<meta itemprop="name" content="WebRTC code samples">
<meta name="mobile-web-app-capable" content="yes">
<meta id="theme-color" name="theme-color" content="#ffffff">
<base target="_blank">
<title>MediaStream Recording</title>
</head>
<body>
<div id="container">
<h1>
<span>WebRTC实例-媒体录制器</span>
</h1>
<video id="gum" playsinline autoplay muted></video>
<video id="recorded" playsinline loop></video>
<div>
<button id="start">打开摄像头</button>
<button id="record" disabled class="off">开始录像</button>
<button id="play" disabled>播放</button>
<button id="download" disabled>下载</button>
</div>
<div>
<h4>媒体流约束选项:</h4>
<p>
消除回声: <input type="checkbox" id="echoCancellation">
</p>
</div>
<div>
<span id="errorMsg"></span>
</div>
</div>
<script async>
'use strict';
const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let recordedBlobs;
let sourceBuffer;
const errorMsgElement = document.querySelector('span#errorMsg');
const recordedVideo = document.querySelector('video#recorded');
const recordButton = document.querySelector('button#record');
recordButton.addEventListener('click', () => {
if (recordButton.className === 'off') {
startRecording();
} else {
stopRecording();
recordButton.textContent = '开始录像';
recordButton.className = 'off';
playButton.disabled = false;
downloadButton.disabled = false;
}
});
const playButton = document.querySelector('button#play');
playButton.addEventListener('click', () => {
const superBuffer = new Blob(recordedBlobs, {type: 'video/webm'});
recordedVideo.src = null;
recordedVideo.srcObject = null;
recordedVideo.src = window.URL.createObjectURL(superBuffer);
recordedVideo.controls = true;
recordedVideo.play();
});
const downloadButton = document.querySelector('button#download');
downloadButton.addEventListener('click', () => {
const blob = new Blob(recordedBlobs, {type: 'video/webm'});
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.style.display = 'none';
a.href = url;
a.download = 'test.webm';
document.body.appendChild(a);
a.click();
setTimeout(() => {
document.body.removeChild(a);
window.URL.revokeObjectURL(url);
}, 100);
});
function handleSourceOpen(event) {
console.log('MediaSource opened');
sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
console.log('Source buffer: ', sourceBuffer);
}
function handleDataAvailable(event) {
if (event.data && event.data.size > 0) {
recordedBlobs.push(event.data);
}
}
function startRecording() {
recordedBlobs = [];
let options = {mimeType: 'video/webm;codecs=vp9'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not Supported`);
errorMsgElement.innerHTML = `${options.mimeType} is not Supported`;
options = {mimeType: 'video/webm;codecs=vp8'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not Supported`);
errorMsgElement.innerHTML = `${options.mimeType} is not Supported`;
options = {mimeType: 'video/webm'};
if (!MediaRecorder.isTypeSupported(options.mimeType)) {
console.error(`${options.mimeType} is not Supported`);
errorMsgElement.innerHTML = `${options.mimeType} is not Supported`;
options = {mimeType: ''};
}
}
}
try {
mediaRecorder = new MediaRecorder(window.stream, options);
} catch (e) {
console.error('Exception while creating MediaRecorder:', e);
errorMsgElement.innerHTML = `Exception while creating MediaRecorder: ${JSON.stringify(e)}`;
return;
}
console.log('Created MediaRecorder', mediaRecorder, 'with options', options);
recordButton.textContent = '停止录像';
recordButton.className = 'on';
playButton.disabled = true;
downloadButton.disabled = true;
mediaRecorder.onstop = (event) => {
console.log('Recorder stopped: ', event);
};
mediaRecorder.ondataavailable = handleDataAvailable;
mediaRecorder.start(10); // collect 10ms of data
console.log('MediaRecorder started', mediaRecorder);
}
function stopRecording() {
mediaRecorder.stop();
console.log('Recorded Blobs: ', recordedBlobs);
}
function handleSuccess(stream) {
recordButton.disabled = false;
console.log('getUserMedia() got stream:', stream);
window.stream = stream;
const gumVideo = document.querySelector('video#gum');
gumVideo.srcObject = stream;
}
async function init(constraints) {
try {
const stream = await navigator.mediaDevices.getUserMedia(constraints);
handleSuccess(stream);
} catch (e) {
console.error('navigator.getUserMedia error:', e);
errorMsgElement.innerHTML = `navigator.getUserMedia error:${e.toString()}`;
}
}
document.querySelector('button#start').addEventListener('click', async () => {
const hasEchoCancellation = document.querySelector('#echoCancellation').checked;
const constraints = {
audio: {
echoCancellation: {exact: hasEchoCancellation}
},
video: {
width: 1280, height: 720
}
};
console.log('Using media constraints:', constraints);
await init(constraints);
});
</script>
<style>
.hidden {
display: none;
}
.highlight {
background-color: #eee;
font-size: 1.2em;
margin: 0 0 30px 0;
padding: 0.2em 1.5em;
}
.warning {
color: red;
font-weight: 400;
}
@media screen and (min-width: 1000px) {
/* hack! to detect non-touch devices */
div#links a {
line-height: 0.8em;
}
}
audio {
max-width: 100%;
}
body {
font-family: 'Roboto', sans-serif;
font-weight: 300;
margin: 0;
padding: 1em;
word-break: break-word;
}
button {
background-color: #d84a38;
border: none;
border-radius: 2px;
color: white;
font-family: 'Roboto', sans-serif;
font-size: 0.8em;
margin: 0 0 1em 0;
padding: 0.5em 0.7em 0.6em 0.7em;
}
button:active {
background-color: #2fcf5f;
}
button:hover {
background-color: #cf402f;
}
button[disabled] {
color: #ccc;
}
button[disabled]:hover {
background-color: #d84a38;
}
canvas {
background-color: #ccc;
max-width: 100%;
width: 100%;
}
code {
font-family: 'Roboto', sans-serif;
font-weight: 400;
}
div#container {
margin: 0 auto 0 auto;
max-width: 60em;
padding: 1em 1.5em 1.3em 1.5em;
}
div#links {
padding: 0.5em 0 0 0;
}
h1 {
border-bottom: 1px solid #ccc;
font-family: 'Roboto', sans-serif;
font-weight: 500;
margin: 0 0 0.8em 0;
padding: 0 0 0.2em 0;
}
h2 {
color: #444;
font-weight: 500;
}
h3 {
border-top: 1px solid #eee;
color: #666;
font-weight: 500;
margin: 10px 0 10px 0;
white-space: nowrap;
}
li {
margin: 0 0 0.4em 0;
}
html {
overflow-y: scroll;
}
img {
border: none;
max-width: 100%;
}
input[type=radio] {
position: relative;
top: -1px;
}
p#data {
border-top: 1px dotted #666;
font-family: Courier New, monospace;
line-height: 1.3em;
max-height: 1000px;
overflow-y: auto;
padding: 1em 0 0 0;
}
section p:last-of-type {
margin: 0;
}
section {
border-bottom: 1px solid #eee;
margin: 0 0 30px 0;
padding: 0 0 20px 0;
}
section:last-of-type {
border-bottom: none;
padding: 0 0 1em 0;
}
select {
margin: 0 1em 1em 0;
position: relative;
top: -1px;
}
video {
background: #222;
margin: 0 0 20px 0; -
-width: 100%;
width: var(- -width);
height: calc(var(- -width)* 0.75);
}
@media screen and (max-width: 450px) {
h1 {
font-size: 20px;
}
}
button {
margin: 0 3px 10px 0;
padding-left: 2px;
padding-right: 2px;
width: 99px;
}
button:last-of-type {
margin: 0;
}
p.borderBelow {
margin: 0 0 20px 0;
padding: 0 0 20px 0;
}
video {
vertical-align: top; -
-width: 25vw;
width: var(- -width);
height: calc(var(- -width)* 0.5625);
}
video:last-of-type {
margin: 0 0 20px 0;
}
video#gumVideo {
margin: 0 20px 20px 0;
}
</style>
</body>
</html>
注意:HTTPS加密通信才能获取getUserMedia(),否则报错:
TypeError: Cannot read property ‘getUserMedia’ of undefined
我们可以将以上代码分离和简化成html和js文件,那么index.html的采集渲染代码如下:
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>学习webrtc</title>
</head>
<body>
<video autoplay></video>
<script src="main.js"></script>
</body>
</html>
main.js采集代码如下:
//判断是否支持调用设备api,因为浏览器不同所以判断方式不同哦
function hasUserMedia() {
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia
|| navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasUserMedia()) {
//alert("浏览器支持")
navigator.getUserMedia = navigator.getUserMedia
|| navigator.webkitGetUserMedia || navigator.mozGetUserMedia
|| navigator.msGetUserMedia;
navigator.getUserMedia({
video : true,//开启视频
audio : false
//先关闭音频,因为会有回响,以后两台电脑通信不会有响声
}, function(stream) {//将视频流交给video
var video = document.querySelector("video");
//video.src=window.URL.createObjectURL(stream);
try {
video.srcObject = stream;
} catch (error) {
video.src = window.URL.createObjectURL(stream);
}
}, function(err) {
console.log("capturing", err)
});
} else {
alert("浏览器暂不支持")
}
通过以上代码,我们就可以采集本地的摄像头播放本地的采集的视频和音频了!
源码获取、合作、技术交流请获取如下联系方式:
QQ交流群:961179337
微信账号:lixiang6153
公众号:IT技术快餐
电子邮箱:aaa@qq.com
上一篇: 音视频基础
下一篇: ffmpeg推流技术汇总
推荐阅读
-
MAC怎么更改iTunes中的视频类型以便顺利播放
-
Android如何让WebView中的HTML5页面实现视频全屏播放
-
javaweb中上传视频,并且播放,用上传视频信息为例
-
利用播放器PotPlayer进行教程视频录制
-
video.js 一个页面同时播放多个视频的实例代码
-
Win10预览版10547:Edge浏览器支持谷歌VP9视频流播放功能
-
AcDown下载播放视频时出现红叉白屏怎么办?
-
android播放视频时在立体声与单声道之间切换无变化原因分析及解决
-
Windows 2003 iis FLV 视频不能播放的原因与解决
-
AMD显卡机型使用任意播放器播放在线视频有声音没图像的解决方法介绍