vue2纯前端对接海康威视摄像头实现实时视频预览

vue2纯前端对接海康威视摄像头实现实时视频预览

vue2纯前端对接海康威视摄像头实现实时视频预览

实现实时对海康威视摄像头进行取流的大致思路:摄像头做端口映射(安装摄像头的师傅一般都会),做了映射之后就可以通过IP+端口的形式在浏览器中进行摄像头的实时浏览,这种是海康威视自己就带有的方式,不能嵌入到自研的系统,视频流画面实现嵌入自研系统,需要在满足以上的前提下,使用webrtc-streamer进行推流,然后在vue2中进行接流,渲染到页面中

一、环境准备

需要具备的前提条件,设备可在网页端进行浏览,且做以下设置

登录进行设置


在这里插入图片描述
设置视频编码格式


设置RTSP协议端口


至此摄像头设置已完成,接下来需要获取摄像头设备所在IP的rtsp链接,海康摄像头的rtsp链接获取见官方说明:海康威视摄像头取流说明
可以使用VLC取流软件进行验证rtsp链接是否是通的VLC官方下载地址

VLC官网


打开网络串流


输入取流地址


在这里插入图片描述


至此准备工作就完成了,接下来就是敲代码进行集成阶段了

二、代码集成

1.1 准备webrtcstreamer.js,粘贴即用,不用做任何修改

var WebRtcStreamer =(function(){/** * Interface with WebRTC-streamer API * @constructor * @param {string} videoElement - id of the video element tag * @param {string} srvurl - url of webrtc-streamer (default is current location) */varWebRtcStreamer=functionWebRtcStreamer(videoElement, srvurl){if(typeof videoElement ==="string"){this.videoElement = document.getElementById(videoElement);}else{this.videoElement = videoElement;}this.srvurl = srvurl || location.protocol+"//"+window.location.hostname+":"+window.location.port;this.pc =null;this.mediaConstraints ={ offerToReceiveAudio:true, offerToReceiveVideo:true};this.iceServers =null;this.earlyCandidates =[];}WebRtcStreamer.prototype._handleHttpErrors=function(response){if(!response.ok){throwError(response.statusText);}return response;}/** * Connect a WebRTC Stream to videoElement * @param {string} videourl - id of WebRTC video stream * @param {string} audiourl - id of WebRTC audio stream * @param {string} options - options of WebRTC call * @param {string} stream - local stream to send * @param {string} prefmime - prefered mime */WebRtcStreamer.prototype.connect=function(videourl, audiourl, options, localstream, prefmime){this.disconnect();// getIceServers is not already receivedif(!this.iceServers){ console.log("Get IceServers");fetch(this.srvurl +"/api/getIceServers").then(this._handleHttpErrors).then((response)=>(response.json())).then((response)=>this.onReceiveGetIceServers(response, videourl, audiourl, options, localstream, prefmime)).catch((error)=>this.onError("getIceServers "+ error ))}else{this.onReceiveGetIceServers(this.iceServers, videourl, audiourl, options, localstream, prefmime);}}/** * Disconnect a WebRTC Stream and clear videoElement source */WebRtcStreamer.prototype.disconnect=function(){if(this.videoElement?.srcObject){this.videoElement.srcObject.getTracks().forEach(track=>{ track.stop()this.videoElement.srcObject.removeTrack(track);});}if(this.pc){fetch(this.srvurl +"/api/hangup?peerid="+this.pc.peerid).then(this._handleHttpErrors).catch((error)=>this.onError("hangup "+ error ))try{this.pc.close();}catch(e){ console.log("Failure close peer connection:"+ e);}this.pc =null;}}WebRtcStreamer.prototype.filterPreferredCodec=function(sdp, prefmime){const lines = sdp.split('\n');const[prefkind, prefcodec]= prefmime.toLowerCase().split('/');let currentMediaType =null;let sdpSections =[];let currentSection =[];// Group lines into sections lines.forEach(line=>{if(line.startsWith('m=')){if(currentSection.length){ sdpSections.push(currentSection);} currentSection =[line];}else{ currentSection.push(line);}}); sdpSections.push(currentSection);// Process each sectionconst processedSections = sdpSections.map(section=>{const firstLine = section[0];if(!firstLine.startsWith('m='+ prefkind)){return section.join('\n');}// Get payload types for preferred codecconst rtpLines = section.filter(line=> line.startsWith('a=rtpmap:'));const preferredPayloads = rtpLines .filter(line=> line.toLowerCase().includes(prefcodec)).map(line=> line.split(':')[1].split(' ')[0]);if(preferredPayloads.length ===0){return section.join('\n');}// Modify m= line to only include preferred payloadsconst mLine = firstLine.split(' ');const newMLine =[...mLine.slice(0,3),...preferredPayloads].join(' ');// Filter related attributesconst filteredLines = section.filter(line=>{if(line === firstLine)returnfalse;if(line.startsWith('a=rtpmap:')){return preferredPayloads.some(payload=> line.startsWith(`a=rtpmap:${payload}`));}if(line.startsWith('a=fmtp:')|| line.startsWith('a=rtcp-fb:')){return preferredPayloads.some(payload=> line.startsWith(`a=${line.split(':')[0].split('a=')[1]}:${payload}`));}returntrue;});return[newMLine,...filteredLines].join('\n');});return processedSections.join('\n');}/* * GetIceServers callback */WebRtcStreamer.prototype.onReceiveGetIceServers=function(iceServers, videourl, audiourl, options, stream, prefmime){this.iceServers = iceServers;this.pcConfig = iceServers ||{"iceServers":[]};try{this.createPeerConnection();let callurl =this.srvurl +"/api/call?peerid="+this.pc.peerid +"&url="+encodeURIComponent(videourl);if(audiourl){ callurl +="&audiourl="+encodeURIComponent(audiourl);}if(options){ callurl +="&options="+encodeURIComponent(options);}if(stream){this.pc.addStream(stream);}// clear early candidatesthis.earlyCandidates.length =0;// create Offerthis.pc.createOffer(this.mediaConstraints).then((sessionDescription)=>{ console.log("Create offer:"+JSON.stringify(sessionDescription)); console.log(`video codecs:${Array.from(newSet(RTCRtpReceiver.getCapabilities("video")?.codecs?.map(codec=> codec.mimeType)))}`) console.log(`audio codecs:${Array.from(newSet(RTCRtpReceiver.getCapabilities("audio")?.codecs?.map(codec=> codec.mimeType)))}`)if(prefmime !=undefined){//set prefered codeclet[prefkind]= prefmime.split('/');if(prefkind !="video"&& prefkind !="audio"){ prefkind ="video"; prefmime = prefkind +"/"+ prefmime;} console.log("sdp:"+ sessionDescription.sdp); sessionDescription.sdp =this.filterPreferredCodec(sessionDescription.sdp, prefmime); console.log("sdp:"+ sessionDescription.sdp);}this.pc.setLocalDescription(sessionDescription).then(()=>{fetch(callurl,{ method:"POST", body:JSON.stringify(sessionDescription)}).then(this._handleHttpErrors).then((response)=>(response.json())).catch((error)=>this.onError("call "+ error )).then((response)=>this.onReceiveCall(response)).catch((error)=>this.onError("call "+ error ))},(error)=>{ console.log("setLocalDescription error:"+JSON.stringify(error));});},(error)=>{alert("Create offer error:"+JSON.stringify(error));});}catch(e){this.disconnect();alert("connect error: "+ e);}}WebRtcStreamer.prototype.getIceCandidate=function(){fetch(this.srvurl +"/api/getIceCandidate?peerid="+this.pc.peerid).then(this._handleHttpErrors).then((response)=>(response.json())).then((response)=>this.onReceiveCandidate(response)).catch((error)=>this.onError("getIceCandidate "+ error ))}/* * create RTCPeerConnection */WebRtcStreamer.prototype.createPeerConnection=function(){ console.log("createPeerConnection config: "+JSON.stringify(this.pcConfig));this.pc =newRTCPeerConnection(this.pcConfig);let pc =this.pc; pc.peerid = Math.random(); pc.onicecandidate=(evt)=>this.onIceCandidate(evt); pc.onaddstream=(evt)=>this.onAddStream(evt); pc.oniceconnectionstatechange=(evt)=>{ console.log("oniceconnectionstatechange state: "+ pc.iceConnectionState);if(this.videoElement){if(pc.iceConnectionState ==="connected"){this.videoElement.style.opacity ="1.0";}elseif(pc.iceConnectionState ==="disconnected"){this.videoElement.style.opacity ="0.25";}elseif((pc.iceConnectionState ==="failed")||(pc.iceConnectionState ==="closed")){this.videoElement.style.opacity ="0.5";}elseif(pc.iceConnectionState ==="new"){this.getIceCandidate();}}} pc.ondatachannel=function(evt){ console.log("remote datachannel created:"+JSON.stringify(evt)); evt.channel.onopen=function(){ console.log("remote datachannel open");this.send("remote channel openned");} evt.channel.onmessage=function(event){ console.log("remote datachannel recv:"+JSON.stringify(event.data));}}try{let dataChannel = pc.createDataChannel("ClientDataChannel"); dataChannel.onopen=function(){ console.log("local datachannel open");this.send("local channel openned");} dataChannel.onmessage=function(evt){ console.log("local datachannel recv:"+JSON.stringify(evt.data));}}catch(e){ console.log("Cannor create datachannel error: "+ e);} console.log("Created RTCPeerConnnection with config: "+JSON.stringify(this.pcConfig));return pc;}/* * RTCPeerConnection IceCandidate callback */WebRtcStreamer.prototype.onIceCandidate=function(event){if(event.candidate){if(this.pc.currentRemoteDescription){this.addIceCandidate(this.pc.peerid, event.candidate);}else{this.earlyCandidates.push(event.candidate);}}else{ console.log("End of candidates.");}}WebRtcStreamer.prototype.addIceCandidate=function(peerid, candidate){fetch(this.srvurl +"/api/addIceCandidate?peerid="+peerid,{ method:"POST", body:JSON.stringify(candidate)}).then(this._handleHttpErrors).then((response)=>(response.json())).then((response)=>{console.log("addIceCandidate ok:"+ response)}).catch((error)=>this.onError("addIceCandidate "+ error ))}/* * RTCPeerConnection AddTrack callback */WebRtcStreamer.prototype.onAddStream=function(event){ console.log("Remote track added:"+JSON.stringify(event));this.videoElement.srcObject = event.stream;let promise =this.videoElement.play();if(promise !==undefined){ promise.catch((error)=>{ console.warn("error:"+error);this.videoElement.setAttribute("controls",true);});}}/* * AJAX /call callback */WebRtcStreamer.prototype.onReceiveCall=function(dataJson){ console.log("offer: "+JSON.stringify(dataJson));let descr =newRTCSessionDescription(dataJson);this.pc.setRemoteDescription(descr).then(()=>{ console.log("setRemoteDescription ok");while(this.earlyCandidates.length){let candidate =this.earlyCandidates.shift();this.addIceCandidate(this.pc.peerid, candidate);}this.getIceCandidate()},(error)=>{ console.log("setRemoteDescription error:"+JSON.stringify(error));});}/* * AJAX /getIceCandidate callback */WebRtcStreamer.prototype.onReceiveCandidate=function(dataJson){ console.log("candidate: "+JSON.stringify(dataJson));if(dataJson){for(let i=0; i<dataJson.length; i++){let candidate =newRTCIceCandidate(dataJson[i]); console.log("Adding ICE candidate :"+JSON.stringify(candidate));this.pc.addIceCandidate(candidate).then(()=>{ console.log("addIceCandidate OK");},(error)=>{ console.log("addIceCandidate error:"+JSON.stringify(error));});}this.pc.addIceCandidate();}}/* * AJAX callback for Error */WebRtcStreamer.prototype.onError=function(status){ console.log("onError:"+ status);}return WebRtcStreamer;})();if(typeof window !=='undefined'&&typeof window.document !=='undefined'){ window.WebRtcStreamer = WebRtcStreamer;}if(typeof module !=='undefined'&&typeof module.exports !=='undefined'){ module.exports = WebRtcStreamer;}

1.2 封装视频组件,在需要视频的地方引入此封装的视频组件即可,也是粘贴即用,注意其中import的webrtcstreamer.js的地址替换为自己的

<template><div class="rtsp_video_container"><div v-if="videoUrls.length === 1"class="rtsp_video single-video"><video :id="'video_0'" controls autoPlay muted width="100%" height="100%" style="object-fit: fill"></video></div><div v-if="videoUrls.length >1" v-for="(videoUrl, index) in videoUrls":key="index"class="rtsp_video"><video :id="'video_' + index" controls autoPlay muted width="100%" height="100%" style="object-fit: fill"></video></div></div></template><script>import WebRtcStreamer from'../untils/webrtcstreamer';// 注意此处替换为webrtcstreamer.js所在的路径exportdefault{ name:'RtspVideo', props:{ videoUrls:{ type: Array, required:true,}},data(){return{ cameraIp:'localhost:8000',// 这里的IP固定为本地,不要修改,是用来与本地的webrtc-streamer插件进行通讯的,见文章1.3 webRtcServers:[],// 存储 WebRtcStreamer 实例};},mounted(){this.initializeStreams();}, watch:{// 监听 videoUrls 或 cameraIp 的变化,重新初始化流 videoUrls:{handler(newUrls, oldUrls){if(newUrls.length !== oldUrls.length ||!this.isSameArray(newUrls, oldUrls)){this.resetStreams();this.initializeStreams();}}, deep:true,},cameraIp(newIp, oldIp){if(newIp !== oldIp){this.resetStreams();this.initializeStreams();}}}, methods:{// 初始化视频流连接initializeStreams(){if(this.webRtcServers.length ===0){this.videoUrls.forEach((videoUrl, index)=>{const videoElement = document.getElementById(`video_${index}`);const webRtcServer =newWebRtcStreamer(videoElement,`http://${this.cameraIp}`);this.webRtcServers.push(webRtcServer); webRtcServer.connect(videoUrl,null,'rtptransport=tcp',null);});}},// 检查新旧数组是否相同isSameArray(arr1, arr2){return arr1.length === arr2.length && arr1.every((value, index)=> value === arr2[index]);},// 清除 WebRtcStreamer 实例resetStreams(){this.webRtcServers.forEach((webRtcServer)=>{if(webRtcServer){ webRtcServer.disconnect();// 断开连接}});this.webRtcServers =[];// 清空实例},},beforeDestroy(){this.resetStreams();// 页面销毁时清理 WebRtcStreamer 实例,避免内存泄漏},};</script><style lang="less" scoped>.rtsp_video_container { display: flex; flex-wrap: wrap; gap:10px; justify-content: space-between;}.rtsp_video { flex:1148%; height:225px; max-width:48%; background: #000; border-radius:8px; overflow: hidden;}.single-video { flex:11100%; height:100%; max-width:100%; background: #000;} video { width:100%; height:100%; object-fit: cover;}</style>

父组件中进行此视频组件的引用示例:

<template><div style="margin-top: 10px;width: 100%;height: 100%;"><rtsp-video :videoUrls="selectedUrls":key="selectedUrls.join(',')"></rtsp-video></div></template>import RtspVideo from"../views/video"; components:{ RtspVideo }data(){return{ selectedUrls:['rtsp://user:[email protected]:xxxx/Streaming/Channels/101','rtsp://user:[email protected]:xxxx/Streaming/Channels/201'],}}

1.3 以上完成之后,需要观看视频的本地PC设备启动webrtc-streamer插件

webrtc-streamer插件下载webrtc-streamer

下载图中的版本,标题1.1中对应的js版本就是此版本


下载解压完成之后,其中的exe和js是配套的,插件脚本在webrtc-streamer-v0.8.13-dirty-Windows-AMD64-Release\bin目录下,对应的webrtcstreamer.js在webrtc-streamer-v0.8.13-dirty-Windows-AMD64-Release\share\webrtc-streamer\html目录下,只需要webrtc-streamer.exe和webrtcstreamer.js即可,也可以直接用博主在上面提供的,切记一定要配套,不然可能画面取不出。

实现效果图见下:

在这里插入图片描述

至此海康威视实时视频预览功能已完成,写作不易,如果对您有帮助,恳请保留一个赞。

补充:
如果启动webrtc-streamer.exe导致客户端卡顿 或者 需要更改webrtc-streamer.exe的端口号,可参考下图

在这里插入图片描述
在这里插入图片描述


视频监控观看插件.bat:
@echo off
cd C:
start webrtc-streamer.exe -o -H 0.0.0.0:8124
exit

Read more

机器人室内导航新纪元:SLAM与‘室内GPS’融合终结定位‘鬼打墙’

机器人室内导航新纪元:SLAM与‘室内GPS’融合终结定位‘鬼打墙’

如果你观察过仓储机器人的运行,可能会发现一个有趣现象:刚充满电出发的AGV矫健精准,但工作几小时后,它经过货架时总会莫名多“蹭”一下边——这不是程序设定的仪式感,而是SLAM算法累积误差在作祟。 漂移宿命:SLAM的“记忆模糊症” 激光SLAM的本质,是让机器人通过对比连续时刻的环境特征,推算出自己“相对刚才的位置”移动了多少。这种相对定位方式就像蒙眼走路——每一步的微小误差都会叠加,最终导致轨迹偏离。 学术界将这一问题称为“累积漂移”。研究数据显示,即便是配置16线激光雷达的高端方案,在长直走廊或结构重复的仓库中运行10分钟后,定位误差也可能突破10厘米阈值。更棘手的是,当环境发生动态变化——比如货架被移动、有新障碍物出现——激光SLAM的地图匹配可能彻底失效,导致机器人瞬间“失忆”。 工程师们尝试用多传感器融合弥补这一缺陷:激光+IMU+编码器+视觉的组合成为主流,紧耦合算法、因子图优化等技术不断迭代。这些方案确实提升了短期精度,但本质仍是“相对+相对”的堆叠——就像让蒙眼者戴上更灵敏的耳塞,却始终无法真正睁开眼睛。 融合破局:给激光雷达装上“北斗卫星”

在 NVIDIA DGX Spark部署 Stable Diffusion 3.5 并使用ComfyUI

在 NVIDIA DGX Spark部署 Stable Diffusion 3.5 并使用ComfyUI

📖 前言 随着 NVIDIA Blackwell 架构的问世,DGX Spark (Personal AI Supercomputer) 将桌面级 AI 算力推向了新的巅峰。这台怪兽级设备搭载了 GB200/GB10 级别的 GPU 和 NVIDIA Grace CPU (ARM64),并运行在最新的 CUDA 13 环境下。 然而,“最强硬件"往往伴随着"最难环境”。由于 Grace CPU 采用 ARM (aarch64) 架构,且 CUDA 13 过于前沿,传统的 PyTorch 安装方法极易失败。 本文将手把手教你如何在这台超级计算机上部署 Stable Diffusion

宇树机器人g1二次开发:建图,定位,导航手把手教程(二)建图部分:开始一直到打开rviz教程

注意: 本教程为ros1,需要ubuntu20.04,使用算法为fase_lio 本教程为遵循的网上开源项目:https://github.com/deepglint/FAST_LIO_LOCALIZATION_HUMANOID.git 一、系统环境准备 1.1. 安装必要的依赖库 # 安装C++标准库 sudo apt install libc++-dev libc++abi-dev # 安装Eigen3线性代数库 sudo apt-get install libeigen3-dev 库说明: * libc++-dev:C++标准库开发文件 * libeigen3-dev:线性代数库,用于矩阵运算和几何变换 * 这些是编译FAST-LIO和Open3D必需的数学和系统库 二、创建工作空间和准备 2.1. 创建定位工作空间 mkdir

程序员的自我修养:用 AR 眼镜管理健康

程序员的自我修养:用 AR 眼镜管理健康

欢迎文末添加好友交流,共同进步! “ 俺はモンキー・D・ルフィ。海贼王になる男だ!” * 一、从一次体检说起 * 二、为什么是 AR 眼镜? * 三、技术选型:CXR-M SDK vs 灵珠平台 * 四、项目架构设计 * 五、从配置开始:Gradle 和权限 * 5.1 添加 SDK 依赖 * 5.2 权限配置 * 六、数据层实现 * 6.1 数据模型 * 6.2 数据仓库 * 七、SDK 封装层 * 7.1 发送提醒到眼镜 * 7.2 TTS 语音播报