1.自行去下载tracking.js
npm install tracking
2.三个canvas,一个显示完整摄像头,一个在摄像头上显示人脸矩形框,一个截取人脸(提醒大家一下,如果你选择localhost运行预览是不会报错的,但是如果选择IP地址运行预览,肯定是会报错的,因为你的浏览器在不是https协议的情况下不允许打开摄像头)
<template>
<div>
<div>
<p>脸部放入红色矩形框</p>
<video id="video" style="transform: rotateY(180deg)" autoplay preload loop muted></video>
<canvas id="overlayCanvas" width="200" height="200" style="position: absolute; top: 20; left: 0; transform: rotateY(180deg);"></canvas>
</div>
<div>
<p>检测人脸结果</p>
<canvas id="fullCanvas" width="200" height="200" style="transform: rotateY(180deg)"></canvas>
<canvas id="faceCanvas" width="200" height="200" style="transform: rotateY(180deg); display: none;"></canvas>
</div>
<div>结果:{
{ img }}</div>
</div>
</template>
<script>
//import { userMedia } from '../../utils/utils';
import 'tracking/build/tracking.js';
import 'tracking/build/data/face-min.js';
export default {
data() {
return {
img: '',
videoObj: null,
trackerTask: null,
overlayContext: null
}
},
mounted() {
this.getCompetence()
},
methods: {
openCamera() {
this.$nextTick(() => {
const fullCanvas = document.getElementById('fullCanvas')
const fullContext = fullCanvas.getContext('2d')
const faceCanvas = document.getElementById('faceCanvas')
const faceContext = faceCanvas.getContext('2d')
this.videoObj = document.getElementById('video')
const overlayCanvas = document.getElementById('overlayCanvas')
this.overlayContext = overlayCanvas.getContext('2d')
const tracker = new window.tracking.ObjectTracker('face')
tracker.setInitialScale(4)
tracker.setStepSize(2)
tracker.setEdgesDensity(0.1)
this.trackerTask = window.tracking.track('#video', tracker, { camera: true })
this.img = this.trackerTask
tracker.on('track', (event) => {
if (event.data.length !== 0) {
event.data.forEach((rect) => {
fullContext.clearRect(0, 0, fullCanvas.width, fullCanvas.height)
fullContext.drawImage(this.videoObj, 0, 0, fullCanvas.width, fullCanvas.height)
faceCanvas.width = rect.width
faceCanvas.height = rect.height
faceContext.clearRect(0, 0, faceCanvas.width, faceCanvas.height)
faceContext.drawImage(this.videoObj, rect.x, rect.y, rect.width, rect.height, 0, 0, faceCanvas.width, faceCanvas.height)
// 绘制矩形框到 overlayCanvas
this.overlayContext.clearRect(0, 0, overlayCanvas.width, overlayCanvas.height)
this.overlayContext.strokeStyle = 'red'; // 设置矩形框颜色
this.overlayContext.lineWidth = 2; // 设置矩形框线宽
this.overlayContext.strokeRect(rect.x, rect.y, rect.width, rect.height)
console.log(faceCanvas.toDataURL())
this.img = faceCanvas.toDataURL()
});
}
})
})
},
getCompetence() {
let _this = this;
this.video = document.getElementById("video");
if (navigator.mediaDevices === undefined) {
navigator.mediaDevices = {};
}
if (navigator.mediaDevices.getUserMedia === undefined) {
navigator.mediaDevices.getUserMedia = function (constraints) {
var getUserMedia =
navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia ||
navigator.getUserMedia;
if (!getUserMedia) {
return Promise.reject(
new Error("getUserMedia is not implemented in this browser")
);
}
return new Promise(function (resolve, reject) {
getUserMedia.call(navigator, constraints, resolve, reject);
});
};
}
var constraints = {
video: { width: 200, height: 200, transform: "scaleX(-1)" },
audio: false,
};
navigator.mediaDevices
.getUserMedia(constraints)
.then(function (stream) {
if ("srcObject" in _this.video) {
_this.video.srcObject = stream;
} else {
_this.video.src = window.URL.createObjectURL(stream);
}
_this.video.onloadedmetadata = function (e) {
_this.video.play();
};
_this.openCamera();
})
.catch((err) => {
console.log(err);
});
},
de() {
this.video.srcObject.getTracks()[0].stop();
this.trackerTask.stop();
},
handleCancel() {
this.videoObj.srcObject.getTracks()[0].stop()
this.trackerTask.stop()
},
success(stream) {
this.videoObj.srcObject = stream
this.videoObj.play()
},
error(error) {
console.log(`访问用户媒体设备失败${error.name}, ${error.message}`)
}
},
beforeDestroy() {
this.handleCancel()
}
}
</script>