黑色是因为video没有自动播放导致的。
而且video必须设置muted
(静音)属性,否则视频都无法播放;
如果不设置muted,也可以用设置x5-video-player-type="h5"
替代(意为兼容qq浏览器,解决在小程序中黑色的问题)
const coverVideoTarget = document.getElementById('coverVideo');
function initThree(){
scene = new THREE.Scene();
// camera = new THREE.Camera();
camera = new THREE.PerspectiveCamera(75,window.innerWidth/window.innerHeight,0.1,10000);
camera.matrixAutoUpdate = false;
scene.add(camera);
scene.add(new THREE.AmbientLight(0xffffff, 1.5));
root = new THREE.Object3D();
root.matrixAutoUpdate = false;
scene.add(root);
renderer = new THREE.WebGLRenderer({ canvas: canvas, alpha: true, antialias: true });
renderer.setPixelRatio(window.devicePixelRatio);
renderer.setSize(webglWidth, webglHeight);
setVideo('./video/18912265449355602.mp4');
// setModel();
}
function setVideo(videoUrl){
console.log('setVideo')
coverVideoTarget.src = videoUrl;
const texture = new THREE.VideoTexture(coverVideoTarget);
var mat = new THREE.MeshBasicMaterial({map: texture,transparent:true});
const geo = new THREE.PlaneGeometry(400,240);
// const geo = new THREE.PlaneGeometry(20,12);
coverVideo = new THREE.Mesh(geo, mat);
coverVideo.position.z = -50;
coverVideo.position.x = 140;
coverVideo.position.y = 200;
root.add(coverVideo);
}
function setVideo2(videoUrl){
console.log('setVideo2')
coverVideoTarget.src = videoUrl;
let vWidth = 400, vHeight = 240;
coverVideoTarget.addEventListener('canplay', function () {
vWidth = this.videoWidth;
vHeight = this.videoHeight;
});
var canvas_process = document.createElement('canvas');// 未被加入body。用于绘制video帧图片并传入tracker识别使用
var context_process = canvas_process.getContext('2d');
const texture = new THREE.CanvasTexture(canvas_process);
function update() {
context_process.fillStyle = 'black';
context_process.fillRect(0, 0, vWidth, vHeight);
context_process.drawImage(coverVideoTarget, 0, 0, vWidth, vHeight);
texture.needsUpdate = true;
requestAnimationFrame(update);
}
update();
var mat = new THREE.MeshBasicMaterial({map: texture,transparent:true});
const geo = new THREE.PlaneGeometry(400,240);
// const geo = new THREE.PlaneGeometry(20,12);
coverVideo = new THREE.Mesh(geo, mat);
coverVideo.position.z = -50;
coverVideo.position.x = 140;
coverVideo.position.y = 200;
root.add(coverVideo);
}
function initNFT(marker, videoUrl) {// video宽高比 240 : 360
if (typeof(NFTworker) !== 'undefined') {
NFTworker.terminate();
NFTworker = undefined;
}
// setVideo(videoUrl);
NFTworker = new Worker('../js/imgTracker.worker.js');
NFTworker.postMessage({ type: "initNFT", pw: trackImgWidth, ph: trackImgHeight, trackData: './../examples/Data/camera_para.dat', marker: marker });
var resultTime = new Date().getTime(),noResultTime = 0;
NFTworker.onmessage = function (ev) {
var msg = ev.data;
switch (msg.type) {
case "nftInitSuccess": {
var proj = JSON.parse(msg.proj);
proj[0] *= ratioW;
proj[4] *= ratioW;
proj[8] *= ratioW;
proj[12] *= ratioW;
proj[1] *= ratioH;
proj[5] *= ratioH;
proj[9] *= ratioH;
proj[13] *= ratioH;
setMatrix(camera.projectionMatrix, proj);
nftProcess();
break;
}
case 'found': {
// console.log('found')
coverVideo && (coverVideo.visible = true);
coverVideoTarget.play();
setMatrix(root.matrix, JSON.parse(msg.matrixGL_RH));
renderer.render(scene, camera);
resultTime = new Date().getTime();
nftProcess();
break;
}
case 'noResult': {
coverVideo && (coverVideo.visible = false);
coverVideoTarget.pause();
renderer.render(scene, camera);
var noResultTime = new Date().getTime();
if (noResultTime - resultTime > 3000) {
console.log('go track');
// trackProcess('track');
nftProcess();
} else {
nftProcess();
}
break;
}
}
};
};
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>ALVA Image Tracker</title>
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=0.5, maximum-scale=1">
<link rel="stylesheet" href="css/nft-style.css">
</head>
<style>
#coverVideo{
/* visibility: hidden; */
position: fixed;
/* left: 0px; */
left: -1000000px;
top:0px;
z-index: 10000;
width: 320px;
height: 240px;
background-color: pink;
}
</style>
<body>
<div id="loading" >
<span class="loading-text">Loading, please wait</span>
</div>
<div id="app">
<video
loop
autoplay
muted
playsinline
id="cameraVideo">
</video>
<video id="coverVideo" loop controls webkit-playsinline="true" x-webkit-airplay="true"
playsinline="true" x5-video-player-type="h5" preload="auto" src='./video/18912265449355602.mp4'></video>
<canvas id="canvas"></canvas>
</div>
<script src="../vconsole.min.js"></script>
<script src="js/third_party/three.js/three.js"></script>
<script src="js/third_party/three.js/GLTFLoader.js"></script>
<script src="https://res2.wx.qq.com/open/js/jweixin-1.6.0.js"></script>
<script src="threejs_worker.js"></script>
</body>
</html>
var imgTracker;
var cameraVideo = document.getElementById( 'cameraVideo' );
if (navigator.mediaDevices && navigator.mediaDevices.getUserMedia) {
var hint = { audio: false, video: true };
if( window.innerWidth < 800 ) { // 宽高比为2/3或者3/2
var width = ( window.innerWidth < window.innerHeight ) ? 240 : 360;
var height = ( window.innerWidth < window.innerHeight ) ? 360 : 240;
width = width * window.devicePixelRatio;
height = height * window.devicePixelRatio;
hint = {
audio: false,
video: {
facingMode: 'environment',// 调用后置摄像头
width: { min: width, max: width }
},
};
}
navigator.mediaDevices.getUserMedia( hint ).then( function( stream ) {
cameraVideo.srcObject = stream;
cameraVideo.addEventListener( 'loadedmetadata', function() {
cameraVideo.play();
start(cameraVideo);
} );
} ).catch(err => {
console.log('catch');
});
} else {
}