提交 8cb48f81 编写于 作者: D Diego Marcos

Renderer Plugin to handle stereo rendering and head tracking for VR HMDs. It...

Renderer Plugin to handle stereo rendering and head tracking for VR HMDs. It uses the native Firefox API
上级 d3cb4e7c
<!DOCTYPE html>
<html lang="en">
<head>
<title>three.js webgl - native vr demo</title>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
<style>
body {
font-family: Monospace;
background-color: #f0f0f0;
margin: 0px;
overflow: hidden;
}
.button {
position: fixed;
top: 20px;
right: 20px;
padding: 8px;
color: #FFF;
background-color: #555;
}
.button:hover {
cursor: pointer;
background-color: rgb(18, 36, 70);
}
.button.error {
pointer-events: none;
background-color: red;
}
</style>
</head>
<body>
<div class="button">Start VR Mode</div>
<script src="../build/three.js"></script>
<script src="../src/extras/renderers/plugins/VRPlugin.js"></script>
<script src="js/libs/stats.min.js"></script>
<script>
var container, stats;
var camera, scene, projector, raycaster, renderer;
var vrPlugin = new THREE.VRPlugin();
var mouse = new THREE.Vector2(), INTERSECTED;
var radius = 100, theta = 0;
init();
animate();
function init() {
container = document.createElement( 'div' );
document.body.appendChild( container );
var info = document.createElement( 'div' );
info.style.position = 'absolute';
info.style.top = '10px';
info.style.width = '100%';
info.style.textAlign = 'center';
info.innerHTML = '<a href="http://threejs.org" target="_blank">three.js</a> webgl - interactive cubes';
container.appendChild( info );
camera = new THREE.PerspectiveCamera( 70, window.innerWidth / window.innerHeight, 1, 10000 );
scene = new THREE.Scene();
var light = new THREE.DirectionalLight( 0xffffff, 2 );
light.position.set( 1, 1, 1 ).normalize();
scene.add( light );
var light = new THREE.DirectionalLight( 0xffffff );
light.position.set( -1, -1, -1 ).normalize();
scene.add( light );
var geometry = new THREE.BoxGeometry( 20, 20, 20 );
for ( var i = 0; i < 2000; i ++ ) {
var object = new THREE.Mesh( geometry, new THREE.MeshLambertMaterial( { color: Math.random() * 0xffffff } ) );
object.position.x = Math.random() * 800 - 400;
object.position.y = Math.random() * 800 - 400;
object.position.z = Math.random() * 800 - 400;
object.rotation.x = Math.random() * 2 * Math.PI;
object.rotation.y = Math.random() * 2 * Math.PI;
object.rotation.z = Math.random() * 2 * Math.PI;
object.scale.x = Math.random() + 0.5;
object.scale.y = Math.random() + 0.5;
object.scale.z = Math.random() + 0.5;
scene.add( object );
}
projector = new THREE.Projector();
raycaster = new THREE.Raycaster();
renderer = new THREE.WebGLRenderer();
var fullScreenButton = document.querySelector( '.button' );
fullScreenButton.onclick = function() {
vrPlugin.enableVRMode( true );
};
vrPlugin.init( renderer, vrPluginStarted );
function vrPluginStarted(error) {
if (error) {
fullScreenButton.innerHTML = error;
fullScreenButton.classList.add('error');
}
}
renderer.setClearColor( 0xf0f0f0 );
renderer.setSize( window.innerWidth, window.innerHeight );
renderer.sortObjects = false;
container.appendChild( renderer.domElement );
stats = new Stats();
stats.domElement.style.position = 'absolute';
stats.domElement.style.top = '0px';
container.appendChild( stats.domElement );
document.addEventListener( 'mousemove', onDocumentMouseMove, false );
//
window.addEventListener( 'resize', onWindowResize, false );
}
function onWindowResize() {
camera.aspect = window.innerWidth / window.innerHeight;
camera.updateProjectionMatrix();
renderer.setSize( window.innerWidth, window.innerHeight );
}
function onDocumentMouseMove( event ) {
event.preventDefault();
mouse.x = ( event.clientX / window.innerWidth ) * 2 - 1;
mouse.y = - ( event.clientY / window.innerHeight ) * 2 + 1;
}
//
function animate() {
requestAnimationFrame( animate );
render();
stats.update();
}
function render() {
theta += 0.1;
camera.position.x = radius * Math.sin( THREE.Math.degToRad( theta ) );
camera.position.y = radius * Math.sin( THREE.Math.degToRad( theta ) );
camera.position.z = radius * Math.cos( THREE.Math.degToRad( theta ) );
camera.lookAt( scene.position );
// find intersections
var vector = new THREE.Vector3( mouse.x, mouse.y, 1 );
projector.unprojectVector( vector, camera );
raycaster.set( camera.position, vector.sub( camera.position ).normalize() );
var intersects = raycaster.intersectObjects( scene.children );
if ( intersects.length > 0 ) {
if ( INTERSECTED != intersects[ 0 ].object ) {
if ( INTERSECTED ) INTERSECTED.material.emissive.setHex( INTERSECTED.currentHex );
INTERSECTED = intersects[ 0 ].object;
INTERSECTED.currentHex = INTERSECTED.material.emissive.getHex();
INTERSECTED.material.emissive.setHex( 0xff0000 );
}
} else {
if ( INTERSECTED ) INTERSECTED.material.emissive.setHex( INTERSECTED.currentHex );
INTERSECTED = null;
}
vrPlugin.render( scene, camera );
}
</script>
</body>
</html>
/**
* @author Diego Marcos Segura : @dmarcos
*
* It handles stereo rendering and head tracking
* If the VR API is not available it gracefuly falls back to a
* regular renderer
*
* The only supported HMD is the Oculus Rift DK1 and The API doesn't currently allow
* to query for the display resolution. The dimensions of the screen are currently
* hardcoded (1280 x 800).
*
* For VR mode to work it has to be used with the Oculus enabled builds of Firefox:
*
* OSX: http://people.mozilla.com/~vladimir/vr/firefox-33.0a1.en-US.mac.dmg
* WIN: http://people.mozilla.com/~vladimir/vr/firefox-33.0a1.en-US.win64-x86_64.zip
*
*/
THREE.VRPlugin = function() {
var _vrModeEnabled = false;
this.init = function ( renderer, done ) {
var self = this;
this._renderer = renderer;
if ( !navigator.mozGetVRDevices ) {
if (done) {
done("Your browser is not VR Ready");
}
return;
}
navigator.mozGetVRDevices( gotVRDevices );
function gotVRDevices( devices ) {
var vrHMD;
var error;
for ( var i = 0; i < devices.length; ++i ) {
if ( devices[i] instanceof PositionSensorVRDevice ) {
self.vrState = devices[i];
}
if ( devices[i] instanceof HMDVRDevice ) {
vrHMD = devices[i];
self._vrHMD = vrHMD;
self.leftEyeTranslation = vrHMD.getEyeTranslation( "left" );
self.rightEyeTranslation = vrHMD.getEyeTranslation( "right" );
self.leftEyeFOV = vrHMD.getRecommendedEyeFieldOfView( "left" );
self.rightEyeFOV = vrHMD.getRecommendedEyeFieldOfView( "right" );
}
}
if ( done ) {
// if ( vrHMD ) {
// error = 'HMD not available';
// }
done( error );
}
}
};
this.render = function( scene, camera, renderTarget, forceClear ) {
var renderer = this._renderer;
renderer.enableScissorTest( false );
// VR render mode
if ( _vrModeEnabled ) {
this.renderStereo.apply( this, arguments );
return;
}
// Regular render mode
this._renderer.render.apply( this._renderer , arguments );
};
this.renderStereo = function( scene, camera, renderTarget, forceClear ) {
var vrState = this.getVRState();
var cameraLeft;
var cameraRight;
var leftEyeTranslation = this.leftEyeTranslation;
var rightEyeTranslation = this.rightEyeTranslation;
var renderer = this._renderer;
renderer.enableScissorTest( true );
renderer.clear();
// Grab camera matrix from user.
// This is interpreted as the head base.
if ( camera.matrixAutoUpdate ) {
camera.updateMatrix();
}
var eyeWorldMatrix = camera.matrixWorld.clone();
cameraLeft = camera.clone();
cameraRight = camera.clone();
cameraLeft.projectionMatrix = this.FovToProjection( this.leftEyeFOV );
cameraRight.projectionMatrix = this.FovToProjection( this.rightEyeFOV );
cameraLeft.position.add(new THREE.Vector3(
leftEyeTranslation.x, leftEyeTranslation.y, leftEyeTranslation.z) );
cameraRight.position.add(new THREE.Vector3(
rightEyeTranslation.x, rightEyeTranslation.y, rightEyeTranslation.z) );
// Applies head rotation from sensors data.
if ( vrState ) {
var quat = new THREE.Quaternion(
vrState.hmd.rotation[1],
vrState.hmd.rotation[2],
vrState.hmd.rotation[3],
vrState.hmd.rotation[0]
);
var rotMat = new THREE.Matrix4();
cameraLeft.matrix.set( eyeWorldMatrix );
cameraRight.matrix.set( eyeWorldMatrix );
cameraLeft.setRotationFromQuaternion( quat );
cameraRight.setRotationFromQuaternion( quat );
}
// render left eye
renderer.setViewport( 0, 0, 640, 800 );
renderer.setScissor( 0, 0, 640, 800 );
renderer.render( scene, cameraLeft );
// render right eye
renderer.setViewport( 640, 0, 640, 800 );
renderer.setScissor( 640, 0, 640, 800 );
renderer.render( scene, cameraRight );
};
this.getVRState = function() {
var orientation = this.vrState.getState().orientation;
var state = {
hmd : {
rotation : [
orientation.w,
orientation.x,
orientation.y,
orientation.z
]
}
};
return state;
};
this.enableVRMode = function( enable ) {
var renderer = this._renderer;
var vrHMD = this._vrHMD;
var canvasOriginalSize = this._canvasOriginalSize;
if (!vrHMD) {
return;
}
// If state doesn't change we do nothing
if ( enable && _vrModeEnabled ||
!enable && !_vrModeEnabled ) {
return;
}
// VR Mode disabled
if ( !enable && _vrModeEnabled ) {
// Restores canvas original size
renderer.setSize( canvasOriginalSize.width, canvasOriginalSize.height );
_vrModeEnabled = false;
return;
}
// VR Mode enabled
this._canvasOriginalSize = {
width: renderer.domElement.width,
height: renderer.domElement.height
};
_vrModeEnabled = true;
// Hardcoded Rift display size
renderer.setSize( 1280, 800 );
this.startVRMode( vrHMD );
};
this.startVRMode = function( vrHMD ) {
var self = this;
var renderer = this._renderer;
vrHMD.xxxToggleElementVR( renderer.domElement );
document.addEventListener( "mozfullscreenchange", function() {
if ( !document.mozFullScreenElement ) {
self.enableVRMode( false );
}
},false );
renderer.domElement.mozRequestFullScreen( { vrDisplay: vrHMD } );
};
this.FovToNDCScaleOffset = function( fov ) {
var pxscale = 2.0 / (fov.leftTan + fov.rightTan);
var pxoffset = (fov.leftTan - fov.rightTan) * pxscale * 0.5;
var pyscale = 2.0 / (fov.upTan + fov.downTan);
var pyoffset = (fov.upTan - fov.downTan) * pyscale * 0.5;
return { scale: [pxscale, pyscale], offset: [pxoffset, pyoffset] };
};
this.FovPortToProjection = function(fov, rightHanded /* = true */, zNear /* = 0.01 */, zFar /* = 10000.0 */)
{
rightHanded = rightHanded === undefined ? true : rightHanded;
zNear = zNear === undefined ? 0.01 : zNear;
zFar = zFar === undefined ? 10000.0 : zFar;
var handednessScale = rightHanded ? -1.0 : 1.0;
// start with an identity matrix
var mobj = new THREE.Matrix4();
var m = mobj.elements;
// and with scale/offset info for normalized device coords
var scaleAndOffset = this.FovToNDCScaleOffset(fov);
// X result, map clip edges to [-w,+w]
m[0*4+0] = scaleAndOffset.scale[0];
m[0*4+1] = 0.0;
m[0*4+2] = scaleAndOffset.offset[0] * handednessScale;
m[0*4+3] = 0.0;
// Y result, map clip edges to [-w,+w]
// Y offset is negated because this proj matrix transforms from world coords with Y=up,
// but the NDC scaling has Y=down (thanks D3D?)
m[1*4+0] = 0.0;
m[1*4+1] = scaleAndOffset.scale[1];
m[1*4+2] = -scaleAndOffset.offset[1] * handednessScale;
m[1*4+3] = 0.0;
// Z result (up to the app)
m[2*4+0] = 0.0;
m[2*4+1] = 0.0;
m[2*4+2] = zFar / (zNear - zFar) * -handednessScale;
m[2*4+3] = (zFar * zNear) / (zNear - zFar);
// W result (= Z in)
m[3*4+0] = 0.0;
m[3*4+1] = 0.0;
m[3*4+2] = handednessScale;
m[3*4+3] = 0.0;
mobj.transpose();
return mobj;
};
this.FovToProjection = function( fov, rightHanded /* = true */, zNear /* = 0.01 */, zFar /* = 10000.0 */ )
{
var fovPort = {
upTan: Math.tan(fov.upDegrees * Math.PI / 180.0),
downTan: Math.tan(fov.downDegrees * Math.PI / 180.0),
leftTan: Math.tan(fov.leftDegrees * Math.PI / 180.0),
rightTan: Math.tan(fov.rightDegrees * Math.PI / 180.0)
};
return this.FovPortToProjection(fovPort, rightHanded, zNear, zFar);
};
};
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册