Spaces:
Running
Running
| import { ArrayCamera } from '../../cameras/ArrayCamera.js'; | |
| import { EventDispatcher } from '../../core/EventDispatcher.js'; | |
| import { PerspectiveCamera } from '../../cameras/PerspectiveCamera.js'; | |
| import { Vector3 } from '../../math/Vector3.js'; | |
| import { Vector4 } from '../../math/Vector4.js'; | |
| import { WebGLAnimation } from '../webgl/WebGLAnimation.js'; | |
| import { WebGLRenderTarget } from '../WebGLRenderTarget.js'; | |
| import { WebXRController } from './WebXRController.js'; | |
| import { DepthTexture } from '../../textures/DepthTexture.js'; | |
| import { WebGLMultisampleRenderTarget } from '../WebGLMultisampleRenderTarget.js'; | |
| import { DepthFormat, DepthStencilFormat, RGBAFormat, sRGBEncoding, UnsignedByteType, UnsignedShortType, UnsignedInt248Type } from '../../constants.js'; | |
| class WebXRManager extends EventDispatcher { | |
| constructor(renderer, gl) { | |
| super(); | |
| const scope = this; | |
| let session = null; | |
| let framebufferScaleFactor = 1.0; | |
| let referenceSpace = null; | |
| let referenceSpaceType = 'local-floor'; | |
| const hasMultisampledRenderToTexture = renderer.extensions.has('WEBGL_multisampled_render_to_texture'); | |
| let pose = null; | |
| let glBinding = null; | |
| let glProjLayer = null; | |
| let glBaseLayer = null; | |
| let isMultisample = false; | |
| let xrFrame = null; | |
| const attributes = gl.getContextAttributes(); | |
| let initialRenderTarget = null; | |
| let newRenderTarget = null; | |
| const controllers = []; | |
| const inputSourcesMap = new Map(); | |
| // | |
| const cameraL = new PerspectiveCamera(); | |
| cameraL.layers.enable(1); | |
| cameraL.viewport = new Vector4(); | |
| const cameraR = new PerspectiveCamera(); | |
| cameraR.layers.enable(2); | |
| cameraR.viewport = new Vector4(); | |
| const cameras = [cameraL, cameraR]; | |
| const cameraVR = new ArrayCamera(); | |
| cameraVR.layers.enable(1); | |
| cameraVR.layers.enable(2); | |
| let _currentDepthNear = null; | |
| let _currentDepthFar = null; | |
| // | |
| this.cameraAutoUpdate = true; | |
| this.enabled = false; | |
| this.isPresenting = false; | |
| this.getController = function (index) { | |
| let controller = controllers[index]; | |
| if (controller === undefined) { | |
| controller = new WebXRController(); | |
| controllers[index] = controller; | |
| } | |
| return controller.getTargetRaySpace(); | |
| }; | |
| this.getControllerGrip = function (index) { | |
| let controller = controllers[index]; | |
| if (controller === undefined) { | |
| controller = new WebXRController(); | |
| controllers[index] = controller; | |
| } | |
| return controller.getGripSpace(); | |
| }; | |
| this.getHand = function (index) { | |
| let controller = controllers[index]; | |
| if (controller === undefined) { | |
| controller = new WebXRController(); | |
| controllers[index] = controller; | |
| } | |
| return controller.getHandSpace(); | |
| }; | |
| // | |
| function onSessionEvent(event) { | |
| const controller = inputSourcesMap.get(event.inputSource); | |
| if (controller) { | |
| controller.dispatchEvent({ type: event.type, data: event.inputSource }); | |
| } | |
| } | |
| function onSessionEnd() { | |
| inputSourcesMap.forEach(function (controller, inputSource) { | |
| controller.disconnect(inputSource); | |
| }); | |
| inputSourcesMap.clear(); | |
| _currentDepthNear = null; | |
| _currentDepthFar = null; | |
| // restore framebuffer/rendering state | |
| renderer.setRenderTarget(initialRenderTarget); | |
| glBaseLayer = null; | |
| glProjLayer = null; | |
| glBinding = null; | |
| session = null; | |
| newRenderTarget = null; | |
| // | |
| animation.stop(); | |
| scope.isPresenting = false; | |
| scope.dispatchEvent({ type: 'sessionend' }); | |
| } | |
| this.setFramebufferScaleFactor = function (value) { | |
| framebufferScaleFactor = value; | |
| if (scope.isPresenting === true) { | |
| console.warn('THREE.WebXRManager: Cannot change framebuffer scale while presenting.'); | |
| } | |
| }; | |
| this.setReferenceSpaceType = function (value) { | |
| referenceSpaceType = value; | |
| if (scope.isPresenting === true) { | |
| console.warn('THREE.WebXRManager: Cannot change reference space type while presenting.'); | |
| } | |
| }; | |
| this.getReferenceSpace = function () { | |
| return referenceSpace; | |
| }; | |
| this.getBaseLayer = function () { | |
| return glProjLayer !== null ? glProjLayer : glBaseLayer; | |
| }; | |
| this.getBinding = function () { | |
| return glBinding; | |
| }; | |
| this.getFrame = function () { | |
| return xrFrame; | |
| }; | |
| this.getSession = function () { | |
| return session; | |
| }; | |
| this.setSession = async function (value) { | |
| session = value; | |
| if (session !== null) { | |
| initialRenderTarget = renderer.getRenderTarget(); | |
| session.addEventListener('select', onSessionEvent); | |
| session.addEventListener('selectstart', onSessionEvent); | |
| session.addEventListener('selectend', onSessionEvent); | |
| session.addEventListener('squeeze', onSessionEvent); | |
| session.addEventListener('squeezestart', onSessionEvent); | |
| session.addEventListener('squeezeend', onSessionEvent); | |
| session.addEventListener('end', onSessionEnd); | |
| session.addEventListener('inputsourceschange', onInputSourcesChange); | |
| if (attributes.xrCompatible !== true) { | |
| await gl.makeXRCompatible(); | |
| } | |
| if (session.renderState.layers === undefined || renderer.capabilities.isWebGL2 === false) { | |
| const layerInit = { | |
| antialias: session.renderState.layers === undefined ? attributes.antialias : true, | |
| alpha: attributes.alpha, | |
| depth: attributes.depth, | |
| stencil: attributes.stencil, | |
| framebufferScaleFactor: framebufferScaleFactor, | |
| }; | |
| glBaseLayer = new XRWebGLLayer(session, gl, layerInit); | |
| session.updateRenderState({ baseLayer: glBaseLayer }); | |
| newRenderTarget = new WebGLRenderTarget(glBaseLayer.framebufferWidth, glBaseLayer.framebufferHeight, { | |
| format: RGBAFormat, | |
| type: UnsignedByteType, | |
| encoding: renderer.outputEncoding, | |
| }); | |
| } else { | |
| isMultisample = attributes.antialias; | |
| let depthFormat = null; | |
| let depthType = null; | |
| let glDepthFormat = null; | |
| if (attributes.depth) { | |
| glDepthFormat = attributes.stencil ? gl.DEPTH24_STENCIL8 : gl.DEPTH_COMPONENT24; | |
| depthFormat = attributes.stencil ? DepthStencilFormat : DepthFormat; | |
| depthType = attributes.stencil ? UnsignedInt248Type : UnsignedShortType; | |
| } | |
| const projectionlayerInit = { | |
| colorFormat: renderer.outputEncoding === sRGBEncoding ? gl.SRGB8_ALPHA8 : gl.RGBA8, | |
| depthFormat: glDepthFormat, | |
| scaleFactor: framebufferScaleFactor, | |
| }; | |
| glBinding = new XRWebGLBinding(session, gl); | |
| glProjLayer = glBinding.createProjectionLayer(projectionlayerInit); | |
| session.updateRenderState({ layers: [glProjLayer] }); | |
| if (isMultisample) { | |
| newRenderTarget = new WebGLMultisampleRenderTarget(glProjLayer.textureWidth, glProjLayer.textureHeight, { | |
| format: RGBAFormat, | |
| type: UnsignedByteType, | |
| depthTexture: new DepthTexture( | |
| glProjLayer.textureWidth, | |
| glProjLayer.textureHeight, | |
| depthType, | |
| undefined, | |
| undefined, | |
| undefined, | |
| undefined, | |
| undefined, | |
| undefined, | |
| depthFormat | |
| ), | |
| stencilBuffer: attributes.stencil, | |
| ignoreDepth: glProjLayer.ignoreDepthValues, | |
| useRenderToTexture: hasMultisampledRenderToTexture, | |
| encoding: renderer.outputEncoding, | |
| }); | |
| } else { | |
| newRenderTarget = new WebGLRenderTarget(glProjLayer.textureWidth, glProjLayer.textureHeight, { | |
| format: RGBAFormat, | |
| type: UnsignedByteType, | |
| depthTexture: new DepthTexture( | |
| glProjLayer.textureWidth, | |
| glProjLayer.textureHeight, | |
| depthType, | |
| undefined, | |
| undefined, | |
| undefined, | |
| undefined, | |
| undefined, | |
| undefined, | |
| depthFormat | |
| ), | |
| stencilBuffer: attributes.stencil, | |
| ignoreDepth: glProjLayer.ignoreDepthValues, | |
| encoding: renderer.outputEncoding, | |
| }); | |
| } | |
| } | |
| // Set foveation to maximum. | |
| this.setFoveation(1.0); | |
| referenceSpace = await session.requestReferenceSpace(referenceSpaceType); | |
| animation.setContext(session); | |
| animation.start(); | |
| scope.isPresenting = true; | |
| scope.dispatchEvent({ type: 'sessionstart' }); | |
| } | |
| }; | |
| function onInputSourcesChange(event) { | |
| const inputSources = session.inputSources; | |
| // Assign inputSources to available controllers | |
| for (let i = 0; i < controllers.length; i++) { | |
| inputSourcesMap.set(inputSources[i], controllers[i]); | |
| } | |
| // Notify disconnected | |
| for (let i = 0; i < event.removed.length; i++) { | |
| const inputSource = event.removed[i]; | |
| const controller = inputSourcesMap.get(inputSource); | |
| if (controller) { | |
| controller.dispatchEvent({ type: 'disconnected', data: inputSource }); | |
| inputSourcesMap.delete(inputSource); | |
| } | |
| } | |
| // Notify connected | |
| for (let i = 0; i < event.added.length; i++) { | |
| const inputSource = event.added[i]; | |
| const controller = inputSourcesMap.get(inputSource); | |
| if (controller) { | |
| controller.dispatchEvent({ type: 'connected', data: inputSource }); | |
| } | |
| } | |
| } | |
| // | |
| const cameraLPos = new Vector3(); | |
| const cameraRPos = new Vector3(); | |
| /** | |
| * Assumes 2 cameras that are parallel and share an X-axis, and that | |
| * the cameras' projection and world matrices have already been set. | |
| * And that near and far planes are identical for both cameras. | |
| * Visualization of this technique: https://computergraphics.stackexchange.com/a/4765 | |
| */ | |
| function setProjectionFromUnion(camera, cameraL, cameraR) { | |
| cameraLPos.setFromMatrixPosition(cameraL.matrixWorld); | |
| cameraRPos.setFromMatrixPosition(cameraR.matrixWorld); | |
| const ipd = cameraLPos.distanceTo(cameraRPos); | |
| const projL = cameraL.projectionMatrix.elements; | |
| const projR = cameraR.projectionMatrix.elements; | |
| // VR systems will have identical far and near planes, and | |
| // most likely identical top and bottom frustum extents. | |
| // Use the left camera for these values. | |
| const near = projL[14] / (projL[10] - 1); | |
| const far = projL[14] / (projL[10] + 1); | |
| const topFov = (projL[9] + 1) / projL[5]; | |
| const bottomFov = (projL[9] - 1) / projL[5]; | |
| const leftFov = (projL[8] - 1) / projL[0]; | |
| const rightFov = (projR[8] + 1) / projR[0]; | |
| const left = near * leftFov; | |
| const right = near * rightFov; | |
| // Calculate the new camera's position offset from the | |
| // left camera. xOffset should be roughly half `ipd`. | |
| const zOffset = ipd / (-leftFov + rightFov); | |
| const xOffset = zOffset * -leftFov; | |
| // TODO: Better way to apply this offset? | |
| cameraL.matrixWorld.decompose(camera.position, camera.quaternion, camera.scale); | |
| camera.translateX(xOffset); | |
| camera.translateZ(zOffset); | |
| camera.matrixWorld.compose(camera.position, camera.quaternion, camera.scale); | |
| camera.matrixWorldInverse.copy(camera.matrixWorld).invert(); | |
| // Find the union of the frustum values of the cameras and scale | |
| // the values so that the near plane's position does not change in world space, | |
| // although must now be relative to the new union camera. | |
| const near2 = near + zOffset; | |
| const far2 = far + zOffset; | |
| const left2 = left - xOffset; | |
| const right2 = right + (ipd - xOffset); | |
| const top2 = ((topFov * far) / far2) * near2; | |
| const bottom2 = ((bottomFov * far) / far2) * near2; | |
| camera.projectionMatrix.makePerspective(left2, right2, top2, bottom2, near2, far2); | |
| } | |
| function updateCamera(camera, parent) { | |
| if (parent === null) { | |
| camera.matrixWorld.copy(camera.matrix); | |
| } else { | |
| camera.matrixWorld.multiplyMatrices(parent.matrixWorld, camera.matrix); | |
| } | |
| camera.matrixWorldInverse.copy(camera.matrixWorld).invert(); | |
| } | |
| this.updateCamera = function (camera) { | |
| if (session === null) return; | |
| cameraVR.near = cameraR.near = cameraL.near = camera.near; | |
| cameraVR.far = cameraR.far = cameraL.far = camera.far; | |
| if (_currentDepthNear !== cameraVR.near || _currentDepthFar !== cameraVR.far) { | |
| // Note that the new renderState won't apply until the next frame. See #18320 | |
| session.updateRenderState({ | |
| depthNear: cameraVR.near, | |
| depthFar: cameraVR.far, | |
| }); | |
| _currentDepthNear = cameraVR.near; | |
| _currentDepthFar = cameraVR.far; | |
| } | |
| const parent = camera.parent; | |
| const cameras = cameraVR.cameras; | |
| updateCamera(cameraVR, parent); | |
| for (let i = 0; i < cameras.length; i++) { | |
| updateCamera(cameras[i], parent); | |
| } | |
| cameraVR.matrixWorld.decompose(cameraVR.position, cameraVR.quaternion, cameraVR.scale); | |
| // update user camera and its children | |
| camera.position.copy(cameraVR.position); | |
| camera.quaternion.copy(cameraVR.quaternion); | |
| camera.scale.copy(cameraVR.scale); | |
| camera.matrix.copy(cameraVR.matrix); | |
| camera.matrixWorld.copy(cameraVR.matrixWorld); | |
| const children = camera.children; | |
| for (let i = 0, l = children.length; i < l; i++) { | |
| children[i].updateMatrixWorld(true); | |
| } | |
| // update projection matrix for proper view frustum culling | |
| if (cameras.length === 2) { | |
| setProjectionFromUnion(cameraVR, cameraL, cameraR); | |
| } else { | |
| // assume single camera setup (AR) | |
| cameraVR.projectionMatrix.copy(cameraL.projectionMatrix); | |
| } | |
| }; | |
| this.getCamera = function () { | |
| return cameraVR; | |
| }; | |
| this.getFoveation = function () { | |
| if (glProjLayer !== null) { | |
| return glProjLayer.fixedFoveation; | |
| } | |
| if (glBaseLayer !== null) { | |
| return glBaseLayer.fixedFoveation; | |
| } | |
| return undefined; | |
| }; | |
| this.setFoveation = function (foveation) { | |
| // 0 = no foveation = full resolution | |
| // 1 = maximum foveation = the edges render at lower resolution | |
| if (glProjLayer !== null) { | |
| glProjLayer.fixedFoveation = foveation; | |
| } | |
| if (glBaseLayer !== null && glBaseLayer.fixedFoveation !== undefined) { | |
| glBaseLayer.fixedFoveation = foveation; | |
| } | |
| }; | |
| // Animation Loop | |
| let onAnimationFrameCallback = null; | |
| function onAnimationFrame(time, frame) { | |
| pose = frame.getViewerPose(referenceSpace); | |
| xrFrame = frame; | |
| if (pose !== null) { | |
| const views = pose.views; | |
| if (glBaseLayer !== null) { | |
| renderer.setRenderTargetFramebuffer(newRenderTarget, glBaseLayer.framebuffer); | |
| renderer.setRenderTarget(newRenderTarget); | |
| } | |
| let cameraVRNeedsUpdate = false; | |
| // check if it's necessary to rebuild cameraVR's camera list | |
| if (views.length !== cameraVR.cameras.length) { | |
| cameraVR.cameras.length = 0; | |
| cameraVRNeedsUpdate = true; | |
| } | |
| for (let i = 0; i < views.length; i++) { | |
| const view = views[i]; | |
| let viewport = null; | |
| if (glBaseLayer !== null) { | |
| viewport = glBaseLayer.getViewport(view); | |
| } else { | |
| const glSubImage = glBinding.getViewSubImage(glProjLayer, view); | |
| viewport = glSubImage.viewport; | |
| // For side-by-side projection, we only produce a single texture for both eyes. | |
| if (i === 0) { | |
| renderer.setRenderTargetTextures( | |
| newRenderTarget, | |
| glSubImage.colorTexture, | |
| glProjLayer.ignoreDepthValues ? undefined : glSubImage.depthStencilTexture | |
| ); | |
| renderer.setRenderTarget(newRenderTarget); | |
| } | |
| } | |
| const camera = cameras[i]; | |
| camera.matrix.fromArray(view.transform.matrix); | |
| camera.projectionMatrix.fromArray(view.projectionMatrix); | |
| camera.viewport.set(viewport.x, viewport.y, viewport.width, viewport.height); | |
| if (i === 0) { | |
| cameraVR.matrix.copy(camera.matrix); | |
| } | |
| if (cameraVRNeedsUpdate === true) { | |
| cameraVR.cameras.push(camera); | |
| } | |
| } | |
| } | |
| // | |
| const inputSources = session.inputSources; | |
| for (let i = 0; i < controllers.length; i++) { | |
| const controller = controllers[i]; | |
| const inputSource = inputSources[i]; | |
| controller.update(inputSource, frame, referenceSpace); | |
| } | |
| if (onAnimationFrameCallback) onAnimationFrameCallback(time, frame); | |
| xrFrame = null; | |
| } | |
| const animation = new WebGLAnimation(); | |
| animation.setAnimationLoop(onAnimationFrame); | |
| this.setAnimationLoop = function (callback) { | |
| onAnimationFrameCallback = callback; | |
| }; | |
| this.dispose = function () {}; | |
| } | |
| } | |
| export { WebXRManager }; | |