diff options
Diffstat (limited to 'chromium/third_party/webxr_test_pages/webxr-samples/input-tracking.html')
-rw-r--r-- | chromium/third_party/webxr_test_pages/webxr-samples/input-tracking.html | 258 |
1 files changed, 258 insertions, 0 deletions
diff --git a/chromium/third_party/webxr_test_pages/webxr-samples/input-tracking.html b/chromium/third_party/webxr_test_pages/webxr-samples/input-tracking.html new file mode 100644 index 00000000000..487289c9062 --- /dev/null +++ b/chromium/third_party/webxr_test_pages/webxr-samples/input-tracking.html @@ -0,0 +1,258 @@ +<!doctype html> +<!-- +Copyright 2018 The Immersive Web Community Group + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +--> +<html> + <head> + <meta charset='utf-8'> + <meta name='viewport' content='width=device-width, initial-scale=1, user-scalable=no'> + <meta name='mobile-web-app-capable' content='yes'> + <meta name='apple-mobile-web-app-capable' content='yes'> + + <!-- Origin Trial Token, feature = WebXR Device API, origin = https://immersive-web.github.io, expires = 2018-08-28 --> +<meta http-equiv="origin-trial" data-feature="WebXR Device API" data-expires="2018-08-28" content="AnNpu7ceXvLew05ccD8Zr1OZsdZiB2hLQKK82kTTMDwF7oRKtP3QEJ4RzkeHrmB8Sq0vSV6ZNmszpBCZ0I8p9gAAAABceyJvcmlnaW4iOiJodHRwczovL2ltbWVyc2l2ZS13ZWIuZ2l0aHViLmlvOjQ0MyIsImZlYXR1cmUiOiJXZWJYUkRldmljZSIsImV4cGlyeSI6MTUzNTQxNDQwMH0="> + + <title>Input Tracking</title> + + <link href='css/common.css' rel='stylesheet'></link> + + <!--The polyfill is not needed for browser that have native API support, + but is linked by these samples for wider compatibility.--> + <!--script src='https://cdn.jsdelivr.net/npm/webxr-polyfill@latest/build/webxr-polyfill.js'></script--> + <script src='js/webxr-polyfill.js'></script> + + <!--This script patches up around implementation differences in past browser versions + so that the samples can always be written against the most recent spec changes. + It won't be necessary after the API has been officially shipped for a bit.--> + <script src='js/webxr-version-shim.js'></script> + + <script src='js/cottontail/build/cottontail.js'></script> + + <script src='js/webxr-button.js'></script> + </head> + <body> + <header> + <details open> + <summary>Input Tracking</summary> + <p> + This sample demonstrates basic tracking and rendering of + XRInputSources. It does not respond to button presses or other + controller interactions. + <a class="back" href="./">Back</a> + </p> + </details> + </header> + <script> + (function () { + 'use strict'; + + // If requested, initialize the WebXR polyfill + if (QueryArgs.getBool('allowPolyfill', false)) { + var polyfill = new WebXRPolyfill(); + } + // Apply the version shim after the polyfill is instantiated, to ensure + // that the polyfill also gets patched if necessary. + var versionShim = new WebXRVersionShim(); + + // XR globals. + let xrButton = null; + let xrImmersiveFrameOfRef = null; + let xrNonImmersiveFrameOfRef = null; + + // WebGL scene globals. + let gl = null; + let renderer = null; + let scene = new Scene(); + scene.addNode(new Gltf2Node({url: 'media/gltf/cave/cave.gltf'})); + scene.addNode(new SkyboxNode({url: 'media/textures/eilenriede-park-2k.png'})); + scene.standingStats(true); + + function initXR() { + xrButton = new XRDeviceButton({ + onRequestSession: onRequestSession, + onEndSession: onEndSession + }); + document.querySelector('header').appendChild(xrButton.domElement); + + if (navigator.xr) { + navigator.xr.requestDevice().then((device) => { + device.supportsSession({immersive: true}).then(() => { + xrButton.setDevice(device); + }); + + // Note: If you don't want dragging on the canvas to do things like + // scroll or pull-to-refresh, you'll want to set touch-action: none; + // on the canvas' CSS style, which this page does in common.css + let outputCanvas = document.createElement('canvas'); + let ctx = outputCanvas.getContext('xrpresent'); + + device.requestSession({ outputContext: ctx }) + .then((session) => { + document.body.appendChild(outputCanvas); + onSessionStarted(session); + }); + }).catch(() => { + initFallback(); + }); + } else { + initFallback(); + } + } + + function initFallback() { + initGL(); + document.body.appendChild(gl.canvas); + let fallbackHelper = new FallbackHelper(scene, gl); + fallbackHelper.emulateStage = true; + } + + function initGL(compatibleDevice) { + if (gl) + return; + + gl = createWebGLContext({ + compatibleXRDevice: compatibleDevice + }); + + renderer = new Renderer(gl); + + scene.setRenderer(renderer); + + // Loads a generic controller mesh. + scene.inputRenderer.setControllerMesh(new Gltf2Node({url: 'media/gltf/controller/controller.gltf'})); + } + + function onRequestSession(device) { + // Set up a mirror canvas + let mirrorCanvas = document.createElement('canvas'); + let ctx = mirrorCanvas.getContext('xrpresent'); + mirrorCanvas.setAttribute('id', 'mirror-canvas'); + document.body.appendChild(mirrorCanvas); + + device.requestSession({ immersive: true, outputContext: ctx }).then((session) => { + xrButton.setSession(session); + onSessionStarted(session); + }); + } + + function onSessionStarted(session) { + session.addEventListener('end', onSessionEnded); + + initGL(session.device); + + session.baseLayer = new XRWebGLLayer(session, gl); + + session.requestFrameOfReference('stage').then((frameOfRef) => { + if (session.immersive) { + xrImmersiveFrameOfRef = frameOfRef; + } else { + xrNonImmersiveFrameOfRef = frameOfRef; + } + + session.requestAnimationFrame(onXRFrame); + }); + } + + function onEndSession(session) { + session.end(); + } + + function onSessionEnded(event) { + if (event.session.immersive) { + document.body.removeChild(document.querySelector('#mirror-canvas')); + xrButton.setSession(null); + } + } + + function updateInputSources(session, frame, frameOfRef) { + let inputSources = session.getInputSources(); + + for (let inputSource of inputSources) { + let inputPose = frame.getInputPose(inputSource, frameOfRef); + + // We may not get a pose back in cases where the input source has lost + // tracking or does not know where it is relative to the given frame + // of reference. + if (!inputPose) { + continue; + } + + if (inputPose.gripMatrix) { + // If we have a grip matrix use it to render a mesh showing the + // position of the controller. + scene.inputRenderer.addController(inputPose.gripMatrix); + } + + if (inputPose.targetRay) { + if (inputSource.targetRayMode == 'tracked-pointer') { + // If we have a pointer matrix and the pointer origin is the users + // hand (as opposed to their head or the screen) use it to render + // a ray coming out of the input device to indicate the pointer + // direction. + scene.inputRenderer.addLaserPointer(inputPose.targetRay); + } + + // If we have a pointer matrix we can also use it to render a cursor + // for both handheld and gaze-based input sources. + + // Statically render the cursor 2 meters down the ray since we're + // not calculating any intersections in this sample. + let cursorDistance = 2.0; + let cursorPos = vec3.fromValues( + inputPose.targetRay.origin.x, + inputPose.targetRay.origin.y, + inputPose.targetRay.origin.z + ); + vec3.add(cursorPos, cursorPos, [ + inputPose.targetRay.direction.x * cursorDistance, + inputPose.targetRay.direction.y * cursorDistance, + inputPose.targetRay.direction.z * cursorDistance, + ]); + // vec3.transformMat4(cursorPos, cursorPos, inputPose.targetRay.transformMatrix); + + scene.inputRenderer.addCursor(cursorPos); + } + } + } + + function onXRFrame(t, frame) { + let session = frame.session; + let frameOfRef = session.immersive ? + xrImmersiveFrameOfRef : + xrNonImmersiveFrameOfRef; + let pose = frame.getDevicePose(frameOfRef); + + scene.startFrame(); + + session.requestAnimationFrame(onXRFrame); + + updateInputSources(session, frame, frameOfRef); + + scene.drawXRFrame(frame, pose); + + scene.endFrame(); + } + + // Start the XR application. + initXR(); + })(); + </script> + </body> +</html> |