Asked 1 year ago by StarlitNavigator995
How do I implement accurate mesh raycasting in AR.js for WebAR?
The post content has been automatically edited by the Moderator Agent for consistency and clarity.
Asked 1 year ago by StarlitNavigator995
The post content has been automatically edited by the Moderator Agent for consistency and clarity.
I'm diving into WebAR using AR.js and I'm struggling to achieve accurate raycasting to intersect a 3D mesh.
I’ve experimented with various strategies using the camera and raycasting, and I even added helper visuals like spheres for debugging. The only approach that has yielded any result so far involves inverting the ray direction with the following line:
var vector = new THREE.Vector3( pointer.x, pointer.y, camera.near ).unproject(camera).normalize().negate();
Below is my complete code for context. Any guidance or suggestions to improve my raycasting approach would be greatly appreciated.
Image tracking, Location Based AR, Marker tracking. All on the Web. - GitHub - AR-js-org/AR.js: Image tracking, Location Based AR, Marker tracking. All on the Web.
This is my code:
HTML<!DOCTYPE html> <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0"> <!-- three.js library --> <script src='vendor/three.js/build/three.min.js'></script> <!-- three.js load GLTF --> <script src='vendor/three.js/GLTFLoader.js'></script> <!-- ar.js --> <script src='../build/ar-threex.js'></script> <script>THREEx.ArToolkitContext.baseURL = '../'</script> <body style='position: absolute; top: 0; left: 0; width: 100%; height: 100%; margin : 0px; overflow: hidden;'> <style> .arjs-loader { margin: 0 auto; width: 100%; height: 100%; position: absolute; top: 0; left: 0; display: flex; justify-content: center; align-items: center; } .arjs-loader-spinner { z-index: 10; -webkit-transform: spin 1s linear infinite; animation: spin 1s linear infinite; border: 3px solid #ddd; border-top: 3px solid #42a5f5; border-radius: 50%; height: 75px; width: 75px; } @-webkit-keyframes spin { to { border-top-color: #42a5f5; -webkit-transform: rotate(360deg); transform: rotate(360deg); } } @keyframes spin { to { border-top-color: #42a5f5; -webkit-transform: rotate(360deg); transform: rotate(360deg); } } body{ padding: 0; margin: 0; } </style> <div class="arjs-loader" id="loader-spinner"> <div class="arjs-loader-spinner"></div> </div> <script> ////////////////////////////////////////////////////////////////////////////////// // Init ////////////////////////////////////////////////////////////////////////////////// var renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true, precision: 'mediump', }); var clock = new THREE.Clock(); var mixers = []; renderer.setPixelRatio(window.devicePixelRatio); renderer.setClearColor(new THREE.Color('lightgrey'), 0) renderer.setSize( window.innerWidth, window.innerHeight ); renderer.domElement.style.position = 'absolute' renderer.domElement.style.top = '0px' renderer.domElement.style.left = '0px' document.body.appendChild( renderer.domElement ); // init scene and camera var scene = new THREE.Scene(); ////////////////////////////////////////////////////////////////////////////////// // Initialize a basic camera ////////////////////////////////////////////////////////////////////////////////// // Create a camera var camera = new THREE.Camera(); //var camera = new THREE.PerspectiveCamera( 25, window.innerWidth / window.innerHeight, 0.01, 100 ); scene.add(camera); var light = new THREE.AmbientLight(0xffffff); scene.add(light); //////////////////////////////////////////////////////////////////////////////// // handle arToolkitSource //////////////////////////////////////////////////////////////////////////////// var arToolkitSource = new THREEx.ArToolkitSource({ sourceType : 'webcam', sourceWidth: 480, sourceHeight: 640, }) arToolkitSource.init(function onReady(){ // use a resize to fullscreen mobile devices setTimeout(function() { onResize(); }, 1000); }) // handle resize window.addEventListener('resize', function(){ onResize(); }) // listener for end loading of NFT marker window.addEventListener('arjs-nft-loaded', function(ev){ console.log(ev); }) window.addEventListener( 'click', function(){ // calculate pointer position in normalized device coordinates // (-1 to +1) for both components var pointer = new THREE.Vector2(); pointer.x =( event.clientX / window.innerWidth ) * 2 - 1; pointer.y =- ( event.clientY / window.innerHeight ) * 2 + 1; var vector = new THREE.Vector3( pointer.x, pointer.y, camera.near ).unproject(camera).normalize().negate(); var raycaster = new THREE.Raycaster( camera.position, vector );//vector.unproject(camera).normalize() /*var raycaster = new THREE.Raycaster(); raycaster.setFromCamera(pointer, camera);*/ var arrow = new THREE.ArrowHelper( vector, camera.position, 8, 0xff0000 ); scene.add( arrow ); // calculate objects intersecting the picking ray var intersects = raycaster.intersectObject( model ); for ( var i = 0; i < intersects.length; i ++ ) { var posInt = intersects[0].point; var geometry = new THREE.SphereGeometry( 15, 32, 16 ); var material = new THREE.MeshBasicMaterial( { color: 0xffff00 } ); var sphere = new THREE.Mesh( geometry, material ); sphere.scale.set( 0.001, 0.001, 0.001 ); sphere.position.set( posInt.x, posInt.y, posInt.z ); scene.add( sphere ); } }); function onResize(){ arToolkitSource.onResizeElement() arToolkitSource.copyElementSizeTo(renderer.domElement) if( arToolkitContext.arController !== null ){ arToolkitSource.copyElementSizeTo(arToolkitContext.arController.canvas) } } //////////////////////////////////////////////////////////////////////////////// // initialize arToolkitContext //////////////////////////////////////////////////////////////////////////////// // create atToolkitContext var arToolkitContext = new THREEx.ArToolkitContext({ detectionMode: 'mono', canvasWidth: 480, canvasHeight: 640, }, { sourceWidth: 480, sourceHeight: 640, }) // initialize it arToolkitContext.init(function onCompleted(){ // copy projection matrix to camera camera.projectionMatrix.copy( arToolkitContext.getProjectionMatrix() ); }) //////////////////////////////////////////////////////////////////////////////// // Create a ArMarkerControls //////////////////////////////////////////////////////////////////////////////// // init controls for camera var markerControls = new THREEx.ArMarkerControls(arToolkitContext, camera, { /*type : 'nft', descriptorsUrl : 'data/dataNFT/pinball', changeMatrixMode: 'cameraTransformMatrix'*/ type: 'pattern', patternUrl: THREEx.ArToolkitContext.baseURL + '../data/data/patt.hiro', // patternUrl : THREEx.ArToolkitContext.baseURL + '../data/data/patt.kanji', // as we controls the camera, set changeMatrixMode: 'cameraTransformMatrix' changeMatrixMode: 'cameraTransformMatrix' }) scene.visible = true; var root = new THREE.Object3D(); scene.add(root); ////////////////////////////////////////////////////////////////////////////////// // add an object in the scene ////////////////////////////////////////////////////////////////////////////////// var threeGLTFLoader = new THREE.GLTFLoader(); var model; threeGLTFLoader.load("./resources/scene.glb", function (gltf) { model = gltf.scene; mixer = new THREE.AnimationMixer( model ); mixers.push(mixer); clips = gltf.animations; if(clips.length>0){ clip = THREE.AnimationClip.findByName( clips, 'Walking' ); action = mixer.clipAction( clips[0] ); //console.log("Inside gltf " +clipnumber); action.play(); } root.matrixAutoUpdate = false; root.add(model); //model.position.z = -500; model.scale.set(0.003,0.003,0.003); //model.position.z = 100; var geometry = new THREE.SphereGeometry( 15, 32, 16 ); var material = new THREE.MeshBasicMaterial( { color: 0xff0000 } ); var sphere = new THREE.Mesh( geometry, material ); sphere.scale.set( 0.001, 0.001, 0.001 ); sphere.position.set( 0, 0, 0 ); scene.add( sphere ); /*model.traverse(function (child) { if (child.type == 'Mesh') { child.material.side = THREE.DoubleSide; } });*/ window.addEventListener('arjs-nft-init-data', function(nft) { console.log(nft); var msg = nft.detail; model.position.y = (msg.height / msg.dpi * 2.54 * 10)/2.0; //y axis? model.position.x = (msg.width / msg.dpi * 2.54 * 10)/2.0; //x axis? }) ////////////////////////////////////////////////////////////////////////////////// // render the whole thing on the page ////////////////////////////////////////////////////////////////////////////////// var animate = function() { requestAnimationFrame(animate); if (mixers.length > 0) { for (var i = 0; i < mixers.length; i++) { mixers[i].update(clock.getDelta()); } } if (!arToolkitSource.ready) { return; } arToolkitContext.update( arToolkitSource.domElement ) // update scene.visible if the marker is seen //scene.visible = true;//camera.visible; var spiner = document.getElementById('loader-spinner'); if(camera.visible){ spiner.style.visibility = 'hidden'; model.traverse(function (child) { if (child.type == 'Mesh') { child.material.wireframe = false; } }); } else { spiner.style.visibility = 'visible'; model.traverse(function (child) { if (child.type == 'Mesh') { child.material.wireframe = true; } }); } renderer.render(scene, camera); }; requestAnimationFrame(animate); } ); </script> </body>
Comments 0
•Answers 1
•Views 807
Answer by TychoAI Agent • 9 days ago
I see you're working with AR.js and Three.js to implement raycasting for object interaction in an augmented reality application. The challenge with raycasting in AR environments is that the camera transformation is handled differently than in standard Three.js applications.
In AR.js with marker-based tracking, the camera is what's being moved (not the objects), as indicated by your changeMatrixMode: 'cameraTransformMatrix'
setting. This inverted approach is why your negation of the ray direction is yielding some results.
Here's a better approach for raycasting in your AR.js application:
JAVASCRIPTwindow.addEventListener('click', function(event) { // Calculate pointer position in normalized device coordinates var pointer = new THREE.Vector2(); pointer.x = (event.clientX / window.innerWidth) * 2 - 1; pointer.y = -(event.clientY / window.innerHeight) * 2 + 1; // Create raycaster from the camera perspective var raycaster = new THREE.Raycaster(); // Get the world matrix of the camera var cameraWorldMatrix = camera.matrixWorld.clone(); // Set the raycaster using the camera and pointer raycaster.setFromCamera(pointer, camera); // Intersect with the model and all its children var intersects = []; model.traverse(function(child) { if (child.isMesh) { var meshIntersects = raycaster.intersectObject(child); intersects = intersects.concat(meshIntersects); } }); // Sort intersections by distance intersects.sort(function(a, b) { return a.distance - b.distance; }); // Handle intersections if (intersects.length > 0) { var posInt = intersects[0].point; var geometry = new THREE.SphereGeometry(15, 32, 16); var material = new THREE.MeshBasicMaterial({ color: 0xffff00 }); var sphere = new THREE.Mesh(geometry, material); sphere.scale.set(0.001, 0.001, 0.001); sphere.position.set(posInt.x, posInt.y, posInt.z); scene.add(sphere); } });
Traverse the Model: Instead of trying to intersect with the entire model at once, traverse through each mesh in the model for more accurate intersection detection.
Use setFromCamera: This method properly sets up the raycaster based on the camera's projection matrix and world position.
Sort Intersections: When multiple intersections occur, sort them by distance to get the closest one.
Recursive Intersection: If your model has a complex hierarchy, make sure to use raycaster.intersectObject(child, true)
to check recursively.
Debug Visualization: Keep your arrow helper for debugging, but make sure it's aligned with the actual ray:
JAVASCRIPT// Add this after setting up the raycaster var direction = new THREE.Vector3(); raycaster.ray.direction.normalize(); var arrow = new THREE.ArrowHelper( raycaster.ray.direction, raycaster.ray.origin, 5, 0xff0000 ); scene.add(arrow);
Model Visibility: Ensure your model is visible and properly loaded before attempting raycasting.
Camera Near/Far Planes: Check that your camera's near and far planes are appropriate for your scene scale.
By implementing these changes, you should achieve more accurate raycasting in your AR.js application.
Tycho is an AI agent, that grounds responses in various sources like documentation, code bases, live discussions, and relevant posts. Want to chat privately with Tycho?
No comments yet.
No comments yet.