Skip to content

Webvr to webxr #42

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 19 commits into from
Apr 3, 2024
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion aquarium/aquarium-config.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@

var g_aquariumConfig = {
aquariumRoot: '',
enableVR: false
enableVR: true,
};
285 changes: 158 additions & 127 deletions aquarium/aquarium.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ tdl.require('tdl.webgl');
const g_query = parseQueryString(window.location.search);

function isMultiviewSupportEnabled() {
return g_aquariumConfig.enableVR && g_query.enableMultiview && g_query.enableMultiview == 'true';
return g_aquariumConfig.enableVR || (g_query.enableMultiview && g_query.enableMultiview == 'true');
}

if (isMultiviewSupportEnabled()) {
Expand Down Expand Up @@ -57,7 +57,6 @@ var g_getCount = 0;
var g_putCount = 0;

var g_frameData;
var g_vrDisplay;
var g_vrUi;

var g_multiviewFb; // multiview framebuffer.
Expand Down Expand Up @@ -88,12 +87,16 @@ var g_lightRayRotRange = 1.0;
var g_lightRayRotLerp = 0.2;
var g_lightRayOffset = Math.PI * 2 / g_numLightRays;
var g_lightRayInfo = [];
var g_session = null;
var g_xrImmersiveRefSpace = null;
var g_onXRFrame = ()=>{};
var g_onAnimationFrame = () => {};

var g_ui = [
{ obj: 'globals', name: 'speed', value: 1, max: 4 },
{ obj: 'globals', name: 'targetHeight', value: 0, max: 150 },
{ obj: 'globals', name: 'targetRadius', value: 88, max: 200 },
{ obj: 'globals', name: 'eyeHeight', value: 19, max: 150 },
{ obj: 'globals', name: 'eyeHeight', value: 150, max: 150 },
{ obj: 'globals', name: 'eyeRadius', value: 60, max: 200 },
{ obj: 'globals', name: 'eyeSpeed', value: 0.06, max: 1 },
{ obj: 'globals', name: 'fieldOfView', value: 85, max: 179, min: 1},
Expand Down Expand Up @@ -883,7 +886,7 @@ function main() {

g_fpsTimer = new tdl.fps.FPSTimer();
if (isMultiviewSupportEnabled()) {
gl = tdl.webgl.setupWebGL(canvas, {antialias: false}, undefined, 'webgl2');
gl = tdl.webgl.setupWebGL(canvas, {antialias: false, xrCompatible: true}, undefined, 'webgl2');
multiview = gl.getExtension('OVR_multiview2');
} else {
gl = tdl.webgl.setupWebGL(canvas);
Expand Down Expand Up @@ -1259,7 +1262,7 @@ function initialize() {
gl.clearColor(0,0.8,1,0);
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT | gl.STENCIL_BUFFER_BIT);

var presentingVR = g_vrDisplay && g_vrDisplay.isPresenting;
var presentingVR = g_session && g_session.isImmersive;

var uiMatrix = new Float32Array(16);

Expand All @@ -1272,7 +1275,7 @@ function initialize() {
}
fast.matrix4.copy(viewInverse, viewInverseMatrix);
// TODO: Support VRUI when using multiview? Would require adding multiview shaders to UI and changing UI matrices when multiview is on.
if (!useMultiview && presentingVR && pose) {
if (!useMultiview && presentingVR && pose && g_vrUi) {
// Hard coded FPS translation vector and pin the whole UI in front of the user in VR mode. This hard coded position
// vector used only once here.
calculateViewMatrix(uiMatrix, pose.orientation, [0, 0, 10]);
Expand Down Expand Up @@ -1701,6 +1704,106 @@ function initialize() {
render(monoProjection, viewInverseTemp);
}

function onXRFrame(now, frame) {
let session = frame.session;
let refSpace = g_xrImmersiveRefSpace;
let pose = frame.getViewerPose(refSpace);

var now = theClock.getTime();
var elapsedTime;
if(then == 0.0) {
elapsedTime = 0.0;
} else {
elapsedTime = now - then;
}
then = now;

if (g.net.sync) {
clock = now * g.globals.speed;
eyeClock = now * g.globals.eyeSpeed;
} else {
// we have our own clock.
clock += elapsedTime * g.globals.speed;
eyeClock += elapsedTime * g.globals.eyeSpeed;
}

frameCount++;
g_fpsTimer.update(elapsedTime);
fpsElem.innerHTML = g_fpsTimer.averageFPS;

if (g_shadersNeedUpdate) {
setShaders(true);
g_shadersNeedUpdate = false;
}

if (g_vrUi) {
// Set fps and prepare rendering it.
g_vrUi.setFps(g_fpsTimer.averageFPS);

// Query gamepad button clicked event.
g_vrUi.queryGamepadStatus();

// TODO: Support VRUI when doing multiview rendering.
if (!useMultiviewForStereo() && g_vrUi.isMenuMode) {

// When VR UI in menu mode, UI need a cursor to help user do select operation. Currently, cursor uses
// head-neck model which means a point in front of user and user could move the point by rotating their head(with HMD).
// A click event will be triggered when user stare at a label 2 seconds.
// TODO : add gamepad support to control cursor and trigger select event with VR controllers.

// Jquery selector description.
var selectorDescription;

// VR UI return whether there is an option been selected in VR mode.
var clickedLabel = g_vrUi.queryClickedLabel([0, 0, 0], g_frameData.pose.orientation);
if (clickedLabel != null) {
if (clickedLabel.isAdvancedSettings) {
selectorDescription = "#optionsContainer > div:contains(" + clickedLabel.name + ")";
$(selectorDescription).click();
} else if (clickedLabel.name == "options") {
$("#options").click();
} else {
selectorDescription = "#setSetting" + clickedLabel.name;
$(selectorDescription).click();
}
}
}
}

// Using head-neck model in VR mode because of unclear distance measurement(vr return position using meters),
// user could see around but couldn't move around.
eyePosition[0] = g.globals.eyeRadius;
eyePosition[1] = g.globals.eyeHeight;
eyePosition[2] = g.globals.eyeRadius;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's not clear to me what using the eye radius here is intended to do, but I can pretty confidently say the position from the appropriate pose view's transform should be used here instead. Something like:

const viewPos = pose.views[n].transform.position;
eyePosition = [viewPos.x, viewPos.y, viewPos.z];

The comment makes note to "User could see around but couldn't move around" but this code ignores all the positional data provided by the headset if it's capable of positional tracking. (If it's not, like a simple cardboard viewer, the WebXR implementation will provide its own neck model. You don't need to emulate it in your code.)

If you were previously using the view positions from WebXR and still not seeing any apparent movement, that may be because the scale of the aquarium is not in the same units as WebXR reports (I think this is what the comment it saying?) If so, the appropriate way to rectify that is not to either scale the environment and the objects in it to match WebXR's scale (as the comment notes: 1 unit == 1 meter) or apply a scale to the view matrices computed from the WebXR data (position included) to match the environment.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Made this change but removed later because it was unnecessary since all matrices were coming from xrPose.


let vrPose = [pose.transform.orientation.x, pose.transform.orientation.y, pose.transform.orientation.z, 1]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This discards the w value from the original orientation quaternion, which will yield inaccurate results and is likely part of the cause for the issue with head rotation not responding correctly all the way around. Please use the full quaternion reported by the pose.

Suggested change
let vrPose = [pose.transform.orientation.x, pose.transform.orientation.y, pose.transform.orientation.z, 1]
let vrPose = [pose.transform.orientation.x, pose.transform.orientation.y, pose.transform.orientation.z, pose.transform.orientation.w]

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Tried it on Oculus Browser and Wolvic on Quest 2, and it does not work. Going to investigate it.
Tested on cardboard on Galaxy A34 on chrome versions 120.0.6099.145 and .193 .
Previously, I had problems with older versions of Chrome crashing on entering XR, but this does not happen anymore.

I made this change but later removed this code because it was unnecessary since all matrices were coming from xrPose.

calculateViewMatrix(viewInverseTemp, vrPose, eyePosition);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There's no stereo effect in VR or when using the "stereo mode" on desktop, which at least in part would be because the same view matrix is used for both views. This will cause each "eye" to render from the same location. To get accurate results the view matrix should be calculated per-view.

Also, WebXR provides its own convenience method for computing the view matrices if you want to use it, instead of building these out manually.

viewInverseTemp = pose.views[n].transform.inverse.matrix;

Once this is fixed if there still appears to be a lack of stereo depth it could possibly be related to the scale mismatch talked about in an above comment. Objects that are very large and far away will lack visible stereo depth.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ditto


let glLayer = session.renderState.baseLayer;

gl.bindFramebuffer(gl.FRAMEBUFFER, glLayer.framebuffer);
gl.enable(gl.SCISSOR_TEST);

let xrViewport = glLayer.getViewport(pose.views[0]);
gl.viewport(xrViewport.x, xrViewport.y, xrViewport.width, xrViewport.height);
gl.scissor(xrViewport.x, xrViewport.y, xrViewport.width, xrViewport.height);
render(pose.views[0].projectionMatrix, viewInverseTemp, false, pose);

xrViewport = glLayer.getViewport(pose.views[1]);
gl.viewport(xrViewport.x, xrViewport.y, xrViewport.width, xrViewport.height);
gl.scissor(xrViewport.x, xrViewport.y, xrViewport.width, xrViewport.height);
render(pose.views[1].projectionMatrix, viewInverseTemp, false, pose);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The fact that you're using the right viewports and projection matrices here is great, but as noted above you should be recalculating the view matrix as well for each eye. Also, you can make this more robust to a variety of XR devices (and make the code easier on yourself) by looping through all of the given views rather than assuming there are only ever 2. Something like the following:

for (const view of pose.views) {
  let xrViewport = glLayer.getViewport(pose.views[0]);
  gl.viewport(xrViewport.x, xrViewport.y, xrViewport.width, xrViewport.height);
  gl.scissor(xrViewport.x, xrViewport.y, xrViewport.width, xrViewport.height);
  render(view.projectionMatrix, view.transform.inverse.matrix, false, view); // Not clear on what the final arg is used for here? Glancing at the code it looks like it should probably be view instead of pose.
}

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

Also removed the obsolete calculateViewMatrix block.

//renderStereo(pose.views[0].projectionMatrix, pose.views[1].projectionMatrix, viewInverseTemp, vrPose);
// gl.clearColor(Math.cos(now / 2000),
// Math.cos(now / 4000),
// Math.cos(now / 6000), 1.0);

// gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
session.requestAnimationFrame(onXRFrame);
}

g_onXRFrame = onXRFrame;

function onAnimationFrame() {
var now = theClock.getTime();
var elapsedTime;
Expand Down Expand Up @@ -1759,70 +1862,15 @@ function initialize() {
if (!g.options.reflection.enabled) { g.options.reflection.toggle(); }
}

if (g_vrDisplay) {
g_requestId = g_vrDisplay.requestAnimationFrame(onAnimationFrame);
g_vrDisplay.getFrameData(g_frameData);
} else {
g_requestId = requestAnimationFrame(onAnimationFrame);
}
g_requestId = requestAnimationFrame(onAnimationFrame);

if (g_shadersNeedUpdate) {
var isInStereoMode = (g_vrDisplay && g_vrDisplay.isPresenting) || g_stereoDemoActive;
var isInStereoMode = g_stereoDemoActive;
setShaders(isInStereoMode && useMultiviewForStereo());
g_shadersNeedUpdate = false;
}

if (g_vrDisplay && g_vrDisplay.isPresenting) {
/* VR UI is enabled in VR Mode. VR UI has two mode, menu mode is the mirror of control panel of
* aquarium and non-menu mode may presents fps(could be turn off) in front of user. These two
* mode is controlled by isMenuMode flag and this flag is set by any keyboard event or gamepad
* button click.
*/

// Set fps and prepare rendering it.
g_vrUi.setFps(g_fpsTimer.averageFPS);

// Query gamepad button clicked event.
g_vrUi.queryGamepadStatus();

// TODO: Support VRUI when doing multiview rendering.
if (!useMultiviewForStereo() && g_vrUi.isMenuMode) {

// When VR UI in menu mode, UI need a cursor to help user do select operation. Currently, cursor uses
// head-neck model which means a point in front of user and user could move the point by rotating their head(with HMD).
// A click event will be triggered when user stare at a label 2 seconds.
// TODO : add gamepad support to control cursor and trigger select event with VR controllers.

// Jquery selector description.
var selectorDescription;

// VR UI return whether there is an option been selected in VR mode.
var clickedLabel = g_vrUi.queryClickedLabel([0, 0, 0], g_frameData.pose.orientation);
if (clickedLabel != null) {
if (clickedLabel.isAdvancedSettings) {
selectorDescription = "#optionsContainer > div:contains(" + clickedLabel.name + ")";
$(selectorDescription).click();
} else if (clickedLabel.name == "options") {
$("#options").click();
} else {
selectorDescription = "#setSetting" + clickedLabel.name;
$(selectorDescription).click();
}
}
}

// Using head-neck model in VR mode because of unclear distance measurement(vr return position using meters),
// user could see around but couldn't move around.
eyePosition[0] = g.globals.eyeRadius;
eyePosition[1] = g.globals.eyeHeight;
eyePosition[2] = g.globals.eyeRadius;

calculateViewMatrix(viewInverseTemp, g_frameData.pose.orientation, eyePosition);

renderStereo(g_frameData.leftProjectionMatrix, g_frameData.rightProjectionMatrix, viewInverseTemp, g_frameData.pose);

g_vrDisplay.submitFrame();
} else if (g_stereoDemoActive) {
if (g_stereoDemoActive) {
var near = 1;
var far = 25000;
var aspect = (canvas.clientWidth * 0.5) / canvas.clientHeight;
Expand Down Expand Up @@ -1862,6 +1910,7 @@ function initialize() {
}
}

g_onAnimationFrame = onAnimationFrame;
onAnimationFrame();
return true;
}
Expand Down Expand Up @@ -2136,38 +2185,48 @@ $(function(){
return path.substring(0, path.lastIndexOf('/'));
}

function onPresentChange() {
// When we begin or end presenting, the canvas should be resized
// to the recommended dimensions for the display.
resize();

g_shadersNeedUpdate = true;

if (g_vrDisplay.isPresenting) {
if (g_vrDisplay.capabilities.hasExternalDisplay) {
function onRequestPresent() {
return navigator.xr.requestSession('immersive-vr').then((session) => {
removeButton(vrButton);
vrButton = addButton("Exit VR", "E", getCurrentUrl() + "/vr_assets/button.png", onExitPresent);
}
} else {
if (g_vrDisplay.capabilities.hasExternalDisplay) {
removeButton(vrButton);
vrButton = addButton("Enter VR", "E", getCurrentUrl() + "/vr_assets/button.png", onRequestPresent);
}
}
}

function onRequestPresent() {
g_vrDisplay.requestPresent([{ source: canvas }]).then(function() {}, function() {
console.error("request present failed.");
session.isImmersive = true;
g_session = session;

session.updateRenderState({ baseLayer: new XRWebGLLayer(session, gl) });

let refSpaceType = 'local';
session.requestReferenceSpace(refSpaceType).then((refSpace) => {
g_xrImmersiveRefSpace = refSpace
g_shadersNeedUpdate = true;
g.globals.eyeHeight = 15;
g.globals.eyeRadius = 1;
session.requestAnimationFrame(g_onXRFrame);
});
session.addEventListener('end', onSessionEnded);
});
}

function onExitPresent() {
if (!g_vrDisplay.isPresenting)
return;
g_session.end();
location.reload();
}

g_vrDisplay.exitPresent().then(function() {}, function() {
console.error("exit present failed.");
function onSessionEnded(event) {
if (event.session.isImmersive) {
onExitPresent();
}
}

function onDeviceChange() {
vrButton ?? removeButton(vrButton);
// Checks to ensure that 'immersive-ar' mode is available, and only
// enables the button if so.
navigator.xr.isSessionSupported('immersive-vr').then((supported) => {
if (supported) {
vrButton = addButton("Enter VR", "E", getCurrentUrl() + "/vr_assets/button.png", onRequestPresent);
g_vrUi = new Ui(gl, g_numFish);
g_vrUi.load("./vr_assets/ui/config.js");
}
});
}

Expand All @@ -2177,19 +2236,9 @@ $(function(){
}

function resize() {
if (g_vrDisplay && g_vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDisplay, since that will ensure the best
// results post-distortion.
var leftEye = g_vrDisplay.getEyeParameters("left");
var rightEye = g_vrDisplay.getEyeParameters("right");

setCanvasSize(canvas, Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2, Math.max(leftEye.renderHeight, rightEye.renderHeight));
} else {
// When we're not presenting, we want to change the size of the canvas
// to match the window dimensions.
setCanvasSize(canvas, g.globals.width, g.globals.height);
}
// When we're not presenting, we want to change the size of the canvas
// to match the window dimensions.
setCanvasSize(canvas, g.globals.width, g.globals.height);
}

function onResize() {
Expand All @@ -2202,35 +2251,17 @@ $(function(){

function initPostDOMLoaded() {
if (g_aquariumConfig.enableVR) {
if(navigator.getVRDisplays) {
g_frameData = new VRFrameData();

navigator.getVRDisplays().then(function(displays) {
if (displays.length > 0) {
g_vrDisplay = displays[0];
g_vrDisplay.depthNear = 0.1;
g_vrDisplay.depthFar = 1024.0;

if (g_vrDisplay.capabilities.canPresent) {
vrButton = addButton("Enter VR", "E", getCurrentUrl() + "/vr_assets/button.png", onRequestPresent);
}
if (navigator.xr) {
// Checks to ensure that 'immersive-ar' mode is available, and only
// enables the button if so.
navigator.xr.isSessionSupported('immersive-vr').then((supported) => {
if (supported) {
vrButton = addButton("Enter VR", "E", getCurrentUrl() + "/vr_assets/button.png", onRequestPresent);
g_vrUi = new Ui(gl, g_numFish);
g_vrUi.load("./vr_assets/ui/config.js");

window.addEventListener('vrdisplaypresentchange', onPresentChange, false);
window.addEventListener('vrdisplayactivate', onRequestPresent, false);
window.addEventListener('vrdisplaydeactivate', onExitPresent, false);
window.addEventListener('keydown', function() { g_vrUi.isMenuMode = !g_vrUi.isMenuMode; }, false);
} else {
console.log("WebVR supported, but no VRDisplays found.")
}
});
} else {
if (navigator.getVRDevices) {
console.log("Your browser supports WebVR but not the latest version. See webvr.info for more info.");
} else {
console.log("Your browser does not support WebVR. See webvr.info for assistance");
}
navigator.xr.addEventListener('devicechange', onDeviceChange);
}
// Regardless of if we have WebVR support, we can demonstrate stereo rendering inside the window.
stereoDemoButton = addButton("Toggle Stereo Demo", "", getCurrentUrl() + "/vr_assets/button.png", toggleStereoDemo);
Expand Down