Headset views;

- lovr.headset.getViewCount
- lovr.headset.getViewPose
- lovr.headset.getViewAngles
This commit is contained in:
bjorn 2020-01-27 21:02:37 -08:00
parent b19a391c32
commit 0aa724116e
10 changed files with 264 additions and 37 deletions

View File

@ -181,6 +181,44 @@ static int l_lovrHeadsetGetDisplayMask(lua_State* L) {
return 1;
}
static int l_lovrHeadsetGetViewCount(lua_State* L) {
lua_pushinteger(L, lovrHeadsetDriver->getViewCount());
return 1;
}
static int l_lovrHeadsetGetViewPose(lua_State* L) {
float position[4], orientation[4];
uint32_t view = luaL_checkinteger(L, 1) - 1;
if (!lovrHeadsetDriver->getViewPose(view, position, orientation)) {
lua_pushnil(L);
return 1;
}
float angle, ax, ay, az;
quat_getAngleAxis(orientation, &angle, &ax, &ay, &az);
lua_pushnumber(L, position[0]);
lua_pushnumber(L, position[1]);
lua_pushnumber(L, position[2]);
lua_pushnumber(L, angle);
lua_pushnumber(L, ax);
lua_pushnumber(L, ay);
lua_pushnumber(L, az);
return 7;
}
static int l_lovrHeadsetGetViewAngles(lua_State* L) {
float left, right, up, down;
uint32_t view = luaL_checkinteger(L, 1) - 1;
if (!lovrHeadsetDriver->getViewAngles(view, &left, &right, &up, &down)) {
lua_pushnil(L);
return 1;
}
lua_pushnumber(L, left);
lua_pushnumber(L, right);
lua_pushnumber(L, up);
lua_pushnumber(L, down);
return 4;
}
static int l_lovrHeadsetGetClipDistance(lua_State* L) {
float clipNear, clipFar;
lovrHeadsetDriver->getClipDistance(&clipNear, &clipFar);
@ -549,6 +587,9 @@ static const luaL_Reg lovrHeadset[] = {
{ "getDisplayDimensions", l_lovrHeadsetGetDisplayDimensions },
{ "getDisplayFrequency", l_lovrHeadsetGetDisplayFrequency },
{ "getDisplayMask", l_lovrHeadsetGetDisplayMask },
{ "getViewCount", l_lovrHeadsetGetViewCount },
{ "getViewPose", l_lovrHeadsetGetViewPose },
{ "getViewAngles", l_lovrHeadsetGetViewAngles },
{ "getClipDistance", l_lovrHeadsetGetClipDistance },
{ "setClipDistance", l_lovrHeadsetSetClipDistance },
{ "getBoundsWidth", l_lovrHeadsetGetBoundsWidth },

View File

@ -576,6 +576,10 @@ MAF mat4 mat4_fov(mat4 m, float left, float right, float up, float down, float c
return m;
}
MAF void mat4_getFov(mat4 m, float* left, float* right, float* up, float* down) {
// TODO
}
MAF mat4 mat4_lookAt(mat4 m, vec3 from, vec3 to, vec3 up) {
float x[4];
float y[4];

View File

@ -30,12 +30,14 @@ static struct {
float clipFar;
float pitch;
float yaw;
float fov;
} state;
static bool desktop_init(float offset, uint32_t msaa) {
state.offset = offset;
state.clipNear = .1f;
state.clipFar = 100.f;
state.fov = 67.f * (float) M_PI / 180.f;
if (!state.initialized) {
mat4_identity(state.headTransform);
@ -78,6 +80,28 @@ static const float* desktop_getDisplayMask(uint32_t* count) {
return NULL;
}
static uint32_t desktop_getViewCount() {
return 2;
}
static bool desktop_getViewPose(uint32_t view, float* position, float* orientation) {
vec3_init(position, state.position);
quat_fromMat4(orientation, state.headTransform);
return view < 2;
}
static bool desktop_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
float aspect;
uint32_t width, height;
desktop_getDisplayDimensions(&width, &height);
aspect = (float) width / 2.f / height;
*left = state.fov * aspect;
*right = state.fov * aspect;
*up = state.fov;
*down = state.fov;
return view < 2;
}
static void desktop_getClipDistance(float* clipNear, float* clipFar) {
*clipNear = state.clipNear;
*clipFar = state.clipFar;
@ -148,10 +172,10 @@ static ModelData* desktop_newModelData(Device device) {
}
static void desktop_renderTo(void (*callback)(void*), void* userdata) {
uint32_t width, height;
desktop_getDisplayDimensions(&width, &height);
float left, right, up, down;
desktop_getViewAngles(0, &left, &right, &up, &down);
Camera camera = { .canvas = NULL, .viewMatrix = { MAT4_IDENTITY }, .stereo = true };
mat4_perspective(camera.projection[0], state.clipNear, state.clipFar, 67.f * (float) M_PI / 180.f, (float) width / 2.f / height);
mat4_fov(camera.projection[0], state.clipNear, state.clipFar, left, right, up, down);
mat4_multiply(camera.viewMatrix[0], state.headTransform);
mat4_invert(camera.viewMatrix[0]);
mat4_set(camera.projection[1], camera.projection[0]);
@ -255,6 +279,9 @@ HeadsetInterface lovrHeadsetDesktopDriver = {
.getDisplayTime = desktop_getDisplayTime,
.getDisplayDimensions = desktop_getDisplayDimensions,
.getDisplayMask = desktop_getDisplayMask,
.getViewCount = desktop_getViewCount,
.getViewPose = desktop_getViewPose,
.getViewAngles = desktop_getViewAngles,
.getClipDistance = desktop_getClipDistance,
.setClipDistance = desktop_setClipDistance,
.getBoundsDimensions = desktop_getBoundsDimensions,

View File

@ -51,6 +51,12 @@ typedef enum {
MAX_AXES
} DeviceAxis;
// Notes:
// - getDisplayFrequency may return 0.f if the information is unavailable.
// - For isDown, changed can be set to false if change information is unavailable or inconvenient.
// - getAxis may write 4 floats to the output value. The expected number is a constant (see axisCounts in l_headset).
// - In general, most input results should be kept constant between calls to update.
typedef struct HeadsetInterface {
struct HeadsetInterface* next;
HeadsetDriver driverType;
@ -62,6 +68,9 @@ typedef struct HeadsetInterface {
float (*getDisplayFrequency)(void);
const float* (*getDisplayMask)(uint32_t* count);
double (*getDisplayTime)(void);
uint32_t (*getViewCount)(void);
bool (*getViewPose)(uint32_t view, float* position, float* orientation);
bool (*getViewAngles)(uint32_t view, float* left, float* right, float* up, float* down);
void (*getClipDistance)(float* clipNear, float* clipFar);
void (*setClipDistance)(float clipNear, float clipFar);
void (*getBoundsDimensions)(float* width, float* depth);

View File

@ -12,10 +12,12 @@
#include <OVR_CAPI_GL.h>
#include <stdlib.h>
#include <stdbool.h>
#include <math.h>
static struct {
bool needRefreshTracking;
bool needRefreshButtons;
ovrHmdDesc desc;
ovrSession session;
ovrGraphicsLuid luid;
float clipNear;
@ -85,6 +87,8 @@ static bool oculus_init(float offset, uint32_t msaa) {
return false;
}
state.desc = ovr_GetHmdDesc(state.session);
state.needRefreshTracking = true;
state.needRefreshButtons = true;
state.clipNear = .1f;
@ -119,8 +123,7 @@ static void oculus_destroy(void) {
}
static bool oculus_getName(char* name, size_t length) {
ovrHmdDesc desc = ovr_GetHmdDesc(state.session);
strncpy(name, desc.ProductName, length - 1);
strncpy(name, state.desc.ProductName, length - 1);
name[length - 1] = '\0';
return true;
}
@ -130,9 +133,7 @@ static HeadsetOrigin oculus_getOriginType(void) {
}
static void oculus_getDisplayDimensions(uint32_t* width, uint32_t* height) {
ovrHmdDesc desc = ovr_GetHmdDesc(state.session);
ovrSizei size = ovr_GetFovTextureSize(state.session, ovrEye_Left, desc.DefaultEyeFov[0], 1.0f);
ovrSizei size = ovr_GetFovTextureSize(state.session, ovrEye_Left, state.desc.DefaultEyeFov[0], 1.0f);
*width = size.w;
*height = size.h;
}
@ -146,6 +147,44 @@ static double oculus_getDisplayTime(void) {
return ovr_GetPredictedDisplayTime(state.session, 0);
}
static void getEyePoses(ovrPosef poses[2], double* sensorSampleTime) {
ovrEyeRenderDesc eyeRenderDesc[2] = {
ovr_GetRenderDesc(state.session, ovrEye_Left, state.desc.DefaultEyeFov[0]),
ovr_GetRenderDesc(state.session, ovrEye_Right, state.desc.DefaultEyeFov[1])
};
ovrPosef offsets[2] = {
eyeRenderDesc[0].HmdToEyePose,
eyeRenderDesc[1].HmdToEyePose
};
ovr_GetEyePoses(state.session, 0, ovrFalse, offsets, poses, sensorSampleTime);
}
static uint32_t oculus_getViewCount(void) {
return 2;
}
static bool oculus_getViewPose(uint32_t view, float* position, float* orientation) {
if (view > 1) return false;
ovrPosef poses[2];
getEyePoses(poses, NULL);
ovrPosef* pose = &poses[view];
vec3_set(position, pose->Position.x, pose->Position.y, pose->Position.z);
quat_set(orientation, pose->Orientation.x, pose->Orientation.y, pose->Orientation.z, pose->Orientation.w);
return true;
}
static bool oculus_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
if (view > 1) return false;
ovrFovPort* fov = &state.desc.DefaultEyeFov[view];
*left = atanf(fov->LeftTan);
*right = atanf(fov->RightTan);
*up = atanf(fov->UpTan);
*down = atanf(fov->DownTan);
return true;
}
static void oculus_getClipDistance(float* clipNear, float* clipFar) {
*clipNear = state.clipNear;
*clipFar = state.clipFar;
@ -275,9 +314,8 @@ static ModelData* oculus_newModelData(Device device) {
}
static void oculus_renderTo(void (*callback)(void*), void* userdata) {
ovrHmdDesc desc = ovr_GetHmdDesc(state.session);
if (!state.canvas) {
state.size = ovr_GetFovTextureSize(state.session, ovrEye_Left, desc.DefaultEyeFov[ovrEye_Left], 1.0f);
state.size = ovr_GetFovTextureSize(state.session, ovrEye_Left, state.desc.DefaultEyeFov[ovrEye_Left], 1.0f);
ovrTextureSwapChainDesc swdesc = {
.Type = ovrTexture_2D,
@ -305,16 +343,9 @@ static void oculus_renderTo(void (*callback)(void*), void* userdata) {
lovrPlatformSetSwapInterval(0);
}
ovrEyeRenderDesc eyeRenderDesc[2];
eyeRenderDesc[0] = ovr_GetRenderDesc(state.session, ovrEye_Left, desc.DefaultEyeFov[0]);
eyeRenderDesc[1] = ovr_GetRenderDesc(state.session, ovrEye_Right, desc.DefaultEyeFov[1]);
ovrPosef HmdToEyeOffset[2] = {
eyeRenderDesc[0].HmdToEyePose,
eyeRenderDesc[1].HmdToEyePose
};
ovrPosef EyeRenderPose[2];
double sensorSampleTime;
ovr_GetEyePoses(state.session, 0, ovrTrue, HmdToEyeOffset, EyeRenderPose, &sensorSampleTime);
getEyePoses(EyeRenderPose, &sensorSampleTime);
Camera camera = { .canvas = state.canvas };
@ -328,7 +359,7 @@ static void oculus_renderTo(void (*callback)(void*), void* userdata) {
float pos[] = {
EyeRenderPose[eye].Position.x,
EyeRenderPose[eye].Position.y,
EyeRenderPose[eye].Position.z,
EyeRenderPose[eye].Position.z
};
mat4 transform = camera.viewMatrix[eye];
mat4_identity(transform);
@ -337,7 +368,7 @@ static void oculus_renderTo(void (*callback)(void*), void* userdata) {
transform[13] = -(transform[1] * pos[0] + transform[5] * pos[1] + transform[9] * pos[2]);
transform[14] = -(transform[2] * pos[0] + transform[6] * pos[1] + transform[10] * pos[2]);
ovrMatrix4f projection = ovrMatrix4f_Projection(desc.DefaultEyeFov[eye], state.clipNear, state.clipFar, ovrProjection_ClipRangeOpenGL);
ovrMatrix4f projection = ovrMatrix4f_Projection(state.desc.DefaultEyeFov[eye], state.clipNear, state.clipFar, ovrProjection_ClipRangeOpenGL);
mat4_fromMat44(camera.projection[eye], projection.M);
}
@ -365,7 +396,7 @@ static void oculus_renderTo(void (*callback)(void*), void* userdata) {
vp.Size.w = state.size.w;
vp.Size.h = state.size.h;
ld.Viewport[eye] = vp;
ld.Fov[eye] = desc.DefaultEyeFov[eye];
ld.Fov[eye] = state.desc.DefaultEyeFov[eye];
ld.RenderPose[eye] = EyeRenderPose[eye];
ld.SensorSampleTime = sensorSampleTime;
}
@ -404,6 +435,9 @@ HeadsetInterface lovrHeadsetOculusDriver = {
.getDisplayDimensions = oculus_getDisplayDimensions,
.getDisplayMask = oculus_getDisplayMask,
.getDisplayTime = oculus_getDisplayTime,
.getViewCount = oculus_getViewCount,
.getViewPose = oculus_getViewPose,
.getViewAngles = oculus_getViewAngles,
.getClipDistance = oculus_getClipDistance,
.setClipDistance = oculus_setClipDistance,
.getBoundsDimensions = oculus_getBoundsDimensions,

View File

@ -77,6 +77,26 @@ static const float* vrapi_getDisplayMask(uint32_t* count) {
return NULL;
}
static uint32_t vrapi_getViewCount(void) {
return 2;
}
static void vrapi_getViewPose(uint32_t view, float* position, float* orientation) {
if (view > 1) return false;
float transform[16];
mat4_init(transform, bridgeLovrMobileData.updateData.eyeViewMatrix[view]);
mat4_invert(transform); // :(
mat4_getPosition(transform, position);
mat4_getOrientation(transform, orientation);
return true;
}
static void vrapi_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
if (view > 1) return false;
mat4_getFov(bridgeLovrMobileData.updateData.projectionMatrix[view], left, right, up, down);
return true;
}
static void vrapi_getClipDistance(float* clipNear, float* clipFar) {
// TODO
}
@ -264,6 +284,9 @@ HeadsetInterface lovrHeadsetOculusMobileDriver = {
.getDisplayTime = vrapi_getDisplayTime,
.getDisplayDimensions = vrapi_getDisplayDimensions,
.getDisplayMask = vrapi_getDisplayMask,
.getViewCount = vrapi_getViewCount,
.getViewPose = vrapi_getViewPose,
.getViewAngles = vrapi_getViewAngles,
.getClipDistance = vrapi_getClipDistance,
.setClipDistance = vrapi_setClipDistance,
.getBoundsDimensions = vrapi_getBoundsDimensions,

View File

@ -242,6 +242,28 @@ static double openvr_getDisplayTime(void) {
return lovrPlatformGetTime() + (double) (frameDuration - secondsSinceVsync + vsyncToPhotons);
}
static uint32_t openvr_getViewCount(void) {
return 2;
}
static bool openvr_getViewPose(uint32_t view, float* position, float* orientation) {
EVREye eye = view ? EVREye_Eye_Right : EVREye_Eye_Left;
float transform[16], offset[16];
mat4_fromMat34(transform, state.headPose.mDeviceToAbsoluteTracking.m);
mat4_multiply(transform, mat4_fromMat34(offset, state.system->GetEyeToHeadTransform(eye).m));
mat4_getPosition(transform, position);
mat4_getOrientation(transform, orientation);
return view < 2;
}
static bool openvr_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
EVREye eye = view ? EVREye_Eye_Right : EVREye_Eye_Left;
state.system->GetProjectionRaw(eye, left, right, up, down);
return view < 2;
}
static void openvr_getClipDistance(float* clipNear, float* clipFar) {
*clipNear = state.clipNear;
*clipFar = state.clipFar;
@ -467,7 +489,7 @@ static ModelData* openvr_newModelData(Device device) {
static void openvr_renderTo(void (*callback)(void*), void* userdata) {
if (!state.canvas) {
uint32_t width, height;
state.system->GetRecommendedRenderTargetSize(&width, &height);
openvr_getDisplayDimensions(&width, &height);
CanvasFlags flags = { .depth = { true, false, FORMAT_D24S8 }, .stereo = true, .mipmaps = true, .msaa = state.msaa };
state.canvas = lovrCanvasCreate(width, height, flags);
Texture* texture = lovrTextureCreate(TEXTURE_2D, NULL, 0, true, true, state.msaa);
@ -478,15 +500,17 @@ static void openvr_renderTo(void (*callback)(void*), void* userdata) {
lovrPlatformSetSwapInterval(0);
}
Camera camera = { .canvas = state.canvas, .viewMatrix = { MAT4_IDENTITY, MAT4_IDENTITY } };
Camera camera = { .canvas = state.canvas };
float head[16], eye[16];
float head[16];
mat4_fromMat34(head, state.headPose.mDeviceToAbsoluteTracking.m);
for (int i = 0; i < 2; i++) {
float left, right, up, down, eye[16];
EVREye vrEye = (i == 0) ? EVREye_Eye_Left : EVREye_Eye_Right;
mat4_fromMat44(camera.projection[i], state.system->GetProjectionMatrix(vrEye, state.clipNear, state.clipFar).m);
mat4_multiply(camera.viewMatrix[i], head);
openvr_getViewAngles(i, &left, &right, &up, &down);
mat4_fov(camera.projection[i], left, right, up, down, state.clipNear, state.clipFar);
mat4_init(camera.viewMatrix[i], head);
mat4_multiply(camera.viewMatrix[i], mat4_fromMat34(eye, state.system->GetEyeToHeadTransform(vrEye).m));
mat4_invert(camera.viewMatrix[i]);
}
@ -540,6 +564,9 @@ HeadsetInterface lovrHeadsetOpenVRDriver = {
.getDisplayFrequency = openvr_getDisplayFrequency,
.getDisplayMask = openvr_getDisplayMask,
.getDisplayTime = openvr_getDisplayTime,
.getViewCount = openvr_getViewCount,
.getViewPose = openvr_getViewPose,
.getViewAngles = openvr_getViewAngles,
.getClipDistance = openvr_getClipDistance,
.setClipDistance = openvr_setClipDistance,
.getBoundsDimensions = openvr_getBoundsDimensions,

View File

@ -474,6 +474,53 @@ static double openxr_getDisplayTime(void) {
return state.frameState.predictedDisplayTime / 1e9;
}
static void getViews(XrView views[2], uint32_t* count) {
XrViewLocateInfo viewLocateInfo = {
.type = XR_TYPE_VIEW_LOCATE_INFO,
.viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
.displayTime = state.frameState.predictedDisplayTime,
.space = state.referenceSpace
};
XrViewState viewState;
XR(xrLocateViews(state.session, &viewLocateInfo, &viewState, sizeof(views) / sizeof(views[0]), count, views));
}
static uint32_t openxr_getViewCount(void) {
uint32_t count;
XrView views[2];
getViews(views, &count);
return count;
}
static bool openxr_getViewPose(uint32_t view, float* position, float* orientation) {
uint32_t count;
XrView views[2];
getViews(views, &count);
if (view < count) {
memcpy(position, views[view].pose.position, 3 * sizeof(float));
memcpy(orientation, views[view].pose.orientation, 4 * sizeof(float));
return true;
} else {
return false;
}
}
static bool openxr_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
uint32_t count;
XrView views[2];
getViews(views, &count);
if (view < count) {
*left = views[view].fov.angleLeft;
*right = views[view].fov.angleRight;
*up = views[view].fov.angleUp;
*down = views[view].fov.angleDown;
return true;
} else {
return false;
}
}
static void openxr_getClipDistance(float* clipNear, float* clipFar) {
*clipNear = state.clipNear;
*clipFar = state.clipFar;
@ -646,17 +693,6 @@ static void openxr_renderTo(void (*callback)(void*), void* userdata) {
XrSwapchainImageWaitInfo waitInfo = { XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO, .timeout = 1e9 };
if (XR(xrWaitSwapchainImage(state.swapchain, &waitInfo)) != XR_TIMEOUT_EXPIRED) {
XrViewLocateInfo viewLocateInfo = {
.type = XR_TYPE_VIEW_LOCATE_INFO,
.viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
.displayTime = state.frameState.predictedDisplayTime,
.space = state.referenceSpace
};
XrView views[2];
XrViewState viewState;
XR(xrLocateViews(state.session, &viewLocateInfo, &viewState, 2, NULL, views));
if (!state.canvas) {
CanvasFlags flags = { .depth = { true, false, FORMAT_D24S8 }, .stereo = true, .mipmaps = true, .msaa = state.msaa };
state.canvas = lovrCanvasCreate(state.width, state.height, flags);
@ -665,6 +701,10 @@ static void openxr_renderTo(void (*callback)(void*), void* userdata) {
Camera camera = { .canvas = state.canvas, .stereo = true };
uint32_t count;
XrView views[2];
getViews(views, &count);
for (int eye = 0; eye < 2; eye++) {
XrView* view = &views[eye];
XrVector3f* v = &view->pose.position;
@ -759,6 +799,9 @@ HeadsetInterface lovrHeadsetOpenXRDriver = {
.getDisplayDimensions = openxr_getDisplayDimensions,
.getDisplayMask = openxr_getDisplayMask,
.getDisplayTime = openxr_getDisplayTime,
.getViewCount = openxr_getViewCount,
.getViewPose = openxr_getViewPose,
.getViewAngles = openxr_getViewAngles,
.getClipDistance = openxr_getClipDistance,
.setClipDistance = openxr_setClipDistance,
.getBoundsDimensions = openxr_getBoundsDimensions,

View File

@ -1,6 +1,7 @@
#include "headset/headset.h"
#include "graphics/graphics.h"
#include <stdbool.h>
#include <stdint.h>
// Provided by resources/webvr.js
extern bool webvr_init(float offset, uint32_t msaa);
@ -10,6 +11,9 @@ extern HeadsetOrigin webvr_getOriginType(void);
extern double webvr_getDisplayTime(void);
extern void webvr_getDisplayDimensions(uint32_t* width, uint32_t* height);
extern const float* webvr_getDisplayMask(uint32_t* count);
extern uint32_t webvr_getViewCount(void);
extern bool webvr_getViewPose(uint32_t view, float* position, float* orientation);
extern bool webvr_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down);
extern void webvr_getClipDistance(float* near, float* far);
extern void webvr_setClipDistance(float near, float far);
extern void webvr_getBoundsDimensions(float* width, float* depth);
@ -53,6 +57,9 @@ HeadsetInterface lovrHeadsetWebVRDriver = {
.getDisplayTime = webvr_getDisplayTime,
.getDisplayDimensions = webvr_getDisplayDimensions,
.getDisplayMask = webvr_getDisplayMask,
.getViewCount = webvr_getViewCount,
.getViewPose = webvr_getViewPose,
.getViewAngles = webvr_getViewAngles,
.getClipDistance = webvr_getClipDistance,
.setClipDistance = webvr_setClipDistance,
.getBoundsDimensions = webvr_getBoundsDimensions,

View File

@ -140,6 +140,18 @@ var LibraryLOVR = {
return 0;
},
webvr_getViewCount: function() {
return 2;
},
webvr_getViewPose: function(view, position, orientation) {
return false; // TODO
},
webvr_getViewAngles: function(view, left, right, up, down) {
return false; // TODO
},
webvr_getClipDistance: function(clipNear, clipFar) {
HEAPF32[clipNear >> 2] = webvr.display.depthNear;
HEAPF32[clipFar >> 2] = webvr.display.depthFar;