mirror of https://github.com/bjornbytes/lovr.git
Attempt to fix state.offset;
The goal is to ensure that (for 3DOF devices) all poses are reported to include the state.offset vertical transform applied to them. Previously, only rendering was using state.offset. This is important to fix spatial audio. Is doing this in every driver problematic? What if we did it in the Lua bindings? That way it could be handled in one place.
This commit is contained in:
parent
987b24fd19
commit
5f990d8c9f
|
@ -144,9 +144,8 @@ static const float* fakeGetBoundsGeometry(int* count) {
|
|||
}
|
||||
|
||||
static void fakeGetPose(float* x, float* y, float* z, float* angle, float* ax, float* ay, float* az) {
|
||||
*x = state.position[0];
|
||||
*y = state.position[1];
|
||||
*z = state.position[2];
|
||||
*x = *y = *z = 0;
|
||||
mat4_transform(state.transform, x, y, z);
|
||||
float q[4];
|
||||
quat_fromMat4(q, state.transform);
|
||||
quat_getAngleAxis(q, angle, ax, ay, az);
|
||||
|
@ -186,7 +185,6 @@ static void fakeControllerGetPose(Controller* controller, float* x, float* y, fl
|
|||
*y = 0;
|
||||
*z = -.75;
|
||||
mat4_transform(state.transform, x, y, z);
|
||||
*y += state.offset;
|
||||
|
||||
float q[4];
|
||||
quat_fromMat4(q, state.transform);
|
||||
|
@ -223,7 +221,6 @@ static void fakeRenderTo(void (*callback)(void*), void* userdata) {
|
|||
bool stereo = state.mirrorEye == EYE_BOTH;
|
||||
Camera camera = { .canvas = NULL, .viewMatrix = { MAT4_IDENTITY }, .stereo = stereo };
|
||||
mat4_perspective(camera.projection[0], state.clipNear, state.clipFar, 67 * M_PI / 180., (float) width / (1 + stereo) / height);
|
||||
mat4_translate(camera.viewMatrix[0], 0, state.offset, 0);
|
||||
mat4_multiply(camera.viewMatrix[0], state.transform);
|
||||
mat4_invertPose(camera.viewMatrix[0]);
|
||||
mat4_set(camera.projection[1], camera.projection[0]);
|
||||
|
@ -295,6 +292,7 @@ static void fakeUpdate(float dt) {
|
|||
|
||||
// Update transform
|
||||
mat4_identity(state.transform);
|
||||
mat4_translate(state.transform, 0, state.offset, 0);
|
||||
mat4_translate(state.transform, state.position[0], state.position[1], state.position[2]);
|
||||
mat4_rotate(state.transform, state.yaw, 0, 1, 0);
|
||||
mat4_rotate(state.transform, state.pitch, 1, 0, 0);
|
||||
|
|
|
@ -21,6 +21,7 @@ typedef struct {
|
|||
ovrGraphicsLuid luid;
|
||||
float clipNear;
|
||||
float clipFar;
|
||||
float offset;
|
||||
int lastButtonState;
|
||||
ovrSizei size;
|
||||
Canvas* canvas;
|
||||
|
@ -103,6 +104,7 @@ static bool oculusInit(float offset, int msaa) {
|
|||
state.mirrorEye = EYE_BOTH;
|
||||
state.clipNear = 0.1f;
|
||||
state.clipFar = 30.f;
|
||||
state.offset = offset;
|
||||
|
||||
vec_init(&state.controllers);
|
||||
|
||||
|
@ -205,7 +207,7 @@ static void oculusGetPose(float* x, float* y, float* z, float* angle, float* ax,
|
|||
ovrTrackingState *ts = refreshTracking();
|
||||
ovrVector3f pos = ts->HeadPose.ThePose.Position;
|
||||
*x = pos.x;
|
||||
*y = pos.y;
|
||||
*y = pos.y + state.offset;
|
||||
*z = pos.z;
|
||||
ovrQuatf oq = ts->HeadPose.ThePose.Orientation;
|
||||
float quat[] = { oq.x, oq.y, oq.z, oq.w };
|
||||
|
@ -260,7 +262,7 @@ static void oculusControllerGetPose(Controller* controller, float* x, float* y,
|
|||
ovrTrackingState *ts = refreshTracking();
|
||||
ovrVector3f pos = ts->HandPoses[controller->id].ThePose.Position;
|
||||
*x = pos.x;
|
||||
*y = pos.y;
|
||||
*y = pos.y + state.offset;
|
||||
*z = pos.z;
|
||||
ovrQuatf orient = ts->HandPoses[controller->id].ThePose.Orientation;
|
||||
float quat[4] = { orient.x, orient.y, orient.z, orient.w };
|
||||
|
@ -319,7 +321,6 @@ static ModelData* oculusControllerNewModelData(Controller* controller) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
// TODO: need to set up swap chain textures for the eyes and finish view transforms
|
||||
static void oculusRenderTo(void (*callback)(void*), void* userdata) {
|
||||
ovrHmdDesc desc = ovr_GetHmdDesc(state.session);
|
||||
if (!state.canvas) {
|
||||
|
@ -378,7 +379,7 @@ static void oculusRenderTo(void (*callback)(void*), void* userdata) {
|
|||
mat4_identity(transform);
|
||||
mat4_rotateQuat(transform, orient);
|
||||
transform[12] = -(transform[0] * pos[0] + transform[4] * pos[1] + transform[8] * pos[2]);
|
||||
transform[13] = -(transform[1] * pos[0] + transform[5] * pos[1] + transform[9] * pos[2]);
|
||||
transform[13] = -(transform[1] * pos[0] + transform[5] * pos[1] + transform[9] * pos[2] + state.offset);
|
||||
transform[14] = -(transform[2] * pos[0] + transform[6] * pos[1] + transform[10] * pos[2]);
|
||||
|
||||
ovrMatrix4f projection = ovrMatrix4f_Projection(desc.DefaultEyeFov[eye], state.clipNear, state.clipFar, ovrProjection_ClipRangeOpenGL);
|
||||
|
@ -416,10 +417,6 @@ static void oculusRenderTo(void (*callback)(void*), void* userdata) {
|
|||
|
||||
ovrLayerHeader* layers = &ld.Header;
|
||||
ovr_SubmitFrame(state.session, 0, NULL, &layers, 1);
|
||||
// apparently if this happens we should kill the session and reinit as long as we're getting ovrError_DisplayLost,
|
||||
// lest oculus get upset should you try to get anything on the store.
|
||||
// if (!OVR_SUCCESS(result))
|
||||
// goto Done;
|
||||
|
||||
state.needRefreshTracking = true;
|
||||
state.needRefreshButtons = true;
|
||||
|
|
|
@ -54,6 +54,7 @@ static void getTransform(unsigned int device, mat4 transform) {
|
|||
mat4_identity(transform);
|
||||
} else {
|
||||
mat4_fromMat34(transform, pose.mDeviceToAbsoluteTracking.m);
|
||||
transform[13] += state.offset;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -570,7 +571,6 @@ static void openvrRenderTo(void (*callback)(void*), void* userdata) {
|
|||
for (int i = 0; i < 2; i++) {
|
||||
EVREye vrEye = (i == 0) ? EVREye_Eye_Left : EVREye_Eye_Right;
|
||||
mat4_fromMat44(camera.projection[i], state.system->GetProjectionMatrix(vrEye, state.clipNear, state.clipFar).m);
|
||||
mat4_translate(camera.viewMatrix[i], 0, state.offset, 0);
|
||||
mat4_multiply(camera.viewMatrix[i], head);
|
||||
mat4_multiply(camera.viewMatrix[i], mat4_fromMat34(eye, state.system->GetEyeToHeadTransform(vrEye).m));
|
||||
mat4_invertPose(camera.viewMatrix[i]);
|
||||
|
|
|
@ -195,6 +195,8 @@ var LibraryLOVR = {
|
|||
if (sittingToStanding) {
|
||||
HEAPF32.set(sittingToStanding, matA >> 2);
|
||||
Module._mat4_transform(matA, x, y, z);
|
||||
} else {
|
||||
HEAPF32[y >> 2] += webvr.offset;
|
||||
}
|
||||
} else {
|
||||
HEAPF32[x >> 2] = HEAPF32[y >> 2] = HEAPF32[z >> 2] = 0;
|
||||
|
@ -233,6 +235,8 @@ var LibraryLOVR = {
|
|||
if (sittingToStanding) {
|
||||
HEAPF32.set(sittingToStanding, matB >> 2);
|
||||
Module._mat4_multiply(matA, matB);
|
||||
} else {
|
||||
HEAPF32[(matA + 4 * 13) >> 2] += webvr.offset;
|
||||
}
|
||||
|
||||
Module._mat4_translate(matA, pose.position[0], pose.position[1], pose.position[2]);
|
||||
|
@ -323,6 +327,7 @@ var LibraryLOVR = {
|
|||
Module._quat_fromMat4(quat, matA);
|
||||
Module._quat_getAngleAxis(quat, angle, ax, ay, az);
|
||||
} else {
|
||||
HEAPF32[y >> 2] += webvr.offset;
|
||||
Module._quat_getAngleAxis(quat, angle, ax, ay, az);
|
||||
}
|
||||
},
|
||||
|
|
Loading…
Reference in New Issue