Finish vrapi driver;

This commit is contained in:
bjorn 2020-06-09 18:00:33 -06:00
parent b34c03bcc8
commit a1aa3c8ae8
12 changed files with 544 additions and 19 deletions

1
.gitignore vendored
View File

@ -31,3 +31,4 @@ bin
.DS_Store
.vs
/test
deps/Vrapi

View File

@ -25,7 +25,7 @@ SRC_@(HEADSET)@(SIMULATOR) += src/modules/headset/headset_desktop.c
SRC_@(HEADSET)@(OPENVR) += src/modules/headset/headset_openvr.c
SRC_@(HEADSET)@(OPENXR) += src/modules/headset/headset_openxr.c
SRC_@(HEADSET)@(OCULUS) += src/modules/headset/headset_oculus.c
SRC_@(HEADSET)@(VRAPI) += src/modules/headset/headset_oculus_mobile.c
SRC_@(HEADSET)@(VRAPI) += src/modules/headset/headset_vrapi.c
SRC_@(HEADSET)@(WEBVR) += src/modules/headset/headset_webvr.c
SRC_@(HEADSET)@(WEBXR) += src/modules/headset/headset_webxr.c
SRC_@(HEADSET)@(LEAP) += src/modules/headset/headset_leap.c

View File

@ -37,7 +37,8 @@ CFLAGS_@(OPENVR) += -DLOVR_USE_OPENVR
CFLAGS_@(OPENXR) += -DLOVR_USE_OPENXR
CFLAGS_@(OCULUS) += -DLOVR_USE_OCULUS
CFLAGS_@(OCULUS) += -I@(OCULUS_PATH)/LibOVR/Include
CFLAGS_@(VRAPI) += -DLOVR_USE_OCULUS_MOBILE
CFLAGS_@(VRAPI) += -DLOVR_USE_VRAPI
CFLAGS_@(VRAPI) += -I$(ROOT)/deps/VrApi/Include
CFLAGS_@(WEBXR) += -DLOVR_USE_WEBXR
CFLAGS_@(LEAP) += -DLOVR_USE_LEAP
@ -177,6 +178,7 @@ ifeq ($(PLATFORM),android)
TOOLS = @(ANDROID_SDK)/sdk/build-tools/@(ANDROID_BUILD_TOOLS_VERSION)
JAR = @(ANDROID_SDK)/sdk/platforms/android-@(ANDROID_API_VERSION)/android.jar
GLUE = @(ANDROID_SDK)/sdk/ndk-bundle/sources/android/native_app_glue
VRAPI_LIB_PATH = $(ROOT)/deps/VrApi/Libs/Android/@(ANDROID_ABI)/Release
CFLAGS += --target=@(ANDROID_TARGET)
CFLAGS += -I$(GLUE)
LDFLAGS += --target=@(ANDROID_TARGET)
@ -207,9 +209,12 @@ ifeq ($(PLATFORM),android)
LDFLAGS_@(DATA) += -L$(ROOT)/build/lib_msdfgen -lmsdfgen
LDFLAGS_@(PHYSICS) += -L$(ROOT)/build/ode -lode
LDFLAGS_@(ENET) += -L$(ROOT)/build/enet -lenet
LDFLAGS_@(VRAPI) += -L$(VRAPI_LIB_PATH)
LDFLAGS_@(VRAPI) += -lvrapi
LIBS_@(AUDIO) += $(ROOT)/build/openal/libopenal.*so*
LIBS_@(PHYSICS) += $(ROOT)/build/ode/libode.so
LIBS_@(VRAPI) += $(VRAPI_LIB_PATH)/libvrapi.so
endif
endif

View File

@ -19,6 +19,7 @@ StringEntry HeadsetDrivers[] = {
[DRIVER_OCULUS_MOBILE] = ENTRY("oculusmobile"),
[DRIVER_OPENVR] = ENTRY("openvr"),
[DRIVER_OPENXR] = ENTRY("openxr"),
[DRIVER_VRAPI] = ENTRY("vrapi"),
[DRIVER_WEBVR] = ENTRY("webvr"),
[DRIVER_WEBXR] = ENTRY("webxr"),
{ 0 }

View File

@ -48,7 +48,7 @@ typedef enum {
BUTTON_RELEASED
} ButtonAction;
typedef void (*windowCloseCallback)(void);
typedef void (*quitCallback)(void);
typedef void (*windowFocusCallback)(bool focused);
typedef void (*windowResizeCallback)(int width, int height);
typedef void (*mouseButtonCallback)(MouseButton button, ButtonAction action);
@ -69,7 +69,7 @@ void lovrPlatformGetFramebufferSize(int* width, int* height);
void lovrPlatformSetSwapInterval(int interval);
void lovrPlatformSwapBuffers(void);
void* lovrPlatformGetProcAddress(const char* function);
void lovrPlatformOnWindowClose(windowCloseCallback callback);
void lovrPlatformOnQuitRequest(quitCallback callback);
void lovrPlatformOnWindowFocus(windowFocusCallback callback);
void lovrPlatformOnWindowResize(windowResizeCallback callback);
void lovrPlatformOnMouseButton(mouseButtonCallback callback);

View File

@ -7,28 +7,48 @@
#include <android_native_app_glue.h>
#include <android/log.h>
// The activity is considered ready if it's resumed and there's an active window. This is just an
// artifact of how Oculus' app model works and could be the wrong abstraction, feel free to change.
typedef void (*activeCallback)(bool active);
#ifndef LOVR_USE_OCULUS_MOBILE
static struct {
struct android_app* app;
ANativeWindow* window;
bool resumed;
JNIEnv* jni;
EGLDisplay display;
EGLContext context;
EGLSurface surface;
activeCallback onActive;
quitCallback onQuit;
} state;
static JavaVM* lovrJavaVM;
static JNIEnv* lovrJNIEnv;
int main(int argc, char** argv);
static void onAppCmd(struct android_app* app, int32_t cmd) {
// pause, resume, events, etc.
bool wasActive = state.window && state.resumed;
switch (cmd) {
case APP_CMD_RESUME: state.resumed = true; break;
case APP_CMD_PAUSE: state.resumed = false; break;
case APP_CMD_INIT_WINDOW: state.window = app->window; break;
case APP_CMD_TERM_WINDOW: state.window = NULL; break;
default: break;
}
bool active = state.window && state.resumed;
if (state.onActive && wasActive != active) {
state.onActive(active);
}
}
void android_main(struct android_app* app) {
lovrJavaVM = app->activity->vm;
(*lovrJavaVM)->AttachCurrentThread(lovrJavaVM, &lovrJNIEnv, NULL);
state.app = app;
(*app->activity->vm)->AttachCurrentThread(app->activity->vm, &state.jni, NULL);
app->onAppCmd = onAppCmd;
main(0, NULL);
(*lovrJavaVM)->DetachCurrentThread(lovrJavaVM);
(*app->activity->vm)->DetachCurrentThread(app->activity->vm);
}
#endif
@ -42,8 +62,8 @@ void lovrPlatformDestroy() {
if (state.surface) eglDestroySurface(state.display, state.surface);
if (state.context) eglDestroyContext(state.display, state.context);
if (state.display) eglTerminate(state.display);
memset(&state, 0, sizeof(state));
#endif
memset(&state, 0, sizeof(state));
}
const char* lovrPlatformGetName() {
@ -79,7 +99,16 @@ void lovrPlatformSleep(double seconds) {
#endif
void lovrPlatformPollEvents() {
// TODO
#ifndef LOVR_USE_OCULUS_MOBILE
int events;
struct android_poll_source* source;
bool active = state.window && state.resumed;
while (ALooper_pollAll(active ? 0 : 0, NULL, &events, (void**) &source) >= 0) {
if (source) {
source->process(state.app, source);
}
}
#endif
}
void lovrPlatformOpenConsole() {
@ -197,8 +226,8 @@ void* lovrPlatformGetProcAddress(const char* function) {
return (void*) eglGetProcAddress(function);
}
void lovrPlatformOnWindowClose(windowCloseCallback callback) {
//
void lovrPlatformOnQuitRequest(quitCallback callback) {
state.onQuit = callback;
}
void lovrPlatformOnWindowFocus(windowFocusCallback callback) {
@ -232,3 +261,31 @@ bool lovrPlatformIsMouseDown(MouseButton button) {
bool lovrPlatformIsKeyDown(KeyCode key) {
return false;
}
void lovrPlatformOnActive(activeCallback callback) {
state.onActive = callback;
}
struct ANativeActivity* lovrPlatformGetActivity() {
return state.app->activity;
}
ANativeWindow* lovrPlatformGetNativeWindow() {
return state.window;
}
JNIEnv* lovrPlatformGetJNI() {
return state.jni;
}
EGLDisplay lovrPlatformGetEGLDisplay() {
return state.display;
}
EGLContext lovrPlatformGetEGLContext() {
return state.context;
}
EGLSurface lovrPlatformGetEGLSurface() {
return state.surface;
}

View File

@ -161,7 +161,7 @@ static void gammaCorrect(Color* color) {
color->b = lovrMathGammaToLinear(color->b);
}
static void onCloseWindow(void) {
static void onQuitRequest(void) {
lovrEventPush((Event) { .type = EVENT_QUIT, .data.quit = { .exitCode = 0 } });
}
@ -223,7 +223,7 @@ void lovrGraphicsPresent() {
void lovrGraphicsCreateWindow(WindowFlags* flags) {
lovrAssert(!state.initialized, "Window is already created");
lovrAssert(lovrPlatformCreateWindow(flags), "Could not create window");
lovrPlatformOnWindowClose(onCloseWindow);
lovrPlatformOnQuitRequest(onQuitRequest);
lovrPlatformOnWindowResize(onResizeWindow);
lovrPlatformGetFramebufferSize(&state.width, &state.height);
lovrGpuInit(lovrPlatformGetProcAddress);

View File

@ -33,6 +33,9 @@ bool lovrHeadsetInit(HeadsetDriver* drivers, size_t count, float offset, uint32_
#ifdef LOVR_USE_OPENXR
case DRIVER_OPENXR: interface = &lovrHeadsetOpenXRDriver; break;
#endif
#ifdef LOVR_USE_VRAPI
case DRIVER_VRAPI: interface = &lovrHeadsetVrApiDriver; break;
#endif
#ifdef LOVR_USE_WEBVR
case DRIVER_WEBVR: interface = &lovrHeadsetWebVRDriver; break;
#endif

View File

@ -16,6 +16,7 @@ typedef enum {
DRIVER_OCULUS_MOBILE,
DRIVER_OPENVR,
DRIVER_OPENXR,
DRIVER_VRAPI,
DRIVER_WEBVR,
DRIVER_WEBXR
} HeadsetDriver;
@ -116,6 +117,7 @@ typedef struct HeadsetInterface {
extern HeadsetInterface lovrHeadsetOculusDriver;
extern HeadsetInterface lovrHeadsetOpenVRDriver;
extern HeadsetInterface lovrHeadsetOpenXRDriver;
extern HeadsetInterface lovrHeadsetVrApiDriver;
extern HeadsetInterface lovrHeadsetWebVRDriver;
extern HeadsetInterface lovrHeadsetWebXRDriver;
extern HeadsetInterface lovrHeadsetDesktopDriver;

View File

@ -0,0 +1,456 @@
#include "headset/headset.h"
#include "graphics/canvas.h"
#include "graphics/graphics.h"
#include "core/maf.h"
#include "core/os.h"
#include "core/ref.h"
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
#include <EGL/egl.h>
#include <android_native_app_glue.h>
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wc11-extensions"
#pragma clang diagnostic ignored "-Wgnu-empty-initializer"
#pragma clang diagnostic ignored "-Wpedantic"
#include <VrApi.h>
#include <VrApi_Helpers.h>
#include <VrApi_Input.h>
#pragma clang diagnostic pop
#define GL_SRGB8_ALPHA8 0x8C43
// Private platform functions
void lovrPlatformOnActive(void (*callback)(bool active));
JNIEnv* lovrPlatformGetJNI(void);
struct ANativeActivity* lovrPlatformGetActivity(void);
ANativeWindow* lovrPlatformGetNativeWindow(void);
EGLDisplay lovrPlatformGetEGLDisplay(void);
EGLContext lovrPlatformGetEGLContext(void);
static struct {
ovrJava java;
ovrMobile* session;
ovrDeviceType deviceType;
uint64_t frameIndex;
float offset;
uint32_t msaa;
ovrVector3f* rawBoundaryPoints;
float* boundaryPoints;
uint32_t boundaryPointCount;
ovrTextureSwapChain* swapchain;
Canvas* canvases[3];
ovrInputTrackedRemoteCapabilities controllerInfo[2];
ovrInputStateTrackedRemote controllers[2];
ovrInputStateHand hands[2];
} state;
static void onActive(bool active) {
if (!state.session && active) {
ovrModeParms config = vrapi_DefaultModeParms(&state.java);
config.Flags &= ~VRAPI_MODE_FLAG_RESET_WINDOW_FULLSCREEN;
config.Flags |= VRAPI_MODE_FLAG_NATIVE_WINDOW;
config.Flags |= VRAPI_MODE_FLAG_FRONT_BUFFER_SRGB;
config.Display = (size_t) lovrPlatformGetEGLDisplay();
config.WindowSurface = (size_t) lovrPlatformGetNativeWindow();
config.ShareContext = (size_t) lovrPlatformGetEGLContext();
state.session = vrapi_EnterVrMode(&config);
state.frameIndex = 0;
} else if (state.session && !active) {
vrapi_LeaveVrMode(state.session);
state.session = NULL;
}
}
static bool vrapi_init(float offset, uint32_t msaa) {
ANativeActivity* activity = lovrPlatformGetActivity();
state.java.Vm = activity->vm;
state.java.ActivityObject = activity->clazz;
state.java.Env = lovrPlatformGetJNI();
state.offset = offset;
state.msaa = msaa;
const ovrInitParms config = vrapi_DefaultInitParms(&state.java);
if (vrapi_Initialize(&config) != VRAPI_INITIALIZE_SUCCESS) {
return false;
}
state.deviceType = vrapi_GetSystemPropertyInt(&state.java, VRAPI_SYS_PROP_DEVICE_TYPE);
lovrPlatformOnActive(onActive);
return true;
}
static void vrapi_destroy() {
vrapi_DestroyTextureSwapChain(state.swapchain);
vrapi_Shutdown();
for (uint32_t i = 0; i < 3; i++) {
lovrRelease(Canvas, state.canvases[i]);
}
memset(&state, 0, sizeof(state));
}
static bool vrapi_getName(char* buffer, size_t length) {
switch (state.deviceType) {
case VRAPI_DEVICE_TYPE_OCULUSGO: strncpy(buffer, "Oculus Go", length - 1); break;
case VRAPI_DEVICE_TYPE_OCULUSQUEST: strncpy(buffer, "Oculus Quest", length - 1); break;
default: return false;
}
buffer[length - 1] = '\0';
return true;
}
static HeadsetOrigin vrapi_getOriginType(void) {
return vrapi_GetTrackingSpace(state.session) == VRAPI_TRACKING_SPACE_LOCAL_FLOOR ? ORIGIN_FLOOR : ORIGIN_HEAD;
}
static void vrapi_getDisplayDimensions(uint32_t* width, uint32_t* height) {
*width = (uint32_t) vrapi_GetSystemPropertyInt(&state.java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_WIDTH);
*height = (uint32_t) vrapi_GetSystemPropertyInt(&state.java, VRAPI_SYS_PROP_SUGGESTED_EYE_TEXTURE_HEIGHT);
}
static float vrapi_getDisplayFrequency() {
return vrapi_GetSystemPropertyFloat(&state.java, VRAPI_SYS_PROP_DISPLAY_REFRESH_RATE);
}
static const float* vrapi_getDisplayMask(uint32_t* count) {
*count = 0;
return NULL;
}
static double vrapi_getDisplayTime() {
return vrapi_GetPredictedDisplayTime(state.session, state.frameIndex);
}
static uint32_t vrapi_getViewCount() {
return 2;
}
static bool vrapi_getViewPose(uint32_t view, float* position, float* orientation) {
if (view >= 2) return false;
ovrTracking2 tracking = vrapi_GetPredictedTracking2(state.session, vrapi_getDisplayTime());
float transform[16];
mat4_init(transform, (float*) &tracking.Eye[view].ViewMatrix);
mat4_invert(transform);
mat4_getPosition(transform, position);
mat4_getOrientation(transform, orientation);
uint32_t mask = VRAPI_TRACKING_STATUS_POSITION_VALID | VRAPI_TRACKING_STATUS_ORIENTATION_VALID;
return (tracking.Status & mask) == mask;
}
static bool vrapi_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
if (view >= 2) return false;
ovrTracking2 tracking = vrapi_GetPredictedTracking2(state.session, vrapi_getDisplayTime());
ovrMatrix4f_ExtractFov(&tracking.Eye[view].ProjectionMatrix, left, right, up, down);
uint32_t mask = VRAPI_TRACKING_STATUS_POSITION_VALID | VRAPI_TRACKING_STATUS_ORIENTATION_VALID;
return (tracking.Status & mask) == mask;
}
static void vrapi_getClipDistance(float* clipNear, float* clipFar) {
// Unsupported
}
static void vrapi_setClipDistance(float clipNear, float clipFar) {
// Unsupported
}
static void vrapi_getBoundsDimensions(float* width, float* depth) {
ovrPosef pose;
ovrVector3f scale;
if (vrapi_GetBoundaryOrientedBoundingBox(state.session, &pose, &scale) == ovrSuccess) {
*width = scale.x * 2.f;
*depth = scale.z * 2.f;
} else {
*width = 0.f;
*depth = 0.f;
}
}
static const float* vrapi_getBoundsGeometry(uint32_t* count) {
if (vrapi_GetBoundaryGeometry(state.session, 0, count, NULL) != ovrSuccess) {
return NULL;
}
if (*count > state.boundaryPointCount) {
state.boundaryPointCount = *count;
state.boundaryPoints = realloc(state.boundaryPoints, 4 * *count * sizeof(float));
state.rawBoundaryPoints = realloc(state.rawBoundaryPoints, *count * sizeof(ovrVector3f));
lovrAssert(state.boundaryPoints && state.rawBoundaryPoints, "Out of memory");
}
if (vrapi_GetBoundaryGeometry(state.session, state.boundaryPointCount, count, state.rawBoundaryPoints) != ovrSuccess) {
return NULL;
}
for (uint32_t i = 0; i < *count; i++) {
state.boundaryPoints[4 * i + 0] = state.rawBoundaryPoints[i].x;
state.boundaryPoints[4 * i + 1] = state.rawBoundaryPoints[i].y;
state.boundaryPoints[4 * i + 2] = state.rawBoundaryPoints[i].z;
}
return state.boundaryPoints;
}
static bool getTracking(Device device, ovrTracking* tracking) {
if (device == DEVICE_HEAD) {
*tracking = vrapi_GetPredictedTracking(state.session, vrapi_getDisplayTime());
return true;
} else if (device == DEVICE_HAND_LEFT || device == DEVICE_HAND_RIGHT) {
ovrInputCapabilityHeader* header = &state.controllerInfo[device - DEVICE_HAND_LEFT].Header;
if (header->Type == ovrControllerType_TrackedRemote) {
return vrapi_GetInputTrackingState(state.session, header->DeviceID, vrapi_getDisplayTime(), tracking) == ovrSuccess;
}
}
return false;
}
static bool vrapi_getPose(Device device, float* position, float* orientation) {
ovrTracking tracking;
if (!getTracking(device, &tracking)) {
return false;
}
ovrPosef* pose = &tracking.HeadPose.Pose;
vec3_set(position, pose->Position.x, pose->Position.y + state.offset, pose->Position.z);
quat_init(orientation, &pose->Orientation.x);
uint32_t mask = VRAPI_TRACKING_STATUS_POSITION_VALID | VRAPI_TRACKING_STATUS_ORIENTATION_VALID;
return (tracking.Status & mask) == mask;
}
static bool vrapi_getVelocity(Device device, float* velocity, float* angularVelocity) {
ovrTracking tracking;
if (!getTracking(device, &tracking)) {
return false;
}
ovrVector3f* linear = &tracking.HeadPose.LinearVelocity;
ovrVector3f* angular = &tracking.HeadPose.AngularVelocity;
vec3_set(velocity, linear->x, linear->y, linear->z);
vec3_set(angularVelocity, angular->x, angular->y, angular->z);
uint32_t mask = VRAPI_TRACKING_STATUS_POSITION_VALID | VRAPI_TRACKING_STATUS_ORIENTATION_VALID;
return (tracking.Status & mask) == mask;
}
static bool vrapi_isDown(Device device, DeviceButton button, bool* down, bool* changed) {
if (device == DEVICE_HEAD && button == BUTTON_PROXIMITY) {
*down = vrapi_GetSystemStatusInt(&state.java, VRAPI_SYS_STATUS_MOUNTED);
return true;
}
if (device != DEVICE_HAND_LEFT && device != DEVICE_HAND_RIGHT) {
return false;
}
if (state.controllerInfo[device - DEVICE_HAND_LEFT].Header.Type != ovrControllerType_TrackedRemote) {
return false;
}
ovrInputStateTrackedRemote* input = &state.controllers[device - DEVICE_HAND_LEFT];
if (state.deviceType == VRAPI_DEVICE_TYPE_OCULUSGO) {
switch (button) {
case BUTTON_TRIGGER: *down = input->Buttons & ovrButton_Trigger; return true;
case BUTTON_TOUCHPAD: *down = input->Buttons & ovrButton_Enter; return true;
case BUTTON_MENU: *down = input->Buttons & ovrButton_Back; return true;
default: return false;
}
} else if (state.deviceType == VRAPI_DEVICE_TYPE_OCULUSQUEST) {
switch (button) {
case BUTTON_TRIGGER: *down = input->Buttons & ovrButton_Trigger; return true;
case BUTTON_THUMBSTICK: *down = input->Buttons & ovrButton_Joystick; return true;
case BUTTON_GRIP: *down = input->Buttons & ovrButton_GripTrigger; return true;
case BUTTON_MENU: *down = input->Buttons & ovrButton_Enter; return true;
case BUTTON_A: *down = input->Buttons & ovrButton_A; return true;
case BUTTON_B: *down = input->Buttons & ovrButton_B; return true;
case BUTTON_X: *down = input->Buttons & ovrButton_X; return true;
case BUTTON_Y: *down = input->Buttons & ovrButton_Y; return true;
default: return false;
}
}
return false;
}
static bool vrapi_isTouched(Device device, DeviceButton button, bool* touched) {
if (device != DEVICE_HAND_LEFT && device != DEVICE_HAND_RIGHT) {
return false;
}
if (state.controllerInfo[device - DEVICE_HAND_LEFT].Header.Type != ovrControllerType_TrackedRemote) {
return false;
}
ovrInputStateTrackedRemote* input = &state.controllers[device - DEVICE_HAND_LEFT];
if (state.deviceType == VRAPI_DEVICE_TYPE_OCULUSGO) {
switch (button) {
case BUTTON_TOUCHPAD: return input->Touches & ovrTouch_TrackPad; return true;
default: return false;
}
} else if (state.deviceType == VRAPI_DEVICE_TYPE_OCULUSQUEST) {
switch (button) {
case BUTTON_TRIGGER: *touched = input->Touches & ovrTouch_IndexTrigger; return true;
case BUTTON_THUMBSTICK: *touched = input->Touches & ovrTouch_Joystick; return true;
case BUTTON_A: *touched = input->Touches & ovrTouch_A; return true;
case BUTTON_B: *touched = input->Touches & ovrTouch_B; return true;
case BUTTON_X: *touched = input->Touches & ovrTouch_X; return true;
case BUTTON_Y: *touched = input->Touches & ovrTouch_Y; return true;
default: return false;
}
}
return false;
}
static bool vrapi_getAxis(Device device, DeviceAxis axis, float* value) {
if (device != DEVICE_HAND_LEFT && device != DEVICE_HAND_RIGHT) {
return false;
}
ovrInputStateTrackedRemote* input = &state.controllers[device - DEVICE_HAND_LEFT];
if (state.deviceType == VRAPI_DEVICE_TYPE_OCULUSGO) {
switch (axis) {
case AXIS_TOUCHPAD:
value[0] = (input->TrackpadPosition.x - 160.f) / 160.f;
value[1] = (input->TrackpadPosition.y - 160.f) / 160.f;
return true;
case AXIS_TRIGGER: value[0] = (input->Buttons & ovrButton_Trigger) ? 1.f : 0.f; return true;
default: return false;
}
} else if (state.deviceType == VRAPI_DEVICE_TYPE_OCULUSQUEST) {
switch (axis) {
case AXIS_THUMBSTICK:
value[0] = input->TrackpadPosition.x;
value[1] = input->TrackpadPosition.y;
break;
case AXIS_TRIGGER: value[0] = input->IndexTrigger; break;
case AXIS_GRIP: value[0] = input->GripTrigger; break;
default: return false;
}
}
return false;
}
static bool vrapi_vibrate(Device device, float strength, float duration, float frequency) {
return false;
}
static struct ModelData* vrapi_newModelData(Device device) {
return NULL;
}
static void vrapi_renderTo(void (*callback)(void*), void* userdata) {
if (!state.swapchain) {
CanvasFlags flags = {
.depth.enabled = true,
.depth.readable = false,
.depth.format = FORMAT_D24S8,
.msaa = state.msaa,
.stereo = true,
.mipmaps = false
};
uint32_t width, height;
vrapi_getDisplayDimensions(&width, &height);
vrapi_CreateTextureSwapChain3(VRAPI_TEXTURE_TYPE_2D_ARRAY, GL_SRGB8_ALPHA8, width, height, 1, 3);
for (uint32_t i = 0; i < 3; i++) {
state.canvases[i] = lovrCanvasCreate(width, height, flags);
uint32_t handle = vrapi_GetTextureSwapChainHandle(state.swapchain, i);
Texture* texture = lovrTextureCreateFromHandle(handle, TEXTURE_ARRAY, 2);
lovrCanvasSetAttachments(state.canvases[i], &(Attachment) { .texture = texture }, 1);
lovrRelease(Texture, texture);
}
}
double displayTime = vrapi_getDisplayTime();
ovrTracking2 tracking = vrapi_GetPredictedTracking2(state.session, displayTime);
ovrLayerProjection2 layer = vrapi_DefaultLayerProjection2();
layer.HeadPose = tracking.HeadPose;
Camera camera;
camera.stereo = true;
camera.canvas = state.canvases[state.frameIndex % 3];
for (uint32_t i = 0; i < 2; i++) {
ovrMatrix4f* viewMatrix = &tracking.Eye[i].ViewMatrix;
ovrMatrix4f* projection = &tracking.Eye[i].ProjectionMatrix;
layer.Textures[i].ColorSwapChain = state.swapchain;
layer.Textures[i].SwapChainIndex = state.frameIndex % 3;
layer.Textures[i].TexCoordsFromTanAngles = ovrMatrix4f_TanAngleMatrixFromProjection(projection);
mat4_init(camera.viewMatrix[i], &viewMatrix->M[0][0]);
mat4_init(camera.projection[i], &projection->M[0][0]);
mat4_transpose(camera.viewMatrix[i]);
mat4_transpose(camera.projection[i]);
}
lovrGraphicsSetCamera(&camera, true);
callback(userdata);
lovrGraphicsSetCamera(NULL, false);
ovrSubmitFrameDescription2 frame = {
.SwapInterval = 1,
.FrameIndex = state.frameIndex,
.DisplayTime = displayTime,
.LayerCount = 1,
.Layers = (const ovrLayerHeader2*[]) { &layer.Header }
};
vrapi_SubmitFrame2(state.session, &frame);
state.frameIndex++;
}
static void vrapi_update(float dt) {
state.controllerInfo[0].Header.Type = ovrControllerType_None;
state.controllerInfo[1].Header.Type = ovrControllerType_None;
ovrInputCapabilityHeader header;
for (uint32_t i = 0; vrapi_EnumerateInputDevices(state.session, i, &header) == ovrSuccess; i++) {
if (header.Type == ovrControllerType_TrackedRemote) {
ovrInputTrackedRemoteCapabilities info;
info.Header = header;
vrapi_GetInputDeviceCapabilities(state.session, &info.Header);
Device device = info.ControllerCapabilities & ovrControllerCaps_LeftHand ? DEVICE_HAND_LEFT : DEVICE_HAND_RIGHT;
state.controllerInfo[device] = info;
vrapi_GetCurrentInputState(state.session, header.DeviceID, &state.controllers[device].Header);
} else if (header.Type == ovrControllerType_Hand) {
ovrInputHandCapabilities info;
info.Header = header;
vrapi_GetInputDeviceCapabilities(state.session, &info.Header);
Device device = info.HandCapabilities & ovrHandCaps_LeftHand ? DEVICE_HAND_LEFT : DEVICE_HAND_RIGHT;
state.controllerInfo[device].Header.Type = header.Type;
vrapi_GetCurrentInputState(state.session, header.DeviceID, &state.controllers[device].Header);
}
}
}
HeadsetInterface lovrHeadsetVrApiDriver = {
.driverType = DRIVER_VRAPI,
.init = vrapi_init,
.destroy = vrapi_destroy,
.getName = vrapi_getName,
.getOriginType = vrapi_getOriginType,
.getDisplayDimensions = vrapi_getDisplayDimensions,
.getDisplayFrequency = vrapi_getDisplayFrequency,
.getDisplayMask = vrapi_getDisplayMask,
.getDisplayTime = vrapi_getDisplayTime,
.getViewCount = vrapi_getViewCount,
.getViewPose = vrapi_getViewPose,
.getViewAngles = vrapi_getViewAngles,
.getClipDistance = vrapi_getClipDistance,
.setClipDistance = vrapi_setClipDistance,
.getBoundsDimensions = vrapi_getBoundsDimensions,
.getBoundsGeometry = vrapi_getBoundsGeometry,
.getPose = vrapi_getPose,
.getVelocity = vrapi_getVelocity,
.isDown = vrapi_isDown,
.isTouched = vrapi_isTouched,
.getAxis = vrapi_getAxis,
.vibrate = vrapi_vibrate,
.newModelData = vrapi_newModelData,
.renderTo = vrapi_renderTo,
.update = vrapi_update
};

View File

@ -5,6 +5,6 @@ import android.app.NativeActivity;
public class LoadLibraries extends NativeActivity {
static {
System.loadLibrary("lovr");
// System.loadLibrary("vrapi");
System.loadLibrary("vrapi");
}
}

View File

@ -96,7 +96,7 @@ function lovr.boot()
timer = true
},
headset = {
drivers = { 'leap', 'openxr', 'oculus', 'oculusmobile', 'openvr', 'webxr', 'webvr', 'desktop' },
drivers = { 'leap', 'openxr', 'oculus', 'vrapi', 'oculusmobile', 'openvr', 'webxr', 'webvr', 'desktop' },
offset = 1.7,
msaa = 4
},