2019-04-11 20:47:25 +00:00
|
|
|
#include "headset/headset.h"
|
|
|
|
#include "event/event.h"
|
2019-08-04 02:06:46 +00:00
|
|
|
#include "filesystem/filesystem.h"
|
2019-04-11 20:47:25 +00:00
|
|
|
#include "graphics/graphics.h"
|
|
|
|
#include "graphics/canvas.h"
|
|
|
|
#include "graphics/texture.h"
|
2019-08-04 02:06:46 +00:00
|
|
|
#include "core/ref.h"
|
|
|
|
#include "core/util.h"
|
2020-08-19 03:09:06 +00:00
|
|
|
#include <stdlib.h>
|
2019-04-11 20:47:25 +00:00
|
|
|
#include <math.h>
|
2020-08-19 03:09:06 +00:00
|
|
|
#if defined(_WIN32)
|
2020-08-24 08:04:06 +00:00
|
|
|
#define XR_USE_PLATFORM_WIN32
|
|
|
|
#include <windows.h>
|
2020-08-19 03:09:06 +00:00
|
|
|
#elif defined(__ANDROID__)
|
2020-08-24 08:04:06 +00:00
|
|
|
#define XR_USE_PLATFORM_ANDROID
|
|
|
|
#include <EGL/egl.h>
|
|
|
|
#include <jni.h>
|
|
|
|
#endif
|
|
|
|
#if defined(LOVR_GL)
|
|
|
|
#define XR_USE_GRAPHICS_API_OPENGL
|
|
|
|
#define GRAPHICS_EXTENSION "XR_KHR_opengl_enable"
|
|
|
|
#elif defined(LOVR_GLES)
|
|
|
|
#define XR_USE_GRAPHICS_API_OPENGLES
|
|
|
|
#define GRAPHICS_EXTENSION "XR_KHR_opengl_es_enable"
|
2019-04-11 20:47:25 +00:00
|
|
|
#endif
|
|
|
|
#include <openxr/openxr.h>
|
|
|
|
#include <openxr/openxr_platform.h>
|
2020-08-22 21:40:52 +00:00
|
|
|
#include "resources/openxr_actions.h"
|
2019-04-11 20:47:25 +00:00
|
|
|
|
|
|
|
#define XR(f) handleResult(f, __FILE__, __LINE__)
|
2019-08-04 02:06:46 +00:00
|
|
|
#define XR_INIT(f) if (XR_FAILED(f)) return openxr_destroy(), false;
|
2019-04-11 20:47:25 +00:00
|
|
|
#define SESSION_VISIBLE(s) (s == XR_SESSION_STATE_VISIBLE || s == XR_SESSION_STATE_FOCUSED)
|
2019-08-04 02:06:46 +00:00
|
|
|
#define SESSION_SYNCHRONIZED(s) (s == XR_SESSION_STATE_SYNCHRONIZED || SESSION_VISIBLE(s))
|
2020-08-19 03:09:06 +00:00
|
|
|
#define GL_SRGB8_ALPHA8 0x8C43
|
2019-04-11 20:47:25 +00:00
|
|
|
#define MAX_IMAGES 4
|
|
|
|
|
2020-08-22 21:40:52 +00:00
|
|
|
#if defined(_WIN32)
|
|
|
|
HANDLE lovrPlatformGetWindow(void);
|
|
|
|
HGLRC lovrPlatformGetContext(void);
|
|
|
|
#elif defined(__ANDROID__)
|
|
|
|
EGLDisplay lovrPlatformGetEGLDisplay();
|
|
|
|
EGLContext lovrPlatformGetEGLContext();
|
|
|
|
EGLConfig lovrPlatformGetEGLConfig();
|
|
|
|
#endif
|
2019-04-11 20:47:25 +00:00
|
|
|
|
|
|
|
static struct {
|
|
|
|
XrInstance instance;
|
|
|
|
XrSystemId system;
|
|
|
|
XrSession session;
|
|
|
|
XrSessionState sessionState;
|
2019-08-04 02:06:46 +00:00
|
|
|
XrSpace referenceSpace;
|
|
|
|
XrReferenceSpaceType referenceSpaceType;
|
|
|
|
XrSpace spaces[MAX_DEVICES];
|
2019-04-11 20:47:25 +00:00
|
|
|
XrSwapchain swapchain;
|
|
|
|
XrCompositionLayerProjection layers[1];
|
|
|
|
XrCompositionLayerProjectionView layerViews[2];
|
2019-08-04 02:06:46 +00:00
|
|
|
XrFrameState frameState;
|
2019-04-11 20:47:25 +00:00
|
|
|
Canvas* canvas;
|
2020-02-23 06:03:20 +00:00
|
|
|
Texture* textures[MAX_IMAGES];
|
2019-04-11 20:47:25 +00:00
|
|
|
uint32_t imageCount;
|
|
|
|
uint32_t msaa;
|
|
|
|
uint32_t width;
|
|
|
|
uint32_t height;
|
|
|
|
float clipNear;
|
|
|
|
float clipFar;
|
|
|
|
XrActionSet actionSet;
|
|
|
|
XrAction actions[MAX_ACTIONS];
|
|
|
|
XrPath actionFilters[2];
|
|
|
|
} state;
|
|
|
|
|
2020-08-22 21:40:52 +00:00
|
|
|
static XrResult handleResult(XrResult result, const char* file, int line) {
|
|
|
|
if (XR_FAILED(result)) {
|
|
|
|
char message[XR_MAX_RESULT_STRING_SIZE];
|
|
|
|
xrResultToString(XR_NULL_HANDLE, result, message);
|
|
|
|
lovrThrow("OpenXR Error: %s at %s:%d", message, file, line);
|
|
|
|
}
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_destroy();
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2019-05-21 03:35:07 +00:00
|
|
|
static bool openxr_init(float offset, uint32_t msaa) {
|
2019-04-11 20:47:25 +00:00
|
|
|
|
|
|
|
{ // Instance
|
|
|
|
XrInstanceCreateInfo info = {
|
|
|
|
.type = XR_TYPE_INSTANCE_CREATE_INFO,
|
|
|
|
.applicationInfo.engineName = "LÖVR",
|
2019-08-04 02:06:46 +00:00
|
|
|
.applicationInfo.engineVersion = ((LOVR_VERSION_MAJOR & 0xff) << 24) + ((LOVR_VERSION_MINOR & 0xff) << 16) + (LOVR_VERSION_PATCH & 0xffff),
|
2019-10-02 23:29:09 +00:00
|
|
|
.applicationInfo.applicationName = "LÖVR",
|
|
|
|
.applicationInfo.applicationVersion = 0,
|
2019-08-04 02:06:46 +00:00
|
|
|
.applicationInfo.apiVersion = XR_CURRENT_API_VERSION,
|
2019-04-11 20:47:25 +00:00
|
|
|
.enabledExtensionCount = 1,
|
2020-08-24 08:04:06 +00:00
|
|
|
.enabledExtensionNames = (const char*[1]) { GRAPHICS_EXTENSION }
|
2019-04-11 20:47:25 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
XR_INIT(xrCreateInstance(&info, &state.instance));
|
|
|
|
}
|
|
|
|
|
|
|
|
{ // System
|
2019-08-04 02:06:46 +00:00
|
|
|
XrSystemGetInfo info = {
|
|
|
|
.type = XR_TYPE_SYSTEM_GET_INFO,
|
|
|
|
.formFactor = XR_FORM_FACTOR_HEAD_MOUNTED_DISPLAY
|
|
|
|
};
|
|
|
|
|
2019-04-11 20:47:25 +00:00
|
|
|
XR_INIT(xrGetSystem(state.instance, &info, &state.system));
|
|
|
|
|
2019-10-02 23:29:09 +00:00
|
|
|
uint32_t viewConfigurationCount;
|
|
|
|
XrViewConfigurationType viewConfigurations[2];
|
|
|
|
XR_INIT(xrEnumerateViewConfigurations(state.instance, state.system, 2, &viewConfigurationCount, viewConfigurations));
|
|
|
|
|
2019-04-11 20:47:25 +00:00
|
|
|
uint32_t viewCount;
|
2019-10-02 23:29:09 +00:00
|
|
|
XrViewConfigurationView views[2] = { [0].type = XR_TYPE_VIEW_CONFIGURATION_VIEW, [1].type = XR_TYPE_VIEW_CONFIGURATION_VIEW };
|
|
|
|
XR_INIT(xrEnumerateViewConfigurationViews(state.instance, state.system, XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO, 0, &viewCount, NULL));
|
2019-04-11 20:47:25 +00:00
|
|
|
XR_INIT(xrEnumerateViewConfigurationViews(state.instance, state.system, XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO, 2, &viewCount, views));
|
|
|
|
|
|
|
|
if ( // Only 2 views are supported, and since they're rendered together they must be identical
|
|
|
|
viewCount != 2 ||
|
|
|
|
views[0].recommendedSwapchainSampleCount != views[1].recommendedSwapchainSampleCount ||
|
|
|
|
views[0].recommendedImageRectWidth != views[1].recommendedImageRectWidth ||
|
|
|
|
views[0].recommendedImageRectHeight != views[1].recommendedImageRectHeight
|
|
|
|
) {
|
2019-08-04 02:06:46 +00:00
|
|
|
openxr_destroy();
|
2019-07-11 01:45:36 +00:00
|
|
|
return false;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
state.msaa = views[0].recommendedSwapchainSampleCount;
|
|
|
|
state.width = views[0].recommendedImageRectWidth;
|
|
|
|
state.height = views[0].recommendedImageRectHeight;
|
|
|
|
}
|
|
|
|
|
2020-08-22 21:40:52 +00:00
|
|
|
{ // Actions
|
2019-04-11 20:47:25 +00:00
|
|
|
XrActionSetCreateInfo info = {
|
|
|
|
.type = XR_TYPE_ACTION_SET_CREATE_INFO,
|
|
|
|
.actionSetName = "default",
|
|
|
|
.localizedActionSetName = "Default",
|
|
|
|
.priority = 0
|
|
|
|
};
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XR_INIT(xrCreateActionSet(state.instance, &info, &state.actionSet));
|
2019-04-11 20:47:25 +00:00
|
|
|
XR_INIT(xrStringToPath(state.instance, "/user/hand/left", &state.actionFilters[0]));
|
|
|
|
XR_INIT(xrStringToPath(state.instance, "/user/hand/right", &state.actionFilters[1]));
|
|
|
|
|
2020-08-22 21:40:52 +00:00
|
|
|
for (uint32_t i = 0; i < MAX_ACTIONS; i++) {
|
|
|
|
actionCreateInfo[i].subactionPaths = state.actionFilters;
|
|
|
|
XR_INIT(xrCreateAction(state.actionSet, &actionCreateInfo[i], &state.actions[i]));
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2020-08-22 21:40:52 +00:00
|
|
|
XrActionSuggestedBinding suggestedBindings[2 * MAX_ACTIONS];
|
|
|
|
for (uint32_t profile = 0, count = 0; profile < MAX_PROFILES; profile++, count = 0) {
|
|
|
|
for (uint32_t action = 0; action < MAX_ACTIONS; action++) {
|
|
|
|
for (uint32_t hand = 0; hand < 2; hand++) {
|
|
|
|
if (bindings[profile][action][hand]) {
|
|
|
|
suggestedBindings[count].action = state.actions[action];
|
|
|
|
XR_INIT(xrStringToPath(state.instance, bindings[profile][action][hand], &suggestedBindings[count].binding));
|
|
|
|
count++;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
XrPath profilePath;
|
2020-08-22 21:40:52 +00:00
|
|
|
XR_INIT(xrStringToPath(state.instance, interactionProfiles[profile], &profilePath));
|
2019-08-04 02:06:46 +00:00
|
|
|
XR_INIT(xrSuggestInteractionProfileBindings(state.instance, &(XrInteractionProfileSuggestedBinding) {
|
2019-04-11 20:47:25 +00:00
|
|
|
.type = XR_TYPE_INTERACTION_PROFILE_SUGGESTED_BINDING,
|
|
|
|
.interactionProfile = profilePath,
|
2020-08-22 21:40:52 +00:00
|
|
|
.countSuggestedBindings = count,
|
|
|
|
.suggestedBindings = suggestedBindings
|
2019-04-11 20:47:25 +00:00
|
|
|
}));
|
|
|
|
}
|
2019-08-04 02:06:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
{ // Session
|
|
|
|
XrSessionCreateInfo info = {
|
|
|
|
.type = XR_TYPE_SESSION_CREATE_INFO,
|
2020-08-19 03:09:06 +00:00
|
|
|
#if defined(_WIN32)
|
2019-10-02 23:29:09 +00:00
|
|
|
.next = &(XrGraphicsBindingOpenGLWin32KHR) {
|
|
|
|
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR,
|
|
|
|
.hDC = lovrPlatformGetWindow(),
|
|
|
|
.hGLRC = lovrPlatformGetContext()
|
|
|
|
},
|
2020-08-19 03:09:06 +00:00
|
|
|
#elif defined(__ANDROID__)
|
|
|
|
.next = &(XrGraphicsBindingOpenGLESAndroidKHR) {
|
|
|
|
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR,
|
|
|
|
.display = lovrPlatformGetEGLDisplay(),
|
|
|
|
.config = lovrPlatformGetEGLConfig(),
|
|
|
|
.context = lovrPlatformGetEGLContext()
|
|
|
|
},
|
2019-10-02 23:29:09 +00:00
|
|
|
#else
|
2020-08-22 21:40:52 +00:00
|
|
|
#error "OpenXR is not supported on this platform"
|
2019-10-02 23:29:09 +00:00
|
|
|
#endif
|
2019-08-04 02:06:46 +00:00
|
|
|
.systemId = state.system
|
|
|
|
};
|
|
|
|
|
|
|
|
XrSessionActionSetsAttachInfo attachInfo = {
|
|
|
|
.type = XR_TYPE_SESSION_ACTION_SETS_ATTACH_INFO,
|
|
|
|
.countActionSets = 1,
|
|
|
|
.actionSets = &state.actionSet
|
|
|
|
};
|
|
|
|
|
|
|
|
XR_INIT(xrCreateSession(state.instance, &info, &state.session));
|
|
|
|
XR_INIT(xrAttachSessionActionSets(state.session, &attachInfo));
|
|
|
|
}
|
|
|
|
|
|
|
|
{ // Spaaaaace
|
|
|
|
|
|
|
|
// Main reference space (can be stage or local)
|
|
|
|
XrReferenceSpaceCreateInfo info = {
|
|
|
|
.type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO,
|
|
|
|
.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_STAGE
|
|
|
|
};
|
|
|
|
|
|
|
|
if (XR_FAILED(xrCreateReferenceSpace(state.session, &info, &state.referenceSpace))) {
|
|
|
|
info.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_LOCAL;
|
|
|
|
info.poseInReferenceSpace.position.y = -offset;
|
|
|
|
XR_INIT(xrCreateReferenceSpace(state.session, &info, &state.referenceSpace));
|
|
|
|
}
|
|
|
|
|
|
|
|
state.referenceSpaceType = info.referenceSpaceType;
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
// Head space (for head pose)
|
2019-04-11 20:47:25 +00:00
|
|
|
XrReferenceSpaceCreateInfo headSpaceInfo = {
|
|
|
|
.type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO,
|
|
|
|
.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_VIEW
|
|
|
|
};
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XR_INIT(xrCreateReferenceSpace(state.session, &headSpaceInfo, &state.spaces[DEVICE_HEAD]));
|
|
|
|
|
|
|
|
// Left hand space
|
2019-04-11 20:47:25 +00:00
|
|
|
XrActionSpaceCreateInfo leftHandSpaceInfo = {
|
|
|
|
.type = XR_TYPE_ACTION_SPACE_CREATE_INFO,
|
2019-08-04 02:06:46 +00:00
|
|
|
.action = state.actions[ACTION_HAND_POSE],
|
2019-04-11 20:47:25 +00:00
|
|
|
.subactionPath = state.actionFilters[0]
|
|
|
|
};
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XR_INIT(xrCreateActionSpace(state.session, &leftHandSpaceInfo, &state.spaces[DEVICE_HAND_LEFT]));
|
|
|
|
|
|
|
|
// Right hand space
|
2019-04-11 20:47:25 +00:00
|
|
|
XrActionSpaceCreateInfo rightHandSpaceInfo = {
|
|
|
|
.type = XR_TYPE_ACTION_SPACE_CREATE_INFO,
|
2019-08-04 02:06:46 +00:00
|
|
|
.action = state.actions[ACTION_HAND_POSE],
|
2019-04-11 20:47:25 +00:00
|
|
|
.subactionPath = state.actionFilters[1]
|
|
|
|
};
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XR_INIT(xrCreateActionSpace(state.session, &rightHandSpaceInfo, &state.spaces[DEVICE_HAND_RIGHT]));
|
|
|
|
}
|
|
|
|
|
|
|
|
{ // Swapchain
|
|
|
|
XrSwapchainCreateInfo info = {
|
|
|
|
.type = XR_TYPE_SWAPCHAIN_CREATE_INFO,
|
|
|
|
.usageFlags = XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_SAMPLED_BIT,
|
|
|
|
.format = GL_SRGB8_ALPHA8,
|
|
|
|
.sampleCount = state.msaa,
|
|
|
|
.width = state.width * 2,
|
|
|
|
.height = state.height,
|
|
|
|
.faceCount = 1,
|
|
|
|
.arraySize = 1,
|
|
|
|
.mipCount = 1
|
|
|
|
};
|
|
|
|
|
2020-08-22 21:40:52 +00:00
|
|
|
#if defined(XR_USE_GRAPHICS_API_OPENGL)
|
2020-08-23 22:10:57 +00:00
|
|
|
XrSwapchainImageOpenGLKHR images[MAX_IMAGES];
|
2020-08-22 21:40:52 +00:00
|
|
|
#elif defined(XR_USE_GRAPHICS_API_OPENGLES)
|
|
|
|
XrSwapchainImageOpenGLESKHR images[MAX_IMAGES];
|
|
|
|
#endif
|
2019-08-04 02:06:46 +00:00
|
|
|
XR_INIT(xrCreateSwapchain(state.session, &info, &state.swapchain));
|
|
|
|
XR_INIT(xrEnumerateSwapchainImages(state.swapchain, MAX_IMAGES, &state.imageCount, (XrSwapchainImageBaseHeader*) images));
|
|
|
|
|
|
|
|
for (uint32_t i = 0; i < state.imageCount; i++) {
|
2020-02-23 06:03:20 +00:00
|
|
|
state.textures[i] = lovrTextureCreateFromHandle(images[i].image, TEXTURE_2D, 1);
|
2019-08-04 02:06:46 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Pre-init composition layer
|
|
|
|
state.layers[0] = (XrCompositionLayerProjection) {
|
|
|
|
.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION,
|
|
|
|
.space = state.referenceSpace,
|
|
|
|
.viewCount = 2,
|
|
|
|
.views = state.layerViews
|
|
|
|
};
|
|
|
|
|
|
|
|
// Pre-init composition layer views
|
|
|
|
state.layerViews[0] = (XrCompositionLayerProjectionView) {
|
|
|
|
.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW,
|
|
|
|
.subImage = { state.swapchain, { { 0, 0 }, { state.width, state.height } }, 0 }
|
|
|
|
};
|
|
|
|
|
|
|
|
// Copy the left view to the right view and offset for side-by-side submission
|
|
|
|
state.layerViews[1] = state.layerViews[0];
|
|
|
|
state.layerViews[1].subImage.imageRect.offset.x += state.width;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2020-01-23 19:25:38 +00:00
|
|
|
state.clipNear = .1f;
|
|
|
|
state.clipNear = 100.f;
|
|
|
|
|
2019-04-11 20:47:25 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_destroy() {
|
2019-04-11 20:47:25 +00:00
|
|
|
lovrRelease(Canvas, state.canvas);
|
|
|
|
for (uint32_t i = 0; i < state.imageCount; i++) {
|
2020-02-23 06:03:20 +00:00
|
|
|
lovrRelease(Texture, state.textures[i]);
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for (size_t i = 0; i < MAX_ACTIONS; i++) {
|
2019-10-02 23:29:09 +00:00
|
|
|
if (state.actions[i]) {
|
|
|
|
xrDestroyAction(state.actions[i]);
|
|
|
|
}
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
for (size_t i = 0; i < MAX_DEVICES; i++) {
|
|
|
|
if (state.spaces[i]) {
|
|
|
|
xrDestroySpace(state.spaces[i]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-02 23:29:09 +00:00
|
|
|
if (state.actionSet) xrDestroyActionSet(state.actionSet);
|
|
|
|
if (state.swapchain) xrDestroySwapchain(state.swapchain);
|
|
|
|
if (state.referenceSpace) xrDestroySpace(state.referenceSpace);
|
|
|
|
if (state.session) xrEndSession(state.session);
|
|
|
|
if (state.instance) xrDestroyInstance(state.instance);
|
2019-08-04 02:06:46 +00:00
|
|
|
memset(&state, 0, sizeof(state));
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static bool openxr_getName(char* name, size_t length) {
|
2019-04-11 20:47:25 +00:00
|
|
|
XrSystemProperties properties;
|
|
|
|
XR(xrGetSystemProperties(state.instance, state.system, &properties));
|
|
|
|
strncpy(name, properties.systemName, length - 1);
|
|
|
|
name[length - 1] = '\0';
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static HeadsetOrigin openxr_getOriginType() {
|
2019-08-04 02:06:46 +00:00
|
|
|
return state.referenceSpaceType == XR_REFERENCE_SPACE_TYPE_STAGE ? ORIGIN_FLOOR : ORIGIN_HEAD;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_getDisplayDimensions(uint32_t* width, uint32_t* height) {
|
2019-04-11 20:47:25 +00:00
|
|
|
*width = state.width;
|
|
|
|
*height = state.height;
|
|
|
|
}
|
|
|
|
|
2019-06-28 07:13:30 +00:00
|
|
|
static const float* openxr_getDisplayMask(uint32_t* count) {
|
2019-07-11 01:45:36 +00:00
|
|
|
*count = 0;
|
|
|
|
return NULL;
|
2019-06-28 07:13:30 +00:00
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static double openxr_getDisplayTime(void) {
|
2019-08-04 02:06:46 +00:00
|
|
|
return state.frameState.predictedDisplayTime / 1e9;
|
2019-04-30 22:31:38 +00:00
|
|
|
}
|
|
|
|
|
2020-01-28 05:02:37 +00:00
|
|
|
static void getViews(XrView views[2], uint32_t* count) {
|
|
|
|
XrViewLocateInfo viewLocateInfo = {
|
|
|
|
.type = XR_TYPE_VIEW_LOCATE_INFO,
|
|
|
|
.viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
|
|
|
|
.displayTime = state.frameState.predictedDisplayTime,
|
|
|
|
.space = state.referenceSpace
|
|
|
|
};
|
|
|
|
|
|
|
|
XrViewState viewState;
|
2020-08-19 03:09:06 +00:00
|
|
|
XR(xrLocateViews(state.session, &viewLocateInfo, &viewState, 2 * sizeof(XrView), count, views));
|
2020-01-28 05:02:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static uint32_t openxr_getViewCount(void) {
|
|
|
|
uint32_t count;
|
|
|
|
XrView views[2];
|
|
|
|
getViews(views, &count);
|
|
|
|
return count;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool openxr_getViewPose(uint32_t view, float* position, float* orientation) {
|
|
|
|
uint32_t count;
|
|
|
|
XrView views[2];
|
|
|
|
getViews(views, &count);
|
|
|
|
if (view < count) {
|
2020-08-19 03:09:06 +00:00
|
|
|
memcpy(position, &views[view].pose.position.x, 3 * sizeof(float));
|
|
|
|
memcpy(orientation, &views[view].pose.orientation.x, 4 * sizeof(float));
|
2020-01-28 05:02:37 +00:00
|
|
|
return true;
|
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool openxr_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
|
|
|
|
uint32_t count;
|
|
|
|
XrView views[2];
|
|
|
|
getViews(views, &count);
|
|
|
|
if (view < count) {
|
|
|
|
*left = views[view].fov.angleLeft;
|
|
|
|
*right = views[view].fov.angleRight;
|
|
|
|
*up = views[view].fov.angleUp;
|
|
|
|
*down = views[view].fov.angleDown;
|
|
|
|
return true;
|
|
|
|
} else {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_getClipDistance(float* clipNear, float* clipFar) {
|
2019-04-11 20:47:25 +00:00
|
|
|
*clipNear = state.clipNear;
|
|
|
|
*clipFar = state.clipFar;
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_setClipDistance(float clipNear, float clipFar) {
|
2019-04-11 20:47:25 +00:00
|
|
|
state.clipNear = clipNear;
|
|
|
|
state.clipFar = clipFar;
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_getBoundsDimensions(float* width, float* depth) {
|
2019-04-11 20:47:25 +00:00
|
|
|
XrExtent2Df bounds;
|
2019-08-04 02:06:46 +00:00
|
|
|
if (XR_SUCCEEDED(xrGetReferenceSpaceBoundsRect(state.session, state.referenceSpaceType, &bounds))) {
|
|
|
|
*width = bounds.width;
|
|
|
|
*depth = bounds.height;
|
|
|
|
} else {
|
|
|
|
*width = 0.f;
|
|
|
|
*depth = 0.f;
|
|
|
|
}
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-05-21 03:35:07 +00:00
|
|
|
static const float* openxr_getBoundsGeometry(uint32_t* count) {
|
2019-04-11 20:47:25 +00:00
|
|
|
*count = 0;
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2019-05-08 03:04:59 +00:00
|
|
|
static bool openxr_getPose(Device device, vec3 position, quat orientation) {
|
2019-08-04 02:06:46 +00:00
|
|
|
if (!state.spaces[device]) {
|
|
|
|
return false;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XrSpaceLocation location;
|
|
|
|
XR(xrLocateSpace(state.spaces[device], state.referenceSpace, state.frameState.predictedDisplayTime, &location));
|
|
|
|
memcpy(orientation, &location.pose.orientation, 4 * sizeof(float));
|
|
|
|
memcpy(position, &location.pose.position, 3 * sizeof(float));
|
|
|
|
return location.locationFlags & (XR_SPACE_LOCATION_POSITION_VALID_BIT | XR_SPACE_LOCATION_ORIENTATION_VALID_BIT);
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
static bool openxr_getVelocity(Device device, vec3 linearVelocity, vec3 angularVelocity) {
|
|
|
|
if (!state.spaces[device]) {
|
|
|
|
return false;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XrSpaceVelocity velocity = { .type = XR_TYPE_SPACE_VELOCITY };
|
|
|
|
XrSpaceLocation location = { .next = &velocity };
|
|
|
|
XR(xrLocateSpace(state.spaces[device], state.referenceSpace, state.frameState.predictedDisplayTime, &location));
|
|
|
|
memcpy(linearVelocity, &velocity.linearVelocity, 3 * sizeof(float));
|
|
|
|
memcpy(angularVelocity, &velocity.angularVelocity, 3 * sizeof(float));
|
|
|
|
return velocity.velocityFlags & (XR_SPACE_VELOCITY_LINEAR_VALID_BIT | XR_SPACE_VELOCITY_ANGULAR_VALID_BIT);
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-05-08 03:04:59 +00:00
|
|
|
static XrPath getActionFilter(Device device) {
|
|
|
|
switch (device) {
|
|
|
|
case DEVICE_HAND_LEFT: return state.actionFilters[0];
|
|
|
|
case DEVICE_HAND_RIGHT: return state.actionFilters[1];
|
|
|
|
default: return XR_NULL_PATH;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-19 03:09:06 +00:00
|
|
|
static bool getButtonState(Device device, DeviceButton button, bool* value, bool* changed, bool touch) {
|
2019-08-04 02:06:46 +00:00
|
|
|
XrActionStateGetInfo info = {
|
|
|
|
.type = XR_TYPE_ACTION_STATE_GET_INFO,
|
|
|
|
.subactionPath = getActionFilter(device)
|
|
|
|
};
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
if (info.subactionPath == XR_NULL_PATH) {
|
2019-05-08 03:04:59 +00:00
|
|
|
return false;
|
|
|
|
}
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2019-05-08 03:04:59 +00:00
|
|
|
switch (button) {
|
2019-08-04 02:06:46 +00:00
|
|
|
case BUTTON_TRIGGER: info.action = state.actions[ACTION_TRIGGER_DOWN + touch]; break;
|
|
|
|
case BUTTON_TOUCHPAD: info.action = state.actions[ACTION_TRACKPAD_DOWN + touch]; break;
|
|
|
|
case BUTTON_MENU: info.action = state.actions[ACTION_MENU_DOWN + touch]; break;
|
|
|
|
case BUTTON_GRIP: info.action = state.actions[ACTION_GRIP_DOWN + touch]; break;
|
2019-05-08 03:04:59 +00:00
|
|
|
default: return false;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-05-08 03:04:59 +00:00
|
|
|
XrActionStateBoolean actionState;
|
2019-08-04 02:06:46 +00:00
|
|
|
XR(xrGetActionStateBoolean(state.session, &info, &actionState));
|
2019-05-08 03:04:59 +00:00
|
|
|
*value = actionState.currentState;
|
2020-01-25 06:46:42 +00:00
|
|
|
*changed = actionState.changedSinceLastSync;
|
2019-05-08 03:04:59 +00:00
|
|
|
return actionState.isActive;
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2020-01-25 06:46:42 +00:00
|
|
|
static bool openxr_isDown(Device device, DeviceButton button, bool* down, bool* changed) {
|
|
|
|
return getButtonState(device, button, down, changed, false);
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-05-08 03:04:59 +00:00
|
|
|
static bool openxr_isTouched(Device device, DeviceButton button, bool* touched) {
|
2020-01-25 06:46:42 +00:00
|
|
|
bool unused;
|
|
|
|
return getButtonState(device, button, touched, &unused, true);
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2020-08-23 22:11:20 +00:00
|
|
|
static bool getAxis(uint32_t action, XrPath filter, float* value) {
|
2019-08-04 02:06:46 +00:00
|
|
|
XrActionStateGetInfo info = {
|
|
|
|
.type = XR_TYPE_ACTION_STATE_GET_INFO,
|
2020-08-23 22:11:20 +00:00
|
|
|
.action = state.actions[action],
|
|
|
|
.subactionPath = filter
|
2019-08-04 02:06:46 +00:00
|
|
|
};
|
2019-05-08 03:04:59 +00:00
|
|
|
|
2020-08-23 22:11:20 +00:00
|
|
|
XrActionStateFloat actionState;
|
|
|
|
XR(xrGetActionStateFloat(state.session, &info, &actionState));
|
|
|
|
*value = actionState.currentState;
|
|
|
|
return actionState.isActive;
|
|
|
|
}
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2020-08-23 22:11:20 +00:00
|
|
|
static bool openxr_getAxis(Device device, DeviceAxis axis, float* value) {
|
|
|
|
XrPath filter = getActionFilter(device);
|
|
|
|
|
|
|
|
if (filter == XR_NULL_PATH) {
|
|
|
|
return false;
|
2019-05-08 03:04:59 +00:00
|
|
|
}
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
switch (axis) {
|
2020-08-23 22:11:20 +00:00
|
|
|
case AXIS_TRIGGER: return getAxis(ACTION_TRIGGER_AXIS, filter, &value[0]);
|
|
|
|
case AXIS_THUMBSTICK: return getAxis(ACTION_THUMBSTICK_X, filter, &value[0]) && getAxis(ACTION_THUMBSTICK_Y, filter, &value[1]);
|
|
|
|
case AXIS_TOUCHPAD: return getAxis(ACTION_TRACKPAD_X, filter, &value[0]) && getAxis(ACTION_TRACKPAD_Y, filter, &value[1]);
|
|
|
|
case AXIS_GRIP: return getAxis(ACTION_GRIP_AXIS, filter, &value[0]);
|
2019-08-04 02:06:46 +00:00
|
|
|
default: return false;
|
|
|
|
}
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
2019-05-08 03:04:59 +00:00
|
|
|
static bool openxr_vibrate(Device device, float power, float duration, float frequency) {
|
2019-08-04 02:06:46 +00:00
|
|
|
XrHapticActionInfo info = {
|
|
|
|
.type = XR_TYPE_HAPTIC_ACTION_INFO,
|
|
|
|
.action = state.actions[ACTION_VIBRATE],
|
|
|
|
.subactionPath = getActionFilter(device)
|
|
|
|
};
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
if (info.subactionPath == XR_NULL_PATH) {
|
2019-04-11 20:47:25 +00:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
XrHapticVibration vibration = {
|
|
|
|
.type = XR_TYPE_HAPTIC_VIBRATION,
|
2019-08-04 02:06:46 +00:00
|
|
|
.duration = (XrDuration) (duration * 1e9f + .5f),
|
2019-04-11 20:47:25 +00:00
|
|
|
.frequency = frequency,
|
|
|
|
.amplitude = power
|
|
|
|
};
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XR(xrApplyHapticFeedback(state.session, &info, (XrHapticBaseHeader*) &vibration));
|
2019-04-11 20:47:25 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2020-08-02 23:25:51 +00:00
|
|
|
static struct ModelData* openxr_newModelData(Device device, bool animated) {
|
2019-04-11 20:47:25 +00:00
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2020-08-02 23:25:51 +00:00
|
|
|
static bool openxr_animate(Device device, struct Model* model) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_renderTo(void (*callback)(void*), void* userdata) {
|
2019-08-04 02:06:46 +00:00
|
|
|
if (!SESSION_SYNCHRONIZED(state.sessionState)) { return; }
|
2019-04-11 20:47:25 +00:00
|
|
|
|
|
|
|
XrFrameBeginInfo beginInfo = { XR_TYPE_FRAME_BEGIN_INFO, NULL };
|
2019-08-04 02:06:46 +00:00
|
|
|
XrFrameEndInfo endInfo = { XR_TYPE_FRAME_END_INFO, NULL, state.frameState.predictedDisplayTime, XR_ENVIRONMENT_BLEND_MODE_OPAQUE, 0, NULL };
|
2019-04-11 20:47:25 +00:00
|
|
|
|
|
|
|
XR(xrBeginFrame(state.session, &beginInfo));
|
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
if (state.frameState.shouldRender) {
|
2019-04-11 20:47:25 +00:00
|
|
|
uint32_t imageIndex;
|
|
|
|
XR(xrAcquireSwapchainImage(state.swapchain, NULL, &imageIndex));
|
|
|
|
XrSwapchainImageWaitInfo waitInfo = { XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO, .timeout = 1e9 };
|
|
|
|
|
|
|
|
if (XR(xrWaitSwapchainImage(state.swapchain, &waitInfo)) != XR_TIMEOUT_EXPIRED) {
|
|
|
|
if (!state.canvas) {
|
|
|
|
CanvasFlags flags = { .depth = { true, false, FORMAT_D24S8 }, .stereo = true, .mipmaps = true, .msaa = state.msaa };
|
|
|
|
state.canvas = lovrCanvasCreate(state.width, state.height, flags);
|
2019-05-14 07:18:22 +00:00
|
|
|
lovrPlatformSetSwapInterval(0);
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
Camera camera = { .canvas = state.canvas, .stereo = true };
|
|
|
|
|
2020-01-28 05:02:37 +00:00
|
|
|
uint32_t count;
|
|
|
|
XrView views[2];
|
|
|
|
getViews(views, &count);
|
|
|
|
|
2019-04-11 20:47:25 +00:00
|
|
|
for (int eye = 0; eye < 2; eye++) {
|
|
|
|
XrView* view = &views[eye];
|
|
|
|
XrVector3f* v = &view->pose.position;
|
|
|
|
XrQuaternionf* q = &view->pose.orientation;
|
|
|
|
XrFovf* fov = &view->fov;
|
2020-02-14 00:33:17 +00:00
|
|
|
float left = tanf(fov->angleLeft);
|
|
|
|
float right = tanf(fov->angleRight);
|
|
|
|
float up = tanf(fov->angleUp);
|
|
|
|
float down = tanf(fov->angleDown);
|
|
|
|
mat4_fov(camera.projection[eye], left, right, up, down, state.clipNear, state.clipFar);
|
2019-04-30 22:36:41 +00:00
|
|
|
mat4_identity(camera.viewMatrix[eye]);
|
|
|
|
mat4_translate(camera.viewMatrix[eye], v->x, v->y, v->z);
|
|
|
|
mat4_rotateQuat(camera.viewMatrix[eye], (float[4]) { q->x, q->y, q->z, q->w });
|
2019-04-11 20:47:25 +00:00
|
|
|
mat4_invert(camera.viewMatrix[eye]);
|
|
|
|
}
|
|
|
|
|
2020-02-23 06:03:20 +00:00
|
|
|
lovrCanvasSetAttachments(state.canvas, &(Attachment) { state.textures[imageIndex], 0, 0 }, 1);
|
2019-04-11 20:47:25 +00:00
|
|
|
lovrGraphicsSetCamera(&camera, true);
|
|
|
|
callback(userdata);
|
|
|
|
lovrGraphicsSetCamera(NULL, false);
|
|
|
|
|
|
|
|
state.layerViews[0].pose = views[0].pose;
|
|
|
|
state.layerViews[0].fov = views[0].fov;
|
|
|
|
state.layerViews[1].pose = views[1].pose;
|
|
|
|
state.layerViews[1].fov = views[1].fov;
|
|
|
|
endInfo.layerCount = 1;
|
|
|
|
endInfo.layers = (const XrCompositionLayerBaseHeader**) &state.layers;
|
|
|
|
}
|
|
|
|
|
|
|
|
XR(xrReleaseSwapchainImage(state.swapchain, NULL));
|
|
|
|
}
|
|
|
|
|
|
|
|
XR(xrEndFrame(state.session, &endInfo));
|
|
|
|
}
|
|
|
|
|
2019-04-30 23:59:15 +00:00
|
|
|
static void openxr_update(float dt) {
|
2019-08-04 02:06:46 +00:00
|
|
|
if (SESSION_SYNCHRONIZED(state.sessionState)) {
|
|
|
|
XR(xrWaitFrame(state.session, NULL, &state.frameState));
|
|
|
|
|
|
|
|
XrActionsSyncInfo syncInfo = {
|
|
|
|
.type = XR_TYPE_ACTIONS_SYNC_INFO,
|
|
|
|
.countActiveActionSets = 1,
|
|
|
|
.activeActionSets = (XrActiveActionSet[]) {
|
|
|
|
{ state.actionSet, state.actionFilters[0] },
|
|
|
|
{ state.actionSet, state.actionFilters[1] }
|
|
|
|
}
|
|
|
|
};
|
2019-04-11 20:47:25 +00:00
|
|
|
|
2019-08-04 02:06:46 +00:00
|
|
|
XR(xrSyncActions(state.session, &syncInfo));
|
2019-04-11 20:47:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Not using designated initializers here to avoid an implicit 4k zero
|
|
|
|
XrEventDataBuffer e;
|
|
|
|
e.type = XR_TYPE_EVENT_DATA_BUFFER;
|
|
|
|
e.next = NULL;
|
|
|
|
|
|
|
|
while (XR_SUCCEEDED(xrPollEvent(state.instance, &e))) {
|
|
|
|
switch (e.type) {
|
|
|
|
case XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED: {
|
|
|
|
XrEventDataSessionStateChanged* event = (XrEventDataSessionStateChanged*) &e;
|
|
|
|
|
|
|
|
switch (event->state) {
|
2019-05-26 03:15:42 +00:00
|
|
|
case XR_SESSION_STATE_READY:
|
2019-04-11 20:47:25 +00:00
|
|
|
XR(xrBeginSession(state.session, &(XrSessionBeginInfo) {
|
|
|
|
.type = XR_TYPE_SESSION_BEGIN_INFO,
|
|
|
|
.primaryViewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO
|
|
|
|
}));
|
|
|
|
break;
|
|
|
|
|
|
|
|
case XR_SESSION_STATE_STOPPING:
|
|
|
|
XR(xrEndSession(state.session));
|
|
|
|
break;
|
|
|
|
|
|
|
|
case XR_SESSION_STATE_EXITING:
|
|
|
|
case XR_SESSION_STATE_LOSS_PENDING:
|
2020-08-19 03:09:06 +00:00
|
|
|
lovrEventPush((Event) { .type = EVENT_QUIT, .data.quit.exitCode = 0 });
|
2019-04-11 20:47:25 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
default: break;
|
|
|
|
}
|
2020-08-24 08:10:12 +00:00
|
|
|
|
|
|
|
bool wasFocused = state.sessionState == XR_SESSION_STATE_FOCUSED;
|
|
|
|
bool isFocused = event->state == XR_SESSION_STATE_FOCUSED;
|
|
|
|
if (wasFocused != isFocused) {
|
|
|
|
lovrEventPush((Event) { .type = EVENT_FOCUS, .data.boolean = isFocused });
|
|
|
|
}
|
|
|
|
|
|
|
|
state.sessionState = event->state;
|
2019-04-11 20:47:25 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
default: break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
HeadsetInterface lovrHeadsetOpenXRDriver = {
|
|
|
|
.driverType = DRIVER_OPENXR,
|
2019-04-30 23:59:15 +00:00
|
|
|
.init = openxr_init,
|
|
|
|
.destroy = openxr_destroy,
|
|
|
|
.getName = openxr_getName,
|
|
|
|
.getOriginType = openxr_getOriginType,
|
|
|
|
.getDisplayDimensions = openxr_getDisplayDimensions,
|
2019-06-28 07:13:30 +00:00
|
|
|
.getDisplayMask = openxr_getDisplayMask,
|
2019-04-30 23:59:15 +00:00
|
|
|
.getDisplayTime = openxr_getDisplayTime,
|
2020-01-28 05:02:37 +00:00
|
|
|
.getViewCount = openxr_getViewCount,
|
|
|
|
.getViewPose = openxr_getViewPose,
|
|
|
|
.getViewAngles = openxr_getViewAngles,
|
2019-04-30 23:59:15 +00:00
|
|
|
.getClipDistance = openxr_getClipDistance,
|
|
|
|
.setClipDistance = openxr_setClipDistance,
|
|
|
|
.getBoundsDimensions = openxr_getBoundsDimensions,
|
|
|
|
.getBoundsGeometry = openxr_getBoundsGeometry,
|
|
|
|
.getPose = openxr_getPose,
|
|
|
|
.getVelocity = openxr_getVelocity,
|
|
|
|
.isDown = openxr_isDown,
|
|
|
|
.isTouched = openxr_isTouched,
|
|
|
|
.getAxis = openxr_getAxis,
|
|
|
|
.vibrate = openxr_vibrate,
|
|
|
|
.newModelData = openxr_newModelData,
|
2020-08-19 03:09:06 +00:00
|
|
|
.animate = openxr_animate,
|
2019-04-30 23:59:15 +00:00
|
|
|
.renderTo = openxr_renderTo,
|
|
|
|
.update = openxr_update
|
2019-04-11 20:47:25 +00:00
|
|
|
};
|