Headset support;

This commit is contained in:
bjorn 2022-06-05 20:38:14 -07:00
parent e36cc6482d
commit d5325b87b4
8 changed files with 236 additions and 195 deletions

View File

@ -135,16 +135,33 @@ function lovr.run()
if lovr.headset then dt = lovr.headset.update() end if lovr.headset then dt = lovr.headset.update() end
if lovr.update then lovr.update(dt) end if lovr.update then lovr.update(dt) end
if lovr.graphics then if lovr.graphics then
if lovr.system.isWindowOpen() then lovr.mirror() end local headset = lovr.headset and lovr.headset.getTexture()
if headset then
local pass = lovr.graphics.getPass('render', headset)
for i = 1, lovr.headset.getViewCount() do
pass:setViewPose(i, lovr.headset.getViewPose(i))
pass:setProjection(i, lovr.headset.getViewAngles(i))
end
if lovr.draw then lovr.draw(pass) end
lovr.graphics.submit(pass)
end
if lovr.system.isWindowOpen() then
lovr.mirror(pass)
end
end end
if lovr.headset then lovr.headset.submit() end
if lovr.math then lovr.math.drain() end if lovr.math then lovr.math.drain() end
end end
end end
function lovr.mirror() function lovr.mirror(pass)
local pass = lovr.graphics.getPass('render', 'window') if lovr.headset then
lovr.draw(pass) --
lovr.graphics.submit(pass) else
local pass = lovr.graphics.getPass('render', 'window')
lovr.draw(pass)
lovr.graphics.submit(pass)
end
end end
local function formatTraceback(s) local function formatTraceback(s)

View File

@ -1,6 +1,7 @@
#include "api.h" #include "api.h"
#include "headset/headset.h" #include "headset/headset.h"
#include "data/modelData.h" #include "data/modelData.h"
#include "graphics/graphics.h"
#include "core/maf.h" #include "core/maf.h"
#include <lua.h> #include <lua.h>
#include <lauxlib.h> #include <lauxlib.h>
@ -66,13 +67,6 @@ StringEntry lovrDeviceAxis[] = {
{ 0 } { 0 }
}; };
static void renderHelper(void* userdata) {
lua_State* L = userdata;
if (lua_isfunction(L, -1)) {
lua_call(L, 0, 0);
}
}
static Device luax_optdevice(lua_State* L, int index) { static Device luax_optdevice(lua_State* L, int index) {
const char* str = luaL_optstring(L, 1, "head"); const char* str = luaL_optstring(L, 1, "head");
if (!strcmp(str, "left")) { if (!strcmp(str, "left")) {
@ -468,9 +462,14 @@ static int l_lovrHeadsetAnimate(lua_State* L) {
return 1; return 1;
} }
static int l_lovrHeadsetRenderTo(lua_State* L) { static int l_lovrHeadsetGetTexture(lua_State* L) {
lua_settop(L, 1); Texture* texture = lovrHeadsetInterface->getTexture();
lovrHeadsetInterface->renderTo(renderHelper, L); luax_pushtype(L, Texture, texture);
return 1;
}
static int l_lovrHeadsetSubmit(lua_State* L) {
lovrHeadsetInterface->submit();
return 0; return 0;
} }
@ -500,10 +499,6 @@ static int l_lovrHeadsetGetDeltaTime(lua_State* L) {
return 1; return 1;
} }
static int l_lovrHeadsetGetMirrorTexture(lua_State* L) {
return 0; // TODO
}
static int l_lovrHeadsetGetHands(lua_State* L) { static int l_lovrHeadsetGetHands(lua_State* L) {
if (lua_istable(L, 1)) { if (lua_istable(L, 1)) {
lua_settop(L, 1); lua_settop(L, 1);
@ -554,16 +549,16 @@ static const luaL_Reg lovrHeadset[] = {
{ "wasReleased", l_lovrHeadsetWasReleased }, { "wasReleased", l_lovrHeadsetWasReleased },
{ "isTouched", l_lovrHeadsetIsTouched }, { "isTouched", l_lovrHeadsetIsTouched },
{ "getAxis", l_lovrHeadsetGetAxis }, { "getAxis", l_lovrHeadsetGetAxis },
{ "getSkeleton", l_lovrHeadsetGetSkeleton },
{ "vibrate", l_lovrHeadsetVibrate }, { "vibrate", l_lovrHeadsetVibrate },
{ "newModel", l_lovrHeadsetNewModel }, { "newModel", l_lovrHeadsetNewModel },
{ "animate", l_lovrHeadsetAnimate }, { "animate", l_lovrHeadsetAnimate },
{ "getSkeleton", l_lovrHeadsetGetSkeleton }, { "getTexture", l_lovrHeadsetGetTexture },
{ "renderTo", l_lovrHeadsetRenderTo }, { "submit", l_lovrHeadsetSubmit },
{ "isFocused", l_lovrHeadsetIsFocused }, { "isFocused", l_lovrHeadsetIsFocused },
{ "update", l_lovrHeadsetUpdate }, { "update", l_lovrHeadsetUpdate },
{ "getTime", l_lovrHeadsetGetTime }, { "getTime", l_lovrHeadsetGetTime },
{ "getDeltaTime", l_lovrHeadsetGetDeltaTime }, { "getDeltaTime", l_lovrHeadsetGetDeltaTime },
{ "getMirrorTexture", l_lovrHeadsetGetMirrorTexture },
{ "getHands", l_lovrHeadsetGetHands }, { "getHands", l_lovrHeadsetGetHands },
{ NULL, NULL } { NULL, NULL }
}; };

View File

@ -633,6 +633,9 @@ typedef struct {
gpu_limits* limits; gpu_limits* limits;
struct { struct {
const char** (*getInstanceExtensions)(uint32_t* count); const char** (*getInstanceExtensions)(uint32_t* count);
uint32_t (*createInstance)(void* instanceCreateInfo, void* allocator, uintptr_t instance, void* getInstanceProcAddr);
void (*getPhysicalDevice)(void* instance, uintptr_t physicalDevice);
uint32_t (*createDevice)(void* instance, void* devceCreateInfo, void* allocator, uintptr_t device, void* getInstanceProcAddr);
uint32_t (*createSurface)(void* instance, void** surface); uint32_t (*createSurface)(void* instance, void** surface);
bool surface; bool surface;
int vsync; int vsync;

View File

@ -1679,7 +1679,11 @@ bool gpu_init(gpu_config* config) {
.ppEnabledExtensionNames = extensions .ppEnabledExtensionNames = extensions
}; };
VK(vkCreateInstance(&instanceInfo, NULL, &state.instance), "Instance creation failed") return gpu_destroy(), false; if (state.config.vk.createInstance) {
VK(state.config.vk.createInstance(&instanceInfo, NULL, (uintptr_t) &state.instance, (void*) vkGetInstanceProcAddr), "Instance creation failed") return gpu_destroy(), false;
} else {
VK(vkCreateInstance(&instanceInfo, NULL, &state.instance), "Instance creation failed") return gpu_destroy(), false;
}
GPU_FOREACH_INSTANCE(GPU_LOAD_INSTANCE); GPU_FOREACH_INSTANCE(GPU_LOAD_INSTANCE);
@ -1701,8 +1705,12 @@ bool gpu_init(gpu_config* config) {
} }
{ // Device { // Device
uint32_t deviceCount = 1; if (state.config.vk.getPhysicalDevice) {
VK(vkEnumeratePhysicalDevices(state.instance, &deviceCount, &state.adapter), "Physical device enumeration failed") return gpu_destroy(), false; state.config.vk.getPhysicalDevice(state.instance, (uintptr_t) &state.adapter);
} else {
uint32_t deviceCount = 1;
VK(vkEnumeratePhysicalDevices(state.instance, &deviceCount, &state.adapter), "Physical device enumeration failed") return gpu_destroy(), false;
}
VkPhysicalDeviceMultiviewProperties multiviewProperties = { .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES }; VkPhysicalDeviceMultiviewProperties multiviewProperties = { .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES };
VkPhysicalDeviceSubgroupProperties subgroupProperties = { .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, .pNext = &multiviewProperties }; VkPhysicalDeviceSubgroupProperties subgroupProperties = { .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES, .pNext = &multiviewProperties };
@ -2215,6 +2223,22 @@ void gpu_wait() {
vkDeviceWaitIdle(state.device); vkDeviceWaitIdle(state.device);
} }
uintptr_t gpu_vk_get_instance() {
return (uintptr_t) state.instance;
}
uintptr_t gpu_vk_get_physical_device() {
return (uintptr_t) state.adapter;
}
uintptr_t gpu_vk_get_device() {
return (uintptr_t) state.device;
}
uintptr_t gpu_vk_get_queue(uint32_t* queueFamilyIndex, uint32_t* queueIndex) {
return *queueFamilyIndex = state.queueFamilyIndex, *queueIndex = 0, (uintptr_t) state.queue;
}
// Helpers // Helpers
static uint32_t hash32(uint32_t initial, void* data, uint32_t size) { static uint32_t hash32(uint32_t initial, void* data, uint32_t size) {

View File

@ -1,6 +1,7 @@
#include "graphics/graphics.h" #include "graphics/graphics.h"
#include "data/blob.h" #include "data/blob.h"
#include "data/image.h" #include "data/image.h"
#include "headset/headset.h"
#include "math/math.h" #include "math/math.h"
#include "core/gpu.h" #include "core/gpu.h"
#include "core/maf.h" #include "core/maf.h"
@ -284,6 +285,14 @@ bool lovrGraphicsInit(bool debug, bool vsync) {
} }
#endif #endif
#if defined LOVR_VK && !defined LOVR_DISABLE_HEADSET
if (lovrHeadsetInterface) {
config.vk.getPhysicalDevice = lovrHeadsetInterface->getVulkanPhysicalDevice;
config.vk.createInstance = lovrHeadsetInterface->createVulkanInstance;
config.vk.createDevice = lovrHeadsetInterface->createVulkanDevice;
}
#endif
if (!gpu_init(&config)) { if (!gpu_init(&config)) {
lovrThrow("Failed to initialize GPU"); lovrThrow("Failed to initialize GPU");
} }

View File

@ -107,6 +107,9 @@ typedef enum {
typedef struct HeadsetInterface { typedef struct HeadsetInterface {
HeadsetDriver driverType; HeadsetDriver driverType;
void (*getVulkanPhysicalDevice)(void* instance, uintptr_t physicalDevice);
uint32_t (*createVulkanInstance)(void* instanceCreateInfo, void* allocator, uintptr_t instance, void* getInstanceProcAddr);
uint32_t (*createVulkanDevice)(void* instance, void* deviceCreateInfo, void* allocator, uintptr_t device, void* getInstanceProcAddr);
bool (*init)(float supersample, float offset, uint32_t msaa, bool overlay); bool (*init)(float supersample, float offset, uint32_t msaa, bool overlay);
void (*start)(void); void (*start)(void);
void (*destroy)(void); void (*destroy)(void);
@ -132,8 +135,8 @@ typedef struct HeadsetInterface {
bool (*vibrate)(Device device, float strength, float duration, float frequency); bool (*vibrate)(Device device, float strength, float duration, float frequency);
struct ModelData* (*newModelData)(Device device, bool animated); struct ModelData* (*newModelData)(Device device, bool animated);
bool (*animate)(Device device, struct Model* model); bool (*animate)(Device device, struct Model* model);
void (*renderTo)(void (*callback)(void*), void* userdata); struct Texture* (*getTexture)(void);
struct Texture* (*getMirrorTexture)(void); void (*submit)(void);
bool (*isFocused)(void); bool (*isFocused)(void);
double (*update)(void); double (*update)(void);
} HeadsetInterface; } HeadsetInterface;

View File

@ -84,19 +84,19 @@ static double desktop_getDeltaTime(void) {
static void desktop_getDisplayDimensions(uint32_t* width, uint32_t* height) { static void desktop_getDisplayDimensions(uint32_t* width, uint32_t* height) {
int w, h; int w, h;
os_window_get_fbsize(&w, &h); os_window_get_fbsize(&w, &h);
*width = (uint32_t) w / 2; *width = (uint32_t) w;
*height = (uint32_t) h; *height = (uint32_t) h;
} }
static uint32_t desktop_getViewCount(void) { static uint32_t desktop_getViewCount(void) {
return 2; return 1;
} }
static bool desktop_getViewPose(uint32_t view, float* position, float* orientation) { static bool desktop_getViewPose(uint32_t view, float* position, float* orientation) {
vec3_init(position, state.position); vec3_init(position, state.position);
quat_fromMat4(orientation, state.headTransform); quat_fromMat4(orientation, state.headTransform);
position[1] += state.offset; position[1] += state.offset;
return view < 2; return view == 0;
} }
static bool desktop_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) { static bool desktop_getViewAngles(uint32_t view, float* left, float* right, float* up, float* down) {
@ -104,12 +104,12 @@ static bool desktop_getViewAngles(uint32_t view, float* left, float* right, floa
uint32_t width, height; uint32_t width, height;
desktop_getDisplayDimensions(&width, &height); desktop_getDisplayDimensions(&width, &height);
aspect = (float) width / height; aspect = (float) width / height;
fov = 67.f * (float) M_PI / 180.f * .5f; fov = .5f;
*left = fov * aspect; *left = atanf(tanf(fov) * aspect);
*right = fov * aspect; *right = atanf(tanf(fov) * aspect);
*up = fov; *up = fov;
*down = fov; *down = fov;
return view < 2; return view == 0;
} }
static void desktop_getClipDistance(float* clipNear, float* clipFar) { static void desktop_getClipDistance(float* clipNear, float* clipFar) {
@ -188,7 +188,11 @@ static bool desktop_animate(Device device, struct Model* model) {
return false; return false;
} }
static void desktop_renderTo(void (*callback)(void*), void* userdata) { static Texture* desktop_getTexture(void) {
return lovrGraphicsGetWindowTexture();
}
static void desktop_submit(void) {
// //
} }
@ -214,7 +218,7 @@ static double desktop_update(void) {
int width, height; int width, height;
double mx, my; double mx, my;
os_window_get_size(&width, &height); os_window_get_fbsize(&width, &height);
os_get_mouse_position(&mx, &my); os_get_mouse_position(&mx, &my);
double aspect = (width > 0 && height > 0) ? ((double) width / height) : 1.; double aspect = (width > 0 && height > 0) ? ((double) width / height) : 1.;
@ -315,7 +319,8 @@ HeadsetInterface lovrHeadsetDesktopDriver = {
.vibrate = desktop_vibrate, .vibrate = desktop_vibrate,
.newModelData = desktop_newModelData, .newModelData = desktop_newModelData,
.animate = desktop_animate, .animate = desktop_animate,
.renderTo = desktop_renderTo, .getTexture = desktop_getTexture,
.submit = desktop_submit,
.isFocused = desktop_isFocused, .isFocused = desktop_isFocused,
.update = desktop_update .update = desktop_update
}; };

View File

@ -1,9 +1,14 @@
#include "headset/headset.h" #include "headset/headset.h"
#include "data/blob.h" #include "data/blob.h"
#include "data/image.h"
#include "data/modelData.h"
#include "event/event.h" #include "event/event.h"
#include "graphics/graphics.h"
#include "core/maf.h"
#include "core/os.h" #include "core/os.h"
#include "util.h" #include "util.h"
#include <stdlib.h> #include <stdlib.h>
#include <string.h>
#include <math.h> #include <math.h>
#if defined(_WIN32) #if defined(_WIN32)
@ -13,30 +18,18 @@
#include <windows.h> #include <windows.h>
#elif defined(__ANDROID__) #elif defined(__ANDROID__)
#define XR_USE_PLATFORM_ANDROID #define XR_USE_PLATFORM_ANDROID
struct ANativeActivity* os_get_activity(void);
#include <android_native_app_glue.h> #include <android_native_app_glue.h>
#include <EGL/egl.h>
#include <jni.h> #include <jni.h>
#elif defined(LOVR_LINUX_X11)
#define XR_USE_PLATFORM_XLIB
typedef unsigned long XID;
typedef struct Display Display;
typedef XID GLXFBConfig;
typedef XID GLXDrawable;
typedef XID GLXContext;
#elif defined(LOVR_LINUX_EGL)
#define XR_USE_PLATFORM_EGL
#define EGL_NO_X11
#include <EGL/egl.h>
#endif #endif
#if defined(LOVR_GL) #ifdef LOVR_VK
#define XR_USE_GRAPHICS_API_OPENGL #define XR_USE_GRAPHICS_API_VULKAN
#define GRAPHICS_EXTENSION "XR_KHR_opengl_enable" uintptr_t gpu_vk_get_instance(void);
#elif defined(LOVR_GLES) uintptr_t gpu_vk_get_physical_device(void);
#define XR_USE_GRAPHICS_API_OPENGL_ES uintptr_t gpu_vk_get_device(void);
#define GRAPHICS_EXTENSION "XR_KHR_opengl_es_enable" uintptr_t gpu_vk_get_queue(uint32_t* queueFamilyIndex, uint32_t* queueIndex);
#else #include <vulkan/vulkan.h>
#error "Unsupported renderer"
#endif #endif
#define XR_NO_PROTOTYPES #define XR_NO_PROTOTYPES
@ -46,34 +39,17 @@ typedef XID GLXContext;
#define XR(f) handleResult(f, __FILE__, __LINE__) #define XR(f) handleResult(f, __FILE__, __LINE__)
#define XR_INIT(f) if (XR_FAILED(f)) return openxr_destroy(), false; #define XR_INIT(f) if (XR_FAILED(f)) return openxr_destroy(), false;
#define SESSION_ACTIVE(s) (s >= XR_SESSION_STATE_READY && s <= XR_SESSION_STATE_FOCUSED) #define SESSION_ACTIVE(s) (s >= XR_SESSION_STATE_READY && s <= XR_SESSION_STATE_FOCUSED)
#define GL_SRGB8_ALPHA8 0x8C43
#define MAX_IMAGES 4 #define MAX_IMAGES 4
#if defined(_WIN32)
HANDLE os_get_win32_window(void);
HGLRC os_get_win32_context(void);
#elif defined(__ANDROID__)
struct ANativeActivity* os_get_activity(void);
EGLDisplay os_get_egl_display(void);
EGLContext os_get_egl_context(void);
EGLConfig os_get_egl_config(void);
#elif defined(LOVR_LINUX_X11)
Display* os_get_x11_display(void);
GLXDrawable os_get_glx_drawable(void);
GLXContext os_get_glx_context(void);
#elif defined(LOVR_LINUX_EGL)
PFNEGLGETPROCADDRESSPROC os_get_egl_proc_addr(void);
EGLDisplay os_get_egl_display(void);
EGLContext os_get_egl_context(void);
EGLConfig os_get_egl_config(void);
#endif
#define XR_FOREACH(X)\ #define XR_FOREACH(X)\
X(xrDestroyInstance)\ X(xrDestroyInstance)\
X(xrPollEvent)\ X(xrPollEvent)\
X(xrResultToString)\ X(xrResultToString)\
X(xrGetSystem)\ X(xrGetSystem)\
X(xrGetSystemProperties)\ X(xrGetSystemProperties)\
X(xrCreateVulkanInstanceKHR)\
X(xrGetVulkanGraphicsDevice2KHR)\
X(xrCreateVulkanDeviceKHR)\
X(xrCreateSession)\ X(xrCreateSession)\
X(xrDestroySession)\ X(xrDestroySession)\
X(xrCreateReferenceSpace)\ X(xrCreateReferenceSpace)\
@ -166,7 +142,7 @@ static struct {
XrCompositionLayerProjection layers[1]; XrCompositionLayerProjection layers[1];
XrCompositionLayerProjectionView layerViews[2]; XrCompositionLayerProjectionView layerViews[2];
XrFrameState frameState; XrFrameState frameState;
Canvas* canvases[MAX_IMAGES]; Texture* textures[MAX_IMAGES];
double lastDisplayTime; double lastDisplayTime;
uint32_t imageIndex; uint32_t imageIndex;
uint32_t imageCount; uint32_t imageCount;
@ -265,6 +241,45 @@ static XrHandTrackerEXT getHandTracker(Device device) {
return *tracker; return *tracker;
} }
static void openxr_getVulkanPhysicalDevice(void* instance, uintptr_t physicalDevice) {
XrVulkanGraphicsDeviceGetInfoKHR info = {
.type = XR_TYPE_VULKAN_GRAPHICS_DEVICE_GET_INFO_KHR,
.systemId = state.system,
.vulkanInstance = (VkInstance) instance
};
XR(xrGetVulkanGraphicsDevice2KHR(state.instance, &info, (VkPhysicalDevice*) physicalDevice));
}
static uint32_t openxr_createVulkanInstance(void* instanceCreateInfo, void* allocator, uintptr_t instance, void* getInstanceProcAddr) {
XrVulkanInstanceCreateInfoKHR info = {
.type = XR_TYPE_VULKAN_INSTANCE_CREATE_INFO_KHR,
.systemId = state.system,
.pfnGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr) getInstanceProcAddr,
.vulkanCreateInfo = instanceCreateInfo,
.vulkanAllocator = allocator
};
VkResult result;
XR(xrCreateVulkanInstanceKHR(state.instance, &info, (VkInstance*) instance, &result));
return result;
}
static uint32_t openxr_createVulkanDevice(void* instance, void* deviceCreateInfo, void* allocator, uintptr_t device, void* getInstanceProcAddr) {
XrVulkanDeviceCreateInfoKHR info = {
.type = XR_TYPE_VULKAN_DEVICE_CREATE_INFO_KHR,
.systemId = state.system,
.pfnGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr) getInstanceProcAddr,
.vulkanPhysicalDevice = (VkPhysicalDevice) gpu_vk_get_physical_device(),
.vulkanCreateInfo = deviceCreateInfo,
.vulkanAllocator = allocator
};
VkResult result;
XR(xrCreateVulkanDeviceKHR(state.instance, &info, (VkDevice*) device, &result));
return result;
}
static void openxr_destroy(); static void openxr_destroy();
static bool openxr_init(float supersample, float offset, uint32_t msaa, bool overlay) { static bool openxr_init(float supersample, float offset, uint32_t msaa, bool overlay) {
@ -303,10 +318,9 @@ static bool openxr_init(float supersample, float offset, uint32_t msaa, bool ove
#ifdef __ANDROID__ #ifdef __ANDROID__
{ "XR_KHR_android_create_instance", NULL, false }, { "XR_KHR_android_create_instance", NULL, false },
#endif #endif
#ifdef LOVR_LINUX_EGL #ifdef LOVR_VK
{ "XR_MNDX_egl_enable", NULL, false }, { "XR_KHR_vulkan_enable2", NULL, false },
#endif #endif
{ GRAPHICS_EXTENSION, NULL, false },
{ "XR_EXT_eye_gaze_interaction", &state.features.gaze, false }, { "XR_EXT_eye_gaze_interaction", &state.features.gaze, false },
{ "XR_EXT_hand_tracking", &state.features.handTracking, false }, { "XR_EXT_hand_tracking", &state.features.handTracking, false },
{ "XR_FB_display_refresh_rate", &state.features.refreshRate, false }, { "XR_FB_display_refresh_rate", &state.features.refreshRate, false },
@ -768,56 +782,28 @@ static bool openxr_init(float supersample, float offset, uint32_t msaa, bool ove
static void openxr_start(void) { static void openxr_start(void) {
{ // Session { // Session
#if defined(LOVR_GL) #ifdef LOVR_VK
XrGraphicsRequirementsOpenGLKHR requirements = { .type = XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_KHR, NULL }; XrGraphicsRequirementsVulkanKHR requirements = { .type = XR_TYPE_GRAPHICS_REQUIREMENTS_VULKAN_KHR, NULL };
PFN_xrGetOpenGLGraphicsRequirementsKHR xrGetOpenGLGraphicsRequirementsKHR; PFN_xrGetVulkanGraphicsRequirements2KHR xrGetVulkanGraphicsRequirements2KHR;
XR_LOAD(xrGetOpenGLGraphicsRequirementsKHR); XR_LOAD(xrGetVulkanGraphicsRequirements2KHR);
xrGetOpenGLGraphicsRequirementsKHR(state.instance, state.system, &requirements); XR(xrGetVulkanGraphicsRequirements2KHR(state.instance, state.system, &requirements));
// TODO validate OpenGL versions, potentially in init if (XR_VERSION_MAJOR(requirements.minApiVersionSupported) > 1 || XR_VERSION_MINOR(requirements.minApiVersionSupported) > 1) {
#elif defined(LOVR_GLES) lovrThrow("OpenXR Vulkan version not supported");
XrGraphicsRequirementsOpenGLESKHR requirements = { .type = XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_ES_KHR, NULL }; }
PFN_xrGetOpenGLESGraphicsRequirementsKHR xrGetOpenGLESGraphicsRequirementsKHR;
XR_LOAD(xrGetOpenGLESGraphicsRequirementsKHR); uint32_t queueFamilyIndex, queueIndex;
xrGetOpenGLESGraphicsRequirementsKHR(state.instance, state.system, &requirements); gpu_vk_get_queue(&queueFamilyIndex, &queueIndex);
// TODO validate OpenGLES versions, potentially in init
XrGraphicsBindingVulkanKHR graphicsBinding = {
.type = XR_TYPE_GRAPHICS_BINDING_VULKAN_KHR,
.instance = (VkInstance) gpu_vk_get_instance(),
.physicalDevice = (VkPhysicalDevice) gpu_vk_get_physical_device(),
.device = (VkDevice) gpu_vk_get_device(),
.queueFamilyIndex = queueFamilyIndex,
.queueIndex = queueIndex
};
#else #else
#error "Unsupported renderer" #error "Unsupported renderer"
#endif
#if defined(_WIN32) && defined(LOVR_GL)
XrGraphicsBindingOpenGLWin32KHR graphicsBinding = {
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR,
.hDC = GetDC(os_get_win32_window()),
.hGLRC = os_get_win32_context()
};
#elif defined(__ANDROID__) && defined(LOVR_GLES)
XrGraphicsBindingOpenGLESAndroidKHR graphicsBinding = {
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_ES_ANDROID_KHR,
.display = os_get_egl_display(),
.config = os_get_egl_config(),
.context = os_get_egl_context()
};
#elif defined(LOVR_LINUX_X11)
XrGraphicsBindingOpenGLXlibKHR graphicsBinding = {
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR,
.next = NULL,
.xDisplay = os_get_x11_display(),
.visualid = 0,
.glxFBConfig = 0,
.glxDrawable = os_get_glx_drawable(),
.glxContext = os_get_glx_context(),
};
#elif defined(LOVR_LINUX_EGL)
XrGraphicsBindingEGLMNDX graphicsBinding = {
.type = XR_TYPE_GRAPHICS_BINDING_EGL_MNDX,
.next = NULL,
.getProcAddress = os_get_egl_proc_addr(),
.display = os_get_egl_display(),
.config = os_get_egl_config(),
.context = os_get_egl_context(),
};
#else
#error "Unsupported OpenXR platform/graphics combination"
#endif #endif
XrSessionCreateInfo info = { XrSessionCreateInfo info = {
@ -891,47 +877,42 @@ static void openxr_start(void) {
} }
{ // Swapchain { // Swapchain
#if defined(XR_USE_GRAPHICS_API_OPENGL) #ifdef LOVR_VK
TextureType textureType = TEXTURE_2D; XrSwapchainImageVulkanKHR images[MAX_IMAGES];
uint32_t width = state.width * 2;
uint32_t arraySize = 1;
XrSwapchainImageOpenGLKHR images[MAX_IMAGES];
for (uint32_t i = 0; i < MAX_IMAGES; i++) { for (uint32_t i = 0; i < MAX_IMAGES; i++) {
images[i].type = XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR; images[i].type = XR_TYPE_SWAPCHAIN_IMAGE_VULKAN_KHR;
images[i].next = NULL;
}
#elif defined(XR_USE_GRAPHICS_API_OPENGL_ES)
TextureType textureType = TEXTURE_ARRAY;
uint32_t width = state.width;
uint32_t arraySize = 2;
XrSwapchainImageOpenGLESKHR images[MAX_IMAGES];
for (uint32_t i = 0; i < MAX_IMAGES; i++) {
images[i].type = XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_ES_KHR;
images[i].next = NULL; images[i].next = NULL;
} }
#endif #endif
XrSwapchainCreateInfo info = { XrSwapchainCreateInfo info = {
.type = XR_TYPE_SWAPCHAIN_CREATE_INFO, .type = XR_TYPE_SWAPCHAIN_CREATE_INFO,
.usageFlags = XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_SAMPLED_BIT, .usageFlags = XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT,
.format = GL_SRGB8_ALPHA8, .format = VK_FORMAT_R8G8B8A8_SRGB,
.width = width, .width = state.width,
.height = state.height, .height = state.height,
.sampleCount = 1, .sampleCount = 1,
.faceCount = 1, .faceCount = 1,
.arraySize = arraySize, .arraySize = 2,
.mipCount = 1 .mipCount = 1
}; };
XR(xrCreateSwapchain(state.session, &info, &state.swapchain)); XR(xrCreateSwapchain(state.session, &info, &state.swapchain));
XR(xrEnumerateSwapchainImages(state.swapchain, MAX_IMAGES, &state.imageCount, (XrSwapchainImageBaseHeader*) images)); XR(xrEnumerateSwapchainImages(state.swapchain, MAX_IMAGES, &state.imageCount, (XrSwapchainImageBaseHeader*) images));
CanvasFlags flags = { .depth = { true, false, FORMAT_D24S8 }, .stereo = true, .mipmaps = false, .msaa = state.msaa };
for (uint32_t i = 0; i < state.imageCount; i++) { for (uint32_t i = 0; i < state.imageCount; i++) {
Texture* texture = lovrTextureCreateFromHandle(images[i].image, textureType, arraySize, state.msaa); state.textures[i] = lovrTextureCreate(&(TextureInfo) {
state.canvases[i] = lovrCanvasCreate(state.width, state.height, flags); .type = TEXTURE_ARRAY,
lovrCanvasSetAttachments(state.canvases[i], &(Attachment) { texture, 0, 0 }, 1); .format = FORMAT_RGBA8,
lovrRelease(texture, lovrTextureDestroy); .srgb = true,
.width = state.width,
.height = state.height,
.depth = 2,
.mipmaps = 1,
.samples = 1,
.usage = TEXTURE_RENDER,
.handle = (uintptr_t) images[i].image
});
} }
XrCompositionLayerFlags layerFlags = 0; XrCompositionLayerFlags layerFlags = 0;
@ -955,21 +936,16 @@ static void openxr_start(void) {
.subImage = { state.swapchain, { { 0, 0 }, { state.width, state.height } }, 0 } .subImage = { state.swapchain, { { 0, 0 }, { state.width, state.height } }, 0 }
}; };
// Copy the left view to the right view and offset either the viewport or array index state.layerViews[1] = (XrCompositionLayerProjectionView) {
state.layerViews[1] = state.layerViews[0]; .type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW,
#if defined(XR_USE_GRAPHICS_API_OPENGL) .subImage = { state.swapchain, { { 0, 0 }, { state.width, state.height } }, 1 }
state.layerViews[1].subImage.imageRect.offset.x += state.width; };
#elif defined(XR_USE_GRAPHICS_API_OPENGL_ES)
state.layerViews[1].subImage.imageArrayIndex = 1;
#endif
} }
os_window_set_vsync(0);
} }
static void openxr_destroy(void) { static void openxr_destroy(void) {
for (uint32_t i = 0; i < state.imageCount; i++) { for (uint32_t i = 0; i < state.imageCount; i++) {
lovrRelease(state.canvases[i], lovrCanvasDestroy); lovrRelease(state.textures[i], lovrTextureDestroy);
} }
for (size_t i = 0; i < MAX_ACTIONS; i++) { for (size_t i = 0; i < MAX_ACTIONS; i++) {
@ -1104,7 +1080,7 @@ static const float* openxr_getBoundsGeometry(uint32_t* count) {
return NULL; return NULL;
} }
static bool openxr_getPose(Device device, vec3 position, quat orientation) { static bool openxr_getPose(Device device, float* position, float* orientation) {
if (!state.spaces[device]) { if (!state.spaces[device]) {
return false; return false;
} }
@ -1179,7 +1155,7 @@ static bool openxr_getPose(Device device, vec3 position, quat orientation) {
return location.locationFlags & (XR_SPACE_LOCATION_POSITION_VALID_BIT | XR_SPACE_LOCATION_ORIENTATION_VALID_BIT); return location.locationFlags & (XR_SPACE_LOCATION_POSITION_VALID_BIT | XR_SPACE_LOCATION_ORIENTATION_VALID_BIT);
} }
static bool openxr_getVelocity(Device device, vec3 linearVelocity, vec3 angularVelocity) { static bool openxr_getVelocity(Device device, float* linearVelocity, float* angularVelocity) {
if (!state.spaces[device]) { if (!state.spaces[device]) {
return false; return false;
} }
@ -1563,7 +1539,7 @@ static struct ModelData* openxr_newModelData(Device device, bool animated) {
} }
static bool openxr_animate(Device device, struct Model* model) { static bool openxr_animate(Device device, struct Model* model) {
XrHandTrackerEXT tracker = getHandTracker(device); /*XrHandTrackerEXT tracker = getHandTracker(device);
if (!tracker) { if (!tracker) {
return false; return false;
@ -1643,44 +1619,54 @@ static bool openxr_animate(Device device, struct Model* model) {
lovrModelPose(model, i, position, orientation, 1.f); lovrModelPose(model, i, position, orientation, 1.f);
} }
} }*/
return true; return true;
} }
static void openxr_renderTo(void (*callback)(void*), void* userdata) { static Texture* openxr_getTexture(void) {
if (!SESSION_ACTIVE(state.sessionState)) {
return NULL;
}
if (state.hasImage) {
return state.textures[state.imageIndex];
}
XrFrameBeginInfo beginfo = { .type = XR_TYPE_FRAME_BEGIN_INFO };
XR(xrBeginFrame(state.session, &beginfo));
XrSwapchainImageWaitInfo waitInfo = { XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO, .timeout = XR_INFINITE_DURATION };
XR(xrAcquireSwapchainImage(state.swapchain, NULL, &state.imageIndex));
XR(xrWaitSwapchainImage(state.swapchain, &waitInfo));
uint32_t count;
XrView views[2];
getViews(views, &count);
state.layerViews[0].pose = views[0].pose;
state.layerViews[0].fov = views[0].fov;
state.layerViews[1].pose = views[1].pose;
state.layerViews[1].fov = views[1].fov;
state.hasImage = true;
return state.textures[state.imageIndex];
}
static void openxr_submit(void) {
if (!SESSION_ACTIVE(state.sessionState)) { if (!SESSION_ACTIVE(state.sessionState)) {
state.waited = false; state.waited = false;
return; return;
} }
XrFrameBeginInfo beginInfo = {
.type = XR_TYPE_FRAME_BEGIN_INFO
};
XrFrameEndInfo endInfo = { XrFrameEndInfo endInfo = {
.type = XR_TYPE_FRAME_END_INFO, .type = XR_TYPE_FRAME_END_INFO,
.displayTime = state.frameState.predictedDisplayTime, .displayTime = state.frameState.predictedDisplayTime,
.environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE, .environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE,
.layers = (const XrCompositionLayerBaseHeader*[1]) { (XrCompositionLayerBaseHeader*) &state.layers[0] } .layers = (const XrCompositionLayerBaseHeader*[1]) { (XrCompositionLayerBaseHeader*) &state.layers[0] },
.layerCount = state.hasImage ? 1 : 0
}; };
XR(xrBeginFrame(state.session, &beginInfo)); if (state.hasImage) {
if (state.frameState.shouldRender) {
if (state.hasImage) {
XR(xrReleaseSwapchainImage(state.swapchain, NULL));
}
XrSwapchainImageWaitInfo waitInfo = {
.type = XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO,
.timeout = XR_INFINITE_DURATION
};
XR(xrAcquireSwapchainImage(state.swapchain, NULL, &state.imageIndex));
XR(xrWaitSwapchainImage(state.swapchain, &waitInfo));
state.hasImage = true;
XR(xrReleaseSwapchainImage(state.swapchain, NULL)); XR(xrReleaseSwapchainImage(state.swapchain, NULL));
state.hasImage = false; state.hasImage = false;
} }
@ -1689,10 +1675,6 @@ static void openxr_renderTo(void (*callback)(void*), void* userdata) {
state.waited = false; state.waited = false;
} }
static Texture* openxr_getMirrorTexture(void) {
return NULL;
}
static bool openxr_isFocused(void) { static bool openxr_isFocused(void) {
return state.sessionState == XR_SESSION_STATE_FOCUSED; return state.sessionState == XR_SESSION_STATE_FOCUSED;
} }
@ -1771,6 +1753,9 @@ static double openxr_update(void) {
HeadsetInterface lovrHeadsetOpenXRDriver = { HeadsetInterface lovrHeadsetOpenXRDriver = {
.driverType = DRIVER_OPENXR, .driverType = DRIVER_OPENXR,
.getVulkanPhysicalDevice = openxr_getVulkanPhysicalDevice,
.createVulkanInstance = openxr_createVulkanInstance,
.createVulkanDevice = openxr_createVulkanDevice,
.init = openxr_init, .init = openxr_init,
.start = openxr_start, .start = openxr_start,
.destroy = openxr_destroy, .destroy = openxr_destroy,
@ -1796,8 +1781,8 @@ HeadsetInterface lovrHeadsetOpenXRDriver = {
.vibrate = openxr_vibrate, .vibrate = openxr_vibrate,
.newModelData = openxr_newModelData, .newModelData = openxr_newModelData,
.animate = openxr_animate, .animate = openxr_animate,
.renderTo = openxr_renderTo, .getTexture = openxr_getTexture,
.getMirrorTexture = openxr_getMirrorTexture, .submit = openxr_submit,
.isFocused = openxr_isFocused, .isFocused = openxr_isFocused,
.update = openxr_update .update = openxr_update
}; };