Merge pull request #190 from bjornbytes/android-canvas

Android Canvas
This commit is contained in:
Bjorn 2020-01-17 12:19:49 -08:00 committed by GitHub
commit c5edc6dedb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 48 additions and 19 deletions

View File

@ -31,7 +31,9 @@ void lovrCanvasSetAttachments(Canvas* canvas, Attachment* attachments, uint32_t
lovrAssert(level < mipmaps, "Invalid attachment mipmap level (Texture has %d, got %d)", mipmaps, level + 1);
lovrAssert(!hasDepthBuffer || width == canvas->width, "Texture width of %d does not match Canvas width (%d)", width, canvas->width);
lovrAssert(!hasDepthBuffer || height == canvas->height, "Texture height of %d does not match Canvas height (%d)", height, canvas->height);
#ifndef __ANDROID__ // On multiview canvases, the multisample settings can be different
lovrAssert(texture->msaa == canvas->flags.msaa, "Texture MSAA does not match Canvas MSAA");
#endif
lovrRetain(texture);
}

View File

@ -666,11 +666,6 @@ static void lovrGpuBindCanvas(Canvas* canvas, bool willDraw) {
}
#endif
// Use the read framebuffer as a binding point to bind resolve textures
if (canvas->flags.msaa) {
glBindFramebuffer(GL_READ_FRAMEBUFFER, canvas->resolveBuffer);
}
GLenum buffers[MAX_CANVAS_ATTACHMENTS] = { GL_NONE };
for (uint32_t i = 0; i < canvas->attachmentCount; i++) {
GLenum drawBuffer = buffers[i] = GL_COLOR_ATTACHMENT0 + i;
@ -683,11 +678,12 @@ static void lovrGpuBindCanvas(Canvas* canvas, bool willDraw) {
#ifdef LOVR_WEBGL
lovrThrow("Unreachable");
#else
glFramebufferTextureMultisampleMultiviewOVR(GL_READ_FRAMEBUFFER, drawBuffer, texture->id, level, canvas->flags.msaa, slice, 2);
glFramebufferTextureMultisampleMultiviewOVR(GL_FRAMEBUFFER, drawBuffer, texture->id, level, canvas->flags.msaa, slice, 2);
#endif
} else {
if (canvas->flags.msaa) {
glFramebufferRenderbuffer(GL_FRAMEBUFFER, drawBuffer, GL_RENDERBUFFER, texture->msaaId);
glBindFramebuffer(GL_READ_FRAMEBUFFER, canvas->resolveBuffer);
}
switch (texture->type) {
@ -1484,7 +1480,7 @@ void lovrTextureAllocate(Texture* texture, uint32_t width, uint32_t height, uint
GLenum glFormat = convertTextureFormat(format);
GLenum internalFormat = convertTextureFormatInternal(format, texture->srgb);
#ifndef LOVR_WEBGL
#ifdef LOVR_GL
if (GLAD_GL_ARB_texture_storage) {
#endif
if (texture->type == TEXTURE_ARRAY) {
@ -1492,7 +1488,7 @@ void lovrTextureAllocate(Texture* texture, uint32_t width, uint32_t height, uint
} else {
glTexStorage2D(texture->target, texture->mipmapCount, internalFormat, width, height);
}
#ifndef LOVR_WEBGL
#ifdef LOVR_GL
} else {
for (uint32_t i = 0; i < texture->mipmapCount; i++) {
switch (texture->type) {
@ -1672,7 +1668,8 @@ Canvas* lovrCanvasInit(Canvas* canvas, uint32_t width, uint32_t height, CanvasFl
lovrAssert(isTextureFormatDepth(flags.depth.format), "Canvas depth buffer can't use a color TextureFormat");
GLenum attachment = flags.depth.format == FORMAT_D24S8 ? GL_DEPTH_STENCIL_ATTACHMENT : GL_DEPTH_ATTACHMENT;
if (flags.stereo && state.singlepass == MULTIVIEW) {
canvas->depth.texture = lovrTextureCreate(TEXTURE_ARRAY, NULL, 0, false, flags.mipmaps, flags.msaa);
// Zero MSAA is intentional here, we attach it to the Canvas using legacy MSAA technique
canvas->depth.texture = lovrTextureCreate(TEXTURE_ARRAY, NULL, 0, false, flags.mipmaps, 0);
lovrTextureAllocate(canvas->depth.texture, width, height, 2, flags.depth.format);
#ifdef LOVR_WEBGL
lovrThrow("Unreachable");
@ -1692,7 +1689,7 @@ Canvas* lovrCanvasInit(Canvas* canvas, uint32_t width, uint32_t height, CanvasFl
}
}
if (flags.msaa) {
if (flags.msaa && (!flags.stereo || state.singlepass != MULTIVIEW)) {
glGenFramebuffers(1, &canvas->resolveBuffer);
}
@ -1732,7 +1729,9 @@ void lovrCanvasResolve(Canvas* canvas) {
lovrGraphicsFlushCanvas(canvas);
if (canvas->flags.msaa) {
// We don't need to resolve a multiview Canvas because it uses the legacy multisampling method in
// which the driver does an implicit multisample resolve whenever the canvas textures are read.
if (canvas->flags.msaa && (!canvas->flags.stereo || state.singlepass != MULTIVIEW)) {
uint32_t w = canvas->width;
uint32_t h = canvas->height;
glBindFramebuffer(GL_READ_FRAMEBUFFER, canvas->framebuffer);

View File

@ -3,6 +3,7 @@
#include "graphics/graphics.h"
#include "graphics/canvas.h"
#include "core/os.h"
#include "core/ref.h"
#include "lib/glad/glad.h"
#include <android/log.h>
#include <assert.h>
@ -20,6 +21,9 @@ static struct {
BridgeLovrDevice deviceType;
BridgeLovrVibrateFunction* vibrateFunction;
BridgeLovrUpdateData updateData;
uint32_t textureHandles[4];
uint32_t textureCount;
Canvas* canvases[4];
} bridgeLovrMobileData;
// Headset
@ -438,6 +442,8 @@ void bridgeLovrInit(BridgeLovrInitData *initData) {
bridgeLovrMobileData.updateData.displayTime = initData->zeroDisplayTime;
bridgeLovrMobileData.deviceType = initData->deviceType;
bridgeLovrMobileData.vibrateFunction = initData->vibrateFunction;
memcpy(bridgeLovrMobileData.textureHandles, initData->textureHandles, initData->textureCount * sizeof(uint32_t));
bridgeLovrMobileData.textureCount = initData->textureCount;
free(apkPath);
size_t length = strlen(initData->apkPath);
@ -491,15 +497,30 @@ void bridgeLovrDraw(BridgeLovrDrawData *drawData) {
lovrGpuDirtyTexture(); // Clear texture state since LÖVR doesn't completely own the GL context
// Initialize a temporary Canvas from the framebuffer handle created by lovr-oculus-mobile
Canvas canvas = { 0 };
CanvasFlags flags = { .stereo = true };
uint32_t width = bridgeLovrMobileData.displayDimensions.width;
uint32_t height = bridgeLovrMobileData.displayDimensions.height;
lovrCanvasInitFromHandle(&canvas, width, height, flags, drawData->framebuffer, 0, 0, 1, true);
// Lazily create Canvas objects on the first frame
if (!bridgeLovrMobileData.canvases[0]) {
for (uint32_t i = 0; i < bridgeLovrMobileData.textureCount; i++) {
uint32_t width = bridgeLovrMobileData.displayDimensions.width;
uint32_t height = bridgeLovrMobileData.displayDimensions.height;
bridgeLovrMobileData.canvases[i] = lovrCanvasCreate(width, height, (CanvasFlags) {
.depth.enabled = true,
.depth.readable = false,
.depth.format = FORMAT_D24S8,
.msaa = 4,
.stereo = true,
.mipmaps = false
});
uint32_t handle = bridgeLovrMobileData.textureHandles[i];
Texture* texture = lovrTextureCreateFromHandle(handle, TEXTURE_ARRAY, 2);
lovrCanvasSetAttachments(bridgeLovrMobileData.canvases[i], &(Attachment) { .texture = texture }, 1);
lovrRelease(Texture, texture);
}
}
// Set up a camera using the view and projection matrices from lovr-oculus-mobile
Camera camera = { .canvas = &canvas };
Camera camera = { .canvas = bridgeLovrMobileData.canvases[drawData->textureIndex] };
mat4_init(camera.viewMatrix[0], bridgeLovrMobileData.updateData.eyeViewMatrix[0]);
mat4_init(camera.viewMatrix[1], bridgeLovrMobileData.updateData.eyeViewMatrix[1]);
mat4_init(camera.projection[0], bridgeLovrMobileData.updateData.projectionMatrix[0]);
@ -512,8 +533,8 @@ void bridgeLovrDraw(BridgeLovrDrawData *drawData) {
lovrSetErrorCallback(luax_vthrow, L);
state.renderCallback(state.renderUserdata);
lovrGraphicsDiscard(false, true, true);
lovrGraphicsSetCamera(NULL, false);
lovrCanvasDestroy(&canvas);
}
// Android activity has been stopped or resumed
@ -536,4 +557,8 @@ void bridgeLovrClose() {
pauseState = PAUSESTATE_NONE;
lua_close(L);
free(lovrOculusMobileWritablePath);
for (uint32_t i = 0; i < bridgeLovrMobileData.textureCount; i++) {
lovrRelease(Canvas, bridgeLovrMobileData.canvases[i]);
}
memset(&bridgeLovrMobileData, 0, sizeof(bridgeLovrMobileData));
}

View File

@ -126,6 +126,8 @@ typedef struct {
double zeroDisplayTime;
BridgeLovrDevice deviceType;
BridgeLovrVibrateFunction* vibrateFunction; // Returns true on success
unsigned int textureHandles[4];
unsigned int textureCount;
} BridgeLovrInitData;
LOVR_EXPORT void bridgeLovrInit(BridgeLovrInitData *initData);
@ -135,6 +137,7 @@ LOVR_EXPORT void bridgeLovrUpdate(BridgeLovrUpdateData *updateData);
typedef struct {
int eye;
int framebuffer;
unsigned int textureIndex;
} BridgeLovrDrawData;
LOVR_EXPORT void bridgeLovrDraw(BridgeLovrDrawData *drawData);