Add some descriptor set limits;

This commit is contained in:
bjorn 2022-05-31 20:16:16 -07:00
parent 42a924b0ee
commit 99e45c22ad
5 changed files with 39 additions and 5 deletions

View File

@ -545,6 +545,13 @@ static int l_lovrGraphicsGetLimits(lua_State* L) {
lua_pushinteger(L, limits.renderSize[2]), lua_rawseti(L, -2, 3);
lua_setfield(L, -2, "renderSize");
lua_pushinteger(L, limits.uniformBuffersPerStage), lua_setfield(L, -2, "uniformBuffersPerStage");
lua_pushinteger(L, limits.storageBuffersPerStage), lua_setfield(L, -2, "storageBuffersPerStage");
lua_pushinteger(L, limits.sampledTexturesPerStage), lua_setfield(L, -2, "sampledTexturesPerStage");
lua_pushinteger(L, limits.storageTexturesPerStage), lua_setfield(L, -2, "storageTexturesPerStage");
lua_pushinteger(L, limits.samplersPerStage), lua_setfield(L, -2, "samplersPerStage");
lua_pushinteger(L, limits.resourcesPerShader), lua_setfield(L, -2, "resourcesPerShader");
lua_pushinteger(L, limits.uniformBufferRange), lua_setfield(L, -2, "uniformBufferRange");
lua_pushinteger(L, limits.storageBufferRange), lua_setfield(L, -2, "storageBufferRange");
lua_pushinteger(L, limits.uniformBufferAlign), lua_setfield(L, -2, "uniformBufferAlign");

View File

@ -551,6 +551,11 @@ typedef struct {
uint32_t textureSizeCube;
uint32_t textureLayers;
uint32_t renderSize[3];
uint32_t uniformBuffersPerStage;
uint32_t storageBuffersPerStage;
uint32_t sampledTexturesPerStage;
uint32_t storageTexturesPerStage;
uint32_t samplersPerStage;
uint32_t uniformBufferRange;
uint32_t storageBufferRange;
uint32_t uniformBufferAlign;

View File

@ -900,7 +900,7 @@ bool gpu_bundle_pool_init(gpu_bundle_pool* pool, gpu_bundle_pool_info* info) {
return false;
}
VkDescriptorSetLayout layouts[256];
VkDescriptorSetLayout layouts[512];
for (uint32_t i = 0; i < info->count; i+= COUNTOF(layouts)) {
uint32_t chunk = MIN(info->count - i, COUNTOF(layouts));
@ -1689,13 +1689,18 @@ bool gpu_init(gpu_config* config) {
if (config->limits) {
VkPhysicalDeviceLimits* limits = &properties2.properties.limits;
config->limits->textureSize2D = MIN(limits->maxImageDimension2D, UINT16_MAX);
config->limits->textureSize3D = MIN(limits->maxImageDimension3D, UINT16_MAX);
config->limits->textureSizeCube = MIN(limits->maxImageDimensionCube, UINT16_MAX);
config->limits->textureLayers = MIN(limits->maxImageArrayLayers, UINT16_MAX);
config->limits->textureSize2D = limits->maxImageDimension2D;
config->limits->textureSize3D = limits->maxImageDimension3D;
config->limits->textureSizeCube = limits->maxImageDimensionCube;
config->limits->textureLayers = limits->maxImageArrayLayers;
config->limits->renderSize[0] = limits->maxFramebufferWidth;
config->limits->renderSize[1] = limits->maxFramebufferHeight;
config->limits->renderSize[2] = multiviewProperties.maxMultiviewViewCount;
config->limits->uniformBuffersPerStage = limits->maxPerStageDescriptorUniformBuffers;
config->limits->storageBuffersPerStage = limits->maxPerStageDescriptorStorageBuffers;
config->limits->sampledTexturesPerStage = limits->maxPerStageDescriptorSampledImages;
config->limits->storageTexturesPerStage = limits->maxPerStageDescriptorStorageImages;
config->limits->samplersPerStage = limits->maxPerStageDescriptorSamplers;
config->limits->uniformBufferRange = limits->maxUniformBufferRange;
config->limits->storageBufferRange = limits->maxStorageBufferRange;
config->limits->uniformBufferAlign = limits->minUniformBufferOffsetAlignment;

View File

@ -20,6 +20,7 @@ uint32_t os_vk_create_surface(void* instance, void** surface);
const char** os_vk_get_instance_extensions(uint32_t* count);
#define MAX_FRAME_MEMORY (1 << 30)
#define MAX_RESOURCES_PER_SHADER 32
typedef struct {
gpu_vertex_format gpu;
@ -411,6 +412,12 @@ void lovrGraphicsGetLimits(GraphicsLimits* limits) {
limits->renderSize[0] = state.limits.renderSize[0];
limits->renderSize[1] = state.limits.renderSize[1];
limits->renderSize[2] = state.limits.renderSize[2];
limits->uniformBuffersPerStage = MIN(state.limits.uniformBuffersPerStage - 2, MAX_RESOURCES_PER_SHADER);
limits->storageBuffersPerStage = MIN(state.limits.storageBuffersPerStage, MAX_RESOURCES_PER_SHADER);
limits->sampledTexturesPerStage = MIN(state.limits.sampledTexturesPerStage, MAX_RESOURCES_PER_SHADER);
limits->storageTexturesPerStage = MIN(state.limits.storageTexturesPerStage, MAX_RESOURCES_PER_SHADER);
limits->samplersPerStage = MIN(state.limits.samplersPerStage - 1, MAX_RESOURCES_PER_SHADER);
limits->resourcesPerShader = MAX_RESOURCES_PER_SHADER;
limits->uniformBufferRange = state.limits.uniformBufferRange;
limits->storageBufferRange = state.limits.storageBufferRange;
limits->uniformBufferAlign = state.limits.uniformBufferAlign;
@ -1110,6 +1117,10 @@ Shader* lovrShaderCreate(ShaderInfo* info) {
uint32_t index = shader->resourceCount++;
if (shader->resourceCount > MAX_RESOURCES_PER_SHADER) {
lovrThrow("Shader resource count exceeds resourcesPerShader limit (%d)", MAX_RESOURCES_PER_SHADER);
}
slots[index] = (gpu_slot) {
.number = resource->binding,
.type = resourceTypes[resource->type],

View File

@ -39,6 +39,12 @@ typedef struct {
uint32_t textureSizeCube;
uint32_t textureLayers;
uint32_t renderSize[3];
uint32_t uniformBuffersPerStage;
uint32_t storageBuffersPerStage;
uint32_t sampledTexturesPerStage;
uint32_t storageTexturesPerStage;
uint32_t samplersPerStage;
uint32_t resourcesPerShader;
uint32_t uniformBufferRange;
uint32_t storageBufferRange;
uint32_t uniformBufferAlign;