fixed wrong cast uint16t to uint32t (#3187)
The new Google Pixel 8 device supports maxTextureSize of 65535. The current implementation sets this value to 0 and lets crash my app. With the cast to uint32t the app works correctly again.
This commit is contained in:
parent
448a42a8c6
commit
e8f51dea4c
@ -2910,7 +2910,7 @@ namespace bgfx { namespace gl
|
||||
: 0
|
||||
;
|
||||
|
||||
g_caps.limits.maxTextureSize = uint16_t(glGet(GL_MAX_TEXTURE_SIZE) );
|
||||
g_caps.limits.maxTextureSize = uint32_t(glGet(GL_MAX_TEXTURE_SIZE) );
|
||||
g_caps.limits.maxTextureLayers = BX_ENABLED(BGFX_CONFIG_RENDERER_OPENGL >= 30) || BX_ENABLED(BGFX_CONFIG_RENDERER_OPENGLES >= 30) || s_extension[Extension::EXT_texture_array].m_supported ? uint16_t(bx::max(glGet(GL_MAX_ARRAY_TEXTURE_LAYERS), 1) ) : 1;
|
||||
g_caps.limits.maxComputeBindings = computeSupport ? BGFX_MAX_COMPUTE_BINDINGS : 0;
|
||||
g_caps.limits.maxVertexStreams = BGFX_CONFIG_MAX_VERTEX_STREAMS;
|
||||
|
Loading…
Reference in New Issue
Block a user