shader: Ignore global memory ops on devices lacking int64 support

This commit is contained in:
ameerj 2021-07-04 00:34:53 -04:00
parent 55233c2861
commit 11f04f1022
8 changed files with 79 additions and 30 deletions

View file

@ -168,6 +168,7 @@ ShaderCache::ShaderCache(RasterizerOpenGL& rasterizer_, Core::Frontend::EmuWindo
.support_descriptor_aliasing = false,
.support_int8 = false,
.support_int16 = false,
.support_int64 = device.HasShaderInt64(),
.support_vertex_instance_id = true,
.support_float_controls = false,
.support_separate_denorm_behavior = false,

View file

@ -280,6 +280,7 @@ PipelineCache::PipelineCache(RasterizerVulkan& rasterizer_, Tegra::Engines::Maxw
.support_descriptor_aliasing = true,
.support_int8 = true,
.support_int16 = device.IsShaderInt16Supported(),
.support_int64 = device.IsShaderInt64Supported(),
.support_vertex_instance_id = false,
.support_float_controls = true,
.support_separate_denorm_behavior = float_control.denormBehaviorIndependence ==