shader: Add integer attribute get optimization pass

Works around an nvidia driver bug, where casting the integer attributes to float and back to an integer always returned 0.
This commit is contained in:
ameerj 2021-12-24 20:00:28 -05:00
parent 640fc1418b
commit 14ac0c2923
9 changed files with 86 additions and 0 deletions

View file

@ -53,6 +53,7 @@ Id EmitGetCbufU32(EmitContext& ctx, const IR::Value& binding, const IR::Value& o
Id EmitGetCbufF32(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset);
Id EmitGetCbufU32x2(EmitContext& ctx, const IR::Value& binding, const IR::Value& offset);
Id EmitGetAttribute(EmitContext& ctx, IR::Attribute attr, Id vertex);
Id EmitGetAttributeU32(EmitContext& ctx, IR::Attribute attr, Id vertex);
void EmitSetAttribute(EmitContext& ctx, IR::Attribute attr, Id value, Id vertex);
Id EmitGetAttributeIndexed(EmitContext& ctx, Id offset, Id vertex);
void EmitSetAttributeIndexed(EmitContext& ctx, Id offset, Id value, Id vertex);