gl_rasterizer_cache: Use dirty flags for the depth buffer

This commit is contained in:
ReinUsesLisp 2019-01-07 02:09:39 -03:00
parent 179ee963db
commit b683e41fca
4 changed files with 23 additions and 3 deletions

View file

@ -144,6 +144,16 @@ void Maxwell3D::CallMethod(const GPU::MethodCall& method_call) {
dirty_flags.color_buffer |= 1u << static_cast<u32>(rt_index);
}
// Zeta buffer
constexpr u32 registers_in_zeta = sizeof(regs.zeta) / sizeof(u32);
if (method_call.method == MAXWELL3D_REG_INDEX(zeta_enable) ||
method_call.method == MAXWELL3D_REG_INDEX(zeta_width) ||
method_call.method == MAXWELL3D_REG_INDEX(zeta_height) ||
(method_call.method >= MAXWELL3D_REG_INDEX(zeta) &&
method_call.method < MAXWELL3D_REG_INDEX(zeta) + registers_in_zeta)) {
dirty_flags.zeta_buffer = true;
}
// Shader
constexpr u32 shader_registers_count =
sizeof(regs.shader_config[0]) * Regs::MaxShaderProgram / sizeof(u32);

View file

@ -1090,6 +1090,7 @@ public:
struct DirtyFlags {
u8 color_buffer = 0xFF;
bool zeta_buffer = true;
bool shaders = true;
@ -1098,6 +1099,7 @@ public:
void OnMemoryWrite() {
color_buffer = 0xFF;
zeta_buffer = true;
shaders = true;
vertex_array = 0xFFFFFFFF;
}