configure_graphics: Add Accelerate ASTC decoding setting
This commit is contained in:
parent
c4ff7ecf51
commit
b2955479e5
9 changed files with 32 additions and 2 deletions
|
@ -9,6 +9,8 @@
|
|||
|
||||
#include <glad/glad.h>
|
||||
|
||||
#include "common/settings.h"
|
||||
|
||||
#include "video_core/renderer_opengl/gl_device.h"
|
||||
#include "video_core/renderer_opengl/gl_shader_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_state_tracker.h"
|
||||
|
@ -307,7 +309,9 @@ void ApplySwizzle(GLuint handle, PixelFormat format, std::array<SwizzleSource, 4
|
|||
|
||||
[[nodiscard]] bool CanBeAccelerated(const TextureCacheRuntime& runtime,
|
||||
const VideoCommon::ImageInfo& info) {
|
||||
return !runtime.HasNativeASTC() && IsPixelFormatASTC(info.format);
|
||||
if (IsPixelFormatASTC(info.format)) {
|
||||
return !runtime.HasNativeASTC() && Settings::values.accelerate_astc.GetValue();
|
||||
}
|
||||
// Disable other accelerated uploads for now as they don't implement swizzled uploads
|
||||
return false;
|
||||
switch (info.type) {
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include <vector>
|
||||
|
||||
#include "common/bit_cast.h"
|
||||
#include "common/settings.h"
|
||||
|
||||
#include "video_core/engines/fermi_2d.h"
|
||||
#include "video_core/renderer_vulkan/blit_image.h"
|
||||
|
@ -828,7 +829,11 @@ Image::Image(TextureCacheRuntime& runtime, const ImageInfo& info_, GPUVAddr gpu_
|
|||
commit = runtime.memory_allocator.Commit(buffer, MemoryUsage::DeviceLocal);
|
||||
}
|
||||
if (IsPixelFormatASTC(info.format) && !runtime.device.IsOptimalAstcSupported()) {
|
||||
flags |= VideoCommon::ImageFlagBits::AcceleratedUpload;
|
||||
if (Settings::values.accelerate_astc.GetValue()) {
|
||||
flags |= VideoCommon::ImageFlagBits::AcceleratedUpload;
|
||||
} else {
|
||||
flags |= VideoCommon::ImageFlagBits::Converted;
|
||||
}
|
||||
}
|
||||
if (runtime.device.HasDebuggingToolAttached()) {
|
||||
if (image) {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue