config: Make high GPU accuracy the default
This is a better default for most games, yielding better performance and less graphical issues.
This commit is contained in:
parent
5b35b01070
commit
13becdf18a
|
@ -782,7 +782,7 @@ void Config::ReadRendererValues() {
|
||||||
ReadSettingGlobal(Settings::values.frame_limit, QStringLiteral("frame_limit"), 100);
|
ReadSettingGlobal(Settings::values.frame_limit, QStringLiteral("frame_limit"), 100);
|
||||||
ReadSettingGlobal(Settings::values.use_disk_shader_cache,
|
ReadSettingGlobal(Settings::values.use_disk_shader_cache,
|
||||||
QStringLiteral("use_disk_shader_cache"), true);
|
QStringLiteral("use_disk_shader_cache"), true);
|
||||||
ReadSettingGlobal(Settings::values.gpu_accuracy, QStringLiteral("gpu_accuracy"), 0);
|
ReadSettingGlobal(Settings::values.gpu_accuracy, QStringLiteral("gpu_accuracy"), 1);
|
||||||
ReadSettingGlobal(Settings::values.use_asynchronous_gpu_emulation,
|
ReadSettingGlobal(Settings::values.use_asynchronous_gpu_emulation,
|
||||||
QStringLiteral("use_asynchronous_gpu_emulation"), true);
|
QStringLiteral("use_asynchronous_gpu_emulation"), true);
|
||||||
ReadSettingGlobal(Settings::values.use_nvdec_emulation, QStringLiteral("use_nvdec_emulation"),
|
ReadSettingGlobal(Settings::values.use_nvdec_emulation, QStringLiteral("use_nvdec_emulation"),
|
||||||
|
@ -1351,7 +1351,7 @@ void Config::SaveRendererValues() {
|
||||||
Settings::values.use_disk_shader_cache, true);
|
Settings::values.use_disk_shader_cache, true);
|
||||||
WriteSettingGlobal(QStringLiteral("gpu_accuracy"),
|
WriteSettingGlobal(QStringLiteral("gpu_accuracy"),
|
||||||
static_cast<int>(Settings::values.gpu_accuracy.GetValue(global)),
|
static_cast<int>(Settings::values.gpu_accuracy.GetValue(global)),
|
||||||
Settings::values.gpu_accuracy.UsingGlobal(), 0);
|
Settings::values.gpu_accuracy.UsingGlobal(), 1);
|
||||||
WriteSettingGlobal(QStringLiteral("use_asynchronous_gpu_emulation"),
|
WriteSettingGlobal(QStringLiteral("use_asynchronous_gpu_emulation"),
|
||||||
Settings::values.use_asynchronous_gpu_emulation, true);
|
Settings::values.use_asynchronous_gpu_emulation, true);
|
||||||
WriteSettingGlobal(QStringLiteral("use_nvdec_emulation"), Settings::values.use_nvdec_emulation,
|
WriteSettingGlobal(QStringLiteral("use_nvdec_emulation"), Settings::values.use_nvdec_emulation,
|
||||||
|
|
|
@ -388,7 +388,7 @@ void Config::ReadValues() {
|
||||||
static_cast<u16>(sdl2_config->GetInteger("Renderer", "frame_limit", 100)));
|
static_cast<u16>(sdl2_config->GetInteger("Renderer", "frame_limit", 100)));
|
||||||
Settings::values.use_disk_shader_cache.SetValue(
|
Settings::values.use_disk_shader_cache.SetValue(
|
||||||
sdl2_config->GetBoolean("Renderer", "use_disk_shader_cache", false));
|
sdl2_config->GetBoolean("Renderer", "use_disk_shader_cache", false));
|
||||||
const int gpu_accuracy_level = sdl2_config->GetInteger("Renderer", "gpu_accuracy", 0);
|
const int gpu_accuracy_level = sdl2_config->GetInteger("Renderer", "gpu_accuracy", 1);
|
||||||
Settings::values.gpu_accuracy.SetValue(static_cast<Settings::GPUAccuracy>(gpu_accuracy_level));
|
Settings::values.gpu_accuracy.SetValue(static_cast<Settings::GPUAccuracy>(gpu_accuracy_level));
|
||||||
Settings::values.use_asynchronous_gpu_emulation.SetValue(
|
Settings::values.use_asynchronous_gpu_emulation.SetValue(
|
||||||
sdl2_config->GetBoolean("Renderer", "use_asynchronous_gpu_emulation", true));
|
sdl2_config->GetBoolean("Renderer", "use_asynchronous_gpu_emulation", true));
|
||||||
|
|
Reference in New Issue