Merge branch 'master' into feature/savestates-2
This commit is contained in:
commit
828f88d20a
|
@ -140,7 +140,7 @@ if (ENABLE_SDL2)
|
|||
if (CITRA_USE_BUNDLED_SDL2)
|
||||
# Detect toolchain and platform
|
||||
if ((MSVC_VERSION GREATER_EQUAL 1910 AND MSVC_VERSION LESS 1930) AND ARCHITECTURE_x86_64)
|
||||
set(SDL2_VER "SDL2-2.0.8")
|
||||
set(SDL2_VER "SDL2-2.0.10")
|
||||
else()
|
||||
message(FATAL_ERROR "No bundled SDL2 binaries for your toolchain. Disable CITRA_USE_BUNDLED_SDL2 and provide your own.")
|
||||
endif()
|
||||
|
|
|
@ -6,10 +6,10 @@ function(copy_citra_Qt5_deps target_dir)
|
|||
set(Qt5_MEDIASERVICE_DIR "${Qt5_DIR}/../../../plugins/mediaservice/")
|
||||
set(Qt5_STYLES_DIR "${Qt5_DIR}/../../../plugins/styles/")
|
||||
set(Qt5_IMAGEFORMATS_DIR "${Qt5_DIR}/../../../plugins/imageformats/")
|
||||
set(PLATFORMS ${DLL_DEST}platforms/)
|
||||
set(MEDIASERVICE ${DLL_DEST}mediaservice/)
|
||||
set(STYLES ${DLL_DEST}styles/)
|
||||
set(IMAGEFORMATS ${DLL_DEST}imageformats/)
|
||||
set(PLATFORMS ${DLL_DEST}plugins/platforms/)
|
||||
set(MEDIASERVICE ${DLL_DEST}plugins/mediaservice/)
|
||||
set(STYLES ${DLL_DEST}plugins/styles/)
|
||||
set(IMAGEFORMATS ${DLL_DEST}plugins/imageformats/)
|
||||
windows_copy_files(${target_dir} ${Qt5_DLL_DIR} ${DLL_DEST}
|
||||
icudt*.dll
|
||||
icuin*.dll
|
||||
|
@ -38,4 +38,10 @@ function(copy_citra_Qt5_deps target_dir)
|
|||
qwbmp$<$<CONFIG:Debug>:d>.dll
|
||||
qwebp$<$<CONFIG:Debug>:d>.dll
|
||||
)
|
||||
|
||||
# Create an empty qt.conf file. Qt will detect that this file exists, and use the folder that its in as the root folder.
|
||||
# This way it'll look for plugins in the root/plugins/ folder
|
||||
add_custom_command(TARGET citra-qt POST_BUILD
|
||||
COMMAND ${CMAKE_COMMAND} -E touch ${DLL_DEST}qt.conf
|
||||
)
|
||||
endfunction(copy_citra_Qt5_deps)
|
||||
|
|
|
@ -27,7 +27,7 @@ install:
|
|||
- ps: |
|
||||
if ($env:BUILD_TYPE -eq 'mingw') {
|
||||
$dependencies = "mingw64/mingw-w64-x86_64-cmake mingw64/mingw-w64-x86_64-qt5 mingw64/mingw-w64-x86_64-ffmpeg"
|
||||
C:\msys64\usr\bin\bash -lc "pacman --noconfirm -U http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-SDL2-2.0.5-2-any.pkg.tar.xz"
|
||||
C:\msys64\usr\bin\bash -lc "pacman --noconfirm -U http://repo.msys2.org/mingw/x86_64/mingw-w64-x86_64-SDL2-2.0.10-1-any.pkg.tar.xz"
|
||||
C:\msys64\usr\bin\bash -lc "pacman --noconfirm -S $dependencies"
|
||||
# (HACK) ignore errors
|
||||
0
|
||||
|
|
|
@ -38,14 +38,14 @@ void DspInterface::EnableStretching(bool enable) {
|
|||
perform_time_stretching = enable;
|
||||
}
|
||||
|
||||
void DspInterface::OutputFrame(StereoFrame16& frame) {
|
||||
void DspInterface::OutputFrame(StereoFrame16 frame) {
|
||||
if (!sink)
|
||||
return;
|
||||
|
||||
fifo.Push(frame.data(), frame.size());
|
||||
|
||||
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||
Core::System::GetInstance().VideoDumper().AddAudioFrame(frame);
|
||||
Core::System::GetInstance().VideoDumper().AddAudioFrame(std::move(frame));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,7 +56,7 @@ void DspInterface::OutputSample(std::array<s16, 2> sample) {
|
|||
fifo.Push(&sample, 1);
|
||||
|
||||
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||
Core::System::GetInstance().VideoDumper().AddAudioSample(sample);
|
||||
Core::System::GetInstance().VideoDumper().AddAudioSample(std::move(sample));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -101,7 +101,7 @@ public:
|
|||
void EnableStretching(bool enable);
|
||||
|
||||
protected:
|
||||
void OutputFrame(StereoFrame16& frame);
|
||||
void OutputFrame(StereoFrame16 frame);
|
||||
void OutputSample(std::array<s16, 2> sample);
|
||||
|
||||
private:
|
||||
|
|
|
@ -431,7 +431,7 @@ bool DspHle::Impl::Tick() {
|
|||
// shared memory region)
|
||||
current_frame = GenerateCurrentFrame();
|
||||
|
||||
parent.OutputFrame(current_frame);
|
||||
parent.OutputFrame(std::move(current_frame));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
|
|
@ -289,7 +289,9 @@ void Source::GenerateFrame() {
|
|||
break;
|
||||
}
|
||||
}
|
||||
state.next_sample_number += static_cast<u32>(frame_position);
|
||||
// TODO(jroweboy): Keep track of frame_position independently so that it doesn't lose precision
|
||||
// over time
|
||||
state.next_sample_number += static_cast<u32>(frame_position * state.rate_multiplier);
|
||||
|
||||
state.filters.ProcessFrame(current_frame);
|
||||
}
|
||||
|
|
|
@ -483,7 +483,8 @@ DspLle::DspLle(Memory::MemorySystem& memory, bool multithread)
|
|||
*memory.GetFCRAMPointer(address - Memory::FCRAM_PADDR) = value;
|
||||
};
|
||||
impl->teakra.SetAHBMCallback(ahbm);
|
||||
impl->teakra.SetAudioCallback([this](std::array<s16, 2> sample) { OutputSample(sample); });
|
||||
impl->teakra.SetAudioCallback(
|
||||
[this](std::array<s16, 2> sample) { OutputSample(std::move(sample)); });
|
||||
}
|
||||
DspLle::~DspLle() = default;
|
||||
|
||||
|
|
|
@ -409,7 +409,7 @@ int main(int argc, char** argv) {
|
|||
if (!dump_video.empty()) {
|
||||
Layout::FramebufferLayout layout{
|
||||
Layout::FrameLayoutFromResolutionScale(VideoCore::GetResolutionScaleFactor())};
|
||||
system.VideoDumper().StartDumping(dump_video, "webm", layout);
|
||||
system.VideoDumper().StartDumping(dump_video, layout);
|
||||
}
|
||||
|
||||
std::thread render_thread([&emu_window] { emu_window->Present(); });
|
||||
|
|
|
@ -132,8 +132,6 @@ void Config::ReadValues() {
|
|||
static_cast<u16>(sdl2_config->GetInteger("Renderer", "use_vsync_new", 1));
|
||||
Settings::values.texture_filter_name =
|
||||
sdl2_config->GetString("Renderer", "texture_filter_name", "none");
|
||||
Settings::values.texture_filter_factor =
|
||||
sdl2_config->GetInteger("Renderer", "texture_filter_factor", 1);
|
||||
|
||||
Settings::values.render_3d = static_cast<Settings::StereoRenderOption>(
|
||||
sdl2_config->GetInteger("Renderer", "render_3d", 0));
|
||||
|
@ -201,7 +199,7 @@ void Config::ReadValues() {
|
|||
sdl2_config->GetBoolean("Data Storage", "use_virtual_sd", true);
|
||||
|
||||
// System
|
||||
Settings::values.is_new_3ds = sdl2_config->GetBoolean("System", "is_new_3ds", false);
|
||||
Settings::values.is_new_3ds = sdl2_config->GetBoolean("System", "is_new_3ds", true);
|
||||
Settings::values.region_value =
|
||||
sdl2_config->GetInteger("System", "region_value", Settings::REGION_VALUE_AUTO_SELECT);
|
||||
Settings::values.init_clock =
|
||||
|
@ -270,6 +268,33 @@ void Config::ReadValues() {
|
|||
sdl2_config->GetString("WebService", "web_api_url", "https://api.citra-emu.org");
|
||||
Settings::values.citra_username = sdl2_config->GetString("WebService", "citra_username", "");
|
||||
Settings::values.citra_token = sdl2_config->GetString("WebService", "citra_token", "");
|
||||
|
||||
// Video Dumping
|
||||
Settings::values.output_format =
|
||||
sdl2_config->GetString("Video Dumping", "output_format", "webm");
|
||||
Settings::values.format_options = sdl2_config->GetString("Video Dumping", "format_options", "");
|
||||
|
||||
Settings::values.video_encoder =
|
||||
sdl2_config->GetString("Video Dumping", "video_encoder", "libvpx-vp9");
|
||||
|
||||
// Options for variable bit rate live streaming taken from here:
|
||||
// https://developers.google.com/media/vp9/live-encoding
|
||||
std::string default_video_options;
|
||||
if (Settings::values.video_encoder == "libvpx-vp9") {
|
||||
default_video_options =
|
||||
"quality:realtime,speed:6,tile-columns:4,frame-parallel:1,threads:8,row-mt:1";
|
||||
}
|
||||
Settings::values.video_encoder_options =
|
||||
sdl2_config->GetString("Video Dumping", "video_encoder_options", default_video_options);
|
||||
Settings::values.video_bitrate =
|
||||
sdl2_config->GetInteger("Video Dumping", "video_bitrate", 2500000);
|
||||
|
||||
Settings::values.audio_encoder =
|
||||
sdl2_config->GetString("Video Dumping", "audio_encoder", "libvorbis");
|
||||
Settings::values.audio_encoder_options =
|
||||
sdl2_config->GetString("Video Dumping", "audio_encoder_options", "");
|
||||
Settings::values.audio_bitrate =
|
||||
sdl2_config->GetInteger("Video Dumping", "audio_bitrate", 64000);
|
||||
}
|
||||
|
||||
void Config::Reload() {
|
||||
|
|
|
@ -132,9 +132,8 @@ use_disk_shader_cache =
|
|||
# factor for the 3DS resolution
|
||||
resolution_factor =
|
||||
|
||||
# Texture filter name and scale factor
|
||||
# Texture filter name
|
||||
texture_filter_name =
|
||||
texture_filter_factor =
|
||||
|
||||
# Turns on the frame limiter, which will limit frames output to the target game speed
|
||||
# 0: Off, 1: On (default)
|
||||
|
@ -244,7 +243,7 @@ use_virtual_sd =
|
|||
|
||||
[System]
|
||||
# The system model that Citra will try to emulate
|
||||
# 0: Old 3DS (default), 1: New 3DS
|
||||
# 0: Old 3DS, 1: New 3DS (default)
|
||||
is_new_3ds =
|
||||
|
||||
# The system region that Citra will use during emulation
|
||||
|
@ -305,5 +304,31 @@ web_api_url = https://api.citra-emu.org
|
|||
# See https://profile.citra-emu.org/ for more info
|
||||
citra_username =
|
||||
citra_token =
|
||||
|
||||
[Video Dumping]
|
||||
# Format of the video to output, default: webm
|
||||
output_format =
|
||||
|
||||
# Options passed to the muxer (optional)
|
||||
# This is a param package, format: [key1]:[value1],[key2]:[value2],...
|
||||
format_options =
|
||||
|
||||
# Video encoder used, default: libvpx-vp9
|
||||
video_encoder =
|
||||
|
||||
# Options passed to the video codec (optional)
|
||||
video_encoder_options =
|
||||
|
||||
# Video bitrate, default: 2500000
|
||||
video_bitrate =
|
||||
|
||||
# Audio encoder used, default: libvorbis
|
||||
audio_encoder =
|
||||
|
||||
# Options passed to the audio codec (optional)
|
||||
audio_encoder_options =
|
||||
|
||||
# Audio bitrate, default: 64000
|
||||
audio_bitrate =
|
||||
)";
|
||||
}
|
||||
|
|
|
@ -162,6 +162,20 @@ add_executable(citra-qt
|
|||
util/util.h
|
||||
)
|
||||
|
||||
if (ENABLE_FFMPEG_VIDEO_DUMPER)
|
||||
target_sources(citra-qt PRIVATE
|
||||
dumping/dumping_dialog.cpp
|
||||
dumping/dumping_dialog.h
|
||||
dumping/dumping_dialog.ui
|
||||
dumping/option_set_dialog.cpp
|
||||
dumping/option_set_dialog.h
|
||||
dumping/option_set_dialog.ui
|
||||
dumping/options_dialog.cpp
|
||||
dumping/options_dialog.h
|
||||
dumping/options_dialog.ui
|
||||
)
|
||||
endif()
|
||||
|
||||
file(GLOB COMPAT_LIST
|
||||
${PROJECT_BINARY_DIR}/dist/compatibility_list/compatibility_list.qrc
|
||||
${PROJECT_BINARY_DIR}/dist/compatibility_list/compatibility_list.json)
|
||||
|
|
|
@ -104,8 +104,8 @@ void EmuThread::run() {
|
|||
}
|
||||
|
||||
OpenGLWindow::OpenGLWindow(QWindow* parent, QWidget* event_handler, QOpenGLContext* shared_context)
|
||||
: QWindow(parent), event_handler(event_handler),
|
||||
context(new QOpenGLContext(shared_context->parent())) {
|
||||
: QWindow(parent), context(new QOpenGLContext(shared_context->parent())),
|
||||
event_handler(event_handler) {
|
||||
|
||||
// disable vsync for any shared contexts
|
||||
auto format = shared_context->format();
|
||||
|
@ -201,6 +201,8 @@ GRenderWindow::GRenderWindow(QWidget* parent_, EmuThread* emu_thread)
|
|||
setLayout(layout);
|
||||
InputCommon::Init();
|
||||
|
||||
this->setMouseTracking(true);
|
||||
|
||||
GMainWindow* parent = GetMainWindow();
|
||||
connect(this, &GRenderWindow::FirstFrameDisplayed, parent, &GMainWindow::OnLoadComplete);
|
||||
}
|
||||
|
@ -297,6 +299,7 @@ void GRenderWindow::mousePressEvent(QMouseEvent* event) {
|
|||
} else if (event->button() == Qt::RightButton) {
|
||||
InputCommon::GetMotionEmu()->BeginTilt(pos.x(), pos.y());
|
||||
}
|
||||
QWidget::mouseMoveEvent(event);
|
||||
}
|
||||
|
||||
void GRenderWindow::mouseMoveEvent(QMouseEvent* event) {
|
||||
|
@ -307,6 +310,7 @@ void GRenderWindow::mouseMoveEvent(QMouseEvent* event) {
|
|||
const auto [x, y] = ScaleTouch(pos);
|
||||
this->TouchMoved(x, y);
|
||||
InputCommon::GetMotionEmu()->Tilt(pos.x(), pos.y());
|
||||
QWidget::mouseMoveEvent(event);
|
||||
}
|
||||
|
||||
void GRenderWindow::mouseReleaseEvent(QMouseEvent* event) {
|
||||
|
|
|
@ -95,6 +95,7 @@ void Config::ReadValues() {
|
|||
ReadMiscellaneousValues();
|
||||
ReadDebuggingValues();
|
||||
ReadWebServiceValues();
|
||||
ReadVideoDumpingValues();
|
||||
ReadUIValues();
|
||||
ReadUtilityValues();
|
||||
}
|
||||
|
@ -456,8 +457,6 @@ void Config::ReadRendererValues() {
|
|||
ReadSetting(QStringLiteral("texture_filter_name"), QStringLiteral("none"))
|
||||
.toString()
|
||||
.toStdString();
|
||||
Settings::values.texture_filter_factor =
|
||||
ReadSetting(QStringLiteral("texture_filter_factor"), 1).toInt();
|
||||
|
||||
qt_config->endGroup();
|
||||
}
|
||||
|
@ -484,7 +483,7 @@ void Config::ReadShortcutValues() {
|
|||
void Config::ReadSystemValues() {
|
||||
qt_config->beginGroup(QStringLiteral("System"));
|
||||
|
||||
Settings::values.is_new_3ds = ReadSetting(QStringLiteral("is_new_3ds"), false).toBool();
|
||||
Settings::values.is_new_3ds = ReadSetting(QStringLiteral("is_new_3ds"), true).toBool();
|
||||
Settings::values.region_value =
|
||||
ReadSetting(QStringLiteral("region_value"), Settings::REGION_VALUE_AUTO_SELECT).toInt();
|
||||
Settings::values.init_clock = static_cast<Settings::InitClock>(
|
||||
|
@ -496,6 +495,49 @@ void Config::ReadSystemValues() {
|
|||
qt_config->endGroup();
|
||||
}
|
||||
|
||||
// Options for variable bit rate live streaming taken from here:
|
||||
// https://developers.google.com/media/vp9/live-encoding
|
||||
const QString DEFAULT_VIDEO_ENCODER_OPTIONS =
|
||||
QStringLiteral("quality:realtime,speed:6,tile-columns:4,frame-parallel:1,threads:8,row-mt:1");
|
||||
const QString DEFAULT_AUDIO_ENCODER_OPTIONS = QString{};
|
||||
|
||||
void Config::ReadVideoDumpingValues() {
|
||||
qt_config->beginGroup(QStringLiteral("VideoDumping"));
|
||||
|
||||
Settings::values.output_format =
|
||||
ReadSetting(QStringLiteral("output_format"), QStringLiteral("webm"))
|
||||
.toString()
|
||||
.toStdString();
|
||||
Settings::values.format_options =
|
||||
ReadSetting(QStringLiteral("format_options")).toString().toStdString();
|
||||
|
||||
Settings::values.video_encoder =
|
||||
ReadSetting(QStringLiteral("video_encoder"), QStringLiteral("libvpx-vp9"))
|
||||
.toString()
|
||||
.toStdString();
|
||||
|
||||
Settings::values.video_encoder_options =
|
||||
ReadSetting(QStringLiteral("video_encoder_options"), DEFAULT_VIDEO_ENCODER_OPTIONS)
|
||||
.toString()
|
||||
.toStdString();
|
||||
|
||||
Settings::values.video_bitrate =
|
||||
ReadSetting(QStringLiteral("video_bitrate"), 2500000).toULongLong();
|
||||
|
||||
Settings::values.audio_encoder =
|
||||
ReadSetting(QStringLiteral("audio_encoder"), QStringLiteral("libvorbis"))
|
||||
.toString()
|
||||
.toStdString();
|
||||
Settings::values.audio_encoder_options =
|
||||
ReadSetting(QStringLiteral("audio_encoder_options"), DEFAULT_AUDIO_ENCODER_OPTIONS)
|
||||
.toString()
|
||||
.toStdString();
|
||||
Settings::values.audio_bitrate =
|
||||
ReadSetting(QStringLiteral("audio_bitrate"), 64000).toULongLong();
|
||||
|
||||
qt_config->endGroup();
|
||||
}
|
||||
|
||||
void Config::ReadUIValues() {
|
||||
qt_config->beginGroup(QStringLiteral("UI"));
|
||||
|
||||
|
@ -530,6 +572,8 @@ void Config::ReadUIValues() {
|
|||
UISettings::values.show_console = ReadSetting(QStringLiteral("showConsole"), false).toBool();
|
||||
UISettings::values.pause_when_in_background =
|
||||
ReadSetting(QStringLiteral("pauseWhenInBackground"), false).toBool();
|
||||
UISettings::values.hide_mouse =
|
||||
ReadSetting(QStringLiteral("hideInactiveMouse"), false).toBool();
|
||||
|
||||
qt_config->endGroup();
|
||||
}
|
||||
|
@ -628,6 +672,7 @@ void Config::SaveValues() {
|
|||
SaveMiscellaneousValues();
|
||||
SaveDebuggingValues();
|
||||
SaveWebServiceValues();
|
||||
SaveVideoDumpingValues();
|
||||
SaveUIValues();
|
||||
SaveUtilityValues();
|
||||
}
|
||||
|
@ -895,8 +940,6 @@ void Config::SaveRendererValues() {
|
|||
WriteSetting(QStringLiteral("texture_filter_name"),
|
||||
QString::fromStdString(Settings::values.texture_filter_name),
|
||||
QStringLiteral("none"));
|
||||
WriteSetting(QStringLiteral("texture_filter_factor"), Settings::values.texture_filter_factor,
|
||||
1);
|
||||
|
||||
qt_config->endGroup();
|
||||
}
|
||||
|
@ -923,7 +966,7 @@ void Config::SaveShortcutValues() {
|
|||
void Config::SaveSystemValues() {
|
||||
qt_config->beginGroup(QStringLiteral("System"));
|
||||
|
||||
WriteSetting(QStringLiteral("is_new_3ds"), Settings::values.is_new_3ds, false);
|
||||
WriteSetting(QStringLiteral("is_new_3ds"), Settings::values.is_new_3ds, true);
|
||||
WriteSetting(QStringLiteral("region_value"), Settings::values.region_value,
|
||||
Settings::REGION_VALUE_AUTO_SELECT);
|
||||
WriteSetting(QStringLiteral("init_clock"), static_cast<u32>(Settings::values.init_clock),
|
||||
|
@ -934,6 +977,33 @@ void Config::SaveSystemValues() {
|
|||
qt_config->endGroup();
|
||||
}
|
||||
|
||||
void Config::SaveVideoDumpingValues() {
|
||||
qt_config->beginGroup(QStringLiteral("VideoDumping"));
|
||||
|
||||
WriteSetting(QStringLiteral("output_format"),
|
||||
QString::fromStdString(Settings::values.output_format), QStringLiteral("webm"));
|
||||
WriteSetting(QStringLiteral("format_options"),
|
||||
QString::fromStdString(Settings::values.format_options));
|
||||
WriteSetting(QStringLiteral("video_encoder"),
|
||||
QString::fromStdString(Settings::values.video_encoder),
|
||||
QStringLiteral("libvpx-vp9"));
|
||||
WriteSetting(QStringLiteral("video_encoder_options"),
|
||||
QString::fromStdString(Settings::values.video_encoder_options),
|
||||
DEFAULT_VIDEO_ENCODER_OPTIONS);
|
||||
WriteSetting(QStringLiteral("video_bitrate"),
|
||||
static_cast<unsigned long long>(Settings::values.video_bitrate), 2500000);
|
||||
WriteSetting(QStringLiteral("audio_encoder"),
|
||||
QString::fromStdString(Settings::values.audio_encoder),
|
||||
QStringLiteral("libvorbis"));
|
||||
WriteSetting(QStringLiteral("audio_encoder_options"),
|
||||
QString::fromStdString(Settings::values.audio_encoder_options),
|
||||
DEFAULT_AUDIO_ENCODER_OPTIONS);
|
||||
WriteSetting(QStringLiteral("audio_bitrate"),
|
||||
static_cast<unsigned long long>(Settings::values.audio_bitrate), 64000);
|
||||
|
||||
qt_config->endGroup();
|
||||
}
|
||||
|
||||
void Config::SaveUIValues() {
|
||||
qt_config->beginGroup(QStringLiteral("UI"));
|
||||
|
||||
|
@ -962,6 +1032,7 @@ void Config::SaveUIValues() {
|
|||
WriteSetting(QStringLiteral("showConsole"), UISettings::values.show_console, false);
|
||||
WriteSetting(QStringLiteral("pauseWhenInBackground"),
|
||||
UISettings::values.pause_when_in_background, false);
|
||||
WriteSetting(QStringLiteral("hideInactiveMouse"), UISettings::values.hide_mouse, false);
|
||||
|
||||
qt_config->endGroup();
|
||||
}
|
||||
|
|
|
@ -44,6 +44,7 @@ private:
|
|||
void ReadUpdaterValues();
|
||||
void ReadUtilityValues();
|
||||
void ReadWebServiceValues();
|
||||
void ReadVideoDumpingValues();
|
||||
|
||||
void SaveValues();
|
||||
void SaveAudioValues();
|
||||
|
@ -65,6 +66,7 @@ private:
|
|||
void SaveUpdaterValues();
|
||||
void SaveUtilityValues();
|
||||
void SaveWebServiceValues();
|
||||
void SaveVideoDumpingValues();
|
||||
|
||||
QVariant ReadSetting(const QString& name) const;
|
||||
QVariant ReadSetting(const QString& name, const QVariant& default_value) const;
|
||||
|
|
|
@ -8,17 +8,14 @@
|
|||
#include "core/settings.h"
|
||||
#include "ui_configure_enhancements.h"
|
||||
#include "video_core/renderer_opengl/post_processing_opengl.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_manager.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filterer.h"
|
||||
|
||||
ConfigureEnhancements::ConfigureEnhancements(QWidget* parent)
|
||||
: QWidget(parent), ui(new Ui::ConfigureEnhancements) {
|
||||
ui->setupUi(this);
|
||||
|
||||
for (const auto& filter : OpenGL::TextureFilterManager::TextureFilterMap())
|
||||
ui->texture_filter_combobox->addItem(QString::fromStdString(filter.first.data()));
|
||||
|
||||
connect(ui->texture_filter_combobox, QOverload<int>::of(&QComboBox::currentIndexChanged), this,
|
||||
&ConfigureEnhancements::updateTextureFilter);
|
||||
for (const auto& filter : OpenGL::TextureFilterer::GetFilterNames())
|
||||
ui->texture_filter_combobox->addItem(QString::fromStdString(filter.data()));
|
||||
|
||||
SetConfiguration();
|
||||
|
||||
|
@ -60,7 +57,6 @@ void ConfigureEnhancements::SetConfiguration() {
|
|||
ui->factor_3d->setValue(Settings::values.factor_3d);
|
||||
updateShaders(Settings::values.render_3d);
|
||||
ui->toggle_linear_filter->setChecked(Settings::values.filter_mode);
|
||||
ui->texture_scale_spinbox->setValue(Settings::values.texture_filter_factor);
|
||||
int tex_filter_idx = ui->texture_filter_combobox->findText(
|
||||
QString::fromStdString(Settings::values.texture_filter_name));
|
||||
if (tex_filter_idx == -1) {
|
||||
|
@ -68,7 +64,6 @@ void ConfigureEnhancements::SetConfiguration() {
|
|||
} else {
|
||||
ui->texture_filter_combobox->setCurrentIndex(tex_filter_idx);
|
||||
}
|
||||
updateTextureFilter(tex_filter_idx);
|
||||
ui->layout_combobox->setCurrentIndex(static_cast<int>(Settings::values.layout_option));
|
||||
ui->swap_screen->setChecked(Settings::values.swap_screen);
|
||||
ui->toggle_disk_shader_cache->setChecked(Settings::values.use_hw_shader &&
|
||||
|
@ -105,17 +100,6 @@ void ConfigureEnhancements::updateShaders(Settings::StereoRenderOption stereo_op
|
|||
}
|
||||
}
|
||||
|
||||
void ConfigureEnhancements::updateTextureFilter(int index) {
|
||||
if (index == -1)
|
||||
return;
|
||||
ui->texture_filter_group->setEnabled(index != 0);
|
||||
const auto& clamp = OpenGL::TextureFilterManager::TextureFilterMap()
|
||||
.at(ui->texture_filter_combobox->currentText().toStdString())
|
||||
.clamp_scale;
|
||||
ui->texture_scale_spinbox->setMinimum(clamp.min);
|
||||
ui->texture_scale_spinbox->setMaximum(clamp.max);
|
||||
}
|
||||
|
||||
void ConfigureEnhancements::RetranslateUI() {
|
||||
ui->retranslateUi(this);
|
||||
}
|
||||
|
@ -130,7 +114,6 @@ void ConfigureEnhancements::ApplyConfiguration() {
|
|||
ui->shader_combobox->itemText(ui->shader_combobox->currentIndex()).toStdString();
|
||||
Settings::values.filter_mode = ui->toggle_linear_filter->isChecked();
|
||||
Settings::values.texture_filter_name = ui->texture_filter_combobox->currentText().toStdString();
|
||||
Settings::values.texture_filter_factor = ui->texture_scale_spinbox->value();
|
||||
Settings::values.layout_option =
|
||||
static_cast<Settings::LayoutOption>(ui->layout_combobox->currentIndex());
|
||||
Settings::values.swap_screen = ui->swap_screen->isChecked();
|
||||
|
|
|
@ -131,42 +131,6 @@
|
|||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QWidget" name="texture_filter_group" native="true">
|
||||
<layout class="QVBoxLayout" name="verticalLayout_7">
|
||||
<property name="leftMargin">
|
||||
<number>16</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_8">
|
||||
<item>
|
||||
<widget class="QLabel" name="label_6">
|
||||
<property name="text">
|
||||
<string>Texture Scale Factor</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSpinBox" name="texture_scale_spinbox">
|
||||
<property name="minimum">
|
||||
<number>1</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
|
|
|
@ -27,6 +27,7 @@ ConfigureGeneral::~ConfigureGeneral() = default;
|
|||
void ConfigureGeneral::SetConfiguration() {
|
||||
ui->toggle_check_exit->setChecked(UISettings::values.confirm_before_closing);
|
||||
ui->toggle_background_pause->setChecked(UISettings::values.pause_when_in_background);
|
||||
ui->toggle_hide_mouse->setChecked(UISettings::values.hide_mouse);
|
||||
|
||||
ui->toggle_update_check->setChecked(UISettings::values.check_for_update_on_start);
|
||||
ui->toggle_auto_update->setChecked(UISettings::values.update_on_close);
|
||||
|
@ -55,6 +56,7 @@ void ConfigureGeneral::ResetDefaults() {
|
|||
void ConfigureGeneral::ApplyConfiguration() {
|
||||
UISettings::values.confirm_before_closing = ui->toggle_check_exit->isChecked();
|
||||
UISettings::values.pause_when_in_background = ui->toggle_background_pause->isChecked();
|
||||
UISettings::values.hide_mouse = ui->toggle_hide_mouse->isChecked();
|
||||
|
||||
UISettings::values.check_for_update_on_start = ui->toggle_update_check->isChecked();
|
||||
UISettings::values.update_on_close = ui->toggle_auto_update->isChecked();
|
||||
|
|
|
@ -36,6 +36,13 @@
|
|||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QCheckBox" name="toggle_hide_mouse">
|
||||
<property name="text">
|
||||
<string>Hide mouse on inactivity</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
|
|
|
@ -30,7 +30,7 @@ CalibrationConfigurationDialog::CalibrationConfigurationDialog(QWidget* parent,
|
|||
setLayout(layout);
|
||||
|
||||
using namespace InputCommon::CemuhookUDP;
|
||||
job = std::move(std::make_unique<CalibrationConfigurationJob>(
|
||||
job = std::make_unique<CalibrationConfigurationJob>(
|
||||
host, port, pad_index, client_id,
|
||||
[this](CalibrationConfigurationJob::Status status) {
|
||||
QString text;
|
||||
|
@ -56,7 +56,7 @@ CalibrationConfigurationDialog::CalibrationConfigurationDialog(QWidget* parent,
|
|||
min_y = min_y_;
|
||||
max_x = max_x_;
|
||||
max_y = max_y_;
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
CalibrationConfigurationDialog::~CalibrationConfigurationDialog() = default;
|
||||
|
|
|
@ -277,6 +277,8 @@ void ConfigureSystem::SetConfiguration() {
|
|||
ui->slider_clock_speed->setValue(SettingsToSlider(Settings::values.cpu_clock_percentage));
|
||||
ui->clock_display_label->setText(
|
||||
QStringLiteral("%1%").arg(Settings::values.cpu_clock_percentage));
|
||||
|
||||
ui->toggle_new_3ds->setChecked(Settings::values.is_new_3ds);
|
||||
}
|
||||
|
||||
void ConfigureSystem::ReadSystemSettings() {
|
||||
|
@ -374,6 +376,8 @@ void ConfigureSystem::ApplyConfiguration() {
|
|||
Settings::values.init_clock =
|
||||
static_cast<Settings::InitClock>(ui->combo_init_clock->currentIndex());
|
||||
Settings::values.init_time = ui->edit_init_time->dateTime().toTime_t();
|
||||
|
||||
Settings::values.is_new_3ds = ui->toggle_new_3ds->isChecked();
|
||||
}
|
||||
|
||||
Settings::values.cpu_clock_percentage = SliderToSettings(ui->slider_clock_speed->value());
|
||||
|
|
|
@ -22,14 +22,74 @@
|
|||
<string>System Settings</string>
|
||||
</property>
|
||||
<layout class="QGridLayout" name="gridLayout">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel" name="label_username">
|
||||
<property name="text">
|
||||
<string>Username</string>
|
||||
<item row="4" column="1">
|
||||
<widget class="QComboBox" name="combo_language">
|
||||
<property name="toolTip">
|
||||
<string>Note: this can be overridden when region setting is auto-select</string>
|
||||
</property>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Japanese (日本語)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>English</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>French (français)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>German (Deutsch)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Italian (italiano)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Spanish (español)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Simplified Chinese (简体中文)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Korean (한국어)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Dutch (Nederlands)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Portuguese (português)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Russian (Русский)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Traditional Chinese (正體中文)</string>
|
||||
</property>
|
||||
</item>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<item row="2" column="1">
|
||||
<widget class="QLineEdit" name="edit_username">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Preferred" vsizetype="Fixed">
|
||||
|
@ -42,14 +102,33 @@
|
|||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QLabel" name="label_birthday">
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel" name="label_username">
|
||||
<property name="text">
|
||||
<string>Birthday</string>
|
||||
<string>Username</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<item row="5" column="1">
|
||||
<widget class="QComboBox" name="combo_sound">
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Mono</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Stereo</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Surround</string>
|
||||
</property>
|
||||
</item>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="1">
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_birthday2">
|
||||
<item>
|
||||
<widget class="QComboBox" name="combo_birthmonth">
|
||||
|
@ -120,124 +199,38 @@
|
|||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<item row="4" column="0">
|
||||
<widget class="QLabel" name="label_language">
|
||||
<property name="text">
|
||||
<string>Language</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QComboBox" name="combo_language">
|
||||
<property name="toolTip">
|
||||
<string>Note: this can be overridden when region setting is auto-select</string>
|
||||
</property>
|
||||
<item>
|
||||
<item row="3" column="0">
|
||||
<widget class="QLabel" name="label_birthday">
|
||||
<property name="text">
|
||||
<string>Japanese (日本語)</string>
|
||||
<string>Birthday</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>English</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>French (français)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>German (Deutsch)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Italian (italiano)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Spanish (español)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Simplified Chinese (简体中文)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Korean (한국어)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Dutch (Nederlands)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Portuguese (português)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Russian (Русский)</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Traditional Chinese (正體中文)</string>
|
||||
</property>
|
||||
</item>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="0">
|
||||
<item row="5" column="0">
|
||||
<widget class="QLabel" name="label_sound">
|
||||
<property name="text">
|
||||
<string>Sound output mode</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="3" column="1">
|
||||
<widget class="QComboBox" name="combo_sound">
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Mono</string>
|
||||
</property>
|
||||
<item row="6" column="1">
|
||||
<widget class="QComboBox" name="combo_country"/>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Stereo</string>
|
||||
</property>
|
||||
</item>
|
||||
<item>
|
||||
<property name="text">
|
||||
<string>Surround</string>
|
||||
</property>
|
||||
</item>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="4" column="0">
|
||||
<item row="6" column="0">
|
||||
<widget class="QLabel" name="label_country">
|
||||
<property name="text">
|
||||
<string>Country</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="4" column="1">
|
||||
<widget class="QComboBox" name="combo_country"/>
|
||||
</item>
|
||||
<item row="5" column="0">
|
||||
<widget class="QLabel" name="label_init_clock">
|
||||
<property name="text">
|
||||
<string>Clock</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="5" column="1">
|
||||
<item row="7" column="1">
|
||||
<widget class="QComboBox" name="combo_init_clock">
|
||||
<item>
|
||||
<property name="text">
|
||||
|
@ -251,42 +244,35 @@
|
|||
</item>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="6" column="0">
|
||||
<item row="7" column="0">
|
||||
<widget class="QLabel" name="label_init_clock">
|
||||
<property name="text">
|
||||
<string>Clock</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="8" column="0">
|
||||
<widget class="QLabel" name="label_init_time">
|
||||
<property name="text">
|
||||
<string>Startup time</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="6" column="1">
|
||||
<widget class="QDateTimeEdit" name="edit_init_time">
|
||||
<property name="displayFormat">
|
||||
<string>yyyy-MM-ddTHH:mm:ss</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="7" column="0">
|
||||
<widget class="QLabel" name="label_play_coins">
|
||||
<property name="text">
|
||||
<string>Play Coins:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="7" column="1">
|
||||
<item row="9" column="1">
|
||||
<widget class="QSpinBox" name="spinBox_play_coins">
|
||||
<property name="maximum">
|
||||
<number>300</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="8" column="0">
|
||||
<widget class="QLabel" name="label_console_id">
|
||||
<item row="9" column="0">
|
||||
<widget class="QLabel" name="label_play_coins">
|
||||
<property name="text">
|
||||
<string>Console ID:</string>
|
||||
<string>Play Coins:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="8" column="1">
|
||||
<item row="12" column="1">
|
||||
<widget class="QPushButton" name="button_regenerate_console_id">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
|
@ -302,6 +288,27 @@
|
|||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="12" column="0">
|
||||
<widget class="QLabel" name="label_console_id">
|
||||
<property name="text">
|
||||
<string>Console ID:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="8" column="1">
|
||||
<widget class="QDateTimeEdit" name="edit_init_time">
|
||||
<property name="displayFormat">
|
||||
<string>yyyy-MM-ddTHH:mm:ss</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QCheckBox" name="toggle_new_3ds">
|
||||
<property name="text">
|
||||
<string>Enable New 3DS mode</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
|
|
|
@ -0,0 +1,220 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <QFileDialog>
|
||||
#include <QMessageBox>
|
||||
#include "citra_qt/dumping/dumping_dialog.h"
|
||||
#include "citra_qt/dumping/options_dialog.h"
|
||||
#include "citra_qt/uisettings.h"
|
||||
#include "core/settings.h"
|
||||
#include "ui_dumping_dialog.h"
|
||||
|
||||
DumpingDialog::DumpingDialog(QWidget* parent)
|
||||
: QDialog(parent), ui(std::make_unique<Ui::DumpingDialog>()) {
|
||||
|
||||
ui->setupUi(this);
|
||||
|
||||
format_generic_options = VideoDumper::GetFormatGenericOptions();
|
||||
encoder_generic_options = VideoDumper::GetEncoderGenericOptions();
|
||||
|
||||
connect(ui->pathExplore, &QToolButton::clicked, this, &DumpingDialog::OnToolButtonClicked);
|
||||
connect(ui->buttonBox, &QDialogButtonBox::accepted, [this] {
|
||||
if (ui->pathLineEdit->text().isEmpty()) {
|
||||
QMessageBox::critical(this, tr("Citra"), tr("Please specify the output path."));
|
||||
return;
|
||||
}
|
||||
ApplyConfiguration();
|
||||
accept();
|
||||
});
|
||||
connect(ui->buttonBox, &QDialogButtonBox::rejected, this, &DumpingDialog::reject);
|
||||
connect(ui->formatOptionsButton, &QToolButton::clicked, [this] {
|
||||
OpenOptionsDialog(formats.at(ui->formatComboBox->currentData().toUInt()).options,
|
||||
format_generic_options, ui->formatOptionsLineEdit);
|
||||
});
|
||||
connect(ui->videoEncoderOptionsButton, &QToolButton::clicked, [this] {
|
||||
OpenOptionsDialog(
|
||||
video_encoders.at(ui->videoEncoderComboBox->currentData().toUInt()).options,
|
||||
encoder_generic_options, ui->videoEncoderOptionsLineEdit);
|
||||
});
|
||||
connect(ui->audioEncoderOptionsButton, &QToolButton::clicked, [this] {
|
||||
OpenOptionsDialog(
|
||||
audio_encoders.at(ui->audioEncoderComboBox->currentData().toUInt()).options,
|
||||
encoder_generic_options, ui->audioEncoderOptionsLineEdit);
|
||||
});
|
||||
|
||||
SetConfiguration();
|
||||
|
||||
connect(ui->formatComboBox, qOverload<int>(&QComboBox::currentIndexChanged), [this] {
|
||||
ui->pathLineEdit->setText(QString{});
|
||||
ui->formatOptionsLineEdit->clear();
|
||||
PopulateEncoders();
|
||||
});
|
||||
|
||||
connect(ui->videoEncoderComboBox, qOverload<int>(&QComboBox::currentIndexChanged),
|
||||
[this] { ui->videoEncoderOptionsLineEdit->clear(); });
|
||||
connect(ui->audioEncoderComboBox, qOverload<int>(&QComboBox::currentIndexChanged),
|
||||
[this] { ui->audioEncoderOptionsLineEdit->clear(); });
|
||||
}
|
||||
|
||||
DumpingDialog::~DumpingDialog() = default;
|
||||
|
||||
QString DumpingDialog::GetFilePath() const {
|
||||
return ui->pathLineEdit->text();
|
||||
}
|
||||
|
||||
void DumpingDialog::Populate() {
|
||||
formats = VideoDumper::ListFormats();
|
||||
video_encoders = VideoDumper::ListEncoders(AVMEDIA_TYPE_VIDEO);
|
||||
audio_encoders = VideoDumper::ListEncoders(AVMEDIA_TYPE_AUDIO);
|
||||
|
||||
// Check that these are not empty
|
||||
QString missing;
|
||||
if (formats.empty()) {
|
||||
missing = tr("output formats");
|
||||
}
|
||||
if (video_encoders.empty()) {
|
||||
missing = tr("video encoders");
|
||||
}
|
||||
if (audio_encoders.empty()) {
|
||||
missing = tr("audio encoders");
|
||||
}
|
||||
|
||||
if (!missing.isEmpty()) {
|
||||
QMessageBox::critical(this, tr("Citra"),
|
||||
tr("Could not find any available %1.\nPlease check your FFmpeg "
|
||||
"installation used for compilation.")
|
||||
.arg(missing));
|
||||
reject();
|
||||
return;
|
||||
}
|
||||
|
||||
// Populate formats
|
||||
for (std::size_t i = 0; i < formats.size(); ++i) {
|
||||
const auto& format = formats[i];
|
||||
|
||||
// Check format: only formats that have video encoders and audio encoders are displayed
|
||||
bool has_video = false;
|
||||
for (const auto& video_encoder : video_encoders) {
|
||||
if (format.supported_video_codecs.count(video_encoder.codec)) {
|
||||
has_video = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!has_video)
|
||||
continue;
|
||||
|
||||
bool has_audio = false;
|
||||
for (const auto& audio_encoder : audio_encoders) {
|
||||
if (format.supported_audio_codecs.count(audio_encoder.codec)) {
|
||||
has_audio = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!has_audio)
|
||||
continue;
|
||||
|
||||
ui->formatComboBox->addItem(tr("%1 (%2)").arg(QString::fromStdString(format.long_name),
|
||||
QString::fromStdString(format.name)),
|
||||
static_cast<unsigned long long>(i));
|
||||
if (format.name == Settings::values.output_format) {
|
||||
ui->formatComboBox->setCurrentIndex(ui->formatComboBox->count() - 1);
|
||||
}
|
||||
}
|
||||
PopulateEncoders();
|
||||
}
|
||||
|
||||
void DumpingDialog::PopulateEncoders() {
|
||||
const auto& format = formats.at(ui->formatComboBox->currentData().toUInt());
|
||||
|
||||
ui->videoEncoderComboBox->clear();
|
||||
for (std::size_t i = 0; i < video_encoders.size(); ++i) {
|
||||
const auto& video_encoder = video_encoders[i];
|
||||
if (!format.supported_video_codecs.count(video_encoder.codec)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ui->videoEncoderComboBox->addItem(
|
||||
tr("%1 (%2)").arg(QString::fromStdString(video_encoder.long_name),
|
||||
QString::fromStdString(video_encoder.name)),
|
||||
static_cast<unsigned long long>(i));
|
||||
if (video_encoder.name == Settings::values.video_encoder) {
|
||||
ui->videoEncoderComboBox->setCurrentIndex(ui->videoEncoderComboBox->count() - 1);
|
||||
}
|
||||
}
|
||||
|
||||
ui->audioEncoderComboBox->clear();
|
||||
for (std::size_t i = 0; i < audio_encoders.size(); ++i) {
|
||||
const auto& audio_encoder = audio_encoders[i];
|
||||
if (!format.supported_audio_codecs.count(audio_encoder.codec)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
ui->audioEncoderComboBox->addItem(
|
||||
tr("%1 (%2)").arg(QString::fromStdString(audio_encoder.long_name),
|
||||
QString::fromStdString(audio_encoder.name)),
|
||||
static_cast<unsigned long long>(i));
|
||||
if (audio_encoder.name == Settings::values.audio_encoder) {
|
||||
ui->audioEncoderComboBox->setCurrentIndex(ui->audioEncoderComboBox->count() - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void DumpingDialog::OnToolButtonClicked() {
|
||||
const auto& format = formats.at(ui->formatComboBox->currentData().toUInt());
|
||||
|
||||
QString extensions;
|
||||
for (const auto& ext : format.extensions) {
|
||||
if (!extensions.isEmpty()) {
|
||||
extensions.append(QLatin1Char{' '});
|
||||
}
|
||||
extensions.append(QStringLiteral("*.%1").arg(QString::fromStdString(ext)));
|
||||
}
|
||||
|
||||
const auto path = QFileDialog::getSaveFileName(
|
||||
this, tr("Select Video Output Path"), last_path,
|
||||
tr("%1 (%2)").arg(QString::fromStdString(format.long_name), extensions));
|
||||
if (!path.isEmpty()) {
|
||||
last_path = QFileInfo(ui->pathLineEdit->text()).path();
|
||||
ui->pathLineEdit->setText(path);
|
||||
}
|
||||
}
|
||||
|
||||
void DumpingDialog::OpenOptionsDialog(const std::vector<VideoDumper::OptionInfo>& specific_options,
|
||||
const std::vector<VideoDumper::OptionInfo>& generic_options,
|
||||
QLineEdit* line_edit) {
|
||||
OptionsDialog dialog(this, specific_options, generic_options, line_edit->text().toStdString());
|
||||
if (dialog.exec() != QDialog::DialogCode::Accepted) {
|
||||
return;
|
||||
}
|
||||
|
||||
line_edit->setText(QString::fromStdString(dialog.GetCurrentValue()));
|
||||
}
|
||||
|
||||
void DumpingDialog::SetConfiguration() {
|
||||
Populate();
|
||||
|
||||
ui->formatOptionsLineEdit->setText(QString::fromStdString(Settings::values.format_options));
|
||||
ui->videoEncoderOptionsLineEdit->setText(
|
||||
QString::fromStdString(Settings::values.video_encoder_options));
|
||||
ui->audioEncoderOptionsLineEdit->setText(
|
||||
QString::fromStdString(Settings::values.audio_encoder_options));
|
||||
last_path = UISettings::values.video_dumping_path;
|
||||
ui->videoBitrateSpinBox->setValue(static_cast<int>(Settings::values.video_bitrate));
|
||||
ui->audioBitrateSpinBox->setValue(static_cast<int>(Settings::values.audio_bitrate));
|
||||
}
|
||||
|
||||
void DumpingDialog::ApplyConfiguration() {
|
||||
Settings::values.output_format = formats.at(ui->formatComboBox->currentData().toUInt()).name;
|
||||
Settings::values.format_options = ui->formatOptionsLineEdit->text().toStdString();
|
||||
Settings::values.video_encoder =
|
||||
video_encoders.at(ui->videoEncoderComboBox->currentData().toUInt()).name;
|
||||
Settings::values.video_encoder_options = ui->videoEncoderOptionsLineEdit->text().toStdString();
|
||||
Settings::values.video_bitrate = ui->videoBitrateSpinBox->value();
|
||||
Settings::values.audio_encoder =
|
||||
audio_encoders.at(ui->audioEncoderComboBox->currentData().toUInt()).name;
|
||||
Settings::values.audio_encoder_options = ui->audioEncoderOptionsLineEdit->text().toStdString();
|
||||
Settings::values.audio_bitrate = ui->audioBitrateSpinBox->value();
|
||||
UISettings::values.video_dumping_path = last_path;
|
||||
Settings::Apply();
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <memory>
|
||||
#include <QDialog>
|
||||
#include "core/dumping/ffmpeg_backend.h"
|
||||
|
||||
namespace Ui {
|
||||
class DumpingDialog;
|
||||
}
|
||||
|
||||
class QLineEdit;
|
||||
|
||||
class DumpingDialog : public QDialog {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
explicit DumpingDialog(QWidget* parent);
|
||||
~DumpingDialog() override;
|
||||
|
||||
QString GetFilePath() const;
|
||||
void ApplyConfiguration();
|
||||
|
||||
private:
|
||||
void Populate();
|
||||
void PopulateEncoders();
|
||||
void SetConfiguration();
|
||||
void OnToolButtonClicked();
|
||||
void OpenOptionsDialog(const std::vector<VideoDumper::OptionInfo>& specific_options,
|
||||
const std::vector<VideoDumper::OptionInfo>& generic_options,
|
||||
QLineEdit* line_edit);
|
||||
|
||||
std::unique_ptr<Ui::DumpingDialog> ui;
|
||||
|
||||
QString last_path;
|
||||
|
||||
std::vector<VideoDumper::FormatInfo> formats;
|
||||
std::vector<VideoDumper::OptionInfo> format_generic_options;
|
||||
std::vector<VideoDumper::EncoderInfo> video_encoders;
|
||||
std::vector<VideoDumper::EncoderInfo> audio_encoders;
|
||||
std::vector<VideoDumper::OptionInfo> encoder_generic_options;
|
||||
};
|
|
@ -0,0 +1,213 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>DumpingDialog</class>
|
||||
<widget class="QDialog" name="DumpingDialog">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>600</width>
|
||||
<height>420</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Dump Video</string>
|
||||
</property>
|
||||
<layout class="QVBoxLayout">
|
||||
<item>
|
||||
<widget class="QGroupBox">
|
||||
<property name="title">
|
||||
<string>Output</string>
|
||||
</property>
|
||||
<layout class="QGridLayout">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Format:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<widget class="QComboBox" name="formatComboBox"/>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Options:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QLineEdit" name="formatOptionsLineEdit"/>
|
||||
</item>
|
||||
<item row="1" column="2">
|
||||
<widget class="QToolButton" name="formatOptionsButton">
|
||||
<property name="text">
|
||||
<string>...</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Path:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QLineEdit" name="pathLineEdit"/>
|
||||
</item>
|
||||
<item row="2" column="2">
|
||||
<widget class="QToolButton" name="pathExplore">
|
||||
<property name="text">
|
||||
<string>...</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QGroupBox">
|
||||
<property name="title">
|
||||
<string>Video</string>
|
||||
</property>
|
||||
<layout class="QGridLayout">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Encoder:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<widget class="QComboBox" name="videoEncoderComboBox">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="MinimumExpanding" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Options:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QLineEdit" name="videoEncoderOptionsLineEdit"/>
|
||||
</item>
|
||||
<item row="1" column="2">
|
||||
<widget class="QToolButton" name="videoEncoderOptionsButton">
|
||||
<property name="text">
|
||||
<string>...</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Bitrate:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QSpinBox" name="videoBitrateSpinBox">
|
||||
<property name="maximum">
|
||||
<number>10000000</number>
|
||||
</property>
|
||||
<property name="singleStep">
|
||||
<number>1000</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="2">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>bps</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QGroupBox">
|
||||
<property name="title">
|
||||
<string>Audio</string>
|
||||
</property>
|
||||
<layout class="QGridLayout">
|
||||
<item row="0" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Encoder:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="0" column="1">
|
||||
<widget class="QComboBox" name="audioEncoderComboBox">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="MinimumExpanding" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Options:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="1" column="1">
|
||||
<widget class="QLineEdit" name="audioEncoderOptionsLineEdit"/>
|
||||
</item>
|
||||
<item row="1" column="2">
|
||||
<widget class="QToolButton" name="audioEncoderOptionsButton">
|
||||
<property name="text">
|
||||
<string>...</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="0">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>Bitrate:</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="1">
|
||||
<widget class="QSpinBox" name="audioBitrateSpinBox">
|
||||
<property name="maximum">
|
||||
<number>1000000</number>
|
||||
</property>
|
||||
<property name="singleStep">
|
||||
<number>100</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item row="2" column="2">
|
||||
<widget class="QLabel">
|
||||
<property name="text">
|
||||
<string>bps</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</ui>
|
|
@ -0,0 +1,299 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <unordered_map>
|
||||
#include <QCheckBox>
|
||||
#include <QStringList>
|
||||
#include "citra_qt/dumping/option_set_dialog.h"
|
||||
#include "common/logging/log.h"
|
||||
#include "common/string_util.h"
|
||||
#include "ui_option_set_dialog.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavutil/pixdesc.h>
|
||||
}
|
||||
|
||||
static const std::unordered_map<AVOptionType, const char*> TypeNameMap{{
|
||||
{AV_OPT_TYPE_BOOL, QT_TR_NOOP("boolean")},
|
||||
{AV_OPT_TYPE_FLAGS, QT_TR_NOOP("flags")},
|
||||
{AV_OPT_TYPE_DURATION, QT_TR_NOOP("duration")},
|
||||
{AV_OPT_TYPE_INT, QT_TR_NOOP("int")},
|
||||
{AV_OPT_TYPE_UINT64, QT_TR_NOOP("uint64")},
|
||||
{AV_OPT_TYPE_INT64, QT_TR_NOOP("int64")},
|
||||
{AV_OPT_TYPE_DOUBLE, QT_TR_NOOP("double")},
|
||||
{AV_OPT_TYPE_FLOAT, QT_TR_NOOP("float")},
|
||||
{AV_OPT_TYPE_RATIONAL, QT_TR_NOOP("rational")},
|
||||
{AV_OPT_TYPE_PIXEL_FMT, QT_TR_NOOP("pixel format")},
|
||||
{AV_OPT_TYPE_SAMPLE_FMT, QT_TR_NOOP("sample format")},
|
||||
{AV_OPT_TYPE_COLOR, QT_TR_NOOP("color")},
|
||||
{AV_OPT_TYPE_IMAGE_SIZE, QT_TR_NOOP("image size")},
|
||||
{AV_OPT_TYPE_STRING, QT_TR_NOOP("string")},
|
||||
{AV_OPT_TYPE_DICT, QT_TR_NOOP("dictionary")},
|
||||
{AV_OPT_TYPE_VIDEO_RATE, QT_TR_NOOP("video rate")},
|
||||
{AV_OPT_TYPE_CHANNEL_LAYOUT, QT_TR_NOOP("channel layout")},
|
||||
}};
|
||||
|
||||
static const std::unordered_map<AVOptionType, const char*> TypeDescriptionMap{{
|
||||
{AV_OPT_TYPE_DURATION, QT_TR_NOOP("[<hours (integer)>:][<minutes (integer):]<seconds "
|
||||
"(decimal)> e.g. 03:00.5 (3min 500ms)")},
|
||||
{AV_OPT_TYPE_RATIONAL, QT_TR_NOOP("<num>/<den>")},
|
||||
{AV_OPT_TYPE_COLOR, QT_TR_NOOP("0xRRGGBBAA")},
|
||||
{AV_OPT_TYPE_IMAGE_SIZE, QT_TR_NOOP("<width>x<height>, or preset values like 'vga'.")},
|
||||
{AV_OPT_TYPE_DICT,
|
||||
QT_TR_NOOP("Comma-splitted list of <key>=<value>. Do not put spaces.")},
|
||||
{AV_OPT_TYPE_VIDEO_RATE, QT_TR_NOOP("<num>/<den>, or preset values like 'pal'.")},
|
||||
{AV_OPT_TYPE_CHANNEL_LAYOUT, QT_TR_NOOP("Hexadecimal channel layout mask starting with '0x'.")},
|
||||
}};
|
||||
|
||||
/// Get the preset values of an option. returns {display value, real value}
|
||||
std::vector<std::pair<QString, QString>> GetPresetValues(const VideoDumper::OptionInfo& option) {
|
||||
switch (option.type) {
|
||||
case AV_OPT_TYPE_BOOL: {
|
||||
return {{QObject::tr("auto"), QStringLiteral("auto")},
|
||||
{QObject::tr("true"), QStringLiteral("true")},
|
||||
{QObject::tr("false"), QStringLiteral("false")}};
|
||||
}
|
||||
case AV_OPT_TYPE_PIXEL_FMT: {
|
||||
std::vector<std::pair<QString, QString>> out{{QObject::tr("none"), QStringLiteral("none")}};
|
||||
// List all pixel formats
|
||||
const AVPixFmtDescriptor* current = nullptr;
|
||||
while ((current = av_pix_fmt_desc_next(current))) {
|
||||
out.emplace_back(QString::fromUtf8(current->name), QString::fromUtf8(current->name));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
case AV_OPT_TYPE_SAMPLE_FMT: {
|
||||
std::vector<std::pair<QString, QString>> out{{QObject::tr("none"), QStringLiteral("none")}};
|
||||
// List all sample formats
|
||||
int current = 0;
|
||||
while (true) {
|
||||
const char* name = av_get_sample_fmt_name(static_cast<AVSampleFormat>(current));
|
||||
if (name == nullptr)
|
||||
break;
|
||||
out.emplace_back(QString::fromUtf8(name), QString::fromUtf8(name));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
case AV_OPT_TYPE_INT:
|
||||
case AV_OPT_TYPE_INT64:
|
||||
case AV_OPT_TYPE_UINT64: {
|
||||
std::vector<std::pair<QString, QString>> out;
|
||||
// Add in all named constants
|
||||
for (const auto& constant : option.named_constants) {
|
||||
out.emplace_back(QObject::tr("%1 (0x%2)")
|
||||
.arg(QString::fromStdString(constant.name))
|
||||
.arg(constant.value, 0, 16),
|
||||
QString::fromStdString(constant.name));
|
||||
}
|
||||
return out;
|
||||
}
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
void OptionSetDialog::InitializeUI(const std::string& initial_value) {
|
||||
const QString type_name =
|
||||
TypeNameMap.count(option.type) ? tr(TypeNameMap.at(option.type)) : tr("unknown");
|
||||
ui->nameLabel->setText(tr("%1 <%2> %3")
|
||||
.arg(QString::fromStdString(option.name), type_name,
|
||||
QString::fromStdString(option.description)));
|
||||
if (TypeDescriptionMap.count(option.type)) {
|
||||
ui->formatLabel->setVisible(true);
|
||||
ui->formatLabel->setText(tr(TypeDescriptionMap.at(option.type)));
|
||||
}
|
||||
|
||||
if (option.type == AV_OPT_TYPE_INT || option.type == AV_OPT_TYPE_INT64 ||
|
||||
option.type == AV_OPT_TYPE_UINT64 || option.type == AV_OPT_TYPE_FLOAT ||
|
||||
option.type == AV_OPT_TYPE_DOUBLE || option.type == AV_OPT_TYPE_DURATION ||
|
||||
option.type == AV_OPT_TYPE_RATIONAL) { // scalar types
|
||||
|
||||
ui->formatLabel->setVisible(true);
|
||||
if (!ui->formatLabel->text().isEmpty()) {
|
||||
ui->formatLabel->text().append(QStringLiteral("\n"));
|
||||
}
|
||||
ui->formatLabel->setText(
|
||||
ui->formatLabel->text().append(tr("Range: %1 - %2").arg(option.min).arg(option.max)));
|
||||
}
|
||||
|
||||
// Decide and initialize layout
|
||||
if (option.type == AV_OPT_TYPE_BOOL || option.type == AV_OPT_TYPE_PIXEL_FMT ||
|
||||
option.type == AV_OPT_TYPE_SAMPLE_FMT ||
|
||||
((option.type == AV_OPT_TYPE_INT || option.type == AV_OPT_TYPE_INT64 ||
|
||||
option.type == AV_OPT_TYPE_UINT64) &&
|
||||
!option.named_constants.empty())) { // Use the combobox layout
|
||||
|
||||
layout_type = 1;
|
||||
ui->comboBox->setVisible(true);
|
||||
ui->comboBoxHelpLabel->setVisible(true);
|
||||
|
||||
QString real_initial_value = QString::fromStdString(initial_value);
|
||||
if (option.type == AV_OPT_TYPE_INT || option.type == AV_OPT_TYPE_INT64 ||
|
||||
option.type == AV_OPT_TYPE_UINT64) {
|
||||
|
||||
// Get the name of the initial value
|
||||
try {
|
||||
s64 initial_value_integer = std::stoll(initial_value, nullptr, 0);
|
||||
for (const auto& constant : option.named_constants) {
|
||||
if (constant.value == initial_value_integer) {
|
||||
real_initial_value = QString::fromStdString(constant.name);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (...) {
|
||||
// Not convertible to integer, ignore
|
||||
}
|
||||
}
|
||||
|
||||
bool found = false;
|
||||
for (const auto& [display, value] : GetPresetValues(option)) {
|
||||
ui->comboBox->addItem(display, value);
|
||||
if (value == real_initial_value) {
|
||||
found = true;
|
||||
ui->comboBox->setCurrentIndex(ui->comboBox->count() - 1);
|
||||
}
|
||||
}
|
||||
ui->comboBox->addItem(tr("custom"));
|
||||
|
||||
if (!found) {
|
||||
ui->comboBox->setCurrentIndex(ui->comboBox->count() - 1);
|
||||
ui->lineEdit->setText(QString::fromStdString(initial_value));
|
||||
}
|
||||
|
||||
UpdateUIDisplay();
|
||||
|
||||
connect(ui->comboBox, &QComboBox::currentTextChanged, this,
|
||||
&OptionSetDialog::UpdateUIDisplay);
|
||||
} else if (option.type == AV_OPT_TYPE_FLAGS &&
|
||||
!option.named_constants.empty()) { // Use the check boxes layout
|
||||
|
||||
layout_type = 2;
|
||||
|
||||
for (const auto& constant : option.named_constants) {
|
||||
auto* checkBox = new QCheckBox(tr("%1 (0x%2) %3")
|
||||
.arg(QString::fromStdString(constant.name))
|
||||
.arg(constant.value, 0, 16)
|
||||
.arg(QString::fromStdString(constant.description)));
|
||||
checkBox->setProperty("value", static_cast<unsigned long long>(constant.value));
|
||||
checkBox->setProperty("name", QString::fromStdString(constant.name));
|
||||
ui->checkBoxLayout->addWidget(checkBox);
|
||||
}
|
||||
SetCheckBoxDefaults(initial_value);
|
||||
} else { // Use the line edit layout
|
||||
layout_type = 0;
|
||||
ui->lineEdit->setVisible(true);
|
||||
ui->lineEdit->setText(QString::fromStdString(initial_value));
|
||||
}
|
||||
|
||||
adjustSize();
|
||||
}
|
||||
|
||||
void OptionSetDialog::SetCheckBoxDefaults(const std::string& initial_value) {
|
||||
if (initial_value.size() >= 2 &&
|
||||
(initial_value.substr(0, 2) == "0x" || initial_value.substr(0, 2) == "0X")) {
|
||||
// This is a hex mask
|
||||
try {
|
||||
u64 value = std::stoull(initial_value, nullptr, 16);
|
||||
for (int i = 0; i < ui->checkBoxLayout->count(); ++i) {
|
||||
auto* checkBox = qobject_cast<QCheckBox*>(ui->checkBoxLayout->itemAt(i)->widget());
|
||||
if (checkBox) {
|
||||
checkBox->setChecked(value & checkBox->property("value").toULongLong());
|
||||
}
|
||||
}
|
||||
} catch (...) {
|
||||
LOG_ERROR(Frontend, "Could not convert {} to number", initial_value);
|
||||
}
|
||||
} else {
|
||||
// This is a combination of constants, splitted with + or |
|
||||
std::vector<std::string> tmp;
|
||||
Common::SplitString(initial_value, '+', tmp);
|
||||
|
||||
std::vector<std::string> out;
|
||||
std::vector<std::string> tmp2;
|
||||
for (const auto& str : tmp) {
|
||||
Common::SplitString(str, '|', tmp2);
|
||||
out.insert(out.end(), tmp2.begin(), tmp2.end());
|
||||
}
|
||||
for (int i = 0; i < ui->checkBoxLayout->count(); ++i) {
|
||||
auto* checkBox = qobject_cast<QCheckBox*>(ui->checkBoxLayout->itemAt(i)->widget());
|
||||
if (checkBox) {
|
||||
checkBox->setChecked(
|
||||
std::find(out.begin(), out.end(),
|
||||
checkBox->property("name").toString().toStdString()) != out.end());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OptionSetDialog::UpdateUIDisplay() {
|
||||
if (layout_type != 1)
|
||||
return;
|
||||
|
||||
if (ui->comboBox->currentIndex() == ui->comboBox->count() - 1) { // custom
|
||||
ui->comboBoxHelpLabel->setVisible(false);
|
||||
ui->lineEdit->setVisible(true);
|
||||
adjustSize();
|
||||
return;
|
||||
}
|
||||
|
||||
ui->lineEdit->setVisible(false);
|
||||
for (const auto& constant : option.named_constants) {
|
||||
if (constant.name == ui->comboBox->currentData().toString().toStdString()) {
|
||||
ui->comboBoxHelpLabel->setVisible(true);
|
||||
ui->comboBoxHelpLabel->setText(QString::fromStdString(constant.description));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::pair<bool, std::string> OptionSetDialog::GetCurrentValue() {
|
||||
if (!is_set) {
|
||||
return {};
|
||||
}
|
||||
|
||||
switch (layout_type) {
|
||||
case 0: // line edit layout
|
||||
return {true, ui->lineEdit->text().toStdString()};
|
||||
case 1: // combo box layout
|
||||
if (ui->comboBox->currentIndex() == ui->comboBox->count() - 1) {
|
||||
return {true, ui->lineEdit->text().toStdString()}; // custom
|
||||
}
|
||||
return {true, ui->comboBox->currentData().toString().toStdString()};
|
||||
case 2: { // check boxes layout
|
||||
std::string out;
|
||||
for (int i = 0; i < ui->checkBoxLayout->count(); ++i) {
|
||||
auto* checkBox = qobject_cast<QCheckBox*>(ui->checkBoxLayout->itemAt(i)->widget());
|
||||
if (checkBox && checkBox->isChecked()) {
|
||||
if (!out.empty()) {
|
||||
out.append("+");
|
||||
}
|
||||
out.append(checkBox->property("name").toString().toStdString());
|
||||
}
|
||||
}
|
||||
if (out.empty()) {
|
||||
out = "0x0";
|
||||
}
|
||||
return {true, out};
|
||||
}
|
||||
default:
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
OptionSetDialog::OptionSetDialog(QWidget* parent, VideoDumper::OptionInfo option_,
|
||||
const std::string& initial_value)
|
||||
: QDialog(parent), ui(std::make_unique<Ui::OptionSetDialog>()), option(std::move(option_)) {
|
||||
|
||||
ui->setupUi(this);
|
||||
InitializeUI(initial_value);
|
||||
|
||||
connect(ui->unsetButton, &QPushButton::clicked, [this] {
|
||||
is_set = false;
|
||||
accept();
|
||||
});
|
||||
connect(ui->buttonBox, &QDialogButtonBox::accepted, this, &OptionSetDialog::accept);
|
||||
connect(ui->buttonBox, &QDialogButtonBox::rejected, this, &OptionSetDialog::reject);
|
||||
}
|
||||
|
||||
OptionSetDialog::~OptionSetDialog() = default;
|
|
@ -0,0 +1,33 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <memory>
|
||||
#include <QDialog>
|
||||
#include "core/dumping/ffmpeg_backend.h"
|
||||
|
||||
namespace Ui {
|
||||
class OptionSetDialog;
|
||||
}
|
||||
|
||||
class OptionSetDialog : public QDialog {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
explicit OptionSetDialog(QWidget* parent, VideoDumper::OptionInfo option,
|
||||
const std::string& initial_value);
|
||||
~OptionSetDialog() override;
|
||||
|
||||
// {is_set, value}
|
||||
std::pair<bool, std::string> GetCurrentValue();
|
||||
|
||||
private:
|
||||
void InitializeUI(const std::string& initial_value);
|
||||
void SetCheckBoxDefaults(const std::string& initial_value);
|
||||
void UpdateUIDisplay();
|
||||
|
||||
std::unique_ptr<Ui::OptionSetDialog> ui;
|
||||
VideoDumper::OptionInfo option;
|
||||
bool is_set = true;
|
||||
int layout_type = -1; // 0 - line edit, 1 - combo box, 2 - flags (check boxes)
|
||||
};
|
|
@ -0,0 +1,89 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>OptionSetDialog</class>
|
||||
<widget class="QDialog" name="OptionSetDialog">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>600</width>
|
||||
<height>150</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Options</string>
|
||||
</property>
|
||||
<layout class="QVBoxLayout">
|
||||
<item>
|
||||
<widget class="QLabel" name="nameLabel"/>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="formatLabel">
|
||||
<property name="visible">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QVBoxLayout" name="comboBoxLayout">
|
||||
<item>
|
||||
<widget class="QComboBox" name="comboBox">
|
||||
<property name="visible">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLabel" name="comboBoxHelpLabel">
|
||||
<property name="visible">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QLineEdit" name="lineEdit">
|
||||
<property name="visible">
|
||||
<bool>false</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QVBoxLayout" name="checkBoxLayout"/>
|
||||
</item>
|
||||
<item>
|
||||
<spacer>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Vertical</enum>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QHBoxLayout">
|
||||
<item>
|
||||
<widget class="QPushButton" name="unsetButton">
|
||||
<property name="text">
|
||||
<string>Unset</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</ui>
|
|
@ -0,0 +1,68 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <QTreeWidgetItem>
|
||||
#include "citra_qt/dumping/option_set_dialog.h"
|
||||
#include "citra_qt/dumping/options_dialog.h"
|
||||
#include "ui_options_dialog.h"
|
||||
|
||||
constexpr char UNSET_TEXT[] = QT_TR_NOOP("[not set]");
|
||||
|
||||
void OptionsDialog::PopulateOptions() {
|
||||
const auto& options = ui->specificRadioButton->isChecked() ? specific_options : generic_options;
|
||||
ui->main->clear();
|
||||
ui->main->setSortingEnabled(false);
|
||||
for (std::size_t i = 0; i < options.size(); ++i) {
|
||||
const auto& option = options.at(i);
|
||||
auto* item = new QTreeWidgetItem(
|
||||
{QString::fromStdString(option.name), QString::fromStdString(current_values.Get(
|
||||
option.name, tr(UNSET_TEXT).toStdString()))});
|
||||
item->setData(1, Qt::UserRole, static_cast<unsigned long long>(i)); // ID
|
||||
ui->main->addTopLevelItem(item);
|
||||
}
|
||||
ui->main->setSortingEnabled(true);
|
||||
ui->main->sortItems(0, Qt::AscendingOrder);
|
||||
}
|
||||
|
||||
void OptionsDialog::OnSetOptionValue(QTreeWidgetItem* item) {
|
||||
const auto& options = ui->specificRadioButton->isChecked() ? specific_options : generic_options;
|
||||
const int id = item->data(1, Qt::UserRole).toInt();
|
||||
OptionSetDialog dialog(this, options[id],
|
||||
current_values.Get(options[id].name, options[id].default_value));
|
||||
if (dialog.exec() != QDialog::DialogCode::Accepted) {
|
||||
return;
|
||||
}
|
||||
|
||||
const auto& [is_set, value] = dialog.GetCurrentValue();
|
||||
if (is_set) {
|
||||
current_values.Set(options[id].name, value);
|
||||
} else {
|
||||
current_values.Erase(options[id].name);
|
||||
}
|
||||
item->setText(1, is_set ? QString::fromStdString(value) : tr(UNSET_TEXT));
|
||||
}
|
||||
|
||||
std::string OptionsDialog::GetCurrentValue() const {
|
||||
return current_values.Serialize();
|
||||
}
|
||||
|
||||
OptionsDialog::OptionsDialog(QWidget* parent,
|
||||
std::vector<VideoDumper::OptionInfo> specific_options_,
|
||||
std::vector<VideoDumper::OptionInfo> generic_options_,
|
||||
const std::string& current_value)
|
||||
: QDialog(parent), ui(std::make_unique<Ui::OptionsDialog>()),
|
||||
specific_options(std::move(specific_options_)), generic_options(std::move(generic_options_)),
|
||||
current_values(current_value) {
|
||||
|
||||
ui->setupUi(this);
|
||||
PopulateOptions();
|
||||
|
||||
connect(ui->main, &QTreeWidget::itemDoubleClicked,
|
||||
[this](QTreeWidgetItem* item, int column) { OnSetOptionValue(item); });
|
||||
connect(ui->buttonBox, &QDialogButtonBox::accepted, this, &OptionsDialog::accept);
|
||||
connect(ui->buttonBox, &QDialogButtonBox::rejected, this, &OptionsDialog::reject);
|
||||
connect(ui->specificRadioButton, &QRadioButton::toggled, this, &OptionsDialog::PopulateOptions);
|
||||
}
|
||||
|
||||
OptionsDialog::~OptionsDialog() = default;
|
|
@ -0,0 +1,36 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
#include <QDialog>
|
||||
#include "common/param_package.h"
|
||||
#include "core/dumping/ffmpeg_backend.h"
|
||||
|
||||
class QTreeWidgetItem;
|
||||
|
||||
namespace Ui {
|
||||
class OptionsDialog;
|
||||
}
|
||||
|
||||
class OptionsDialog : public QDialog {
|
||||
Q_OBJECT
|
||||
|
||||
public:
|
||||
explicit OptionsDialog(QWidget* parent, std::vector<VideoDumper::OptionInfo> specific_options,
|
||||
std::vector<VideoDumper::OptionInfo> generic_options,
|
||||
const std::string& current_value);
|
||||
~OptionsDialog() override;
|
||||
|
||||
std::string GetCurrentValue() const;
|
||||
|
||||
private:
|
||||
void PopulateOptions();
|
||||
void OnSetOptionValue(QTreeWidgetItem* item);
|
||||
|
||||
std::unique_ptr<Ui::OptionsDialog> ui;
|
||||
std::vector<VideoDumper::OptionInfo> specific_options;
|
||||
std::vector<VideoDumper::OptionInfo> generic_options;
|
||||
Common::ParamPackage current_values;
|
||||
};
|
|
@ -0,0 +1,71 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<ui version="4.0">
|
||||
<class>OptionsDialog</class>
|
||||
<widget class="QDialog" name="OptionsDialog">
|
||||
<property name="geometry">
|
||||
<rect>
|
||||
<x>0</x>
|
||||
<y>0</y>
|
||||
<width>650</width>
|
||||
<height>350</height>
|
||||
</rect>
|
||||
</property>
|
||||
<property name="windowTitle">
|
||||
<string>Options</string>
|
||||
</property>
|
||||
<layout class="QVBoxLayout">
|
||||
<item>
|
||||
<widget class="QLabel">
|
||||
<property name="wordWrap">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Double click to see the description and change the values of the options.</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<layout class="QHBoxLayout">
|
||||
<item>
|
||||
<widget class="QRadioButton" name="specificRadioButton">
|
||||
<property name="text">
|
||||
<string>Specific</string>
|
||||
</property>
|
||||
<property name="checked">
|
||||
<bool>true</bool>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QRadioButton" name="genericRadioButton">
|
||||
<property name="text">
|
||||
<string>Generic</string>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QTreeWidget" name="main">
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Name</string>
|
||||
</property>
|
||||
</column>
|
||||
<column>
|
||||
<property name="text">
|
||||
<string>Value</string>
|
||||
</property>
|
||||
</column>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QDialogButtonBox" name="buttonBox">
|
||||
<property name="standardButtons">
|
||||
<set>QDialogButtonBox::Cancel|QDialogButtonBox::Ok</set>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</widget>
|
||||
</ui>
|
|
@ -89,6 +89,10 @@
|
|||
#include "citra_qt/discord_impl.h"
|
||||
#endif
|
||||
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
#include "citra_qt/dumping/dumping_dialog.h"
|
||||
#endif
|
||||
|
||||
#ifdef QT_STATICPLUGIN
|
||||
Q_IMPORT_PLUGIN(QWindowsIntegrationPlugin);
|
||||
#endif
|
||||
|
@ -100,6 +104,8 @@ __declspec(dllexport) unsigned long NvOptimusEnablement = 0x00000001;
|
|||
}
|
||||
#endif
|
||||
|
||||
constexpr int default_mouse_timeout = 2500;
|
||||
|
||||
/**
|
||||
* "Callouts" are one-time instructional messages shown to the user. In the config settings, there
|
||||
* is a bitfield "callout_flags" options, used to track if a message has already been shown to the
|
||||
|
@ -193,6 +199,14 @@ GMainWindow::GMainWindow() : config(new Config()), emu_thread(nullptr) {
|
|||
// Show one-time "callout" messages to the user
|
||||
ShowTelemetryCallout();
|
||||
|
||||
// make sure menubar has the arrow cursor instead of inheriting from this
|
||||
ui.menubar->setCursor(QCursor());
|
||||
statusBar()->setCursor(QCursor());
|
||||
|
||||
mouse_hide_timer.setInterval(default_mouse_timeout);
|
||||
connect(&mouse_hide_timer, &QTimer::timeout, this, &GMainWindow::HideMouseCursor);
|
||||
connect(ui.menubar, &QMenuBar::hovered, this, &GMainWindow::ShowMouseCursor);
|
||||
|
||||
if (UISettings::values.check_for_update_on_start) {
|
||||
CheckForUpdates();
|
||||
}
|
||||
|
@ -713,9 +727,7 @@ void GMainWindow::ConnectMenuEvents() {
|
|||
connect(ui.action_Capture_Screenshot, &QAction::triggered, this,
|
||||
&GMainWindow::OnCaptureScreenshot);
|
||||
|
||||
#ifndef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
ui.action_Dump_Video->setEnabled(false);
|
||||
#endif
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
connect(ui.action_Dump_Video, &QAction::triggered, [this] {
|
||||
if (ui.action_Dump_Video->isChecked()) {
|
||||
OnStartVideoDumping();
|
||||
|
@ -723,6 +735,9 @@ void GMainWindow::ConnectMenuEvents() {
|
|||
OnStopVideoDumping();
|
||||
}
|
||||
});
|
||||
#else
|
||||
ui.action_Dump_Video->setEnabled(false);
|
||||
#endif
|
||||
|
||||
// Help
|
||||
connect(ui.action_Open_Citra_Folder, &QAction::triggered, this,
|
||||
|
@ -994,6 +1009,13 @@ void GMainWindow::BootGame(const QString& filename) {
|
|||
}
|
||||
status_bar_update_timer.start(2000);
|
||||
|
||||
if (UISettings::values.hide_mouse) {
|
||||
mouse_hide_timer.start();
|
||||
setMouseTracking(true);
|
||||
ui.centralwidget->setMouseTracking(true);
|
||||
ui.menubar->setMouseTracking(true);
|
||||
}
|
||||
|
||||
// show and hide the render_window to create the context
|
||||
render_window->show();
|
||||
render_window->hide();
|
||||
|
@ -1009,8 +1031,14 @@ void GMainWindow::BootGame(const QString& filename) {
|
|||
if (video_dumping_on_start) {
|
||||
Layout::FramebufferLayout layout{
|
||||
Layout::FrameLayoutFromResolutionScale(VideoCore::GetResolutionScaleFactor())};
|
||||
Core::System::GetInstance().VideoDumper().StartDumping(video_dumping_path.toStdString(),
|
||||
"webm", layout);
|
||||
if (!Core::System::GetInstance().VideoDumper().StartDumping(
|
||||
video_dumping_path.toStdString(), layout)) {
|
||||
|
||||
QMessageBox::critical(
|
||||
this, tr("Citra"),
|
||||
tr("Could not start video dumping.<br>Refer to the log for details."));
|
||||
ui.action_Dump_Video->setChecked(false);
|
||||
}
|
||||
video_dumping_on_start = false;
|
||||
video_dumping_path.clear();
|
||||
}
|
||||
|
@ -1026,11 +1054,13 @@ void GMainWindow::ShutdownGame() {
|
|||
HideFullscreen();
|
||||
}
|
||||
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||
game_shutdown_delayed = true;
|
||||
OnStopVideoDumping();
|
||||
return;
|
||||
}
|
||||
#endif
|
||||
|
||||
AllowOSSleep();
|
||||
|
||||
|
@ -1084,6 +1114,10 @@ void GMainWindow::ShutdownGame() {
|
|||
game_list->show();
|
||||
game_list->setFilterFocus();
|
||||
|
||||
setMouseTracking(false);
|
||||
ui.centralwidget->setMouseTracking(false);
|
||||
ui.menubar->setMouseTracking(false);
|
||||
|
||||
// Disable status bar updates
|
||||
status_bar_update_timer.stop();
|
||||
message_label->setVisible(false);
|
||||
|
@ -1290,7 +1324,7 @@ void GMainWindow::OnGameListDumpRomFS(QString game_path, u64 program_id) {
|
|||
using FutureWatcher = QFutureWatcher<std::pair<Loader::ResultStatus, Loader::ResultStatus>>;
|
||||
auto* future_watcher = new FutureWatcher(this);
|
||||
connect(future_watcher, &FutureWatcher::finished,
|
||||
[this, program_id, dialog, base_path, update_path, future_watcher] {
|
||||
[this, dialog, base_path, update_path, future_watcher] {
|
||||
dialog->hide();
|
||||
const auto& [base, update] = future_watcher->result();
|
||||
if (base != Loader::ResultStatus::Success) {
|
||||
|
@ -1676,6 +1710,16 @@ void GMainWindow::OnConfigure() {
|
|||
SyncMenuUISettings();
|
||||
game_list->RefreshGameDirectory();
|
||||
config->Save();
|
||||
if (UISettings::values.hide_mouse && emulation_running) {
|
||||
setMouseTracking(true);
|
||||
ui.centralwidget->setMouseTracking(true);
|
||||
ui.menubar->setMouseTracking(true);
|
||||
mouse_hide_timer.start();
|
||||
} else {
|
||||
setMouseTracking(false);
|
||||
ui.centralwidget->setMouseTracking(false);
|
||||
ui.menubar->setMouseTracking(false);
|
||||
}
|
||||
} else {
|
||||
Settings::values.input_profiles = old_input_profiles;
|
||||
Settings::LoadProfile(old_input_profile_index);
|
||||
|
@ -1915,18 +1959,23 @@ void GMainWindow::OnCaptureScreenshot() {
|
|||
OnStartGame();
|
||||
}
|
||||
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
void GMainWindow::OnStartVideoDumping() {
|
||||
const QString path = QFileDialog::getSaveFileName(
|
||||
this, tr("Save Video"), UISettings::values.video_dumping_path, tr("WebM Videos (*.webm)"));
|
||||
if (path.isEmpty()) {
|
||||
DumpingDialog dialog(this);
|
||||
if (dialog.exec() != QDialog::DialogCode::Accepted) {
|
||||
ui.action_Dump_Video->setChecked(false);
|
||||
return;
|
||||
}
|
||||
UISettings::values.video_dumping_path = QFileInfo(path).path();
|
||||
const auto path = dialog.GetFilePath();
|
||||
if (emulation_running) {
|
||||
Layout::FramebufferLayout layout{
|
||||
Layout::FrameLayoutFromResolutionScale(VideoCore::GetResolutionScaleFactor())};
|
||||
Core::System::GetInstance().VideoDumper().StartDumping(path.toStdString(), "webm", layout);
|
||||
if (!Core::System::GetInstance().VideoDumper().StartDumping(path.toStdString(), layout)) {
|
||||
QMessageBox::critical(
|
||||
this, tr("Citra"),
|
||||
tr("Could not start video dumping.<br>Refer to the log for details."));
|
||||
ui.action_Dump_Video->setChecked(false);
|
||||
}
|
||||
} else {
|
||||
video_dumping_on_start = true;
|
||||
video_dumping_path = path;
|
||||
|
@ -1943,6 +1992,8 @@ void GMainWindow::OnStopVideoDumping() {
|
|||
const bool was_dumping = Core::System::GetInstance().VideoDumper().IsDumping();
|
||||
if (!was_dumping)
|
||||
return;
|
||||
|
||||
game_paused_for_dumping = emu_thread->IsRunning();
|
||||
OnPauseGame();
|
||||
|
||||
auto future =
|
||||
|
@ -1952,13 +2003,15 @@ void GMainWindow::OnStopVideoDumping() {
|
|||
if (game_shutdown_delayed) {
|
||||
game_shutdown_delayed = false;
|
||||
ShutdownGame();
|
||||
} else {
|
||||
} else if (game_paused_for_dumping) {
|
||||
game_paused_for_dumping = false;
|
||||
OnStartGame();
|
||||
}
|
||||
});
|
||||
future_watcher->setFuture(future);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void GMainWindow::UpdateStatusBar() {
|
||||
if (emu_thread == nullptr) {
|
||||
|
@ -1983,6 +2036,30 @@ void GMainWindow::UpdateStatusBar() {
|
|||
emu_frametime_label->setVisible(true);
|
||||
}
|
||||
|
||||
void GMainWindow::HideMouseCursor() {
|
||||
if (emu_thread == nullptr || UISettings::values.hide_mouse == false) {
|
||||
mouse_hide_timer.stop();
|
||||
ShowMouseCursor();
|
||||
return;
|
||||
}
|
||||
setCursor(QCursor(Qt::BlankCursor));
|
||||
}
|
||||
|
||||
void GMainWindow::ShowMouseCursor() {
|
||||
unsetCursor();
|
||||
if (emu_thread != nullptr && UISettings::values.hide_mouse) {
|
||||
mouse_hide_timer.start();
|
||||
}
|
||||
}
|
||||
|
||||
void GMainWindow::mouseMoveEvent(QMouseEvent* event) {
|
||||
ShowMouseCursor();
|
||||
}
|
||||
|
||||
void GMainWindow::mousePressEvent(QMouseEvent* event) {
|
||||
ShowMouseCursor();
|
||||
}
|
||||
|
||||
void GMainWindow::OnCoreError(Core::System::ResultStatus result, std::string details) {
|
||||
QString status_message;
|
||||
|
||||
|
|
|
@ -207,8 +207,10 @@ private slots:
|
|||
void OnPlayMovie();
|
||||
void OnStopRecordingPlayback();
|
||||
void OnCaptureScreenshot();
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
void OnStartVideoDumping();
|
||||
void OnStopVideoDumping();
|
||||
#endif
|
||||
void OnCoreError(Core::System::ResultStatus, std::string);
|
||||
/// Called whenever a user selects Help->About Citra
|
||||
void OnMenuAboutCitra();
|
||||
|
@ -225,6 +227,8 @@ private:
|
|||
void UpdateWindowTitle();
|
||||
void RetranslateStatusBar();
|
||||
void InstallCIA(QStringList filepaths);
|
||||
void HideMouseCursor();
|
||||
void ShowMouseCursor();
|
||||
|
||||
Ui::MainWindow ui;
|
||||
|
||||
|
@ -253,6 +257,7 @@ private:
|
|||
QString game_path;
|
||||
|
||||
bool auto_paused = false;
|
||||
QTimer mouse_hide_timer;
|
||||
|
||||
// Movie
|
||||
bool movie_record_on_start = false;
|
||||
|
@ -263,6 +268,8 @@ private:
|
|||
QString video_dumping_path;
|
||||
// Whether game shutdown is delayed due to video dumping
|
||||
bool game_shutdown_delayed = false;
|
||||
// Whether game was paused due to stopping video dumping
|
||||
bool game_paused_for_dumping = false;
|
||||
|
||||
// Debugger panes
|
||||
ProfilerWidget* profilerWidget;
|
||||
|
@ -301,6 +308,8 @@ protected:
|
|||
void dropEvent(QDropEvent* event) override;
|
||||
void dragEnterEvent(QDragEnterEvent* event) override;
|
||||
void dragMoveEvent(QDragMoveEvent* event) override;
|
||||
void mouseMoveEvent(QMouseEvent* event) override;
|
||||
void mousePressEvent(QMouseEvent* event) override;
|
||||
};
|
||||
|
||||
Q_DECLARE_METATYPE(std::size_t);
|
||||
|
|
|
@ -76,6 +76,7 @@ struct Values {
|
|||
bool confirm_before_closing;
|
||||
bool first_start;
|
||||
bool pause_when_in_background;
|
||||
bool hide_mouse;
|
||||
|
||||
bool updater_found;
|
||||
bool update_on_close;
|
||||
|
|
|
@ -135,4 +135,20 @@ void ParamPackage::Clear() {
|
|||
data.clear();
|
||||
}
|
||||
|
||||
ParamPackage::DataType::iterator ParamPackage::begin() {
|
||||
return data.begin();
|
||||
}
|
||||
|
||||
ParamPackage::DataType::const_iterator ParamPackage::begin() const {
|
||||
return data.begin();
|
||||
}
|
||||
|
||||
ParamPackage::DataType::iterator ParamPackage::end() {
|
||||
return data.end();
|
||||
}
|
||||
|
||||
ParamPackage::DataType::const_iterator ParamPackage::end() const {
|
||||
return data.end();
|
||||
}
|
||||
|
||||
} // namespace Common
|
||||
|
|
|
@ -5,15 +5,15 @@
|
|||
#pragma once
|
||||
|
||||
#include <initializer_list>
|
||||
#include <map>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
|
||||
namespace Common {
|
||||
|
||||
/// A string-based key-value container supporting serializing to and deserializing from a string
|
||||
class ParamPackage {
|
||||
public:
|
||||
using DataType = std::unordered_map<std::string, std::string>;
|
||||
using DataType = std::map<std::string, std::string>;
|
||||
|
||||
ParamPackage() = default;
|
||||
explicit ParamPackage(const std::string& serialized);
|
||||
|
@ -35,6 +35,12 @@ public:
|
|||
void Erase(const std::string& key);
|
||||
void Clear();
|
||||
|
||||
// For range-based for
|
||||
DataType::iterator begin();
|
||||
DataType::const_iterator begin() const;
|
||||
DataType::iterator end();
|
||||
DataType::const_iterator end() const;
|
||||
|
||||
private:
|
||||
DataType data;
|
||||
};
|
||||
|
|
|
@ -495,5 +495,5 @@ if (ARCHITECTURE_x86_64)
|
|||
endif()
|
||||
|
||||
if (ENABLE_FFMPEG_VIDEO_DUMPER)
|
||||
target_link_libraries(core PRIVATE FFmpeg::avcodec FFmpeg::avformat FFmpeg::swscale FFmpeg::swresample FFmpeg::avutil)
|
||||
target_link_libraries(core PUBLIC FFmpeg::avcodec FFmpeg::avformat FFmpeg::swscale FFmpeg::swresample FFmpeg::avutil)
|
||||
endif()
|
||||
|
|
|
@ -377,6 +377,12 @@ System::ResultStatus System::Init(Frontend::EmuWindow& emu_window, u32 system_mo
|
|||
Service::Init(*this);
|
||||
GDBStub::DeferStart();
|
||||
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
video_dumper = std::make_unique<VideoDumper::FFmpegBackend>();
|
||||
#else
|
||||
video_dumper = std::make_unique<VideoDumper::NullBackend>();
|
||||
#endif
|
||||
|
||||
VideoCore::ResultStatus result = VideoCore::Init(emu_window, *memory);
|
||||
if (result != VideoCore::ResultStatus::Success) {
|
||||
switch (result) {
|
||||
|
@ -389,12 +395,6 @@ System::ResultStatus System::Init(Frontend::EmuWindow& emu_window, u32 system_mo
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef ENABLE_FFMPEG_VIDEO_DUMPER
|
||||
video_dumper = std::make_unique<VideoDumper::FFmpegBackend>();
|
||||
#else
|
||||
video_dumper = std::make_unique<VideoDumper::NullBackend>();
|
||||
#endif
|
||||
|
||||
LOG_DEBUG(Core, "Initialized OK");
|
||||
|
||||
initalized = true;
|
||||
|
|
|
@ -8,17 +8,7 @@
|
|||
namespace VideoDumper {
|
||||
|
||||
VideoFrame::VideoFrame(std::size_t width_, std::size_t height_, u8* data_)
|
||||
: width(width_), height(height_), stride(width * 4), data(width * height * 4) {
|
||||
// While copying, rotate the image to put the pixels in correct order
|
||||
// (As OpenGL returns pixel data starting from the lowest position)
|
||||
for (std::size_t i = 0; i < height; i++) {
|
||||
for (std::size_t j = 0; j < width; j++) {
|
||||
for (std::size_t k = 0; k < 4; k++) {
|
||||
data[i * stride + j * 4 + k] = data_[(height - i - 1) * stride + j * 4 + k];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
: width(width_), height(height_), stride(width * 4), data(data_, data_ + width * height * 4) {}
|
||||
|
||||
Backend::~Backend() = default;
|
||||
NullBackend::~NullBackend() = default;
|
||||
|
|
|
@ -28,10 +28,9 @@ public:
|
|||
class Backend {
|
||||
public:
|
||||
virtual ~Backend();
|
||||
virtual bool StartDumping(const std::string& path, const std::string& format,
|
||||
const Layout::FramebufferLayout& layout) = 0;
|
||||
virtual void AddVideoFrame(const VideoFrame& frame) = 0;
|
||||
virtual void AddAudioFrame(const AudioCore::StereoFrame16& frame) = 0;
|
||||
virtual bool StartDumping(const std::string& path, const Layout::FramebufferLayout& layout) = 0;
|
||||
virtual void AddVideoFrame(VideoFrame frame) = 0;
|
||||
virtual void AddAudioFrame(AudioCore::StereoFrame16 frame) = 0;
|
||||
virtual void AddAudioSample(const std::array<s16, 2>& sample) = 0;
|
||||
virtual void StopDumping() = 0;
|
||||
virtual bool IsDumping() const = 0;
|
||||
|
@ -41,12 +40,12 @@ public:
|
|||
class NullBackend : public Backend {
|
||||
public:
|
||||
~NullBackend() override;
|
||||
bool StartDumping(const std::string& /*path*/, const std::string& /*format*/,
|
||||
bool StartDumping(const std::string& /*path*/,
|
||||
const Layout::FramebufferLayout& /*layout*/) override {
|
||||
return false;
|
||||
}
|
||||
void AddVideoFrame(const VideoFrame& /*frame*/) override {}
|
||||
void AddAudioFrame(const AudioCore::StereoFrame16& /*frame*/) override {}
|
||||
void AddVideoFrame(VideoFrame /*frame*/) override {}
|
||||
void AddAudioFrame(AudioCore::StereoFrame16 /*frame*/) override {}
|
||||
void AddAudioSample(const std::array<s16, 2>& /*sample*/) override {}
|
||||
void StopDumping() override {}
|
||||
bool IsDumping() const override {
|
||||
|
|
|
@ -2,15 +2,19 @@
|
|||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <unordered_set>
|
||||
#include "common/assert.h"
|
||||
#include "common/file_util.h"
|
||||
#include "common/logging/log.h"
|
||||
#include "common/param_package.h"
|
||||
#include "common/string_util.h"
|
||||
#include "core/dumping/ffmpeg_backend.h"
|
||||
#include "core/settings.h"
|
||||
#include "video_core/renderer_base.h"
|
||||
#include "video_core/video_core.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavutil/opt.h>
|
||||
#include <libavutil/pixdesc.h>
|
||||
}
|
||||
|
||||
namespace VideoDumper {
|
||||
|
@ -27,14 +31,25 @@ void InitializeFFmpegLibraries() {
|
|||
initialized = true;
|
||||
}
|
||||
|
||||
AVDictionary* ToAVDictionary(const std::string& serialized) {
|
||||
Common::ParamPackage param_package{serialized};
|
||||
AVDictionary* result = nullptr;
|
||||
for (const auto& [key, value] : param_package) {
|
||||
av_dict_set(&result, key.c_str(), value.c_str(), 0);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
FFmpegStream::~FFmpegStream() {
|
||||
Free();
|
||||
}
|
||||
|
||||
bool FFmpegStream::Init(AVFormatContext* format_context_) {
|
||||
bool FFmpegStream::Init(FFmpegMuxer& muxer) {
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
format_context = format_context_;
|
||||
format_context = muxer.format_context.get();
|
||||
format_context_mutex = &muxer.format_context_mutex;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -47,14 +62,12 @@ void FFmpegStream::Flush() {
|
|||
}
|
||||
|
||||
void FFmpegStream::WritePacket(AVPacket& packet) {
|
||||
if (packet.pts != static_cast<s64>(AV_NOPTS_VALUE)) {
|
||||
packet.pts = av_rescale_q(packet.pts, codec_context->time_base, stream->time_base);
|
||||
}
|
||||
if (packet.dts != static_cast<s64>(AV_NOPTS_VALUE)) {
|
||||
packet.dts = av_rescale_q(packet.dts, codec_context->time_base, stream->time_base);
|
||||
}
|
||||
av_packet_rescale_ts(&packet, codec_context->time_base, stream->time_base);
|
||||
packet.stream_index = stream->index;
|
||||
{
|
||||
std::lock_guard lock{*format_context_mutex};
|
||||
av_interleaved_write_frame(format_context, &packet);
|
||||
}
|
||||
}
|
||||
|
||||
void FFmpegStream::SendFrame(AVFrame* frame) {
|
||||
|
@ -88,21 +101,18 @@ FFmpegVideoStream::~FFmpegVideoStream() {
|
|||
Free();
|
||||
}
|
||||
|
||||
bool FFmpegVideoStream::Init(AVFormatContext* format_context, AVOutputFormat* output_format,
|
||||
const Layout::FramebufferLayout& layout_) {
|
||||
bool FFmpegVideoStream::Init(FFmpegMuxer& muxer, const Layout::FramebufferLayout& layout_) {
|
||||
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
if (!FFmpegStream::Init(format_context))
|
||||
if (!FFmpegStream::Init(muxer))
|
||||
return false;
|
||||
|
||||
layout = layout_;
|
||||
frame_count = 0;
|
||||
|
||||
// Initialize video codec
|
||||
// Ensure VP9 codec here, also to avoid patent issues
|
||||
constexpr AVCodecID codec_id = AV_CODEC_ID_VP9;
|
||||
const AVCodec* codec = avcodec_find_encoder(codec_id);
|
||||
const AVCodec* codec = avcodec_find_encoder_by_name(Settings::values.video_encoder.c_str());
|
||||
codec_context.reset(avcodec_alloc_context3(codec));
|
||||
if (!codec || !codec_context) {
|
||||
LOG_ERROR(Render, "Could not find video encoder or allocate video codec context");
|
||||
|
@ -111,23 +121,28 @@ bool FFmpegVideoStream::Init(AVFormatContext* format_context, AVOutputFormat* ou
|
|||
|
||||
// Configure video codec context
|
||||
codec_context->codec_type = AVMEDIA_TYPE_VIDEO;
|
||||
codec_context->bit_rate = 2500000;
|
||||
codec_context->bit_rate = Settings::values.video_bitrate;
|
||||
codec_context->width = layout.width;
|
||||
codec_context->height = layout.height;
|
||||
codec_context->time_base.num = 1;
|
||||
codec_context->time_base.den = 60;
|
||||
codec_context->gop_size = 12;
|
||||
codec_context->pix_fmt = AV_PIX_FMT_YUV420P;
|
||||
codec_context->thread_count = 8;
|
||||
if (output_format->flags & AVFMT_GLOBALHEADER)
|
||||
codec_context->pix_fmt = codec->pix_fmts ? codec->pix_fmts[0] : AV_PIX_FMT_YUV420P;
|
||||
if (format_context->oformat->flags & AVFMT_GLOBALHEADER)
|
||||
codec_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||
av_opt_set_int(codec_context.get(), "cpu-used", 5, 0);
|
||||
|
||||
if (avcodec_open2(codec_context.get(), codec, nullptr) < 0) {
|
||||
AVDictionary* options = ToAVDictionary(Settings::values.video_encoder_options);
|
||||
if (avcodec_open2(codec_context.get(), codec, &options) < 0) {
|
||||
LOG_ERROR(Render, "Could not open video codec");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (av_dict_count(options) != 0) { // Successfully set options are removed from the dict
|
||||
char* buf = nullptr;
|
||||
av_dict_get_string(options, &buf, ':', ';');
|
||||
LOG_WARNING(Render, "Video encoder options not found: {}", buf);
|
||||
}
|
||||
|
||||
// Create video stream
|
||||
stream = avformat_new_stream(format_context, codec);
|
||||
if (!stream || avcodec_parameters_from_context(stream->codecpar, codec_context.get()) < 0) {
|
||||
|
@ -141,7 +156,7 @@ bool FFmpegVideoStream::Init(AVFormatContext* format_context, AVOutputFormat* ou
|
|||
scaled_frame->format = codec_context->pix_fmt;
|
||||
scaled_frame->width = layout.width;
|
||||
scaled_frame->height = layout.height;
|
||||
if (av_frame_get_buffer(scaled_frame.get(), 1) < 0) {
|
||||
if (av_frame_get_buffer(scaled_frame.get(), 0) < 0) {
|
||||
LOG_ERROR(Render, "Could not allocate frame buffer");
|
||||
return false;
|
||||
}
|
||||
|
@ -177,6 +192,10 @@ void FFmpegVideoStream::ProcessFrame(VideoFrame& frame) {
|
|||
current_frame->height = layout.height;
|
||||
|
||||
// Scale the frame
|
||||
if (av_frame_make_writable(scaled_frame.get()) < 0) {
|
||||
LOG_ERROR(Render, "Video frame dropped: Could not prepare frame");
|
||||
return;
|
||||
}
|
||||
if (sws_context) {
|
||||
sws_scale(sws_context.get(), current_frame->data, current_frame->linesize, 0, layout.height,
|
||||
scaled_frame->data, scaled_frame->linesize);
|
||||
|
@ -191,17 +210,16 @@ FFmpegAudioStream::~FFmpegAudioStream() {
|
|||
Free();
|
||||
}
|
||||
|
||||
bool FFmpegAudioStream::Init(AVFormatContext* format_context) {
|
||||
bool FFmpegAudioStream::Init(FFmpegMuxer& muxer) {
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
if (!FFmpegStream::Init(format_context))
|
||||
if (!FFmpegStream::Init(muxer))
|
||||
return false;
|
||||
|
||||
sample_count = 0;
|
||||
frame_count = 0;
|
||||
|
||||
// Initialize audio codec
|
||||
constexpr AVCodecID codec_id = AV_CODEC_ID_VORBIS;
|
||||
const AVCodec* codec = avcodec_find_encoder(codec_id);
|
||||
const AVCodec* codec = avcodec_find_encoder_by_name(Settings::values.audio_encoder.c_str());
|
||||
codec_context.reset(avcodec_alloc_context3(codec));
|
||||
if (!codec || !codec_context) {
|
||||
LOG_ERROR(Render, "Could not find audio encoder or allocate audio codec context");
|
||||
|
@ -210,17 +228,52 @@ bool FFmpegAudioStream::Init(AVFormatContext* format_context) {
|
|||
|
||||
// Configure audio codec context
|
||||
codec_context->codec_type = AVMEDIA_TYPE_AUDIO;
|
||||
codec_context->bit_rate = 64000;
|
||||
codec_context->bit_rate = Settings::values.audio_bitrate;
|
||||
if (codec->sample_fmts) {
|
||||
codec_context->sample_fmt = codec->sample_fmts[0];
|
||||
} else {
|
||||
codec_context->sample_fmt = AV_SAMPLE_FMT_S16P;
|
||||
}
|
||||
|
||||
if (codec->supported_samplerates) {
|
||||
codec_context->sample_rate = codec->supported_samplerates[0];
|
||||
// Prefer native sample rate if supported
|
||||
const int* ptr = codec->supported_samplerates;
|
||||
while ((*ptr)) {
|
||||
if ((*ptr) == AudioCore::native_sample_rate) {
|
||||
codec_context->sample_rate = AudioCore::native_sample_rate;
|
||||
break;
|
||||
}
|
||||
ptr++;
|
||||
}
|
||||
} else {
|
||||
codec_context->sample_rate = AudioCore::native_sample_rate;
|
||||
}
|
||||
codec_context->time_base.num = 1;
|
||||
codec_context->time_base.den = codec_context->sample_rate;
|
||||
codec_context->channel_layout = AV_CH_LAYOUT_STEREO;
|
||||
codec_context->channels = 2;
|
||||
if (format_context->oformat->flags & AVFMT_GLOBALHEADER)
|
||||
codec_context->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
||||
|
||||
if (avcodec_open2(codec_context.get(), codec, nullptr) < 0) {
|
||||
AVDictionary* options = ToAVDictionary(Settings::values.audio_encoder_options);
|
||||
if (avcodec_open2(codec_context.get(), codec, &options) < 0) {
|
||||
LOG_ERROR(Render, "Could not open audio codec");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (av_dict_count(options) != 0) { // Successfully set options are removed from the dict
|
||||
char* buf = nullptr;
|
||||
av_dict_get_string(options, &buf, ':', ';');
|
||||
LOG_WARNING(Render, "Audio encoder options not found: {}", buf);
|
||||
}
|
||||
|
||||
if (codec_context->frame_size) {
|
||||
frame_size = static_cast<u64>(codec_context->frame_size);
|
||||
} else { // variable frame size support
|
||||
frame_size = std::tuple_size<AudioCore::StereoFrame16>::value;
|
||||
}
|
||||
|
||||
// Create audio stream
|
||||
stream = avformat_new_stream(format_context, codec);
|
||||
if (!stream || avcodec_parameters_from_context(stream->codecpar, codec_context.get()) < 0) {
|
||||
|
@ -234,6 +287,7 @@ bool FFmpegAudioStream::Init(AVFormatContext* format_context) {
|
|||
audio_frame->format = codec_context->sample_fmt;
|
||||
audio_frame->channel_layout = codec_context->channel_layout;
|
||||
audio_frame->channels = codec_context->channels;
|
||||
audio_frame->sample_rate = codec_context->sample_rate;
|
||||
|
||||
// Allocate SWR context
|
||||
auto* context =
|
||||
|
@ -253,7 +307,7 @@ bool FFmpegAudioStream::Init(AVFormatContext* format_context) {
|
|||
// Allocate resampled data
|
||||
int error =
|
||||
av_samples_alloc_array_and_samples(&resampled_data, nullptr, codec_context->channels,
|
||||
codec_context->frame_size, codec_context->sample_fmt, 0);
|
||||
frame_size, codec_context->sample_fmt, 0);
|
||||
if (error < 0) {
|
||||
LOG_ERROR(Render, "Could not allocate samples storage");
|
||||
return false;
|
||||
|
@ -274,39 +328,79 @@ void FFmpegAudioStream::Free() {
|
|||
av_freep(&resampled_data);
|
||||
}
|
||||
|
||||
void FFmpegAudioStream::ProcessFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1) {
|
||||
void FFmpegAudioStream::ProcessFrame(const VariableAudioFrame& channel0,
|
||||
const VariableAudioFrame& channel1) {
|
||||
ASSERT_MSG(channel0.size() == channel1.size(),
|
||||
"Frames of the two channels must have the same number of samples");
|
||||
std::array<const u8*, 2> src_data = {reinterpret_cast<u8*>(channel0.data()),
|
||||
reinterpret_cast<u8*>(channel1.data())};
|
||||
if (swr_convert(swr_context.get(), resampled_data, channel0.size(), src_data.data(),
|
||||
channel0.size()) < 0) {
|
||||
|
||||
const auto sample_size = av_get_bytes_per_sample(codec_context->sample_fmt);
|
||||
std::array<const u8*, 2> src_data = {reinterpret_cast<const u8*>(channel0.data()),
|
||||
reinterpret_cast<const u8*>(channel1.data())};
|
||||
|
||||
std::array<u8*, 2> dst_data;
|
||||
if (av_sample_fmt_is_planar(codec_context->sample_fmt)) {
|
||||
dst_data = {resampled_data[0] + sample_size * offset,
|
||||
resampled_data[1] + sample_size * offset};
|
||||
} else {
|
||||
dst_data = {resampled_data[0] + sample_size * offset * 2}; // 2 channels
|
||||
}
|
||||
|
||||
auto resampled_count = swr_convert(swr_context.get(), dst_data.data(), frame_size - offset,
|
||||
src_data.data(), channel0.size());
|
||||
if (resampled_count < 0) {
|
||||
LOG_ERROR(Render, "Audio frame dropped: Could not resample data");
|
||||
return;
|
||||
}
|
||||
|
||||
offset += resampled_count;
|
||||
if (offset < frame_size) { // Still not enough to form a frame
|
||||
return;
|
||||
}
|
||||
|
||||
while (true) {
|
||||
// Prepare frame
|
||||
audio_frame->nb_samples = channel0.size();
|
||||
audio_frame->nb_samples = frame_size;
|
||||
audio_frame->data[0] = resampled_data[0];
|
||||
if (av_sample_fmt_is_planar(codec_context->sample_fmt)) {
|
||||
audio_frame->data[1] = resampled_data[1];
|
||||
audio_frame->pts = sample_count;
|
||||
sample_count += channel0.size();
|
||||
}
|
||||
audio_frame->pts = frame_count * frame_size;
|
||||
frame_count++;
|
||||
|
||||
SendFrame(audio_frame.get());
|
||||
|
||||
// swr_convert buffers input internally. Try to get more resampled data
|
||||
resampled_count = swr_convert(swr_context.get(), resampled_data, frame_size, nullptr, 0);
|
||||
if (resampled_count < 0) {
|
||||
LOG_ERROR(Render, "Audio frame dropped: Could not resample data");
|
||||
return;
|
||||
}
|
||||
if (static_cast<u64>(resampled_count) < frame_size) {
|
||||
offset = resampled_count;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
std::size_t FFmpegAudioStream::GetAudioFrameSize() const {
|
||||
ASSERT_MSG(codec_context, "Codec context is not initialized yet!");
|
||||
return codec_context->frame_size;
|
||||
void FFmpegAudioStream::Flush() {
|
||||
// Send the last samples
|
||||
audio_frame->nb_samples = offset;
|
||||
audio_frame->data[0] = resampled_data[0];
|
||||
if (av_sample_fmt_is_planar(codec_context->sample_fmt)) {
|
||||
audio_frame->data[1] = resampled_data[1];
|
||||
}
|
||||
audio_frame->pts = frame_count * frame_size;
|
||||
|
||||
SendFrame(audio_frame.get());
|
||||
|
||||
FFmpegStream::Flush();
|
||||
}
|
||||
|
||||
FFmpegMuxer::~FFmpegMuxer() {
|
||||
Free();
|
||||
}
|
||||
|
||||
bool FFmpegMuxer::Init(const std::string& path, const std::string& format,
|
||||
const Layout::FramebufferLayout& layout) {
|
||||
bool FFmpegMuxer::Init(const std::string& path, const Layout::FramebufferLayout& layout) {
|
||||
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
|
@ -315,9 +409,8 @@ bool FFmpegMuxer::Init(const std::string& path, const std::string& format,
|
|||
}
|
||||
|
||||
// Get output format
|
||||
// Ensure webm here to avoid patent issues
|
||||
ASSERT_MSG(format == "webm", "Only webm is allowed for frame dumping");
|
||||
auto* output_format = av_guess_format(format.c_str(), path.c_str(), "video/webm");
|
||||
const auto format = Settings::values.output_format;
|
||||
auto* output_format = av_guess_format(format.c_str(), path.c_str(), nullptr);
|
||||
if (!output_format) {
|
||||
LOG_ERROR(Render, "Could not get format {}", format);
|
||||
return false;
|
||||
|
@ -333,18 +426,24 @@ bool FFmpegMuxer::Init(const std::string& path, const std::string& format,
|
|||
}
|
||||
format_context.reset(format_context_raw);
|
||||
|
||||
if (!video_stream.Init(format_context.get(), output_format, layout))
|
||||
if (!video_stream.Init(*this, layout))
|
||||
return false;
|
||||
if (!audio_stream.Init(format_context.get()))
|
||||
if (!audio_stream.Init(*this))
|
||||
return false;
|
||||
|
||||
AVDictionary* options = ToAVDictionary(Settings::values.format_options);
|
||||
// Open video file
|
||||
if (avio_open(&format_context->pb, path.c_str(), AVIO_FLAG_WRITE) < 0 ||
|
||||
avformat_write_header(format_context.get(), nullptr)) {
|
||||
avformat_write_header(format_context.get(), &options)) {
|
||||
|
||||
LOG_ERROR(Render, "Could not open {}", path);
|
||||
return false;
|
||||
}
|
||||
if (av_dict_count(options) != 0) { // Successfully set options are removed from the dict
|
||||
char* buf = nullptr;
|
||||
av_dict_get_string(options, &buf, ':', ';');
|
||||
LOG_WARNING(Render, "Format options not found: {}", buf);
|
||||
}
|
||||
|
||||
LOG_INFO(Render, "Dumping frames to {} ({}x{})", path, layout.width, layout.height);
|
||||
return true;
|
||||
|
@ -360,7 +459,8 @@ void FFmpegMuxer::ProcessVideoFrame(VideoFrame& frame) {
|
|||
video_stream.ProcessFrame(frame);
|
||||
}
|
||||
|
||||
void FFmpegMuxer::ProcessAudioFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1) {
|
||||
void FFmpegMuxer::ProcessAudioFrame(const VariableAudioFrame& channel0,
|
||||
const VariableAudioFrame& channel1) {
|
||||
audio_stream.ProcessFrame(channel0, channel1);
|
||||
}
|
||||
|
||||
|
@ -372,11 +472,9 @@ void FFmpegMuxer::FlushAudio() {
|
|||
audio_stream.Flush();
|
||||
}
|
||||
|
||||
std::size_t FFmpegMuxer::GetAudioFrameSize() const {
|
||||
return audio_stream.GetAudioFrameSize();
|
||||
}
|
||||
|
||||
void FFmpegMuxer::WriteTrailer() {
|
||||
std::lock_guard lock{format_context_mutex};
|
||||
av_interleaved_write_frame(format_context.get(), nullptr);
|
||||
av_write_trailer(format_context.get());
|
||||
}
|
||||
|
||||
|
@ -392,12 +490,11 @@ FFmpegBackend::~FFmpegBackend() {
|
|||
ffmpeg.Free();
|
||||
}
|
||||
|
||||
bool FFmpegBackend::StartDumping(const std::string& path, const std::string& format,
|
||||
const Layout::FramebufferLayout& layout) {
|
||||
bool FFmpegBackend::StartDumping(const std::string& path, const Layout::FramebufferLayout& layout) {
|
||||
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
if (!ffmpeg.Init(path, format, layout)) {
|
||||
if (!ffmpeg.Init(path, layout)) {
|
||||
ffmpeg.Free();
|
||||
return false;
|
||||
}
|
||||
|
@ -450,31 +547,29 @@ bool FFmpegBackend::StartDumping(const std::string& path, const std::string& for
|
|||
return true;
|
||||
}
|
||||
|
||||
void FFmpegBackend::AddVideoFrame(const VideoFrame& frame) {
|
||||
void FFmpegBackend::AddVideoFrame(VideoFrame frame) {
|
||||
event1.Wait();
|
||||
video_frame_buffers[next_buffer] = std::move(frame);
|
||||
event2.Set();
|
||||
}
|
||||
|
||||
void FFmpegBackend::AddAudioFrame(const AudioCore::StereoFrame16& frame) {
|
||||
std::array<std::array<s16, 160>, 2> refactored_frame;
|
||||
void FFmpegBackend::AddAudioFrame(AudioCore::StereoFrame16 frame) {
|
||||
std::array<VariableAudioFrame, 2> refactored_frame;
|
||||
for (auto& channel : refactored_frame) {
|
||||
channel.resize(frame.size());
|
||||
}
|
||||
for (std::size_t i = 0; i < frame.size(); i++) {
|
||||
refactored_frame[0][i] = frame[i][0];
|
||||
refactored_frame[1][i] = frame[i][1];
|
||||
}
|
||||
|
||||
for (auto i : {0, 1}) {
|
||||
audio_buffers[i].insert(audio_buffers[i].end(), refactored_frame[i].begin(),
|
||||
refactored_frame[i].end());
|
||||
}
|
||||
CheckAudioBuffer();
|
||||
audio_frame_queues[0].Push(std::move(refactored_frame[0]));
|
||||
audio_frame_queues[1].Push(std::move(refactored_frame[1]));
|
||||
}
|
||||
|
||||
void FFmpegBackend::AddAudioSample(const std::array<s16, 2>& sample) {
|
||||
for (auto i : {0, 1}) {
|
||||
audio_buffers[i].push_back(sample[i]);
|
||||
}
|
||||
CheckAudioBuffer();
|
||||
audio_frame_queues[0].Push(VariableAudioFrame{sample[0]});
|
||||
audio_frame_queues[1].Push(VariableAudioFrame{sample[1]});
|
||||
}
|
||||
|
||||
void FFmpegBackend::StopDumping() {
|
||||
|
@ -484,12 +579,6 @@ void FFmpegBackend::StopDumping() {
|
|||
// Flush the video processing queue
|
||||
AddVideoFrame(VideoFrame());
|
||||
for (auto i : {0, 1}) {
|
||||
// Add remaining data to audio queue
|
||||
if (audio_buffers[i].size() >= 0) {
|
||||
VariableAudioFrame buffer(audio_buffers[i].begin(), audio_buffers[i].end());
|
||||
audio_frame_queues[i].Push(std::move(buffer));
|
||||
audio_buffers[i].clear();
|
||||
}
|
||||
// Flush the audio processing queue
|
||||
audio_frame_queues[i].Push(VariableAudioFrame());
|
||||
}
|
||||
|
@ -513,18 +602,234 @@ void FFmpegBackend::EndDumping() {
|
|||
processing_ended.Set();
|
||||
}
|
||||
|
||||
void FFmpegBackend::CheckAudioBuffer() {
|
||||
for (auto i : {0, 1}) {
|
||||
const std::size_t frame_size = ffmpeg.GetAudioFrameSize();
|
||||
// Add audio data to the queue when there is enough to form a frame
|
||||
while (audio_buffers[i].size() >= frame_size) {
|
||||
VariableAudioFrame buffer(audio_buffers[i].begin(),
|
||||
audio_buffers[i].begin() + frame_size);
|
||||
audio_frame_queues[i].Push(std::move(buffer));
|
||||
// To std string, but handles nullptr
|
||||
std::string ToStdString(const char* str, const std::string& fallback = "") {
|
||||
return str ? std::string{str} : fallback;
|
||||
}
|
||||
|
||||
audio_buffers[i].erase(audio_buffers[i].begin(), audio_buffers[i].begin() + frame_size);
|
||||
std::string FormatDuration(s64 duration) {
|
||||
// The following is implemented according to libavutil code (opt.c)
|
||||
std::string out;
|
||||
if (duration < 0 && duration != std::numeric_limits<s64>::min()) {
|
||||
out.append("-");
|
||||
duration = -duration;
|
||||
}
|
||||
if (duration == std::numeric_limits<s64>::max()) {
|
||||
return "INT64_MAX";
|
||||
} else if (duration == std::numeric_limits<s64>::min()) {
|
||||
return "INT64_MIN";
|
||||
} else if (duration > 3600ll * 1000000ll) {
|
||||
out.append(fmt::format("{}:{:02d}:{:02d}.{:06d}", duration / 3600000000ll,
|
||||
((duration / 60000000ll) % 60), ((duration / 1000000ll) % 60),
|
||||
duration % 1000000));
|
||||
} else if (duration > 60ll * 1000000ll) {
|
||||
out.append(fmt::format("{}:{:02d}.{:06d}", duration / 60000000ll,
|
||||
((duration / 1000000ll) % 60), duration % 1000000));
|
||||
} else {
|
||||
out.append(fmt::format("{}.{:06d}", duration / 1000000ll, duration % 1000000));
|
||||
}
|
||||
while (out.back() == '0') {
|
||||
out.erase(out.size() - 1, 1);
|
||||
}
|
||||
if (out.back() == '.') {
|
||||
out.erase(out.size() - 1, 1);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
std::string FormatDefaultValue(const AVOption* option,
|
||||
const std::vector<OptionInfo::NamedConstant>& named_constants) {
|
||||
// The following is taken and modified from libavutil code (opt.c)
|
||||
switch (option->type) {
|
||||
case AV_OPT_TYPE_BOOL: {
|
||||
const auto value = option->default_val.i64;
|
||||
if (value < 0) {
|
||||
return "auto";
|
||||
}
|
||||
return value ? "true" : "false";
|
||||
}
|
||||
case AV_OPT_TYPE_FLAGS: {
|
||||
const auto value = option->default_val.i64;
|
||||
std::string out;
|
||||
for (const auto& constant : named_constants) {
|
||||
if (!(value & constant.value)) {
|
||||
continue;
|
||||
}
|
||||
if (!out.empty()) {
|
||||
out.append("+");
|
||||
}
|
||||
out.append(constant.name);
|
||||
}
|
||||
return out.empty() ? fmt::format("{}", value) : out;
|
||||
}
|
||||
case AV_OPT_TYPE_DURATION: {
|
||||
return FormatDuration(option->default_val.i64);
|
||||
}
|
||||
case AV_OPT_TYPE_INT:
|
||||
case AV_OPT_TYPE_UINT64:
|
||||
case AV_OPT_TYPE_INT64: {
|
||||
const auto value = option->default_val.i64;
|
||||
for (const auto& constant : named_constants) {
|
||||
if (constant.value == value) {
|
||||
return constant.name;
|
||||
}
|
||||
}
|
||||
return fmt::format("{}", value);
|
||||
}
|
||||
case AV_OPT_TYPE_DOUBLE:
|
||||
case AV_OPT_TYPE_FLOAT: {
|
||||
return fmt::format("{}", option->default_val.dbl);
|
||||
}
|
||||
case AV_OPT_TYPE_RATIONAL: {
|
||||
const auto q = av_d2q(option->default_val.dbl, std::numeric_limits<int>::max());
|
||||
return fmt::format("{}/{}", q.num, q.den);
|
||||
}
|
||||
case AV_OPT_TYPE_PIXEL_FMT: {
|
||||
const char* name = av_get_pix_fmt_name(static_cast<AVPixelFormat>(option->default_val.i64));
|
||||
return ToStdString(name, "none");
|
||||
}
|
||||
case AV_OPT_TYPE_SAMPLE_FMT: {
|
||||
const char* name =
|
||||
av_get_sample_fmt_name(static_cast<AVSampleFormat>(option->default_val.i64));
|
||||
return ToStdString(name, "none");
|
||||
}
|
||||
case AV_OPT_TYPE_COLOR:
|
||||
case AV_OPT_TYPE_IMAGE_SIZE:
|
||||
case AV_OPT_TYPE_STRING:
|
||||
case AV_OPT_TYPE_DICT:
|
||||
case AV_OPT_TYPE_VIDEO_RATE: {
|
||||
return ToStdString(option->default_val.str);
|
||||
}
|
||||
case AV_OPT_TYPE_CHANNEL_LAYOUT: {
|
||||
return fmt::format("{:#x}", option->default_val.i64);
|
||||
}
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
void GetOptionListSingle(std::vector<OptionInfo>& out, const AVClass* av_class) {
|
||||
if (av_class == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
const AVOption* current = nullptr;
|
||||
std::unordered_map<std::string, std::vector<OptionInfo::NamedConstant>> named_constants_map;
|
||||
// First iteration: find and place all named constants
|
||||
while ((current = av_opt_next(&av_class, current))) {
|
||||
if (current->type != AV_OPT_TYPE_CONST || !current->unit) {
|
||||
continue;
|
||||
}
|
||||
named_constants_map[current->unit].push_back(
|
||||
{current->name, ToStdString(current->help), current->default_val.i64});
|
||||
}
|
||||
// Second iteration: find all options
|
||||
current = nullptr;
|
||||
while ((current = av_opt_next(&av_class, current))) {
|
||||
// Currently we cannot handle binary options
|
||||
if (current->type == AV_OPT_TYPE_CONST || current->type == AV_OPT_TYPE_BINARY) {
|
||||
continue;
|
||||
}
|
||||
std::vector<OptionInfo::NamedConstant> named_constants;
|
||||
if (current->unit && named_constants_map.count(current->unit)) {
|
||||
named_constants = named_constants_map.at(current->unit);
|
||||
}
|
||||
const auto default_value = FormatDefaultValue(current, named_constants);
|
||||
out.push_back({current->name, ToStdString(current->help), current->type, default_value,
|
||||
std::move(named_constants), current->min, current->max});
|
||||
}
|
||||
}
|
||||
|
||||
void GetOptionList(std::vector<OptionInfo>& out, const AVClass* av_class, bool search_children) {
|
||||
if (av_class == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
GetOptionListSingle(out, av_class);
|
||||
|
||||
if (!search_children) {
|
||||
return;
|
||||
}
|
||||
|
||||
const AVClass* child_class = nullptr;
|
||||
while ((child_class = av_opt_child_class_next(av_class, child_class))) {
|
||||
GetOptionListSingle(out, child_class);
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<OptionInfo> GetOptionList(const AVClass* av_class, bool search_children) {
|
||||
std::vector<OptionInfo> out;
|
||||
GetOptionList(out, av_class, search_children);
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<EncoderInfo> ListEncoders(AVMediaType type) {
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
std::vector<EncoderInfo> out;
|
||||
|
||||
const AVCodec* current = nullptr;
|
||||
#if LIBAVCODEC_VERSION_INT < AV_VERSION_INT(58, 10, 100)
|
||||
while ((current = av_codec_next(current))) {
|
||||
#else
|
||||
void* data = nullptr; // For libavcodec to save the iteration state
|
||||
while ((current = av_codec_iterate(&data))) {
|
||||
#endif
|
||||
if (!av_codec_is_encoder(current) || current->type != type) {
|
||||
continue;
|
||||
}
|
||||
out.push_back({current->name, ToStdString(current->long_name), current->id,
|
||||
GetOptionList(current->priv_class, true)});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<OptionInfo> GetEncoderGenericOptions() {
|
||||
return GetOptionList(avcodec_get_class(), false);
|
||||
}
|
||||
|
||||
std::vector<FormatInfo> ListFormats() {
|
||||
InitializeFFmpegLibraries();
|
||||
|
||||
std::vector<FormatInfo> out;
|
||||
|
||||
const AVOutputFormat* current = nullptr;
|
||||
#if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
|
||||
while ((current = av_oformat_next(current))) {
|
||||
#else
|
||||
void* data = nullptr; // For libavformat to save the iteration state
|
||||
while ((current = av_muxer_iterate(&data))) {
|
||||
#endif
|
||||
std::vector<std::string> extensions;
|
||||
Common::SplitString(ToStdString(current->extensions), ',', extensions);
|
||||
|
||||
std::set<AVCodecID> supported_video_codecs;
|
||||
std::set<AVCodecID> supported_audio_codecs;
|
||||
// Go through all codecs
|
||||
const AVCodecDescriptor* codec = nullptr;
|
||||
while ((codec = avcodec_descriptor_next(codec))) {
|
||||
if (avformat_query_codec(current, codec->id, FF_COMPLIANCE_NORMAL) == 1) {
|
||||
if (codec->type == AVMEDIA_TYPE_VIDEO) {
|
||||
supported_video_codecs.emplace(codec->id);
|
||||
} else if (codec->type == AVMEDIA_TYPE_AUDIO) {
|
||||
supported_audio_codecs.emplace(codec->id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (supported_video_codecs.empty() || supported_audio_codecs.empty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
out.push_back({current->name, ToStdString(current->long_name), std::move(extensions),
|
||||
std::move(supported_video_codecs), std::move(supported_audio_codecs),
|
||||
GetOptionList(current->priv_class, true)});
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
std::vector<OptionInfo> GetFormatGenericOptions() {
|
||||
return GetOptionList(avformat_get_class(), false);
|
||||
}
|
||||
|
||||
} // namespace VideoDumper
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
#include <limits>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <set>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
#include "common/common_types.h"
|
||||
|
@ -19,6 +20,7 @@
|
|||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavformat/avformat.h>
|
||||
#include <libavutil/opt.h>
|
||||
#include <libswresample/swresample.h>
|
||||
#include <libswscale/swscale.h>
|
||||
}
|
||||
|
@ -29,13 +31,15 @@ using VariableAudioFrame = std::vector<s16>;
|
|||
|
||||
void InitFFmpegLibraries();
|
||||
|
||||
class FFmpegMuxer;
|
||||
|
||||
/**
|
||||
* Wrapper around FFmpeg AVCodecContext + AVStream.
|
||||
* Rescales/Resamples, encodes and writes a frame.
|
||||
*/
|
||||
class FFmpegStream {
|
||||
public:
|
||||
bool Init(AVFormatContext* format_context);
|
||||
bool Init(FFmpegMuxer& muxer);
|
||||
void Free();
|
||||
void Flush();
|
||||
|
||||
|
@ -58,6 +62,7 @@ protected:
|
|||
};
|
||||
|
||||
AVFormatContext* format_context{};
|
||||
std::mutex* format_context_mutex{};
|
||||
std::unique_ptr<AVCodecContext, AVCodecContextDeleter> codec_context{};
|
||||
AVStream* stream{};
|
||||
};
|
||||
|
@ -70,8 +75,7 @@ class FFmpegVideoStream : public FFmpegStream {
|
|||
public:
|
||||
~FFmpegVideoStream();
|
||||
|
||||
bool Init(AVFormatContext* format_context, AVOutputFormat* output_format,
|
||||
const Layout::FramebufferLayout& layout);
|
||||
bool Init(FFmpegMuxer& muxer, const Layout::FramebufferLayout& layout);
|
||||
void Free();
|
||||
void ProcessFrame(VideoFrame& frame);
|
||||
|
||||
|
@ -96,15 +100,16 @@ private:
|
|||
/**
|
||||
* A FFmpegStream used for audio data.
|
||||
* Resamples (converts), encodes and writes a frame.
|
||||
* This also temporarily stores resampled audio data before there are enough to form a frame.
|
||||
*/
|
||||
class FFmpegAudioStream : public FFmpegStream {
|
||||
public:
|
||||
~FFmpegAudioStream();
|
||||
|
||||
bool Init(AVFormatContext* format_context);
|
||||
bool Init(FFmpegMuxer& muxer);
|
||||
void Free();
|
||||
void ProcessFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1);
|
||||
std::size_t GetAudioFrameSize() const;
|
||||
void ProcessFrame(const VariableAudioFrame& channel0, const VariableAudioFrame& channel1);
|
||||
void Flush();
|
||||
|
||||
private:
|
||||
struct SwrContextDeleter {
|
||||
|
@ -113,12 +118,14 @@ private:
|
|||
}
|
||||
};
|
||||
|
||||
u64 sample_count{};
|
||||
u64 frame_size{};
|
||||
u64 frame_count{};
|
||||
|
||||
std::unique_ptr<AVFrame, AVFrameDeleter> audio_frame{};
|
||||
std::unique_ptr<SwrContext, SwrContextDeleter> swr_context{};
|
||||
|
||||
u8** resampled_data{};
|
||||
u64 offset{}; // Number of output samples that are currently in resampled_data.
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -129,14 +136,12 @@ class FFmpegMuxer {
|
|||
public:
|
||||
~FFmpegMuxer();
|
||||
|
||||
bool Init(const std::string& path, const std::string& format,
|
||||
const Layout::FramebufferLayout& layout);
|
||||
bool Init(const std::string& path, const Layout::FramebufferLayout& layout);
|
||||
void Free();
|
||||
void ProcessVideoFrame(VideoFrame& frame);
|
||||
void ProcessAudioFrame(VariableAudioFrame& channel0, VariableAudioFrame& channel1);
|
||||
void ProcessAudioFrame(const VariableAudioFrame& channel0, const VariableAudioFrame& channel1);
|
||||
void FlushVideo();
|
||||
void FlushAudio();
|
||||
std::size_t GetAudioFrameSize() const;
|
||||
void WriteTrailer();
|
||||
|
||||
private:
|
||||
|
@ -150,28 +155,28 @@ private:
|
|||
FFmpegAudioStream audio_stream{};
|
||||
FFmpegVideoStream video_stream{};
|
||||
std::unique_ptr<AVFormatContext, AVFormatContextDeleter> format_context{};
|
||||
std::mutex format_context_mutex;
|
||||
|
||||
friend class FFmpegStream;
|
||||
};
|
||||
|
||||
/**
|
||||
* FFmpeg video dumping backend.
|
||||
* This class implements a double buffer, and an audio queue to keep audio data
|
||||
* before enough data is received to form a frame.
|
||||
* This class implements a double buffer.
|
||||
*/
|
||||
class FFmpegBackend : public Backend {
|
||||
public:
|
||||
FFmpegBackend();
|
||||
~FFmpegBackend() override;
|
||||
bool StartDumping(const std::string& path, const std::string& format,
|
||||
const Layout::FramebufferLayout& layout) override;
|
||||
void AddVideoFrame(const VideoFrame& frame) override;
|
||||
void AddAudioFrame(const AudioCore::StereoFrame16& frame) override;
|
||||
bool StartDumping(const std::string& path, const Layout::FramebufferLayout& layout) override;
|
||||
void AddVideoFrame(VideoFrame frame) override;
|
||||
void AddAudioFrame(AudioCore::StereoFrame16 frame) override;
|
||||
void AddAudioSample(const std::array<s16, 2>& sample) override;
|
||||
void StopDumping() override;
|
||||
bool IsDumping() const override;
|
||||
Layout::FramebufferLayout GetLayout() const override;
|
||||
|
||||
private:
|
||||
void CheckAudioBuffer();
|
||||
void EndDumping();
|
||||
|
||||
std::atomic_bool is_dumping = false; ///< Whether the backend is currently dumping
|
||||
|
@ -184,13 +189,51 @@ private:
|
|||
Common::Event event1, event2;
|
||||
std::thread video_processing_thread;
|
||||
|
||||
/// An audio buffer used to temporarily hold audio data, before the size is big enough
|
||||
/// to be sent to the encoder as a frame
|
||||
std::array<VariableAudioFrame, 2> audio_buffers;
|
||||
std::array<Common::SPSCQueue<VariableAudioFrame>, 2> audio_frame_queues;
|
||||
std::thread audio_processing_thread;
|
||||
|
||||
Common::Event processing_ended;
|
||||
};
|
||||
|
||||
/// Struct describing encoder/muxer options
|
||||
struct OptionInfo {
|
||||
std::string name;
|
||||
std::string description;
|
||||
AVOptionType type;
|
||||
std::string default_value;
|
||||
struct NamedConstant {
|
||||
std::string name;
|
||||
std::string description;
|
||||
s64 value;
|
||||
};
|
||||
std::vector<NamedConstant> named_constants;
|
||||
|
||||
// If this is a scalar type
|
||||
double min;
|
||||
double max;
|
||||
};
|
||||
|
||||
/// Struct describing an encoder
|
||||
struct EncoderInfo {
|
||||
std::string name;
|
||||
std::string long_name;
|
||||
AVCodecID codec;
|
||||
std::vector<OptionInfo> options;
|
||||
};
|
||||
|
||||
/// Struct describing a format
|
||||
struct FormatInfo {
|
||||
std::string name;
|
||||
std::string long_name;
|
||||
std::vector<std::string> extensions;
|
||||
std::set<AVCodecID> supported_video_codecs;
|
||||
std::set<AVCodecID> supported_audio_codecs;
|
||||
std::vector<OptionInfo> options;
|
||||
};
|
||||
|
||||
std::vector<EncoderInfo> ListEncoders(AVMediaType type);
|
||||
std::vector<OptionInfo> GetEncoderGenericOptions();
|
||||
std::vector<FormatInfo> ListFormats();
|
||||
std::vector<OptionInfo> GetFormatGenericOptions();
|
||||
|
||||
} // namespace VideoDumper
|
||||
|
|
|
@ -223,6 +223,7 @@ void Thread::ResumeFromWait() {
|
|||
case ThreadStatus::WaitArb:
|
||||
case ThreadStatus::WaitSleep:
|
||||
case ThreadStatus::WaitIPC:
|
||||
case ThreadStatus::Dormant:
|
||||
break;
|
||||
|
||||
case ThreadStatus::Ready:
|
||||
|
|
|
@ -120,15 +120,10 @@ void Module::Interface::GetSoftwareClosedFlag(Kernel::HLERequestContext& ctx) {
|
|||
void CheckNew3DS(IPC::RequestBuilder& rb) {
|
||||
const bool is_new_3ds = Settings::values.is_new_3ds;
|
||||
|
||||
if (is_new_3ds) {
|
||||
LOG_CRITICAL(Service_PTM, "The option 'is_new_3ds' is enabled as part of the 'System' "
|
||||
"settings. Citra does not fully support New 3DS emulation yet!");
|
||||
}
|
||||
|
||||
rb.Push(RESULT_SUCCESS);
|
||||
rb.Push(is_new_3ds);
|
||||
|
||||
LOG_WARNING(Service_PTM, "(STUBBED) called isNew3DS = 0x{:08x}", static_cast<u32>(is_new_3ds));
|
||||
LOG_DEBUG(Service_PTM, "called isNew3DS = 0x{:08x}", static_cast<u32>(is_new_3ds));
|
||||
}
|
||||
|
||||
void Module::Interface::CheckNew3DS(Kernel::HLERequestContext& ctx) {
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
#include "core/hle/service/mic_u.h"
|
||||
#include "core/settings.h"
|
||||
#include "video_core/renderer_base.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_manager.h"
|
||||
#include "video_core/video_core.h"
|
||||
|
||||
namespace Settings {
|
||||
|
@ -38,9 +37,7 @@ void Apply() {
|
|||
VideoCore::g_renderer_bg_color_update_requested = true;
|
||||
VideoCore::g_renderer_sampler_update_requested = true;
|
||||
VideoCore::g_renderer_shader_update_requested = true;
|
||||
|
||||
OpenGL::TextureFilterManager::GetInstance().SetTextureFilter(values.texture_filter_name,
|
||||
values.texture_filter_factor);
|
||||
VideoCore::g_texture_filter_update_requested = true;
|
||||
|
||||
auto& system = Core::System::GetInstance();
|
||||
if (system.IsPoweredOn()) {
|
||||
|
@ -88,7 +85,6 @@ void LogSettings() {
|
|||
LogSetting("Renderer_FrameLimit", Settings::values.frame_limit);
|
||||
LogSetting("Renderer_PostProcessingShader", Settings::values.pp_shader_name);
|
||||
LogSetting("Renderer_FilterMode", Settings::values.filter_mode);
|
||||
LogSetting("Renderer_TextureFilterFactor", Settings::values.texture_filter_factor);
|
||||
LogSetting("Renderer_TextureFilterName", Settings::values.texture_filter_name);
|
||||
LogSetting("Stereoscopy_Render3d", static_cast<int>(Settings::values.render_3d));
|
||||
LogSetting("Stereoscopy_Factor3d", Settings::values.factor_3d);
|
||||
|
|
|
@ -148,7 +148,6 @@ struct Values {
|
|||
u16 resolution_factor;
|
||||
bool use_frame_limit;
|
||||
u16 frame_limit;
|
||||
u16 texture_filter_factor;
|
||||
std::string texture_filter_name;
|
||||
|
||||
LayoutOption layout_option;
|
||||
|
@ -207,6 +206,18 @@ struct Values {
|
|||
std::string web_api_url;
|
||||
std::string citra_username;
|
||||
std::string citra_token;
|
||||
|
||||
// Video Dumping
|
||||
std::string output_format;
|
||||
std::string format_options;
|
||||
|
||||
std::string video_encoder;
|
||||
std::string video_encoder_options;
|
||||
u64 video_bitrate;
|
||||
|
||||
std::string audio_encoder;
|
||||
std::string audio_encoder_options;
|
||||
u64 audio_bitrate;
|
||||
} extern values;
|
||||
|
||||
// a special value for Values::region_value indicating that citra will automatically select a region
|
||||
|
|
|
@ -472,6 +472,14 @@ SDLState::SDLState() {
|
|||
if (SDL_SetHint(SDL_HINT_JOYSTICK_ALLOW_BACKGROUND_EVENTS, "1") == SDL_FALSE) {
|
||||
LOG_ERROR(Input, "Failed to set Hint for background events", SDL_GetError());
|
||||
}
|
||||
// these hints are only defined on sdl2.0.9 or higher
|
||||
#if SDL_VERSION_ATLEAST(2, 0, 9)
|
||||
// This can be set back to 1 when the compatibility problems with the controllers are
|
||||
// solved. There are also hints to toggle the individual drivers.
|
||||
SDL_SetHint(SDL_HINT_JOYSTICK_HIDAPI, "0");
|
||||
// This hint should probably stay as "0" as long as the hidapi PS4 led issue isn't fixed
|
||||
SDL_SetHint(SDL_HINT_JOYSTICK_HIDAPI_PS4, "0");
|
||||
#endif
|
||||
|
||||
SDL_AddEventWatch(&SDLEventWatcher, this);
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ public:
|
|||
mutable std::mutex ban_list_mutex; ///< Mutex for the ban lists
|
||||
|
||||
RoomImpl()
|
||||
: random_gen(std::random_device()()), NintendoOUI{0x00, 0x1F, 0x32, 0x00, 0x00, 0x00} {}
|
||||
: NintendoOUI{0x00, 0x1F, 0x32, 0x00, 0x00, 0x00}, random_gen(std::random_device()()) {}
|
||||
|
||||
/// Thread that receives and dispatches network packets
|
||||
std::unique_ptr<std::thread> room_thread;
|
||||
|
|
|
@ -242,6 +242,13 @@ void RoomMember::RoomMemberImpl::MemberLoop() {
|
|||
SetError(Error::LostConnection);
|
||||
}
|
||||
break;
|
||||
case ENET_EVENT_TYPE_NONE:
|
||||
break;
|
||||
case ENET_EVENT_TYPE_CONNECT:
|
||||
// The ENET_EVENT_TYPE_CONNECT event can not possibly happen here because we're
|
||||
// already connected
|
||||
ASSERT_MSG(false, "Received unexpected connect event while already connected");
|
||||
break;
|
||||
}
|
||||
}
|
||||
{
|
||||
|
|
|
@ -23,6 +23,8 @@ add_library(video_core STATIC
|
|||
regs_texturing.h
|
||||
renderer_base.cpp
|
||||
renderer_base.h
|
||||
renderer_opengl/frame_dumper_opengl.cpp
|
||||
renderer_opengl/frame_dumper_opengl.h
|
||||
renderer_opengl/gl_rasterizer.cpp
|
||||
renderer_opengl/gl_rasterizer.h
|
||||
renderer_opengl/gl_rasterizer_cache.cpp
|
||||
|
@ -43,6 +45,8 @@ add_library(video_core STATIC
|
|||
renderer_opengl/gl_state.h
|
||||
renderer_opengl/gl_stream_buffer.cpp
|
||||
renderer_opengl/gl_stream_buffer.h
|
||||
renderer_opengl/gl_surface_params.cpp
|
||||
renderer_opengl/gl_surface_params.h
|
||||
renderer_opengl/gl_vars.cpp
|
||||
renderer_opengl/gl_vars.h
|
||||
renderer_opengl/pica_to_gl.h
|
||||
|
@ -54,11 +58,14 @@ add_library(video_core STATIC
|
|||
renderer_opengl/texture_filters/anime4k/anime4k_ultrafast.h
|
||||
renderer_opengl/texture_filters/bicubic/bicubic.cpp
|
||||
renderer_opengl/texture_filters/bicubic/bicubic.h
|
||||
renderer_opengl/texture_filters/texture_filter_interface.h
|
||||
renderer_opengl/texture_filters/texture_filter_manager.cpp
|
||||
renderer_opengl/texture_filters/texture_filter_manager.h
|
||||
renderer_opengl/texture_filters/texture_filter_base.h
|
||||
renderer_opengl/texture_filters/texture_filterer.cpp
|
||||
renderer_opengl/texture_filters/texture_filterer.h
|
||||
renderer_opengl/texture_filters/xbrz/xbrz_freescale.cpp
|
||||
renderer_opengl/texture_filters/xbrz/xbrz_freescale.h
|
||||
#temporary, move these back in alphabetical order before merging
|
||||
renderer_opengl/gl_format_reinterpreter.cpp
|
||||
renderer_opengl/gl_format_reinterpreter.h
|
||||
shader/debug_data.h
|
||||
shader/shader.cpp
|
||||
shader/shader.h
|
||||
|
|
|
@ -0,0 +1,98 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <glad/glad.h>
|
||||
#include "core/frontend/emu_window.h"
|
||||
#include "core/frontend/scope_acquire_context.h"
|
||||
#include "video_core/renderer_opengl/frame_dumper_opengl.h"
|
||||
#include "video_core/renderer_opengl/renderer_opengl.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
FrameDumperOpenGL::FrameDumperOpenGL(VideoDumper::Backend& video_dumper_,
|
||||
Frontend::EmuWindow& emu_window)
|
||||
: video_dumper(video_dumper_), context(emu_window.CreateSharedContext()) {}
|
||||
|
||||
FrameDumperOpenGL::~FrameDumperOpenGL() {
|
||||
if (present_thread.joinable())
|
||||
present_thread.join();
|
||||
}
|
||||
|
||||
bool FrameDumperOpenGL::IsDumping() const {
|
||||
return video_dumper.IsDumping();
|
||||
}
|
||||
|
||||
Layout::FramebufferLayout FrameDumperOpenGL::GetLayout() const {
|
||||
return video_dumper.GetLayout();
|
||||
}
|
||||
|
||||
void FrameDumperOpenGL::StartDumping() {
|
||||
if (present_thread.joinable())
|
||||
present_thread.join();
|
||||
|
||||
present_thread = std::thread(&FrameDumperOpenGL::PresentLoop, this);
|
||||
}
|
||||
|
||||
void FrameDumperOpenGL::StopDumping() {
|
||||
stop_requested.store(true, std::memory_order_relaxed);
|
||||
}
|
||||
|
||||
void FrameDumperOpenGL::PresentLoop() {
|
||||
Frontend::ScopeAcquireContext scope{*context};
|
||||
InitializeOpenGLObjects();
|
||||
|
||||
const auto& layout = GetLayout();
|
||||
while (!stop_requested.exchange(false)) {
|
||||
auto frame = mailbox->TryGetPresentFrame(200);
|
||||
if (!frame) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (frame->color_reloaded) {
|
||||
LOG_DEBUG(Render_OpenGL, "Reloading present frame");
|
||||
mailbox->ReloadPresentFrame(frame, layout.width, layout.height);
|
||||
}
|
||||
glWaitSync(frame->render_fence, 0, GL_TIMEOUT_IGNORED);
|
||||
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, frame->present.handle);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbos[current_pbo].handle);
|
||||
glReadPixels(0, 0, layout.width, layout.height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, 0);
|
||||
|
||||
// Insert fence for the main thread to block on
|
||||
frame->present_fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
glFlush();
|
||||
|
||||
// Bind the previous PBO and read the pixels
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, pbos[next_pbo].handle);
|
||||
GLubyte* pixels = static_cast<GLubyte*>(glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY));
|
||||
VideoDumper::VideoFrame frame_data{layout.width, layout.height, pixels};
|
||||
video_dumper.AddVideoFrame(std::move(frame_data));
|
||||
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
|
||||
current_pbo = (current_pbo + 1) % 2;
|
||||
next_pbo = (current_pbo + 1) % 2;
|
||||
}
|
||||
|
||||
CleanupOpenGLObjects();
|
||||
}
|
||||
|
||||
void FrameDumperOpenGL::InitializeOpenGLObjects() {
|
||||
const auto& layout = GetLayout();
|
||||
for (auto& buffer : pbos) {
|
||||
buffer.Create();
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, buffer.handle);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, layout.width * layout.height * 4, nullptr,
|
||||
GL_STREAM_READ);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
}
|
||||
}
|
||||
|
||||
void FrameDumperOpenGL::CleanupOpenGLObjects() {
|
||||
for (auto& buffer : pbos) {
|
||||
buffer.Release();
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace OpenGL
|
|
@ -0,0 +1,57 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <memory>
|
||||
#include <thread>
|
||||
#include "core/dumping/backend.h"
|
||||
#include "core/frontend/framebuffer_layout.h"
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
|
||||
namespace Frontend {
|
||||
class EmuWindow;
|
||||
class GraphicsContext;
|
||||
class TextureMailbox;
|
||||
} // namespace Frontend
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class RendererOpenGL;
|
||||
|
||||
/**
|
||||
* This is the 'presentation' part in frame dumping.
|
||||
* Processes frames/textures sent to its mailbox, downloads the pixels and sends the data
|
||||
* to the video encoding backend.
|
||||
*/
|
||||
class FrameDumperOpenGL {
|
||||
public:
|
||||
explicit FrameDumperOpenGL(VideoDumper::Backend& video_dumper, Frontend::EmuWindow& emu_window);
|
||||
~FrameDumperOpenGL();
|
||||
|
||||
bool IsDumping() const;
|
||||
Layout::FramebufferLayout GetLayout() const;
|
||||
void StartDumping();
|
||||
void StopDumping();
|
||||
|
||||
std::unique_ptr<Frontend::TextureMailbox> mailbox;
|
||||
|
||||
private:
|
||||
void InitializeOpenGLObjects();
|
||||
void CleanupOpenGLObjects();
|
||||
void PresentLoop();
|
||||
|
||||
VideoDumper::Backend& video_dumper;
|
||||
std::unique_ptr<Frontend::GraphicsContext> context;
|
||||
std::thread present_thread;
|
||||
std::atomic_bool stop_requested{false};
|
||||
|
||||
// PBOs used to dump frames faster
|
||||
std::array<OGLBuffer, 2> pbos;
|
||||
GLuint current_pbo = 1;
|
||||
GLuint next_pbo = 0;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -0,0 +1,238 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include "common/assert.h"
|
||||
#include "common/scope_exit.h"
|
||||
#include "video_core/renderer_opengl/gl_format_reinterpreter.h"
|
||||
#include "video_core/renderer_opengl/gl_rasterizer_cache.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
#include "video_core/renderer_opengl/gl_vars.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filterer.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
using PixelFormat = SurfaceParams::PixelFormat;
|
||||
|
||||
class RGBA4toRGB5A1 final : public FormatReinterpreterBase {
|
||||
public:
|
||||
RGBA4toRGB5A1() {
|
||||
constexpr std::string_view vs_source = R"(
|
||||
out vec2 dst_coord;
|
||||
|
||||
uniform mediump ivec2 dst_size;
|
||||
|
||||
const vec2 vertices[4] =
|
||||
vec2[4](vec2(-1.0, -1.0), vec2(1.0, -1.0), vec2(-1.0, 1.0), vec2(1.0, 1.0));
|
||||
|
||||
void main() {
|
||||
gl_Position = vec4(vertices[gl_VertexID], 0.0, 1.0);
|
||||
dst_coord = (vertices[gl_VertexID] / 2.0 + 0.5) * vec2(dst_size);
|
||||
}
|
||||
)";
|
||||
|
||||
constexpr std::string_view fs_source = R"(
|
||||
in mediump vec2 dst_coord;
|
||||
|
||||
out lowp vec4 frag_color;
|
||||
|
||||
uniform lowp sampler2D source;
|
||||
uniform mediump ivec2 dst_size;
|
||||
uniform mediump ivec2 src_size;
|
||||
uniform mediump ivec2 src_offset;
|
||||
|
||||
void main() {
|
||||
mediump ivec2 tex_coord;
|
||||
if (src_size == dst_size) {
|
||||
tex_coord = ivec2(dst_coord);
|
||||
} else {
|
||||
highp int tex_index = int(dst_coord.y) * dst_size.x + int(dst_coord.x);
|
||||
mediump int y = tex_index / src_size.x;
|
||||
tex_coord = ivec2(tex_index - y * src_size.x, y);
|
||||
}
|
||||
tex_coord -= src_offset;
|
||||
|
||||
lowp ivec4 rgba4 = ivec4(texelFetch(source, tex_coord, 0) * (exp2(4.0) - 1.0));
|
||||
lowp ivec3 rgb5 =
|
||||
((rgba4.rgb << ivec3(1, 2, 3)) | (rgba4.gba >> ivec3(3, 2, 1))) & 0x1F;
|
||||
frag_color = vec4(vec3(rgb5) / (exp2(5.0) - 1.0), rgba4.a & 0x01);
|
||||
}
|
||||
)";
|
||||
|
||||
program.Create(vs_source.data(), fs_source.data());
|
||||
dst_size_loc = glGetUniformLocation(program.handle, "dst_size");
|
||||
src_size_loc = glGetUniformLocation(program.handle, "src_size");
|
||||
src_offset_loc = glGetUniformLocation(program.handle, "src_offset");
|
||||
vao.Create();
|
||||
}
|
||||
|
||||
void Reinterpret(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint read_fb_handle,
|
||||
GLuint dst_tex, const Common::Rectangle<u32>& dst_rect,
|
||||
GLuint draw_fb_handle) override {
|
||||
OpenGLState prev_state = OpenGLState::GetCurState();
|
||||
SCOPE_EXIT({ prev_state.Apply(); });
|
||||
|
||||
OpenGLState state;
|
||||
state.texture_units[0].texture_2d = src_tex;
|
||||
state.draw.draw_framebuffer = draw_fb_handle;
|
||||
state.draw.shader_program = program.handle;
|
||||
state.draw.vertex_array = vao.handle;
|
||||
state.viewport = {static_cast<GLint>(dst_rect.left), static_cast<GLint>(dst_rect.bottom),
|
||||
static_cast<GLsizei>(dst_rect.GetWidth()),
|
||||
static_cast<GLsizei>(dst_rect.GetHeight())};
|
||||
state.Apply();
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_tex,
|
||||
0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0,
|
||||
0);
|
||||
|
||||
glUniform2i(dst_size_loc, dst_rect.GetWidth(), dst_rect.GetHeight());
|
||||
glUniform2i(src_size_loc, src_rect.GetWidth(), src_rect.GetHeight());
|
||||
glUniform2i(src_offset_loc, src_rect.left, src_rect.bottom);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
}
|
||||
|
||||
private:
|
||||
OGLProgram program;
|
||||
GLint dst_size_loc{-1}, src_size_loc{-1}, src_offset_loc{-1};
|
||||
OGLVertexArray vao;
|
||||
};
|
||||
|
||||
class PixelBufferD24S8toABGR final : public FormatReinterpreterBase {
|
||||
public:
|
||||
PixelBufferD24S8toABGR() {
|
||||
attributeless_vao.Create();
|
||||
d24s8_abgr_buffer.Create();
|
||||
d24s8_abgr_buffer_size = 0;
|
||||
|
||||
constexpr std::string_view vs_source = R"(
|
||||
const vec2 vertices[4] = vec2[4](vec2(-1.0, -1.0), vec2(1.0, -1.0),
|
||||
vec2(-1.0, 1.0), vec2(1.0, 1.0));
|
||||
void main() {
|
||||
gl_Position = vec4(vertices[gl_VertexID], 0.0, 1.0);
|
||||
}
|
||||
)";
|
||||
|
||||
std::string fs_source = GLES ? fragment_shader_precision_OES : "";
|
||||
fs_source += R"(
|
||||
uniform samplerBuffer tbo;
|
||||
uniform vec2 tbo_size;
|
||||
uniform vec4 viewport;
|
||||
|
||||
out vec4 color;
|
||||
|
||||
void main() {
|
||||
vec2 tbo_coord = (gl_FragCoord.xy - viewport.xy) * tbo_size / viewport.zw;
|
||||
int tbo_offset = int(tbo_coord.y) * int(tbo_size.x) + int(tbo_coord.x);
|
||||
color = texelFetch(tbo, tbo_offset).rabg;
|
||||
}
|
||||
)";
|
||||
d24s8_abgr_shader.Create(vs_source.data(), fs_source.c_str());
|
||||
|
||||
OpenGLState state = OpenGLState::GetCurState();
|
||||
GLuint old_program = state.draw.shader_program;
|
||||
state.draw.shader_program = d24s8_abgr_shader.handle;
|
||||
state.Apply();
|
||||
|
||||
GLint tbo_u_id = glGetUniformLocation(d24s8_abgr_shader.handle, "tbo");
|
||||
ASSERT(tbo_u_id != -1);
|
||||
glUniform1i(tbo_u_id, 0);
|
||||
|
||||
state.draw.shader_program = old_program;
|
||||
state.Apply();
|
||||
|
||||
d24s8_abgr_tbo_size_u_id = glGetUniformLocation(d24s8_abgr_shader.handle, "tbo_size");
|
||||
ASSERT(d24s8_abgr_tbo_size_u_id != -1);
|
||||
d24s8_abgr_viewport_u_id = glGetUniformLocation(d24s8_abgr_shader.handle, "viewport");
|
||||
ASSERT(d24s8_abgr_viewport_u_id != -1);
|
||||
}
|
||||
|
||||
~PixelBufferD24S8toABGR() {}
|
||||
|
||||
void Reinterpret(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint read_fb_handle,
|
||||
GLuint dst_tex, const Common::Rectangle<u32>& dst_rect,
|
||||
GLuint draw_fb_handle) override {
|
||||
OpenGLState prev_state = OpenGLState::GetCurState();
|
||||
SCOPE_EXIT({ prev_state.Apply(); });
|
||||
|
||||
OpenGLState state;
|
||||
state.draw.read_framebuffer = read_fb_handle;
|
||||
state.draw.draw_framebuffer = draw_fb_handle;
|
||||
state.Apply();
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, d24s8_abgr_buffer.handle);
|
||||
|
||||
GLsizeiptr target_pbo_size =
|
||||
static_cast<GLsizeiptr>(src_rect.GetWidth()) * src_rect.GetHeight() * 4;
|
||||
if (target_pbo_size > d24s8_abgr_buffer_size) {
|
||||
d24s8_abgr_buffer_size = target_pbo_size * 2;
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, d24s8_abgr_buffer_size, nullptr, GL_STREAM_COPY);
|
||||
}
|
||||
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D,
|
||||
src_tex, 0);
|
||||
glReadPixels(static_cast<GLint>(src_rect.left), static_cast<GLint>(src_rect.bottom),
|
||||
static_cast<GLsizei>(src_rect.GetWidth()),
|
||||
static_cast<GLsizei>(src_rect.GetHeight()), GL_DEPTH_STENCIL,
|
||||
GL_UNSIGNED_INT_24_8, 0);
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
|
||||
// PBO now contains src_tex in RABG format
|
||||
state.draw.shader_program = d24s8_abgr_shader.handle;
|
||||
state.draw.vertex_array = attributeless_vao.handle;
|
||||
state.viewport.x = static_cast<GLint>(dst_rect.left);
|
||||
state.viewport.y = static_cast<GLint>(dst_rect.bottom);
|
||||
state.viewport.width = static_cast<GLsizei>(dst_rect.GetWidth());
|
||||
state.viewport.height = static_cast<GLsizei>(dst_rect.GetHeight());
|
||||
state.Apply();
|
||||
|
||||
OGLTexture tbo;
|
||||
tbo.Create();
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_BUFFER, tbo.handle);
|
||||
glTexBuffer(GL_TEXTURE_BUFFER, GL_RGBA8, d24s8_abgr_buffer.handle);
|
||||
|
||||
glUniform2f(d24s8_abgr_tbo_size_u_id, static_cast<GLfloat>(src_rect.GetWidth()),
|
||||
static_cast<GLfloat>(src_rect.GetHeight()));
|
||||
glUniform4f(d24s8_abgr_viewport_u_id, static_cast<GLfloat>(state.viewport.x),
|
||||
static_cast<GLfloat>(state.viewport.y),
|
||||
static_cast<GLfloat>(state.viewport.width),
|
||||
static_cast<GLfloat>(state.viewport.height));
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_tex,
|
||||
0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0,
|
||||
0);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
glBindTexture(GL_TEXTURE_BUFFER, 0);
|
||||
}
|
||||
|
||||
private:
|
||||
OGLVertexArray attributeless_vao;
|
||||
OGLBuffer d24s8_abgr_buffer;
|
||||
GLsizeiptr d24s8_abgr_buffer_size;
|
||||
OGLProgram d24s8_abgr_shader;
|
||||
GLint d24s8_abgr_tbo_size_u_id;
|
||||
GLint d24s8_abgr_viewport_u_id;
|
||||
};
|
||||
|
||||
FormatReinterpreterOpenGL::FormatReinterpreterOpenGL() {
|
||||
reinterpreters.emplace(PixelFormatPair{PixelFormat::RGBA8, PixelFormat::D24S8},
|
||||
std::make_unique<PixelBufferD24S8toABGR>());
|
||||
reinterpreters.emplace(PixelFormatPair{PixelFormat::RGB5A1, PixelFormat::RGBA4},
|
||||
std::make_unique<RGBA4toRGB5A1>());
|
||||
}
|
||||
|
||||
FormatReinterpreterOpenGL::~FormatReinterpreterOpenGL() = default;
|
||||
|
||||
std::pair<FormatReinterpreterOpenGL::ReinterpreterMap::iterator,
|
||||
FormatReinterpreterOpenGL::ReinterpreterMap::iterator>
|
||||
FormatReinterpreterOpenGL::GetPossibleReinterpretations(PixelFormat dst_format) {
|
||||
return reinterpreters.equal_range(dst_format);
|
||||
}
|
||||
|
||||
} // namespace OpenGL
|
|
@ -0,0 +1,62 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <type_traits>
|
||||
#include <glad/glad.h>
|
||||
#include "common/common_types.h"
|
||||
#include "common/math_util.h"
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_surface_params.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class RasterizerCacheOpenGL;
|
||||
|
||||
struct PixelFormatPair {
|
||||
const SurfaceParams::PixelFormat dst_format, src_format;
|
||||
struct less {
|
||||
using is_transparent = void;
|
||||
constexpr bool operator()(OpenGL::PixelFormatPair lhs, OpenGL::PixelFormatPair rhs) const {
|
||||
return std::tie(lhs.dst_format, lhs.src_format) <
|
||||
std::tie(rhs.dst_format, rhs.src_format);
|
||||
}
|
||||
constexpr bool operator()(OpenGL::SurfaceParams::PixelFormat lhs,
|
||||
OpenGL::PixelFormatPair rhs) const {
|
||||
return lhs < rhs.dst_format;
|
||||
}
|
||||
constexpr bool operator()(OpenGL::PixelFormatPair lhs,
|
||||
OpenGL::SurfaceParams::PixelFormat rhs) const {
|
||||
return lhs.dst_format < rhs;
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
class FormatReinterpreterBase {
|
||||
public:
|
||||
virtual ~FormatReinterpreterBase() = default;
|
||||
|
||||
virtual void Reinterpret(GLuint src_tex, const Common::Rectangle<u32>& src_rect,
|
||||
GLuint read_fb_handle, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint draw_fb_handle) = 0;
|
||||
};
|
||||
|
||||
class FormatReinterpreterOpenGL : NonCopyable {
|
||||
using ReinterpreterMap =
|
||||
std::map<PixelFormatPair, std::unique_ptr<FormatReinterpreterBase>, PixelFormatPair::less>;
|
||||
|
||||
public:
|
||||
explicit FormatReinterpreterOpenGL();
|
||||
~FormatReinterpreterOpenGL();
|
||||
|
||||
std::pair<ReinterpreterMap::iterator, ReinterpreterMap::iterator> GetPossibleReinterpretations(
|
||||
SurfaceParams::PixelFormat dst_format);
|
||||
|
||||
private:
|
||||
ReinterpreterMap reinterpreters;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -5,6 +5,7 @@
|
|||
#include <algorithm>
|
||||
#include <array>
|
||||
#include <atomic>
|
||||
#include <bitset>
|
||||
#include <cstring>
|
||||
#include <iterator>
|
||||
#include <memory>
|
||||
|
@ -31,10 +32,11 @@
|
|||
#include "core/settings.h"
|
||||
#include "video_core/pica_state.h"
|
||||
#include "video_core/renderer_base.h"
|
||||
#include "video_core/renderer_opengl/gl_format_reinterpreter.h"
|
||||
#include "video_core/renderer_opengl/gl_rasterizer_cache.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
#include "video_core/renderer_opengl/gl_vars.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_manager.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filterer.h"
|
||||
#include "video_core/utils.h"
|
||||
#include "video_core/video_core.h"
|
||||
|
||||
|
@ -493,125 +495,6 @@ static bool FillSurface(const Surface& surface, const u8* fill_data,
|
|||
return true;
|
||||
}
|
||||
|
||||
SurfaceParams SurfaceParams::FromInterval(SurfaceInterval interval) const {
|
||||
SurfaceParams params = *this;
|
||||
const u32 tiled_size = is_tiled ? 8 : 1;
|
||||
const u32 stride_tiled_bytes = BytesInPixels(stride * tiled_size);
|
||||
PAddr aligned_start =
|
||||
addr + Common::AlignDown(boost::icl::first(interval) - addr, stride_tiled_bytes);
|
||||
PAddr aligned_end =
|
||||
addr + Common::AlignUp(boost::icl::last_next(interval) - addr, stride_tiled_bytes);
|
||||
|
||||
if (aligned_end - aligned_start > stride_tiled_bytes) {
|
||||
params.addr = aligned_start;
|
||||
params.height = (aligned_end - aligned_start) / BytesInPixels(stride);
|
||||
} else {
|
||||
// 1 row
|
||||
ASSERT(aligned_end - aligned_start == stride_tiled_bytes);
|
||||
const u32 tiled_alignment = BytesInPixels(is_tiled ? 8 * 8 : 1);
|
||||
aligned_start =
|
||||
addr + Common::AlignDown(boost::icl::first(interval) - addr, tiled_alignment);
|
||||
aligned_end =
|
||||
addr + Common::AlignUp(boost::icl::last_next(interval) - addr, tiled_alignment);
|
||||
params.addr = aligned_start;
|
||||
params.width = PixelsInBytes(aligned_end - aligned_start) / tiled_size;
|
||||
params.stride = params.width;
|
||||
params.height = tiled_size;
|
||||
}
|
||||
params.UpdateParams();
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
SurfaceInterval SurfaceParams::GetSubRectInterval(Common::Rectangle<u32> unscaled_rect) const {
|
||||
if (unscaled_rect.GetHeight() == 0 || unscaled_rect.GetWidth() == 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (is_tiled) {
|
||||
unscaled_rect.left = Common::AlignDown(unscaled_rect.left, 8) * 8;
|
||||
unscaled_rect.bottom = Common::AlignDown(unscaled_rect.bottom, 8) / 8;
|
||||
unscaled_rect.right = Common::AlignUp(unscaled_rect.right, 8) * 8;
|
||||
unscaled_rect.top = Common::AlignUp(unscaled_rect.top, 8) / 8;
|
||||
}
|
||||
|
||||
const u32 stride_tiled = !is_tiled ? stride : stride * 8;
|
||||
|
||||
const u32 pixel_offset =
|
||||
stride_tiled * (!is_tiled ? unscaled_rect.bottom : (height / 8) - unscaled_rect.top) +
|
||||
unscaled_rect.left;
|
||||
|
||||
const u32 pixels = (unscaled_rect.GetHeight() - 1) * stride_tiled + unscaled_rect.GetWidth();
|
||||
|
||||
return {addr + BytesInPixels(pixel_offset), addr + BytesInPixels(pixel_offset + pixels)};
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> SurfaceParams::GetSubRect(const SurfaceParams& sub_surface) const {
|
||||
const u32 begin_pixel_index = PixelsInBytes(sub_surface.addr - addr);
|
||||
|
||||
if (is_tiled) {
|
||||
const int x0 = (begin_pixel_index % (stride * 8)) / 8;
|
||||
const int y0 = (begin_pixel_index / (stride * 8)) * 8;
|
||||
// Top to bottom
|
||||
return Common::Rectangle<u32>(x0, height - y0, x0 + sub_surface.width,
|
||||
height - (y0 + sub_surface.height));
|
||||
}
|
||||
|
||||
const int x0 = begin_pixel_index % stride;
|
||||
const int y0 = begin_pixel_index / stride;
|
||||
// Bottom to top
|
||||
return Common::Rectangle<u32>(x0, y0 + sub_surface.height, x0 + sub_surface.width, y0);
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> SurfaceParams::GetScaledSubRect(const SurfaceParams& sub_surface) const {
|
||||
auto rect = GetSubRect(sub_surface);
|
||||
rect.left = rect.left * res_scale;
|
||||
rect.right = rect.right * res_scale;
|
||||
rect.top = rect.top * res_scale;
|
||||
rect.bottom = rect.bottom * res_scale;
|
||||
return rect;
|
||||
}
|
||||
|
||||
bool SurfaceParams::ExactMatch(const SurfaceParams& other_surface) const {
|
||||
return std::tie(other_surface.addr, other_surface.width, other_surface.height,
|
||||
other_surface.stride, other_surface.pixel_format, other_surface.is_tiled) ==
|
||||
std::tie(addr, width, height, stride, pixel_format, is_tiled) &&
|
||||
pixel_format != PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
bool SurfaceParams::CanSubRect(const SurfaceParams& sub_surface) const {
|
||||
return sub_surface.addr >= addr && sub_surface.end <= end &&
|
||||
sub_surface.pixel_format == pixel_format && pixel_format != PixelFormat::Invalid &&
|
||||
sub_surface.is_tiled == is_tiled &&
|
||||
(sub_surface.addr - addr) % BytesInPixels(is_tiled ? 64 : 1) == 0 &&
|
||||
(sub_surface.stride == stride || sub_surface.height <= (is_tiled ? 8u : 1u)) &&
|
||||
GetSubRect(sub_surface).right <= stride;
|
||||
}
|
||||
|
||||
bool SurfaceParams::CanExpand(const SurfaceParams& expanded_surface) const {
|
||||
return pixel_format != PixelFormat::Invalid && pixel_format == expanded_surface.pixel_format &&
|
||||
addr <= expanded_surface.end && expanded_surface.addr <= end &&
|
||||
is_tiled == expanded_surface.is_tiled && stride == expanded_surface.stride &&
|
||||
(std::max(expanded_surface.addr, addr) - std::min(expanded_surface.addr, addr)) %
|
||||
BytesInPixels(stride * (is_tiled ? 8 : 1)) ==
|
||||
0;
|
||||
}
|
||||
|
||||
bool SurfaceParams::CanTexCopy(const SurfaceParams& texcopy_params) const {
|
||||
if (pixel_format == PixelFormat::Invalid || addr > texcopy_params.addr ||
|
||||
end < texcopy_params.end) {
|
||||
return false;
|
||||
}
|
||||
if (texcopy_params.width != texcopy_params.stride) {
|
||||
const u32 tile_stride = BytesInPixels(stride * (is_tiled ? 8 : 1));
|
||||
return (texcopy_params.addr - addr) % BytesInPixels(is_tiled ? 64 : 1) == 0 &&
|
||||
texcopy_params.width % BytesInPixels(is_tiled ? 64 : 1) == 0 &&
|
||||
(texcopy_params.height == 1 || texcopy_params.stride == tile_stride) &&
|
||||
((texcopy_params.addr - addr) % tile_stride) + texcopy_params.width <= tile_stride;
|
||||
}
|
||||
return FromInterval(texcopy_params.GetInterval()).GetInterval() == texcopy_params.GetInterval();
|
||||
}
|
||||
|
||||
bool CachedSurface::CanFill(const SurfaceParams& dest_surface,
|
||||
SurfaceInterval fill_interval) const {
|
||||
if (type == SurfaceType::Fill && IsRegionValid(fill_interval) &&
|
||||
|
@ -653,47 +536,6 @@ bool CachedSurface::CanCopy(const SurfaceParams& dest_surface,
|
|||
return false;
|
||||
}
|
||||
|
||||
SurfaceInterval SurfaceParams::GetCopyableInterval(const Surface& src_surface) const {
|
||||
SurfaceInterval result{};
|
||||
const auto valid_regions =
|
||||
SurfaceRegions(GetInterval() & src_surface->GetInterval()) - src_surface->invalid_regions;
|
||||
for (auto& valid_interval : valid_regions) {
|
||||
const SurfaceInterval aligned_interval{
|
||||
addr + Common::AlignUp(boost::icl::first(valid_interval) - addr,
|
||||
BytesInPixels(is_tiled ? 8 * 8 : 1)),
|
||||
addr + Common::AlignDown(boost::icl::last_next(valid_interval) - addr,
|
||||
BytesInPixels(is_tiled ? 8 * 8 : 1))};
|
||||
|
||||
if (BytesInPixels(is_tiled ? 8 * 8 : 1) > boost::icl::length(valid_interval) ||
|
||||
boost::icl::length(aligned_interval) == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the rectangle within aligned_interval
|
||||
const u32 stride_bytes = BytesInPixels(stride) * (is_tiled ? 8 : 1);
|
||||
SurfaceInterval rect_interval{
|
||||
addr + Common::AlignUp(boost::icl::first(aligned_interval) - addr, stride_bytes),
|
||||
addr + Common::AlignDown(boost::icl::last_next(aligned_interval) - addr, stride_bytes),
|
||||
};
|
||||
if (boost::icl::first(rect_interval) > boost::icl::last_next(rect_interval)) {
|
||||
// 1 row
|
||||
rect_interval = aligned_interval;
|
||||
} else if (boost::icl::length(rect_interval) == 0) {
|
||||
// 2 rows that do not make a rectangle, return the larger one
|
||||
const SurfaceInterval row1{boost::icl::first(aligned_interval),
|
||||
boost::icl::first(rect_interval)};
|
||||
const SurfaceInterval row2{boost::icl::first(rect_interval),
|
||||
boost::icl::last_next(aligned_interval)};
|
||||
rect_interval = (boost::icl::length(row1) > boost::icl::length(row2)) ? row1 : row2;
|
||||
}
|
||||
|
||||
if (boost::icl::length(rect_interval) > boost::icl::length(result)) {
|
||||
result = rect_interval;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
MICROPROFILE_DEFINE(OpenGL_CopySurface, "OpenGL", "CopySurface", MP_RGB(128, 192, 64));
|
||||
void RasterizerCacheOpenGL::CopySurface(const Surface& src_surface, const Surface& dst_surface,
|
||||
SurfaceInterval copy_interval) {
|
||||
|
@ -884,6 +726,16 @@ bool CachedSurface::LoadCustomTexture(u64 tex_hash, Core::CustomTexInfo& tex_inf
|
|||
}
|
||||
|
||||
void CachedSurface::DumpTexture(GLuint target_tex, u64 tex_hash) {
|
||||
// Make sure the texture size is a power of 2
|
||||
// If not, the surface is actually a framebuffer
|
||||
std::bitset<32> width_bits(width);
|
||||
std::bitset<32> height_bits(height);
|
||||
if (width_bits.count() != 1 || height_bits.count() != 1) {
|
||||
LOG_WARNING(Render_OpenGL, "Not dumping {:016X} because size isn't a power of 2 ({}x{})",
|
||||
tex_hash, width, height);
|
||||
return;
|
||||
}
|
||||
|
||||
// Dump texture to RGBA8 and encode as PNG
|
||||
const auto& image_interface = Core::System::GetInstance().GetImageInterface();
|
||||
auto& custom_tex_cache = Core::System::GetInstance().CustomTexCache();
|
||||
|
@ -945,10 +797,6 @@ void CachedSurface::UploadGLTexture(Common::Rectangle<u32> rect, GLuint read_fb_
|
|||
if (Settings::values.custom_textures)
|
||||
is_custom = LoadCustomTexture(tex_hash, custom_tex_info);
|
||||
|
||||
TextureFilterInterface* const texture_filter =
|
||||
is_custom ? nullptr : TextureFilterManager::GetInstance().GetTextureFilter();
|
||||
const u16 default_scale = texture_filter ? texture_filter->scale_factor : 1;
|
||||
|
||||
// Load data from memory to the surface
|
||||
GLint x0 = static_cast<GLint>(rect.left);
|
||||
GLint y0 = static_cast<GLint>(rect.bottom);
|
||||
|
@ -960,7 +808,7 @@ void CachedSurface::UploadGLTexture(Common::Rectangle<u32> rect, GLuint read_fb_
|
|||
// If not 1x scale, create 1x texture that we will blit from to replace texture subrect in
|
||||
// surface
|
||||
OGLTexture unscaled_tex;
|
||||
if (res_scale != default_scale) {
|
||||
if (res_scale != 1) {
|
||||
x0 = 0;
|
||||
y0 = 0;
|
||||
|
||||
|
@ -969,8 +817,7 @@ void CachedSurface::UploadGLTexture(Common::Rectangle<u32> rect, GLuint read_fb_
|
|||
AllocateSurfaceTexture(unscaled_tex.handle, GetFormatTuple(PixelFormat::RGBA8),
|
||||
custom_tex_info.width, custom_tex_info.height);
|
||||
} else {
|
||||
AllocateSurfaceTexture(unscaled_tex.handle, tuple, rect.GetWidth() * default_scale,
|
||||
rect.GetHeight() * default_scale);
|
||||
AllocateSurfaceTexture(unscaled_tex.handle, tuple, rect.GetWidth(), rect.GetHeight());
|
||||
}
|
||||
target_tex = unscaled_tex.handle;
|
||||
}
|
||||
|
@ -996,16 +843,6 @@ void CachedSurface::UploadGLTexture(Common::Rectangle<u32> rect, GLuint read_fb_
|
|||
glActiveTexture(GL_TEXTURE0);
|
||||
glTexSubImage2D(GL_TEXTURE_2D, 0, x0, y0, custom_tex_info.width, custom_tex_info.height,
|
||||
GL_RGBA, GL_UNSIGNED_BYTE, custom_tex_info.tex.data());
|
||||
} else if (texture_filter) {
|
||||
if (res_scale == default_scale) {
|
||||
AllocateSurfaceTexture(texture.handle, GetFormatTuple(pixel_format),
|
||||
rect.GetWidth() * default_scale,
|
||||
rect.GetHeight() * default_scale);
|
||||
cur_state.texture_units[0].texture_2d = texture.handle;
|
||||
cur_state.Apply();
|
||||
}
|
||||
texture_filter->scale(*this, {(u32)x0, (u32)y0, rect.GetWidth(), rect.GetHeight()},
|
||||
buffer_offset);
|
||||
} else {
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, static_cast<GLint>(stride));
|
||||
|
||||
|
@ -1016,13 +853,13 @@ void CachedSurface::UploadGLTexture(Common::Rectangle<u32> rect, GLuint read_fb_
|
|||
}
|
||||
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
|
||||
if (Settings::values.dump_textures && !is_custom && !texture_filter)
|
||||
if (Settings::values.dump_textures && !is_custom)
|
||||
DumpTexture(target_tex, tex_hash);
|
||||
|
||||
cur_state.texture_units[0].texture_2d = old_tex;
|
||||
cur_state.Apply();
|
||||
|
||||
if (res_scale != default_scale) {
|
||||
if (res_scale != 1) {
|
||||
auto scaled_rect = rect;
|
||||
scaled_rect.left *= res_scale;
|
||||
scaled_rect.top *= res_scale;
|
||||
|
@ -1031,9 +868,12 @@ void CachedSurface::UploadGLTexture(Common::Rectangle<u32> rect, GLuint read_fb_
|
|||
auto from_rect =
|
||||
is_custom ? Common::Rectangle<u32>{0, custom_tex_info.height, custom_tex_info.width, 0}
|
||||
: Common::Rectangle<u32>{0, rect.GetHeight(), rect.GetWidth(), 0};
|
||||
if (!owner.texture_filterer->Filter(unscaled_tex.handle, from_rect, texture.handle,
|
||||
scaled_rect, type, read_fb_handle, draw_fb_handle)) {
|
||||
BlitTextures(unscaled_tex.handle, from_rect, texture.handle, scaled_rect, type,
|
||||
read_fb_handle, draw_fb_handle);
|
||||
}
|
||||
}
|
||||
|
||||
InvalidateAllWatcher();
|
||||
}
|
||||
|
@ -1221,53 +1061,13 @@ Surface FindMatch(const SurfaceCache& surface_cache, const SurfaceParams& params
|
|||
}
|
||||
|
||||
RasterizerCacheOpenGL::RasterizerCacheOpenGL() {
|
||||
resolution_scale_factor = VideoCore::GetResolutionScaleFactor();
|
||||
texture_filterer = std::make_unique<TextureFilterer>(Settings::values.texture_filter_name,
|
||||
resolution_scale_factor);
|
||||
format_reinterpreter = std::make_unique<FormatReinterpreterOpenGL>();
|
||||
|
||||
read_framebuffer.Create();
|
||||
draw_framebuffer.Create();
|
||||
|
||||
attributeless_vao.Create();
|
||||
|
||||
d24s8_abgr_buffer.Create();
|
||||
d24s8_abgr_buffer_size = 0;
|
||||
|
||||
std::string vs_source = R"(
|
||||
const vec2 vertices[4] = vec2[4](vec2(-1.0, -1.0), vec2(1.0, -1.0), vec2(-1.0, 1.0), vec2(1.0, 1.0));
|
||||
void main() {
|
||||
gl_Position = vec4(vertices[gl_VertexID], 0.0, 1.0);
|
||||
}
|
||||
)";
|
||||
|
||||
std::string fs_source = GLES ? fragment_shader_precision_OES : "";
|
||||
fs_source += R"(
|
||||
uniform samplerBuffer tbo;
|
||||
uniform vec2 tbo_size;
|
||||
uniform vec4 viewport;
|
||||
|
||||
out vec4 color;
|
||||
|
||||
void main() {
|
||||
vec2 tbo_coord = (gl_FragCoord.xy - viewport.xy) * tbo_size / viewport.zw;
|
||||
int tbo_offset = int(tbo_coord.y) * int(tbo_size.x) + int(tbo_coord.x);
|
||||
color = texelFetch(tbo, tbo_offset).rabg;
|
||||
}
|
||||
)";
|
||||
d24s8_abgr_shader.Create(vs_source.c_str(), fs_source.c_str());
|
||||
|
||||
OpenGLState state = OpenGLState::GetCurState();
|
||||
GLuint old_program = state.draw.shader_program;
|
||||
state.draw.shader_program = d24s8_abgr_shader.handle;
|
||||
state.Apply();
|
||||
|
||||
GLint tbo_u_id = glGetUniformLocation(d24s8_abgr_shader.handle, "tbo");
|
||||
ASSERT(tbo_u_id != -1);
|
||||
glUniform1i(tbo_u_id, 0);
|
||||
|
||||
state.draw.shader_program = old_program;
|
||||
state.Apply();
|
||||
|
||||
d24s8_abgr_tbo_size_u_id = glGetUniformLocation(d24s8_abgr_shader.handle, "tbo_size");
|
||||
ASSERT(d24s8_abgr_tbo_size_u_id != -1);
|
||||
d24s8_abgr_viewport_u_id = glGetUniformLocation(d24s8_abgr_shader.handle, "viewport");
|
||||
ASSERT(d24s8_abgr_viewport_u_id != -1);
|
||||
}
|
||||
|
||||
RasterizerCacheOpenGL::~RasterizerCacheOpenGL() {
|
||||
|
@ -1291,64 +1091,6 @@ bool RasterizerCacheOpenGL::BlitSurfaces(const Surface& src_surface,
|
|||
draw_framebuffer.handle);
|
||||
}
|
||||
|
||||
void RasterizerCacheOpenGL::ConvertD24S8toABGR(GLuint src_tex,
|
||||
const Common::Rectangle<u32>& src_rect,
|
||||
GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect) {
|
||||
OpenGLState prev_state = OpenGLState::GetCurState();
|
||||
SCOPE_EXIT({ prev_state.Apply(); });
|
||||
|
||||
OpenGLState state;
|
||||
state.draw.read_framebuffer = read_framebuffer.handle;
|
||||
state.draw.draw_framebuffer = draw_framebuffer.handle;
|
||||
state.Apply();
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, d24s8_abgr_buffer.handle);
|
||||
|
||||
GLsizeiptr target_pbo_size = src_rect.GetWidth() * src_rect.GetHeight() * 4;
|
||||
if (target_pbo_size > d24s8_abgr_buffer_size) {
|
||||
d24s8_abgr_buffer_size = target_pbo_size * 2;
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, d24s8_abgr_buffer_size, nullptr, GL_STREAM_COPY);
|
||||
}
|
||||
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, src_tex,
|
||||
0);
|
||||
glReadPixels(static_cast<GLint>(src_rect.left), static_cast<GLint>(src_rect.bottom),
|
||||
static_cast<GLsizei>(src_rect.GetWidth()),
|
||||
static_cast<GLsizei>(src_rect.GetHeight()), GL_DEPTH_STENCIL, GL_UNSIGNED_INT_24_8,
|
||||
0);
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
|
||||
// PBO now contains src_tex in RABG format
|
||||
state.draw.shader_program = d24s8_abgr_shader.handle;
|
||||
state.draw.vertex_array = attributeless_vao.handle;
|
||||
state.viewport.x = static_cast<GLint>(dst_rect.left);
|
||||
state.viewport.y = static_cast<GLint>(dst_rect.bottom);
|
||||
state.viewport.width = static_cast<GLsizei>(dst_rect.GetWidth());
|
||||
state.viewport.height = static_cast<GLsizei>(dst_rect.GetHeight());
|
||||
state.Apply();
|
||||
|
||||
OGLTexture tbo;
|
||||
tbo.Create();
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_BUFFER, tbo.handle);
|
||||
glTexBuffer(GL_TEXTURE_BUFFER, GL_RGBA8, d24s8_abgr_buffer.handle);
|
||||
|
||||
glUniform2f(d24s8_abgr_tbo_size_u_id, static_cast<GLfloat>(src_rect.GetWidth()),
|
||||
static_cast<GLfloat>(src_rect.GetHeight()));
|
||||
glUniform4f(d24s8_abgr_viewport_u_id, static_cast<GLfloat>(state.viewport.x),
|
||||
static_cast<GLfloat>(state.viewport.y), static_cast<GLfloat>(state.viewport.width),
|
||||
static_cast<GLfloat>(state.viewport.height));
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_tex, 0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
glBindTexture(GL_TEXTURE_BUFFER, 0);
|
||||
}
|
||||
|
||||
Surface RasterizerCacheOpenGL::GetSurface(const SurfaceParams& params, ScaleMatch match_res_scale,
|
||||
bool load_if_create) {
|
||||
if (params.addr == 0 || params.height * params.width == 0) {
|
||||
|
@ -1495,11 +1237,7 @@ Surface RasterizerCacheOpenGL::GetTextureSurface(const Pica::Texture::TextureInf
|
|||
params.height = info.height;
|
||||
params.is_tiled = true;
|
||||
params.pixel_format = SurfaceParams::PixelFormatFromTextureFormat(info.format);
|
||||
TextureFilterInterface* filter{};
|
||||
|
||||
params.res_scale = (filter = TextureFilterManager::GetInstance().GetTextureFilter())
|
||||
? filter->scale_factor
|
||||
: 1;
|
||||
params.res_scale = texture_filterer->IsNull() ? 1 : resolution_scale_factor;
|
||||
params.UpdateParams();
|
||||
|
||||
u32 min_width = info.width >> max_level;
|
||||
|
@ -1552,7 +1290,7 @@ Surface RasterizerCacheOpenGL::GetTextureSurface(const Pica::Texture::TextureInf
|
|||
glTexImage2D(GL_TEXTURE_2D, level, format_tuple.internal_format, width >> level,
|
||||
height >> level, 0, format_tuple.format, format_tuple.type, nullptr);
|
||||
}
|
||||
if (surface->is_custom) {
|
||||
if (surface->is_custom || !texture_filterer->IsNull()) {
|
||||
// TODO: proper mipmap support for custom textures
|
||||
glGenerateMipmap(GL_TEXTURE_2D);
|
||||
}
|
||||
|
@ -1588,7 +1326,7 @@ Surface RasterizerCacheOpenGL::GetTextureSurface(const Pica::Texture::TextureInf
|
|||
}
|
||||
state.ResetTexture(level_surface->texture.handle);
|
||||
state.Apply();
|
||||
if (!surface->is_custom) {
|
||||
if (!surface->is_custom && texture_filterer->IsNull()) {
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D,
|
||||
level_surface->texture.handle, 0);
|
||||
glFramebufferTexture2D(GL_READ_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT,
|
||||
|
@ -1712,10 +1450,9 @@ SurfaceSurfaceRect_Tuple RasterizerCacheOpenGL::GetFramebufferSurfaces(
|
|||
const auto& config = regs.framebuffer.framebuffer;
|
||||
|
||||
// update resolution_scale_factor and reset cache if changed
|
||||
static u16 resolution_scale_factor = VideoCore::GetResolutionScaleFactor();
|
||||
if (resolution_scale_factor != VideoCore::GetResolutionScaleFactor() ||
|
||||
TextureFilterManager::GetInstance().IsUpdated()) {
|
||||
TextureFilterManager::GetInstance().Reset();
|
||||
if ((resolution_scale_factor != VideoCore::GetResolutionScaleFactor()) |
|
||||
(VideoCore::g_texture_filter_update_requested.exchange(false) &&
|
||||
texture_filterer->Reset(Settings::values.texture_filter_name, resolution_scale_factor))) {
|
||||
resolution_scale_factor = VideoCore::GetResolutionScaleFactor();
|
||||
FlushAll();
|
||||
while (!surface_cache.empty())
|
||||
|
@ -1800,7 +1537,7 @@ SurfaceSurfaceRect_Tuple RasterizerCacheOpenGL::GetFramebufferSurfaces(
|
|||
}
|
||||
|
||||
Surface RasterizerCacheOpenGL::GetFillSurface(const GPU::Regs::MemoryFillConfig& config) {
|
||||
Surface new_surface = std::make_shared<CachedSurface>();
|
||||
Surface new_surface = std::make_shared<CachedSurface>(*this);
|
||||
|
||||
new_surface->addr = config.GetStartAddress();
|
||||
new_surface->end = config.GetEndAddress();
|
||||
|
@ -1881,9 +1618,15 @@ void RasterizerCacheOpenGL::ValidateSurface(const Surface& surface, PAddr addr,
|
|||
return;
|
||||
}
|
||||
|
||||
auto validate_regions = surface->invalid_regions & validate_interval;
|
||||
auto notify_validated = [&](SurfaceInterval interval) {
|
||||
surface->invalid_regions.erase(interval);
|
||||
validate_regions.erase(interval);
|
||||
};
|
||||
|
||||
while (true) {
|
||||
const auto it = surface->invalid_regions.find(validate_interval);
|
||||
if (it == surface->invalid_regions.end())
|
||||
const auto it = validate_regions.begin();
|
||||
if (it == validate_regions.end())
|
||||
break;
|
||||
|
||||
const auto interval = *it & validate_interval;
|
||||
|
@ -1895,27 +1638,27 @@ void RasterizerCacheOpenGL::ValidateSurface(const Surface& surface, PAddr addr,
|
|||
if (copy_surface != nullptr) {
|
||||
SurfaceInterval copy_interval = params.GetCopyableInterval(copy_surface);
|
||||
CopySurface(copy_surface, surface, copy_interval);
|
||||
surface->invalid_regions.erase(copy_interval);
|
||||
notify_validated(copy_interval);
|
||||
continue;
|
||||
}
|
||||
|
||||
// D24S8 to RGBA8
|
||||
if (surface->pixel_format == PixelFormat::RGBA8) {
|
||||
params.pixel_format = PixelFormat::D24S8;
|
||||
Surface reinterpret_surface =
|
||||
FindMatch<MatchFlags::Copy>(surface_cache, params, ScaleMatch::Ignore, interval);
|
||||
if (reinterpret_surface != nullptr) {
|
||||
ASSERT(reinterpret_surface->pixel_format == PixelFormat::D24S8);
|
||||
|
||||
SurfaceInterval convert_interval = params.GetCopyableInterval(reinterpret_surface);
|
||||
SurfaceParams convert_params = surface->FromInterval(convert_interval);
|
||||
auto src_rect = reinterpret_surface->GetScaledSubRect(convert_params);
|
||||
auto dest_rect = surface->GetScaledSubRect(convert_params);
|
||||
|
||||
ConvertD24S8toABGR(reinterpret_surface->texture.handle, src_rect,
|
||||
surface->texture.handle, dest_rect);
|
||||
|
||||
surface->invalid_regions.erase(convert_interval);
|
||||
// Try to find surface in cache with different format
|
||||
// that can can be reinterpreted to the requested format.
|
||||
if (ValidateByReinterpretation(surface, params, interval)) {
|
||||
notify_validated(interval);
|
||||
continue;
|
||||
}
|
||||
// Could not find a matching reinterpreter, check if we need to implement a
|
||||
// reinterpreter
|
||||
if (NoUnimplementedReinterpretations(surface, params, interval) &&
|
||||
!IntervalHasInvalidPixelFormat(params, interval)) {
|
||||
// No surfaces were found in the cache that had a matching bit-width.
|
||||
// If the region was created entirely on the GPU,
|
||||
// assume it was a developer mistake and skip flushing.
|
||||
if (boost::icl::contains(dirty_regions, interval)) {
|
||||
LOG_DEBUG(Render_OpenGL, "Region created fully on GPU and reinterpretation is "
|
||||
"invalid. Skipping validation");
|
||||
validate_regions.erase(interval);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -1925,10 +1668,103 @@ void RasterizerCacheOpenGL::ValidateSurface(const Surface& surface, PAddr addr,
|
|||
surface->LoadGLBuffer(params.addr, params.end);
|
||||
surface->UploadGLTexture(surface->GetSubRect(params), read_framebuffer.handle,
|
||||
draw_framebuffer.handle);
|
||||
surface->invalid_regions.erase(params.GetInterval());
|
||||
notify_validated(params.GetInterval());
|
||||
}
|
||||
}
|
||||
|
||||
bool RasterizerCacheOpenGL::NoUnimplementedReinterpretations(const Surface& surface,
|
||||
SurfaceParams& params,
|
||||
const SurfaceInterval& interval) {
|
||||
static constexpr std::array<PixelFormat, 17> all_formats{
|
||||
PixelFormat::RGBA8, PixelFormat::RGB8, PixelFormat::RGB5A1, PixelFormat::RGB565,
|
||||
PixelFormat::RGBA4, PixelFormat::IA8, PixelFormat::RG8, PixelFormat::I8,
|
||||
PixelFormat::A8, PixelFormat::IA4, PixelFormat::I4, PixelFormat::A4,
|
||||
PixelFormat::ETC1, PixelFormat::ETC1A4, PixelFormat::D16, PixelFormat::D24,
|
||||
PixelFormat::D24S8,
|
||||
};
|
||||
bool implemented = true;
|
||||
for (PixelFormat format : all_formats) {
|
||||
if (SurfaceParams::GetFormatBpp(format) == surface->GetFormatBpp()) {
|
||||
params.pixel_format = format;
|
||||
// This could potentially be expensive,
|
||||
// although experimentally it hasn't been too bad
|
||||
Surface test_surface =
|
||||
FindMatch<MatchFlags::Copy>(surface_cache, params, ScaleMatch::Ignore, interval);
|
||||
if (test_surface != nullptr) {
|
||||
LOG_WARNING(Render_OpenGL, "Missing pixel_format reinterpreter: {} -> {}",
|
||||
SurfaceParams::PixelFormatAsString(format),
|
||||
SurfaceParams::PixelFormatAsString(surface->pixel_format));
|
||||
implemented = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return implemented;
|
||||
}
|
||||
|
||||
bool RasterizerCacheOpenGL::IntervalHasInvalidPixelFormat(SurfaceParams& params,
|
||||
const SurfaceInterval& interval) {
|
||||
params.pixel_format = PixelFormat::Invalid;
|
||||
for (const auto& set : RangeFromInterval(surface_cache, interval))
|
||||
for (const auto& surface : set.second)
|
||||
if (surface->pixel_format == PixelFormat::Invalid) {
|
||||
LOG_WARNING(Render_OpenGL, "Surface found with invalid pixel format");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool RasterizerCacheOpenGL::ValidateByReinterpretation(const Surface& surface,
|
||||
SurfaceParams& params,
|
||||
const SurfaceInterval& interval) {
|
||||
auto [cvt_begin, cvt_end] =
|
||||
format_reinterpreter->GetPossibleReinterpretations(surface->pixel_format);
|
||||
for (auto reinterpreter = cvt_begin; reinterpreter != cvt_end; ++reinterpreter) {
|
||||
PixelFormat format = reinterpreter->first.src_format;
|
||||
params.pixel_format = format;
|
||||
Surface reinterpret_surface =
|
||||
FindMatch<MatchFlags::Copy>(surface_cache, params, ScaleMatch::Ignore, interval);
|
||||
|
||||
if (reinterpret_surface != nullptr) {
|
||||
SurfaceInterval reinterpret_interval = params.GetCopyableInterval(reinterpret_surface);
|
||||
SurfaceParams reinterpret_params = surface->FromInterval(reinterpret_interval);
|
||||
auto src_rect = reinterpret_surface->GetScaledSubRect(reinterpret_params);
|
||||
auto dest_rect = surface->GetScaledSubRect(reinterpret_params);
|
||||
|
||||
if (!texture_filterer->IsNull() && reinterpret_surface->res_scale == 1 &&
|
||||
surface->res_scale == resolution_scale_factor) {
|
||||
// The destination surface is either a framebuffer, or a filtered texture.
|
||||
OGLTexture tmp_tex;
|
||||
tmp_tex.Create();
|
||||
// Create an intermediate surface to convert to before blitting to the
|
||||
// destination.
|
||||
Common::Rectangle<u32> tmp_rect{0, dest_rect.GetHeight() / resolution_scale_factor,
|
||||
dest_rect.GetWidth() / resolution_scale_factor, 0};
|
||||
AllocateSurfaceTexture(tmp_tex.handle,
|
||||
GetFormatTuple(reinterpreter->first.dst_format),
|
||||
tmp_rect.right, tmp_rect.top);
|
||||
reinterpreter->second->Reinterpret(reinterpret_surface->texture.handle, src_rect,
|
||||
read_framebuffer.handle, tmp_tex.handle,
|
||||
tmp_rect, draw_framebuffer.handle);
|
||||
SurfaceParams::SurfaceType type =
|
||||
SurfaceParams::GetFormatType(reinterpreter->first.dst_format);
|
||||
|
||||
if (!texture_filterer->Filter(tmp_tex.handle, tmp_rect, surface->texture.handle,
|
||||
dest_rect, type, read_framebuffer.handle,
|
||||
draw_framebuffer.handle)) {
|
||||
BlitTextures(tmp_tex.handle, tmp_rect, surface->texture.handle, dest_rect, type,
|
||||
read_framebuffer.handle, draw_framebuffer.handle);
|
||||
}
|
||||
} else {
|
||||
reinterpreter->second->Reinterpret(reinterpret_surface->texture.handle, src_rect,
|
||||
read_framebuffer.handle, surface->texture.handle,
|
||||
dest_rect, draw_framebuffer.handle);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void RasterizerCacheOpenGL::ClearAll(bool flush) {
|
||||
const auto flush_interval = PageMap::interval_type::right_open(0x0, 0xFFFFFFFF);
|
||||
// Force flush all surfaces from the cache
|
||||
|
@ -2053,7 +1889,7 @@ void RasterizerCacheOpenGL::InvalidateRegion(PAddr addr, u32 size, const Surface
|
|||
}
|
||||
|
||||
Surface RasterizerCacheOpenGL::CreateSurface(const SurfaceParams& params) {
|
||||
Surface surface = std::make_shared<CachedSurface>();
|
||||
Surface surface = std::make_shared<CachedSurface>(*this);
|
||||
static_cast<SurfaceParams&>(*surface) = params;
|
||||
|
||||
surface->texture.Create();
|
||||
|
|
|
@ -26,14 +26,16 @@
|
|||
#include "common/common_types.h"
|
||||
#include "common/math_util.h"
|
||||
#include "core/custom_tex_cache.h"
|
||||
#include "core/hw/gpu.h"
|
||||
#include "video_core/regs_framebuffer.h"
|
||||
#include "video_core/regs_texturing.h"
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_surface_params.h"
|
||||
#include "video_core/texture/texture_decode.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class RasterizerCacheOpenGL;
|
||||
class TextureFilterer;
|
||||
class FormatReinterpreterOpenGL;
|
||||
|
||||
struct TextureCubeConfig {
|
||||
PAddr px;
|
||||
PAddr nx;
|
||||
|
@ -76,11 +78,8 @@ struct hash<OpenGL::TextureCubeConfig> {
|
|||
|
||||
namespace OpenGL {
|
||||
|
||||
struct CachedSurface;
|
||||
using Surface = std::shared_ptr<CachedSurface>;
|
||||
using SurfaceSet = std::set<Surface>;
|
||||
|
||||
using SurfaceInterval = boost::icl::right_open_interval<PAddr>;
|
||||
using SurfaceRegions = boost::icl::interval_set<PAddr, std::less, SurfaceInterval>;
|
||||
using SurfaceMap =
|
||||
boost::icl::interval_map<PAddr, Surface, boost::icl::partial_absorber, std::less,
|
||||
|
@ -104,212 +103,6 @@ enum class ScaleMatch {
|
|||
Ignore // accept every scaled res
|
||||
};
|
||||
|
||||
struct SurfaceParams {
|
||||
private:
|
||||
static constexpr std::array<unsigned int, 18> BPP_TABLE = {
|
||||
32, // RGBA8
|
||||
24, // RGB8
|
||||
16, // RGB5A1
|
||||
16, // RGB565
|
||||
16, // RGBA4
|
||||
16, // IA8
|
||||
16, // RG8
|
||||
8, // I8
|
||||
8, // A8
|
||||
8, // IA4
|
||||
4, // I4
|
||||
4, // A4
|
||||
4, // ETC1
|
||||
8, // ETC1A4
|
||||
16, // D16
|
||||
0,
|
||||
24, // D24
|
||||
32, // D24S8
|
||||
};
|
||||
|
||||
public:
|
||||
enum class PixelFormat {
|
||||
// First 5 formats are shared between textures and color buffers
|
||||
RGBA8 = 0,
|
||||
RGB8 = 1,
|
||||
RGB5A1 = 2,
|
||||
RGB565 = 3,
|
||||
RGBA4 = 4,
|
||||
|
||||
// Texture-only formats
|
||||
IA8 = 5,
|
||||
RG8 = 6,
|
||||
I8 = 7,
|
||||
A8 = 8,
|
||||
IA4 = 9,
|
||||
I4 = 10,
|
||||
A4 = 11,
|
||||
ETC1 = 12,
|
||||
ETC1A4 = 13,
|
||||
|
||||
// Depth buffer-only formats
|
||||
D16 = 14,
|
||||
// gap
|
||||
D24 = 16,
|
||||
D24S8 = 17,
|
||||
|
||||
Invalid = 255,
|
||||
};
|
||||
|
||||
enum class SurfaceType {
|
||||
Color = 0,
|
||||
Texture = 1,
|
||||
Depth = 2,
|
||||
DepthStencil = 3,
|
||||
Fill = 4,
|
||||
Invalid = 5
|
||||
};
|
||||
|
||||
static constexpr unsigned int GetFormatBpp(PixelFormat format) {
|
||||
const auto format_idx = static_cast<std::size_t>(format);
|
||||
DEBUG_ASSERT_MSG(format_idx < BPP_TABLE.size(), "Invalid pixel format {}", format_idx);
|
||||
return BPP_TABLE[format_idx];
|
||||
}
|
||||
|
||||
unsigned int GetFormatBpp() const {
|
||||
return GetFormatBpp(pixel_format);
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromTextureFormat(Pica::TexturingRegs::TextureFormat format) {
|
||||
return ((unsigned int)format < 14) ? (PixelFormat)format : PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromColorFormat(Pica::FramebufferRegs::ColorFormat format) {
|
||||
return ((unsigned int)format < 5) ? (PixelFormat)format : PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromDepthFormat(Pica::FramebufferRegs::DepthFormat format) {
|
||||
return ((unsigned int)format < 4) ? (PixelFormat)((unsigned int)format + 14)
|
||||
: PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromGPUPixelFormat(GPU::Regs::PixelFormat format) {
|
||||
switch (format) {
|
||||
// RGB565 and RGB5A1 are switched in PixelFormat compared to ColorFormat
|
||||
case GPU::Regs::PixelFormat::RGB565:
|
||||
return PixelFormat::RGB565;
|
||||
case GPU::Regs::PixelFormat::RGB5A1:
|
||||
return PixelFormat::RGB5A1;
|
||||
default:
|
||||
return ((unsigned int)format < 5) ? (PixelFormat)format : PixelFormat::Invalid;
|
||||
}
|
||||
}
|
||||
|
||||
static bool CheckFormatsBlittable(PixelFormat pixel_format_a, PixelFormat pixel_format_b) {
|
||||
SurfaceType a_type = GetFormatType(pixel_format_a);
|
||||
SurfaceType b_type = GetFormatType(pixel_format_b);
|
||||
|
||||
if ((a_type == SurfaceType::Color || a_type == SurfaceType::Texture) &&
|
||||
(b_type == SurfaceType::Color || b_type == SurfaceType::Texture)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (a_type == SurfaceType::Depth && b_type == SurfaceType::Depth) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (a_type == SurfaceType::DepthStencil && b_type == SurfaceType::DepthStencil) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static constexpr SurfaceType GetFormatType(PixelFormat pixel_format) {
|
||||
if ((unsigned int)pixel_format < 5) {
|
||||
return SurfaceType::Color;
|
||||
}
|
||||
|
||||
if ((unsigned int)pixel_format < 14) {
|
||||
return SurfaceType::Texture;
|
||||
}
|
||||
|
||||
if (pixel_format == PixelFormat::D16 || pixel_format == PixelFormat::D24) {
|
||||
return SurfaceType::Depth;
|
||||
}
|
||||
|
||||
if (pixel_format == PixelFormat::D24S8) {
|
||||
return SurfaceType::DepthStencil;
|
||||
}
|
||||
|
||||
return SurfaceType::Invalid;
|
||||
}
|
||||
|
||||
/// Update the params "size", "end" and "type" from the already set "addr", "width", "height"
|
||||
/// and "pixel_format"
|
||||
void UpdateParams() {
|
||||
if (stride == 0) {
|
||||
stride = width;
|
||||
}
|
||||
type = GetFormatType(pixel_format);
|
||||
size = !is_tiled ? BytesInPixels(stride * (height - 1) + width)
|
||||
: BytesInPixels(stride * 8 * (height / 8 - 1) + width * 8);
|
||||
end = addr + size;
|
||||
}
|
||||
|
||||
SurfaceInterval GetInterval() const {
|
||||
return SurfaceInterval(addr, end);
|
||||
}
|
||||
|
||||
// Returns the outer rectangle containing "interval"
|
||||
SurfaceParams FromInterval(SurfaceInterval interval) const;
|
||||
|
||||
SurfaceInterval GetSubRectInterval(Common::Rectangle<u32> unscaled_rect) const;
|
||||
|
||||
// Returns the region of the biggest valid rectange within interval
|
||||
SurfaceInterval GetCopyableInterval(const Surface& src_surface) const;
|
||||
|
||||
u32 GetScaledWidth() const {
|
||||
return width * res_scale;
|
||||
}
|
||||
|
||||
u32 GetScaledHeight() const {
|
||||
return height * res_scale;
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> GetRect() const {
|
||||
return {0, height, width, 0};
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> GetScaledRect() const {
|
||||
return {0, GetScaledHeight(), GetScaledWidth(), 0};
|
||||
}
|
||||
|
||||
u32 PixelsInBytes(u32 size) const {
|
||||
return size * CHAR_BIT / GetFormatBpp(pixel_format);
|
||||
}
|
||||
|
||||
u32 BytesInPixels(u32 pixels) const {
|
||||
return pixels * GetFormatBpp(pixel_format) / CHAR_BIT;
|
||||
}
|
||||
|
||||
bool ExactMatch(const SurfaceParams& other_surface) const;
|
||||
bool CanSubRect(const SurfaceParams& sub_surface) const;
|
||||
bool CanExpand(const SurfaceParams& expanded_surface) const;
|
||||
bool CanTexCopy(const SurfaceParams& texcopy_params) const;
|
||||
|
||||
Common::Rectangle<u32> GetSubRect(const SurfaceParams& sub_surface) const;
|
||||
Common::Rectangle<u32> GetScaledSubRect(const SurfaceParams& sub_surface) const;
|
||||
|
||||
PAddr addr = 0;
|
||||
PAddr end = 0;
|
||||
u32 size = 0;
|
||||
|
||||
u32 width = 0;
|
||||
u32 height = 0;
|
||||
u32 stride = 0;
|
||||
u16 res_scale = 1;
|
||||
|
||||
bool is_tiled = false;
|
||||
PixelFormat pixel_format = PixelFormat::Invalid;
|
||||
SurfaceType type = SurfaceType::Invalid;
|
||||
};
|
||||
|
||||
/**
|
||||
* A watcher that notifies whether a cached surface has been changed. This is useful for caching
|
||||
* surface collection objects, including texture cube and mipmap.
|
||||
|
@ -345,6 +138,8 @@ private:
|
|||
};
|
||||
|
||||
struct CachedSurface : SurfaceParams, std::enable_shared_from_this<CachedSurface> {
|
||||
CachedSurface(RasterizerCacheOpenGL& owner) : owner{owner} {}
|
||||
|
||||
bool CanFill(const SurfaceParams& dest_surface, SurfaceInterval fill_interval) const;
|
||||
bool CanCopy(const SurfaceParams& dest_surface, SurfaceInterval copy_interval) const;
|
||||
|
||||
|
@ -422,6 +217,7 @@ struct CachedSurface : SurfaceParams, std::enable_shared_from_this<CachedSurface
|
|||
}
|
||||
|
||||
private:
|
||||
RasterizerCacheOpenGL& owner;
|
||||
std::list<std::weak_ptr<SurfaceWatcher>> watchers;
|
||||
};
|
||||
|
||||
|
@ -445,9 +241,6 @@ public:
|
|||
bool BlitSurfaces(const Surface& src_surface, const Common::Rectangle<u32>& src_rect,
|
||||
const Surface& dst_surface, const Common::Rectangle<u32>& dst_rect);
|
||||
|
||||
void ConvertD24S8toABGR(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect);
|
||||
|
||||
/// Copy one surface's region to another
|
||||
void CopySurface(const Surface& src_surface, const Surface& dst_surface,
|
||||
SurfaceInterval copy_interval);
|
||||
|
@ -496,6 +289,18 @@ private:
|
|||
/// Update surface's texture for given region when necessary
|
||||
void ValidateSurface(const Surface& surface, PAddr addr, u32 size);
|
||||
|
||||
// Returns false if there is a surface in the cache at the interval with the same bit-width,
|
||||
bool NoUnimplementedReinterpretations(const OpenGL::Surface& surface,
|
||||
OpenGL::SurfaceParams& params,
|
||||
const OpenGL::SurfaceInterval& interval);
|
||||
|
||||
// Return true if a surface with an invalid pixel format exists at the interval
|
||||
bool IntervalHasInvalidPixelFormat(SurfaceParams& params, const SurfaceInterval& interval);
|
||||
|
||||
// Attempt to find a reinterpretable surface in the cache and use it to copy for validation
|
||||
bool ValidateByReinterpretation(const Surface& surface, SurfaceParams& params,
|
||||
const SurfaceInterval& interval);
|
||||
|
||||
/// Create a new surface
|
||||
Surface CreateSurface(const SurfaceParams& params);
|
||||
|
||||
|
@ -516,14 +321,13 @@ private:
|
|||
OGLFramebuffer read_framebuffer;
|
||||
OGLFramebuffer draw_framebuffer;
|
||||
|
||||
OGLVertexArray attributeless_vao;
|
||||
OGLBuffer d24s8_abgr_buffer;
|
||||
GLsizeiptr d24s8_abgr_buffer_size;
|
||||
OGLProgram d24s8_abgr_shader;
|
||||
GLint d24s8_abgr_tbo_size_u_id;
|
||||
GLint d24s8_abgr_viewport_u_id;
|
||||
u16 resolution_scale_factor;
|
||||
|
||||
std::unordered_map<TextureCubeConfig, CachedTextureCube> texture_cube_cache;
|
||||
|
||||
public:
|
||||
std::unique_ptr<TextureFilterer> texture_filterer;
|
||||
std::unique_ptr<FormatReinterpreterOpenGL> format_reinterpreter;
|
||||
};
|
||||
|
||||
struct FormatTuple {
|
||||
|
|
|
@ -38,13 +38,6 @@ constexpr GLuint ShadowTexturePZ = 5;
|
|||
constexpr GLuint ShadowTextureNZ = 6;
|
||||
} // namespace ImageUnits
|
||||
|
||||
struct Viewport {
|
||||
GLint x;
|
||||
GLint y;
|
||||
GLsizei width;
|
||||
GLsizei height;
|
||||
};
|
||||
|
||||
class OpenGLState {
|
||||
public:
|
||||
struct {
|
||||
|
@ -142,7 +135,12 @@ public:
|
|||
GLsizei height;
|
||||
} scissor;
|
||||
|
||||
Viewport viewport;
|
||||
struct {
|
||||
GLint x;
|
||||
GLint y;
|
||||
GLsizei width;
|
||||
GLsizei height;
|
||||
} viewport;
|
||||
|
||||
std::array<bool, 2> clip_distance; // GL_CLIP_DISTANCE
|
||||
|
||||
|
|
|
@ -0,0 +1,171 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include "common/alignment.h"
|
||||
#include "video_core/renderer_opengl/gl_rasterizer_cache.h"
|
||||
#include "video_core/renderer_opengl/gl_surface_params.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
SurfaceParams SurfaceParams::FromInterval(SurfaceInterval interval) const {
|
||||
SurfaceParams params = *this;
|
||||
const u32 tiled_size = is_tiled ? 8 : 1;
|
||||
const u32 stride_tiled_bytes = BytesInPixels(stride * tiled_size);
|
||||
PAddr aligned_start =
|
||||
addr + Common::AlignDown(boost::icl::first(interval) - addr, stride_tiled_bytes);
|
||||
PAddr aligned_end =
|
||||
addr + Common::AlignUp(boost::icl::last_next(interval) - addr, stride_tiled_bytes);
|
||||
|
||||
if (aligned_end - aligned_start > stride_tiled_bytes) {
|
||||
params.addr = aligned_start;
|
||||
params.height = (aligned_end - aligned_start) / BytesInPixels(stride);
|
||||
} else {
|
||||
// 1 row
|
||||
ASSERT(aligned_end - aligned_start == stride_tiled_bytes);
|
||||
const u32 tiled_alignment = BytesInPixels(is_tiled ? 8 * 8 : 1);
|
||||
aligned_start =
|
||||
addr + Common::AlignDown(boost::icl::first(interval) - addr, tiled_alignment);
|
||||
aligned_end =
|
||||
addr + Common::AlignUp(boost::icl::last_next(interval) - addr, tiled_alignment);
|
||||
params.addr = aligned_start;
|
||||
params.width = PixelsInBytes(aligned_end - aligned_start) / tiled_size;
|
||||
params.stride = params.width;
|
||||
params.height = tiled_size;
|
||||
}
|
||||
params.UpdateParams();
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
SurfaceInterval SurfaceParams::GetSubRectInterval(Common::Rectangle<u32> unscaled_rect) const {
|
||||
if (unscaled_rect.GetHeight() == 0 || unscaled_rect.GetWidth() == 0) {
|
||||
return {};
|
||||
}
|
||||
|
||||
if (is_tiled) {
|
||||
unscaled_rect.left = Common::AlignDown(unscaled_rect.left, 8) * 8;
|
||||
unscaled_rect.bottom = Common::AlignDown(unscaled_rect.bottom, 8) / 8;
|
||||
unscaled_rect.right = Common::AlignUp(unscaled_rect.right, 8) * 8;
|
||||
unscaled_rect.top = Common::AlignUp(unscaled_rect.top, 8) / 8;
|
||||
}
|
||||
|
||||
const u32 stride_tiled = !is_tiled ? stride : stride * 8;
|
||||
|
||||
const u32 pixel_offset =
|
||||
stride_tiled * (!is_tiled ? unscaled_rect.bottom : (height / 8) - unscaled_rect.top) +
|
||||
unscaled_rect.left;
|
||||
|
||||
const u32 pixels = (unscaled_rect.GetHeight() - 1) * stride_tiled + unscaled_rect.GetWidth();
|
||||
|
||||
return {addr + BytesInPixels(pixel_offset), addr + BytesInPixels(pixel_offset + pixels)};
|
||||
}
|
||||
|
||||
SurfaceInterval SurfaceParams::GetCopyableInterval(const Surface& src_surface) const {
|
||||
SurfaceInterval result{};
|
||||
const auto valid_regions =
|
||||
SurfaceRegions(GetInterval() & src_surface->GetInterval()) - src_surface->invalid_regions;
|
||||
for (auto& valid_interval : valid_regions) {
|
||||
const SurfaceInterval aligned_interval{
|
||||
addr + Common::AlignUp(boost::icl::first(valid_interval) - addr,
|
||||
BytesInPixels(is_tiled ? 8 * 8 : 1)),
|
||||
addr + Common::AlignDown(boost::icl::last_next(valid_interval) - addr,
|
||||
BytesInPixels(is_tiled ? 8 * 8 : 1))};
|
||||
|
||||
if (BytesInPixels(is_tiled ? 8 * 8 : 1) > boost::icl::length(valid_interval) ||
|
||||
boost::icl::length(aligned_interval) == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get the rectangle within aligned_interval
|
||||
const u32 stride_bytes = BytesInPixels(stride) * (is_tiled ? 8 : 1);
|
||||
SurfaceInterval rect_interval{
|
||||
addr + Common::AlignUp(boost::icl::first(aligned_interval) - addr, stride_bytes),
|
||||
addr + Common::AlignDown(boost::icl::last_next(aligned_interval) - addr, stride_bytes),
|
||||
};
|
||||
if (boost::icl::first(rect_interval) > boost::icl::last_next(rect_interval)) {
|
||||
// 1 row
|
||||
rect_interval = aligned_interval;
|
||||
} else if (boost::icl::length(rect_interval) == 0) {
|
||||
// 2 rows that do not make a rectangle, return the larger one
|
||||
const SurfaceInterval row1{boost::icl::first(aligned_interval),
|
||||
boost::icl::first(rect_interval)};
|
||||
const SurfaceInterval row2{boost::icl::first(rect_interval),
|
||||
boost::icl::last_next(aligned_interval)};
|
||||
rect_interval = (boost::icl::length(row1) > boost::icl::length(row2)) ? row1 : row2;
|
||||
}
|
||||
|
||||
if (boost::icl::length(rect_interval) > boost::icl::length(result)) {
|
||||
result = rect_interval;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> SurfaceParams::GetSubRect(const SurfaceParams& sub_surface) const {
|
||||
const u32 begin_pixel_index = PixelsInBytes(sub_surface.addr - addr);
|
||||
|
||||
if (is_tiled) {
|
||||
const int x0 = (begin_pixel_index % (stride * 8)) / 8;
|
||||
const int y0 = (begin_pixel_index / (stride * 8)) * 8;
|
||||
// Top to bottom
|
||||
return Common::Rectangle<u32>(x0, height - y0, x0 + sub_surface.width,
|
||||
height - (y0 + sub_surface.height));
|
||||
}
|
||||
|
||||
const int x0 = begin_pixel_index % stride;
|
||||
const int y0 = begin_pixel_index / stride;
|
||||
// Bottom to top
|
||||
return Common::Rectangle<u32>(x0, y0 + sub_surface.height, x0 + sub_surface.width, y0);
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> SurfaceParams::GetScaledSubRect(const SurfaceParams& sub_surface) const {
|
||||
auto rect = GetSubRect(sub_surface);
|
||||
rect.left = rect.left * res_scale;
|
||||
rect.right = rect.right * res_scale;
|
||||
rect.top = rect.top * res_scale;
|
||||
rect.bottom = rect.bottom * res_scale;
|
||||
return rect;
|
||||
}
|
||||
|
||||
bool SurfaceParams::ExactMatch(const SurfaceParams& other_surface) const {
|
||||
return std::tie(other_surface.addr, other_surface.width, other_surface.height,
|
||||
other_surface.stride, other_surface.pixel_format, other_surface.is_tiled) ==
|
||||
std::tie(addr, width, height, stride, pixel_format, is_tiled) &&
|
||||
pixel_format != PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
bool SurfaceParams::CanSubRect(const SurfaceParams& sub_surface) const {
|
||||
return sub_surface.addr >= addr && sub_surface.end <= end &&
|
||||
sub_surface.pixel_format == pixel_format && pixel_format != PixelFormat::Invalid &&
|
||||
sub_surface.is_tiled == is_tiled &&
|
||||
(sub_surface.addr - addr) % BytesInPixels(is_tiled ? 64 : 1) == 0 &&
|
||||
(sub_surface.stride == stride || sub_surface.height <= (is_tiled ? 8u : 1u)) &&
|
||||
GetSubRect(sub_surface).right <= stride;
|
||||
}
|
||||
|
||||
bool SurfaceParams::CanExpand(const SurfaceParams& expanded_surface) const {
|
||||
return pixel_format != PixelFormat::Invalid && pixel_format == expanded_surface.pixel_format &&
|
||||
addr <= expanded_surface.end && expanded_surface.addr <= end &&
|
||||
is_tiled == expanded_surface.is_tiled && stride == expanded_surface.stride &&
|
||||
(std::max(expanded_surface.addr, addr) - std::min(expanded_surface.addr, addr)) %
|
||||
BytesInPixels(stride * (is_tiled ? 8 : 1)) ==
|
||||
0;
|
||||
}
|
||||
|
||||
bool SurfaceParams::CanTexCopy(const SurfaceParams& texcopy_params) const {
|
||||
if (pixel_format == PixelFormat::Invalid || addr > texcopy_params.addr ||
|
||||
end < texcopy_params.end) {
|
||||
return false;
|
||||
}
|
||||
if (texcopy_params.width != texcopy_params.stride) {
|
||||
const u32 tile_stride = BytesInPixels(stride * (is_tiled ? 8 : 1));
|
||||
return (texcopy_params.addr - addr) % BytesInPixels(is_tiled ? 64 : 1) == 0 &&
|
||||
texcopy_params.width % BytesInPixels(is_tiled ? 64 : 1) == 0 &&
|
||||
(texcopy_params.height == 1 || texcopy_params.stride == tile_stride) &&
|
||||
((texcopy_params.addr - addr) % tile_stride) + texcopy_params.width <= tile_stride;
|
||||
}
|
||||
return FromInterval(texcopy_params.GetInterval()).GetInterval() == texcopy_params.GetInterval();
|
||||
}
|
||||
|
||||
} // namespace OpenGL
|
|
@ -0,0 +1,270 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <array>
|
||||
#include <climits>
|
||||
#include <boost/icl/interval.hpp>
|
||||
#include "common/assert.h"
|
||||
#include "common/math_util.h"
|
||||
#include "core/hw/gpu.h"
|
||||
#include "video_core/regs_framebuffer.h"
|
||||
#include "video_core/regs_texturing.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
struct CachedSurface;
|
||||
using Surface = std::shared_ptr<CachedSurface>;
|
||||
|
||||
using SurfaceInterval = boost::icl::right_open_interval<PAddr>;
|
||||
|
||||
struct SurfaceParams {
|
||||
private:
|
||||
static constexpr std::array<unsigned int, 18> BPP_TABLE = {
|
||||
32, // RGBA8
|
||||
24, // RGB8
|
||||
16, // RGB5A1
|
||||
16, // RGB565
|
||||
16, // RGBA4
|
||||
16, // IA8
|
||||
16, // RG8
|
||||
8, // I8
|
||||
8, // A8
|
||||
8, // IA4
|
||||
4, // I4
|
||||
4, // A4
|
||||
4, // ETC1
|
||||
8, // ETC1A4
|
||||
16, // D16
|
||||
0,
|
||||
24, // D24
|
||||
32, // D24S8
|
||||
};
|
||||
|
||||
public:
|
||||
enum class PixelFormat {
|
||||
// First 5 formats are shared between textures and color buffers
|
||||
RGBA8 = 0,
|
||||
RGB8 = 1,
|
||||
RGB5A1 = 2,
|
||||
RGB565 = 3,
|
||||
RGBA4 = 4,
|
||||
|
||||
// Texture-only formats
|
||||
IA8 = 5,
|
||||
RG8 = 6,
|
||||
I8 = 7,
|
||||
A8 = 8,
|
||||
IA4 = 9,
|
||||
I4 = 10,
|
||||
A4 = 11,
|
||||
ETC1 = 12,
|
||||
ETC1A4 = 13,
|
||||
|
||||
// Depth buffer-only formats
|
||||
D16 = 14,
|
||||
// gap
|
||||
D24 = 16,
|
||||
D24S8 = 17,
|
||||
|
||||
Invalid = 255,
|
||||
};
|
||||
|
||||
enum class SurfaceType {
|
||||
Color = 0,
|
||||
Texture = 1,
|
||||
Depth = 2,
|
||||
DepthStencil = 3,
|
||||
Fill = 4,
|
||||
Invalid = 5
|
||||
};
|
||||
|
||||
static constexpr unsigned int GetFormatBpp(PixelFormat format) {
|
||||
const auto format_idx = static_cast<std::size_t>(format);
|
||||
DEBUG_ASSERT_MSG(format_idx < BPP_TABLE.size(), "Invalid pixel format {}", format_idx);
|
||||
return BPP_TABLE[format_idx];
|
||||
}
|
||||
|
||||
unsigned int GetFormatBpp() const {
|
||||
return GetFormatBpp(pixel_format);
|
||||
}
|
||||
|
||||
static std::string_view PixelFormatAsString(PixelFormat format) {
|
||||
switch (format) {
|
||||
case PixelFormat::RGBA8:
|
||||
return "RGBA8";
|
||||
case PixelFormat::RGB8:
|
||||
return "RGB8";
|
||||
case PixelFormat::RGB5A1:
|
||||
return "RGB5A1";
|
||||
case PixelFormat::RGB565:
|
||||
return "RGB565";
|
||||
case PixelFormat::RGBA4:
|
||||
return "RGBA4";
|
||||
case PixelFormat::IA8:
|
||||
return "IA8";
|
||||
case PixelFormat::RG8:
|
||||
return "RG8";
|
||||
case PixelFormat::I8:
|
||||
return "I8";
|
||||
case PixelFormat::A8:
|
||||
return "A8";
|
||||
case PixelFormat::IA4:
|
||||
return "IA4";
|
||||
case PixelFormat::I4:
|
||||
return "I4";
|
||||
case PixelFormat::A4:
|
||||
return "A4";
|
||||
case PixelFormat::ETC1:
|
||||
return "ETC1";
|
||||
case PixelFormat::ETC1A4:
|
||||
return "ETC1A4";
|
||||
case PixelFormat::D16:
|
||||
return "D16";
|
||||
case PixelFormat::D24:
|
||||
return "D24";
|
||||
case PixelFormat::D24S8:
|
||||
return "D24S8";
|
||||
default:
|
||||
return "Not a real pixel format";
|
||||
}
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromTextureFormat(Pica::TexturingRegs::TextureFormat format) {
|
||||
return ((unsigned int)format < 14) ? (PixelFormat)format : PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromColorFormat(Pica::FramebufferRegs::ColorFormat format) {
|
||||
return ((unsigned int)format < 5) ? (PixelFormat)format : PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromDepthFormat(Pica::FramebufferRegs::DepthFormat format) {
|
||||
return ((unsigned int)format < 4) ? (PixelFormat)((unsigned int)format + 14)
|
||||
: PixelFormat::Invalid;
|
||||
}
|
||||
|
||||
static PixelFormat PixelFormatFromGPUPixelFormat(GPU::Regs::PixelFormat format) {
|
||||
switch (format) {
|
||||
// RGB565 and RGB5A1 are switched in PixelFormat compared to ColorFormat
|
||||
case GPU::Regs::PixelFormat::RGB565:
|
||||
return PixelFormat::RGB565;
|
||||
case GPU::Regs::PixelFormat::RGB5A1:
|
||||
return PixelFormat::RGB5A1;
|
||||
default:
|
||||
return ((unsigned int)format < 5) ? (PixelFormat)format : PixelFormat::Invalid;
|
||||
}
|
||||
}
|
||||
|
||||
static bool CheckFormatsBlittable(PixelFormat pixel_format_a, PixelFormat pixel_format_b) {
|
||||
SurfaceType a_type = GetFormatType(pixel_format_a);
|
||||
SurfaceType b_type = GetFormatType(pixel_format_b);
|
||||
|
||||
if ((a_type == SurfaceType::Color || a_type == SurfaceType::Texture) &&
|
||||
(b_type == SurfaceType::Color || b_type == SurfaceType::Texture)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (a_type == SurfaceType::Depth && b_type == SurfaceType::Depth) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (a_type == SurfaceType::DepthStencil && b_type == SurfaceType::DepthStencil) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
static constexpr SurfaceType GetFormatType(PixelFormat pixel_format) {
|
||||
if ((unsigned int)pixel_format < 5) {
|
||||
return SurfaceType::Color;
|
||||
}
|
||||
|
||||
if ((unsigned int)pixel_format < 14) {
|
||||
return SurfaceType::Texture;
|
||||
}
|
||||
|
||||
if (pixel_format == PixelFormat::D16 || pixel_format == PixelFormat::D24) {
|
||||
return SurfaceType::Depth;
|
||||
}
|
||||
|
||||
if (pixel_format == PixelFormat::D24S8) {
|
||||
return SurfaceType::DepthStencil;
|
||||
}
|
||||
|
||||
return SurfaceType::Invalid;
|
||||
}
|
||||
|
||||
/// Update the params "size", "end" and "type" from the already set "addr", "width", "height"
|
||||
/// and "pixel_format"
|
||||
void UpdateParams() {
|
||||
if (stride == 0) {
|
||||
stride = width;
|
||||
}
|
||||
type = GetFormatType(pixel_format);
|
||||
size = !is_tiled ? BytesInPixels(stride * (height - 1) + width)
|
||||
: BytesInPixels(stride * 8 * (height / 8 - 1) + width * 8);
|
||||
end = addr + size;
|
||||
}
|
||||
|
||||
SurfaceInterval GetInterval() const {
|
||||
return SurfaceInterval(addr, end);
|
||||
}
|
||||
|
||||
// Returns the outer rectangle containing "interval"
|
||||
SurfaceParams FromInterval(SurfaceInterval interval) const;
|
||||
|
||||
SurfaceInterval GetSubRectInterval(Common::Rectangle<u32> unscaled_rect) const;
|
||||
|
||||
// Returns the region of the biggest valid rectange within interval
|
||||
SurfaceInterval GetCopyableInterval(const Surface& src_surface) const;
|
||||
|
||||
u32 GetScaledWidth() const {
|
||||
return width * res_scale;
|
||||
}
|
||||
|
||||
u32 GetScaledHeight() const {
|
||||
return height * res_scale;
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> GetRect() const {
|
||||
return {0, height, width, 0};
|
||||
}
|
||||
|
||||
Common::Rectangle<u32> GetScaledRect() const {
|
||||
return {0, GetScaledHeight(), GetScaledWidth(), 0};
|
||||
}
|
||||
|
||||
u32 PixelsInBytes(u32 size) const {
|
||||
return size * CHAR_BIT / GetFormatBpp(pixel_format);
|
||||
}
|
||||
|
||||
u32 BytesInPixels(u32 pixels) const {
|
||||
return pixels * GetFormatBpp(pixel_format) / CHAR_BIT;
|
||||
}
|
||||
|
||||
bool ExactMatch(const SurfaceParams& other_surface) const;
|
||||
bool CanSubRect(const SurfaceParams& sub_surface) const;
|
||||
bool CanExpand(const SurfaceParams& expanded_surface) const;
|
||||
bool CanTexCopy(const SurfaceParams& texcopy_params) const;
|
||||
|
||||
Common::Rectangle<u32> GetSubRect(const SurfaceParams& sub_surface) const;
|
||||
Common::Rectangle<u32> GetScaledSubRect(const SurfaceParams& sub_surface) const;
|
||||
|
||||
PAddr addr = 0;
|
||||
PAddr end = 0;
|
||||
u32 size = 0;
|
||||
|
||||
u32 width = 0;
|
||||
u32 height = 0;
|
||||
u32 stride = 0;
|
||||
u16 res_scale = 1;
|
||||
|
||||
bool is_tiled = false;
|
||||
PixelFormat pixel_format = PixelFormat::Invalid;
|
||||
SurfaceType type = SurfaceType::Invalid;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -32,23 +32,8 @@
|
|||
#include "video_core/renderer_opengl/gl_vars.h"
|
||||
#include "video_core/renderer_opengl/post_processing_opengl.h"
|
||||
#include "video_core/renderer_opengl/renderer_opengl.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_manager.h"
|
||||
#include "video_core/video_core.h"
|
||||
|
||||
namespace Frontend {
|
||||
|
||||
struct Frame {
|
||||
u32 width{}; /// Width of the frame (to detect resize)
|
||||
u32 height{}; /// Height of the frame
|
||||
bool color_reloaded = false; /// Texture attachment was recreated (ie: resized)
|
||||
OpenGL::OGLRenderbuffer color{}; /// Buffer shared between the render/present FBO
|
||||
OpenGL::OGLFramebuffer render{}; /// FBO created on the render thread
|
||||
OpenGL::OGLFramebuffer present{}; /// FBO created on the present thread
|
||||
GLsync render_fence{}; /// Fence created on the render thread
|
||||
GLsync present_fence{}; /// Fence created on the presentation thread
|
||||
};
|
||||
} // namespace Frontend
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
// If the size of this is too small, it ends up creating a soft cap on FPS as the renderer will have
|
||||
|
@ -79,6 +64,7 @@ public:
|
|||
std::queue<Frontend::Frame*>().swap(free_queue);
|
||||
present_queue.clear();
|
||||
present_cv.notify_all();
|
||||
free_cv.notify_all();
|
||||
}
|
||||
|
||||
void ReloadPresentFrame(Frontend::Frame* frame, u32 height, u32 width) override {
|
||||
|
@ -89,7 +75,7 @@ public:
|
|||
glBindFramebuffer(GL_FRAMEBUFFER, frame->present.handle);
|
||||
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER,
|
||||
frame->color.handle);
|
||||
if (!glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE) {
|
||||
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
|
||||
LOG_CRITICAL(Render_OpenGL, "Failed to recreate present FBO!");
|
||||
}
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, previous_draw_fbo);
|
||||
|
@ -115,7 +101,7 @@ public:
|
|||
state.Apply();
|
||||
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER,
|
||||
frame->color.handle);
|
||||
if (!glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE) {
|
||||
if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) {
|
||||
LOG_CRITICAL(Render_OpenGL, "Failed to recreate render FBO!");
|
||||
}
|
||||
prev_state.Apply();
|
||||
|
@ -145,19 +131,12 @@ public:
|
|||
present_cv.notify_one();
|
||||
}
|
||||
|
||||
Frontend::Frame* TryGetPresentFrame(int timeout_ms) override {
|
||||
std::unique_lock<std::mutex> lock(swap_chain_lock);
|
||||
// wait for new entries in the present_queue
|
||||
present_cv.wait_for(lock, std::chrono::milliseconds(timeout_ms),
|
||||
[&] { return !present_queue.empty(); });
|
||||
if (present_queue.empty()) {
|
||||
// timed out waiting for a frame to draw so return the previous frame
|
||||
return previous_frame;
|
||||
}
|
||||
|
||||
// This is virtual as it is to be overriden in OGLVideoDumpingMailbox below.
|
||||
virtual void LoadPresentFrame() {
|
||||
// free the previous frame and add it back to the free queue
|
||||
if (previous_frame) {
|
||||
free_queue.push(previous_frame);
|
||||
free_cv.notify_one();
|
||||
}
|
||||
|
||||
// the newest entries are pushed to the front of the queue
|
||||
|
@ -169,8 +148,72 @@ public:
|
|||
}
|
||||
present_queue.clear();
|
||||
previous_frame = frame;
|
||||
}
|
||||
|
||||
Frontend::Frame* TryGetPresentFrame(int timeout_ms) override {
|
||||
std::unique_lock<std::mutex> lock(swap_chain_lock);
|
||||
// wait for new entries in the present_queue
|
||||
present_cv.wait_for(lock, std::chrono::milliseconds(timeout_ms),
|
||||
[&] { return !present_queue.empty(); });
|
||||
if (present_queue.empty()) {
|
||||
// timed out waiting for a frame to draw so return the previous frame
|
||||
return previous_frame;
|
||||
}
|
||||
|
||||
LoadPresentFrame();
|
||||
return previous_frame;
|
||||
}
|
||||
};
|
||||
|
||||
/// This mailbox is different in that it will never discard rendered frames
|
||||
class OGLVideoDumpingMailbox : public OGLTextureMailbox {
|
||||
public:
|
||||
Frontend::Frame* GetRenderFrame() override {
|
||||
std::unique_lock<std::mutex> lock(swap_chain_lock);
|
||||
|
||||
// If theres no free frames, we will wait until one shows up
|
||||
if (free_queue.empty()) {
|
||||
free_cv.wait(lock, [&] { return !free_queue.empty(); });
|
||||
}
|
||||
|
||||
if (free_queue.empty()) {
|
||||
LOG_CRITICAL(Render_OpenGL, "Could not get free frame");
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
Frontend::Frame* frame = free_queue.front();
|
||||
free_queue.pop();
|
||||
return frame;
|
||||
}
|
||||
|
||||
void LoadPresentFrame() override {
|
||||
// free the previous frame and add it back to the free queue
|
||||
if (previous_frame) {
|
||||
free_queue.push(previous_frame);
|
||||
free_cv.notify_one();
|
||||
}
|
||||
|
||||
Frontend::Frame* frame = present_queue.back();
|
||||
present_queue.pop_back();
|
||||
previous_frame = frame;
|
||||
|
||||
// Do not remove entries from the present_queue, as video dumping would require
|
||||
// that we preserve all frames
|
||||
}
|
||||
|
||||
Frontend::Frame* TryGetPresentFrame(int timeout_ms) override {
|
||||
std::unique_lock<std::mutex> lock(swap_chain_lock);
|
||||
// wait for new entries in the present_queue
|
||||
present_cv.wait_for(lock, std::chrono::milliseconds(timeout_ms),
|
||||
[&] { return !present_queue.empty(); });
|
||||
if (present_queue.empty()) {
|
||||
// timed out waiting for a frame
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
LoadPresentFrame();
|
||||
return previous_frame;
|
||||
}
|
||||
};
|
||||
|
||||
static const char vertex_shader[] = R"(
|
||||
|
@ -279,21 +322,35 @@ struct ScreenRectVertex {
|
|||
*
|
||||
* The projection part of the matrix is trivial, hence these operations are represented
|
||||
* by a 3x2 matrix.
|
||||
*
|
||||
* @param flipped Whether the frame should be flipped upside down.
|
||||
*/
|
||||
static std::array<GLfloat, 3 * 2> MakeOrthographicMatrix(const float width, const float height) {
|
||||
static std::array<GLfloat, 3 * 2> MakeOrthographicMatrix(const float width, const float height,
|
||||
bool flipped) {
|
||||
|
||||
std::array<GLfloat, 3 * 2> matrix; // Laid out in column-major order
|
||||
|
||||
// Last matrix row is implicitly assumed to be [0, 0, 1].
|
||||
if (flipped) {
|
||||
// clang-format off
|
||||
matrix[0] = 2.f / width; matrix[2] = 0.f; matrix[4] = -1.f;
|
||||
matrix[1] = 0.f; matrix[3] = 2.f / height; matrix[5] = -1.f;
|
||||
// clang-format on
|
||||
} else {
|
||||
// clang-format off
|
||||
matrix[0] = 2.f / width; matrix[2] = 0.f; matrix[4] = -1.f;
|
||||
matrix[1] = 0.f; matrix[3] = -2.f / height; matrix[5] = 1.f;
|
||||
// Last matrix row is implicitly assumed to be [0, 0, 1].
|
||||
// clang-format on
|
||||
}
|
||||
|
||||
return matrix;
|
||||
}
|
||||
|
||||
RendererOpenGL::RendererOpenGL(Frontend::EmuWindow& window) : RendererBase{window} {
|
||||
RendererOpenGL::RendererOpenGL(Frontend::EmuWindow& window)
|
||||
: RendererBase{window}, frame_dumper(Core::System::GetInstance().VideoDumper(), window) {
|
||||
|
||||
window.mailbox = std::make_unique<OGLTextureMailbox>();
|
||||
frame_dumper.mailbox = std::make_unique<OGLVideoDumpingMailbox>();
|
||||
}
|
||||
|
||||
RendererOpenGL::~RendererOpenGL() = default;
|
||||
|
@ -311,56 +368,14 @@ void RendererOpenGL::SwapBuffers() {
|
|||
|
||||
RenderScreenshot();
|
||||
|
||||
RenderVideoDumping();
|
||||
|
||||
const auto& layout = render_window.GetFramebufferLayout();
|
||||
RenderToMailbox(layout, render_window.mailbox, false);
|
||||
|
||||
Frontend::Frame* frame;
|
||||
{
|
||||
MICROPROFILE_SCOPE(OpenGL_WaitPresent);
|
||||
|
||||
frame = render_window.mailbox->GetRenderFrame();
|
||||
|
||||
// Clean up sync objects before drawing
|
||||
|
||||
// INTEL driver workaround. We can't delete the previous render sync object until we are
|
||||
// sure that the presentation is done
|
||||
if (frame->present_fence) {
|
||||
glClientWaitSync(frame->present_fence, 0, GL_TIMEOUT_IGNORED);
|
||||
if (frame_dumper.IsDumping()) {
|
||||
RenderToMailbox(frame_dumper.GetLayout(), frame_dumper.mailbox, true);
|
||||
}
|
||||
|
||||
// delete the draw fence if the frame wasn't presented
|
||||
if (frame->render_fence) {
|
||||
glDeleteSync(frame->render_fence);
|
||||
frame->render_fence = 0;
|
||||
}
|
||||
|
||||
// wait for the presentation to be done
|
||||
if (frame->present_fence) {
|
||||
glWaitSync(frame->present_fence, 0, GL_TIMEOUT_IGNORED);
|
||||
glDeleteSync(frame->present_fence);
|
||||
frame->present_fence = 0;
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
MICROPROFILE_SCOPE(OpenGL_RenderFrame);
|
||||
// Recreate the frame if the size of the window has changed
|
||||
if (layout.width != frame->width || layout.height != frame->height) {
|
||||
LOG_DEBUG(Render_OpenGL, "Reloading render frame");
|
||||
render_window.mailbox->ReloadRenderFrame(frame, layout.width, layout.height);
|
||||
}
|
||||
|
||||
GLuint render_texture = frame->color.handle;
|
||||
state.draw.draw_framebuffer = frame->render.handle;
|
||||
state.Apply();
|
||||
DrawScreens(layout);
|
||||
// Create a fence for the frontend to wait on and swap this frame to OffTex
|
||||
frame->render_fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
glFlush();
|
||||
render_window.mailbox->ReleaseRenderFrame(frame);
|
||||
m_current_frame++;
|
||||
}
|
||||
|
||||
Core::System::GetInstance().perf_stats->EndSystemFrame();
|
||||
|
||||
|
@ -396,7 +411,7 @@ void RendererOpenGL::RenderScreenshot() {
|
|||
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER,
|
||||
renderbuffer);
|
||||
|
||||
DrawScreens(layout);
|
||||
DrawScreens(layout, false);
|
||||
|
||||
glReadPixels(0, 0, layout.width, layout.height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV,
|
||||
VideoCore::g_screenshot_bits);
|
||||
|
@ -449,33 +464,54 @@ void RendererOpenGL::PrepareRendertarget() {
|
|||
}
|
||||
}
|
||||
|
||||
void RendererOpenGL::RenderVideoDumping() {
|
||||
if (cleanup_video_dumping.exchange(false)) {
|
||||
ReleaseVideoDumpingGLObjects();
|
||||
void RendererOpenGL::RenderToMailbox(const Layout::FramebufferLayout& layout,
|
||||
std::unique_ptr<Frontend::TextureMailbox>& mailbox,
|
||||
bool flipped) {
|
||||
|
||||
Frontend::Frame* frame;
|
||||
{
|
||||
MICROPROFILE_SCOPE(OpenGL_WaitPresent);
|
||||
|
||||
frame = mailbox->GetRenderFrame();
|
||||
|
||||
// Clean up sync objects before drawing
|
||||
|
||||
// INTEL driver workaround. We can't delete the previous render sync object until we are
|
||||
// sure that the presentation is done
|
||||
if (frame->present_fence) {
|
||||
glClientWaitSync(frame->present_fence, 0, GL_TIMEOUT_IGNORED);
|
||||
}
|
||||
|
||||
if (Core::System::GetInstance().VideoDumper().IsDumping()) {
|
||||
if (prepare_video_dumping.exchange(false)) {
|
||||
InitVideoDumpingGLObjects();
|
||||
// delete the draw fence if the frame wasn't presented
|
||||
if (frame->render_fence) {
|
||||
glDeleteSync(frame->render_fence);
|
||||
frame->render_fence = 0;
|
||||
}
|
||||
|
||||
const auto& layout = Core::System::GetInstance().VideoDumper().GetLayout();
|
||||
glBindFramebuffer(GL_READ_FRAMEBUFFER, frame_dumping_framebuffer.handle);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frame_dumping_framebuffer.handle);
|
||||
DrawScreens(layout);
|
||||
// wait for the presentation to be done
|
||||
if (frame->present_fence) {
|
||||
glWaitSync(frame->present_fence, 0, GL_TIMEOUT_IGNORED);
|
||||
glDeleteSync(frame->present_fence);
|
||||
frame->present_fence = 0;
|
||||
}
|
||||
}
|
||||
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, frame_dumping_pbos[current_pbo].handle);
|
||||
glReadPixels(0, 0, layout.width, layout.height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, 0);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, frame_dumping_pbos[next_pbo].handle);
|
||||
{
|
||||
MICROPROFILE_SCOPE(OpenGL_RenderFrame);
|
||||
// Recreate the frame if the size of the window has changed
|
||||
if (layout.width != frame->width || layout.height != frame->height) {
|
||||
LOG_DEBUG(Render_OpenGL, "Reloading render frame");
|
||||
mailbox->ReloadRenderFrame(frame, layout.width, layout.height);
|
||||
}
|
||||
|
||||
GLubyte* pixels = static_cast<GLubyte*>(glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY));
|
||||
VideoDumper::VideoFrame frame_data{layout.width, layout.height, pixels};
|
||||
Core::System::GetInstance().VideoDumper().AddVideoFrame(frame_data);
|
||||
|
||||
glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
current_pbo = (current_pbo + 1) % 2;
|
||||
next_pbo = (current_pbo + 1) % 2;
|
||||
GLuint render_texture = frame->color.handle;
|
||||
state.draw.draw_framebuffer = frame->render.handle;
|
||||
state.Apply();
|
||||
DrawScreens(layout, flipped);
|
||||
// Create a fence for the frontend to wait on and swap this frame to OffTex
|
||||
frame->render_fence = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
|
||||
glFlush();
|
||||
mailbox->ReleaseRenderFrame(frame);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -886,7 +922,7 @@ void RendererOpenGL::DrawSingleScreenStereo(const ScreenInfo& screen_info_l,
|
|||
/**
|
||||
* Draws the emulated screens to the emulator window.
|
||||
*/
|
||||
void RendererOpenGL::DrawScreens(const Layout::FramebufferLayout& layout) {
|
||||
void RendererOpenGL::DrawScreens(const Layout::FramebufferLayout& layout, bool flipped) {
|
||||
if (VideoCore::g_renderer_bg_color_update_requested.exchange(false)) {
|
||||
// Update background color before drawing
|
||||
glClearColor(Settings::values.bg_red, Settings::values.bg_green, Settings::values.bg_blue,
|
||||
|
@ -913,7 +949,7 @@ void RendererOpenGL::DrawScreens(const Layout::FramebufferLayout& layout) {
|
|||
|
||||
// Set projection matrix
|
||||
std::array<GLfloat, 3 * 2> ortho_matrix =
|
||||
MakeOrthographicMatrix((float)layout.width, (float)layout.height);
|
||||
MakeOrthographicMatrix((float)layout.width, (float)layout.height, flipped);
|
||||
glUniformMatrix3x2fv(uniform_modelview_matrix, 1, GL_FALSE, ortho_matrix.data());
|
||||
|
||||
// Bind texture in Texture Unit 0
|
||||
|
@ -1052,41 +1088,11 @@ void RendererOpenGL::TryPresent(int timeout_ms) {
|
|||
void RendererOpenGL::UpdateFramerate() {}
|
||||
|
||||
void RendererOpenGL::PrepareVideoDumping() {
|
||||
prepare_video_dumping = true;
|
||||
frame_dumper.StartDumping();
|
||||
}
|
||||
|
||||
void RendererOpenGL::CleanupVideoDumping() {
|
||||
cleanup_video_dumping = true;
|
||||
}
|
||||
|
||||
void RendererOpenGL::InitVideoDumpingGLObjects() {
|
||||
const auto& layout = Core::System::GetInstance().VideoDumper().GetLayout();
|
||||
|
||||
frame_dumping_framebuffer.Create();
|
||||
glGenRenderbuffers(1, &frame_dumping_renderbuffer);
|
||||
glBindRenderbuffer(GL_RENDERBUFFER, frame_dumping_renderbuffer);
|
||||
glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB8, layout.width, layout.height);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frame_dumping_framebuffer.handle);
|
||||
glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER,
|
||||
frame_dumping_renderbuffer);
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
|
||||
for (auto& buffer : frame_dumping_pbos) {
|
||||
buffer.Create();
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, buffer.handle);
|
||||
glBufferData(GL_PIXEL_PACK_BUFFER, layout.width * layout.height * 4, nullptr,
|
||||
GL_STREAM_READ);
|
||||
glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);
|
||||
}
|
||||
}
|
||||
|
||||
void RendererOpenGL::ReleaseVideoDumpingGLObjects() {
|
||||
frame_dumping_framebuffer.Release();
|
||||
glDeleteRenderbuffers(1, &frame_dumping_renderbuffer);
|
||||
|
||||
for (auto& buffer : frame_dumping_pbos) {
|
||||
buffer.Release();
|
||||
}
|
||||
frame_dumper.StopDumping();
|
||||
}
|
||||
|
||||
static const char* GetSource(GLenum source) {
|
||||
|
@ -1179,14 +1185,10 @@ VideoCore::ResultStatus RendererOpenGL::Init() {
|
|||
|
||||
RefreshRasterizerSetting();
|
||||
|
||||
TextureFilterManager::GetInstance().Reset();
|
||||
|
||||
return VideoCore::ResultStatus::Success;
|
||||
}
|
||||
|
||||
/// Shutdown the renderer
|
||||
void RendererOpenGL::ShutDown() {
|
||||
TextureFilterManager::GetInstance().Destroy();
|
||||
}
|
||||
void RendererOpenGL::ShutDown() {}
|
||||
|
||||
} // namespace OpenGL
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include "common/math_util.h"
|
||||
#include "core/hw/gpu.h"
|
||||
#include "video_core/renderer_base.h"
|
||||
#include "video_core/renderer_opengl/frame_dumper_opengl.h"
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
|
||||
|
@ -17,6 +18,20 @@ namespace Layout {
|
|||
struct FramebufferLayout;
|
||||
}
|
||||
|
||||
namespace Frontend {
|
||||
|
||||
struct Frame {
|
||||
u32 width{}; /// Width of the frame (to detect resize)
|
||||
u32 height{}; /// Height of the frame
|
||||
bool color_reloaded = false; /// Texture attachment was recreated (ie: resized)
|
||||
OpenGL::OGLRenderbuffer color{}; /// Buffer shared between the render/present FBO
|
||||
OpenGL::OGLFramebuffer render{}; /// FBO created on the render thread
|
||||
OpenGL::OGLFramebuffer present{}; /// FBO created on the present thread
|
||||
GLsync render_fence{}; /// Fence created on the render thread
|
||||
GLsync present_fence{}; /// Fence created on the presentation thread
|
||||
};
|
||||
} // namespace Frontend
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
/// Structure used for storing information about the textures for each 3DS screen
|
||||
|
@ -72,10 +87,11 @@ private:
|
|||
void ReloadShader();
|
||||
void PrepareRendertarget();
|
||||
void RenderScreenshot();
|
||||
void RenderVideoDumping();
|
||||
void RenderToMailbox(const Layout::FramebufferLayout& layout,
|
||||
std::unique_ptr<Frontend::TextureMailbox>& mailbox, bool flipped);
|
||||
void ConfigureFramebufferTexture(TextureInfo& texture,
|
||||
const GPU::Regs::FramebufferConfig& framebuffer);
|
||||
void DrawScreens(const Layout::FramebufferLayout& layout);
|
||||
void DrawScreens(const Layout::FramebufferLayout& layout, bool flipped);
|
||||
void DrawSingleScreenRotated(const ScreenInfo& screen_info, float x, float y, float w, float h);
|
||||
void DrawSingleScreen(const ScreenInfo& screen_info, float x, float y, float w, float h);
|
||||
void DrawSingleScreenStereoRotated(const ScreenInfo& screen_info_l,
|
||||
|
@ -91,9 +107,6 @@ private:
|
|||
// Fills active OpenGL texture with the given RGB color.
|
||||
void LoadColorToActiveGLTexture(u8 color_r, u8 color_g, u8 color_b, const TextureInfo& texture);
|
||||
|
||||
void InitVideoDumpingGLObjects();
|
||||
void ReleaseVideoDumpingGLObjects();
|
||||
|
||||
OpenGLState state;
|
||||
|
||||
// OpenGL object IDs
|
||||
|
@ -120,19 +133,7 @@ private:
|
|||
GLuint attrib_position;
|
||||
GLuint attrib_tex_coord;
|
||||
|
||||
// Frame dumping
|
||||
OGLFramebuffer frame_dumping_framebuffer;
|
||||
GLuint frame_dumping_renderbuffer;
|
||||
|
||||
// Whether prepare/cleanup video dumping has been requested.
|
||||
// They will be executed on next frame.
|
||||
std::atomic_bool prepare_video_dumping = false;
|
||||
std::atomic_bool cleanup_video_dumping = false;
|
||||
|
||||
// PBOs used to dump frames faster
|
||||
std::array<OGLBuffer, 2> frame_dumping_pbos;
|
||||
GLuint current_pbo = 1;
|
||||
GLuint next_pbo = 0;
|
||||
FrameDumperOpenGL frame_dumper;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
||||
|
|
|
@ -42,18 +42,17 @@
|
|||
|
||||
namespace OpenGL {
|
||||
|
||||
Anime4kUltrafast::Anime4kUltrafast(u16 scale_factor) : TextureFilterInterface(scale_factor) {
|
||||
Anime4kUltrafast::Anime4kUltrafast(u16 scale_factor) : TextureFilterBase(scale_factor) {
|
||||
const OpenGLState cur_state = OpenGLState::GetCurState();
|
||||
const auto setup_temp_tex = [this, scale_factor](TempTex& texture, GLint internal_format,
|
||||
GLint format) {
|
||||
const auto setup_temp_tex = [this](TempTex& texture, GLint internal_format, GLint format) {
|
||||
texture.fbo.Create();
|
||||
texture.tex.Create();
|
||||
state.draw.draw_framebuffer = texture.fbo.handle;
|
||||
state.Apply();
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glBindTexture(GL_TEXTURE_RECTANGLE, texture.tex.handle);
|
||||
glTexImage2D(GL_TEXTURE_RECTANGLE, 0, internal_format, 1024 * scale_factor,
|
||||
1024 * scale_factor, 0, format, GL_HALF_FLOAT, nullptr);
|
||||
glTexImage2D(GL_TEXTURE_RECTANGLE, 0, internal_format, 1024 * internal_scale_factor,
|
||||
1024 * internal_scale_factor, 0, format, GL_HALF_FLOAT, nullptr);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_RECTANGLE,
|
||||
texture.tex.handle, 0);
|
||||
};
|
||||
|
@ -61,7 +60,6 @@ Anime4kUltrafast::Anime4kUltrafast(u16 scale_factor) : TextureFilterInterface(sc
|
|||
setup_temp_tex(XY, GL_RG16F, GL_RG);
|
||||
|
||||
vao.Create();
|
||||
out_fbo.Create();
|
||||
|
||||
for (std::size_t idx = 0; idx < samplers.size(); ++idx) {
|
||||
samplers[idx].Create();
|
||||
|
@ -86,30 +84,26 @@ Anime4kUltrafast::Anime4kUltrafast(u16 scale_factor) : TextureFilterInterface(sc
|
|||
state.draw.shader_program = refine_program.handle;
|
||||
state.Apply();
|
||||
glUniform1i(glGetUniformLocation(refine_program.handle, "LUMAD"), 1);
|
||||
glUniform1f(glGetUniformLocation(refine_program.handle, "final_scale"),
|
||||
static_cast<GLfloat>(internal_scale_factor) / scale_factor);
|
||||
|
||||
cur_state.Apply();
|
||||
}
|
||||
|
||||
void Anime4kUltrafast::scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) {
|
||||
void Anime4kUltrafast::Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect,
|
||||
GLuint dst_tex, const Common::Rectangle<u32>& dst_rect,
|
||||
GLuint read_fb_handle, GLuint draw_fb_handle) {
|
||||
const OpenGLState cur_state = OpenGLState::GetCurState();
|
||||
|
||||
OGLTexture src_tex;
|
||||
src_tex.Create();
|
||||
|
||||
state.viewport = RectToViewport(rect);
|
||||
|
||||
state.texture_units[0].texture_2d = src_tex.handle;
|
||||
state.viewport = {static_cast<GLint>(src_rect.left * internal_scale_factor),
|
||||
static_cast<GLint>(src_rect.bottom * internal_scale_factor),
|
||||
static_cast<GLsizei>(src_rect.GetWidth() * internal_scale_factor),
|
||||
static_cast<GLsizei>(src_rect.GetHeight() * internal_scale_factor)};
|
||||
state.texture_units[0].texture_2d = src_tex;
|
||||
state.draw.draw_framebuffer = XY.fbo.handle;
|
||||
state.draw.shader_program = gradient_x_program.handle;
|
||||
state.Apply();
|
||||
|
||||
const FormatTuple tuple = GetFormatTuple(surface.pixel_format);
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, static_cast<GLint>(surface.stride));
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, tuple.internal_format, rect.GetWidth(), rect.GetHeight(), 0,
|
||||
tuple.format, tuple.type, &surface.gl_buffer[buffer_offset]);
|
||||
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
glBindTexture(GL_TEXTURE_RECTANGLE, LUMAD.tex.handle);
|
||||
glActiveTexture(GL_TEXTURE2);
|
||||
|
@ -124,14 +118,17 @@ void Anime4kUltrafast::scale(CachedSurface& surface, const Common::Rectangle<u32
|
|||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
// refine pass
|
||||
state.draw.draw_framebuffer = out_fbo.handle;
|
||||
state.viewport = {static_cast<GLint>(dst_rect.left), static_cast<GLint>(dst_rect.bottom),
|
||||
static_cast<GLsizei>(dst_rect.GetWidth()),
|
||||
static_cast<GLsizei>(dst_rect.GetHeight())};
|
||||
state.draw.draw_framebuffer = draw_fb_handle;
|
||||
state.draw.shader_program = refine_program.handle;
|
||||
state.Apply();
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D,
|
||||
cur_state.texture_units[0].texture_2d, 0);
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_tex, 0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
cur_state.Apply();
|
||||
}
|
||||
|
||||
|
|
|
@ -6,29 +6,25 @@
|
|||
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_interface.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_base.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class Anime4kUltrafast : public TextureFilterInterface {
|
||||
class Anime4kUltrafast : public TextureFilterBase {
|
||||
public:
|
||||
static TextureFilterInfo GetInfo() {
|
||||
TextureFilterInfo info;
|
||||
info.name = "Anime4K Ultrafast";
|
||||
info.clamp_scale = {2, 2};
|
||||
info.constructor = std::make_unique<Anime4kUltrafast, u16>;
|
||||
return info;
|
||||
}
|
||||
static constexpr std::string_view NAME = "Anime4K Ultrafast";
|
||||
|
||||
Anime4kUltrafast(u16 scale_factor);
|
||||
void scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) override;
|
||||
explicit Anime4kUltrafast(u16 scale_factor);
|
||||
void Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) override;
|
||||
|
||||
private:
|
||||
static constexpr u8 internal_scale_factor = 2;
|
||||
|
||||
OpenGLState state{};
|
||||
|
||||
OGLVertexArray vao;
|
||||
OGLFramebuffer out_fbo;
|
||||
|
||||
struct TempTex {
|
||||
OGLTexture tex;
|
||||
|
|
|
@ -8,6 +8,8 @@ uniform sampler2D HOOKED;
|
|||
uniform sampler2DRect LUMAD;
|
||||
uniform sampler2DRect LUMAG;
|
||||
|
||||
uniform float final_scale;
|
||||
|
||||
const float LINE_DETECT_THRESHOLD = 0.4;
|
||||
const float STRENGTH = 0.6;
|
||||
|
||||
|
@ -24,7 +26,7 @@ vec4 getAverage(vec4 cc, vec4 a, vec4 b, vec4 c) {
|
|||
|
||||
#define GetRGBAL(offset) \
|
||||
RGBAL(textureOffset(HOOKED, tex_coord, offset), \
|
||||
texture(LUMAD, clamp(gl_FragCoord.xy + offset, vec2(0.0), input_max)).x)
|
||||
texture(LUMAD, clamp((gl_FragCoord.xy + offset) * final_scale, vec2(0.0), input_max)).x)
|
||||
|
||||
float min3v(float a, float b, float c) {
|
||||
return min(min(a, b), c);
|
||||
|
|
|
@ -10,45 +10,36 @@
|
|||
|
||||
namespace OpenGL {
|
||||
|
||||
Bicubic::Bicubic(u16 scale_factor) : TextureFilterInterface(scale_factor) {
|
||||
Bicubic::Bicubic(u16 scale_factor) : TextureFilterBase(scale_factor) {
|
||||
program.Create(tex_coord_vert.data(), bicubic_frag.data());
|
||||
vao.Create();
|
||||
draw_fbo.Create();
|
||||
src_sampler.Create();
|
||||
|
||||
state.draw.shader_program = program.handle;
|
||||
state.draw.vertex_array = vao.handle;
|
||||
state.draw.shader_program = program.handle;
|
||||
state.draw.draw_framebuffer = draw_fbo.handle;
|
||||
state.texture_units[0].sampler = src_sampler.handle;
|
||||
|
||||
glSamplerParameteri(src_sampler.handle, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glSamplerParameteri(src_sampler.handle, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glSamplerParameteri(src_sampler.handle, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
glSamplerParameteri(src_sampler.handle, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
|
||||
}
|
||||
} // namespace OpenGL
|
||||
|
||||
void Bicubic::scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) {
|
||||
void Bicubic::Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) {
|
||||
const OpenGLState cur_state = OpenGLState::GetCurState();
|
||||
|
||||
OGLTexture src_tex;
|
||||
src_tex.Create();
|
||||
state.texture_units[0].texture_2d = src_tex.handle;
|
||||
|
||||
state.viewport = RectToViewport(rect);
|
||||
state.texture_units[0].texture_2d = src_tex;
|
||||
state.draw.draw_framebuffer = draw_fb_handle;
|
||||
state.viewport = {static_cast<GLint>(dst_rect.left), static_cast<GLint>(dst_rect.bottom),
|
||||
static_cast<GLsizei>(dst_rect.GetWidth()),
|
||||
static_cast<GLsizei>(dst_rect.GetHeight())};
|
||||
state.Apply();
|
||||
|
||||
const FormatTuple tuple = GetFormatTuple(surface.pixel_format);
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, static_cast<GLint>(surface.stride));
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, tuple.internal_format, rect.GetWidth(), rect.GetHeight(), 0,
|
||||
tuple.format, tuple.type, &surface.gl_buffer[buffer_offset]);
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D,
|
||||
cur_state.texture_units[0].texture_2d, 0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_tex, 0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
|
||||
cur_state.Apply();
|
||||
}
|
||||
|
|
|
@ -6,27 +6,24 @@
|
|||
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_interface.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_base.h"
|
||||
|
||||
namespace OpenGL {
|
||||
class Bicubic : public TextureFilterInterface {
|
||||
public:
|
||||
static TextureFilterInfo GetInfo() {
|
||||
TextureFilterInfo info;
|
||||
info.name = "Bicubic";
|
||||
info.constructor = std::make_unique<Bicubic, u16>;
|
||||
return info;
|
||||
}
|
||||
|
||||
Bicubic(u16 scale_factor);
|
||||
void scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) override;
|
||||
class Bicubic : public TextureFilterBase {
|
||||
public:
|
||||
static constexpr std::string_view NAME = "Bicubic";
|
||||
|
||||
explicit Bicubic(u16 scale_factor);
|
||||
void Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) override;
|
||||
|
||||
private:
|
||||
OpenGLState state{};
|
||||
OGLProgram program{};
|
||||
OGLVertexArray vao{};
|
||||
OGLFramebuffer draw_fbo{};
|
||||
OGLSampler src_sampler{};
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "common/common_types.h"
|
||||
#include "common/math_util.h"
|
||||
#include "video_core/renderer_opengl/gl_surface_params.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class TextureFilterBase {
|
||||
friend class TextureFilterer;
|
||||
virtual void Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) = 0;
|
||||
|
||||
public:
|
||||
explicit TextureFilterBase(u16 scale_factor) : scale_factor{scale_factor} {};
|
||||
virtual ~TextureFilterBase() = default;
|
||||
|
||||
const u16 scale_factor{};
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -1,38 +0,0 @@
|
|||
// Copyright 2019 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <functional>
|
||||
#include <string_view>
|
||||
#include "common/common_types.h"
|
||||
#include "common/math_util.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
struct CachedSurface;
|
||||
struct Viewport;
|
||||
|
||||
class TextureFilterInterface {
|
||||
public:
|
||||
const u16 scale_factor{};
|
||||
TextureFilterInterface(u16 scale_factor) : scale_factor{scale_factor} {}
|
||||
virtual void scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) = 0;
|
||||
virtual ~TextureFilterInterface() = default;
|
||||
|
||||
protected:
|
||||
Viewport RectToViewport(const Common::Rectangle<u32>& rect);
|
||||
};
|
||||
|
||||
// every texture filter should have a static GetInfo function
|
||||
struct TextureFilterInfo {
|
||||
std::string_view name;
|
||||
struct {
|
||||
u16 min, max;
|
||||
} clamp_scale{1, 10};
|
||||
std::function<std::unique_ptr<TextureFilterInterface>(u16 scale_factor)> constructor;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -1,89 +0,0 @@
|
|||
// Copyright 2019 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include "common/logging/log.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/anime4k/anime4k_ultrafast.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/bicubic/bicubic.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_manager.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/xbrz/xbrz_freescale.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
Viewport TextureFilterInterface::RectToViewport(const Common::Rectangle<u32>& rect) {
|
||||
return {
|
||||
static_cast<GLint>(rect.left) * scale_factor,
|
||||
static_cast<GLint>(rect.top) * scale_factor,
|
||||
static_cast<GLsizei>(rect.GetWidth()) * scale_factor,
|
||||
static_cast<GLsizei>(rect.GetHeight()) * scale_factor,
|
||||
};
|
||||
}
|
||||
|
||||
namespace {
|
||||
template <typename T>
|
||||
std::pair<std::string_view, TextureFilterInfo> FilterMapPair() {
|
||||
return {T::GetInfo().name, T::GetInfo()};
|
||||
};
|
||||
|
||||
struct NoFilter {
|
||||
static TextureFilterInfo GetInfo() {
|
||||
TextureFilterInfo info;
|
||||
info.name = TextureFilterManager::NONE;
|
||||
info.clamp_scale = {1, 1};
|
||||
info.constructor = [](u16) { return nullptr; };
|
||||
return info;
|
||||
}
|
||||
};
|
||||
} // namespace
|
||||
|
||||
const std::map<std::string_view, TextureFilterInfo, TextureFilterManager::FilterNameComp>&
|
||||
TextureFilterManager::TextureFilterMap() {
|
||||
static const std::map<std::string_view, TextureFilterInfo, FilterNameComp> filter_map{
|
||||
FilterMapPair<NoFilter>(),
|
||||
FilterMapPair<Anime4kUltrafast>(),
|
||||
FilterMapPair<Bicubic>(),
|
||||
FilterMapPair<XbrzFreescale>(),
|
||||
};
|
||||
return filter_map;
|
||||
}
|
||||
|
||||
void TextureFilterManager::SetTextureFilter(std::string filter_name, u16 new_scale_factor) {
|
||||
if (name == filter_name && scale_factor == new_scale_factor)
|
||||
return;
|
||||
std::lock_guard<std::mutex> lock{mutex};
|
||||
name = std::move(filter_name);
|
||||
scale_factor = new_scale_factor;
|
||||
updated = true;
|
||||
}
|
||||
|
||||
TextureFilterInterface* TextureFilterManager::GetTextureFilter() const {
|
||||
return filter.get();
|
||||
}
|
||||
|
||||
bool TextureFilterManager::IsUpdated() const {
|
||||
return updated;
|
||||
}
|
||||
|
||||
void TextureFilterManager::Reset() {
|
||||
std::lock_guard<std::mutex> lock{mutex};
|
||||
updated = false;
|
||||
auto iter = TextureFilterMap().find(name);
|
||||
if (iter == TextureFilterMap().end()) {
|
||||
LOG_ERROR(Render_OpenGL, "Invalid texture filter: {}", name);
|
||||
filter = nullptr;
|
||||
return;
|
||||
}
|
||||
|
||||
const auto& filter_info = iter->second;
|
||||
|
||||
u16 clamped_scale =
|
||||
std::clamp(scale_factor, filter_info.clamp_scale.min, filter_info.clamp_scale.max);
|
||||
if (clamped_scale != scale_factor)
|
||||
LOG_ERROR(Render_OpenGL, "Invalid scale factor {} for texture filter {}, clamped to {}",
|
||||
scale_factor, filter_info.name, clamped_scale);
|
||||
|
||||
filter = filter_info.constructor(clamped_scale);
|
||||
}
|
||||
|
||||
} // namespace OpenGL
|
|
@ -1,55 +0,0 @@
|
|||
// Copyright 2019 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <atomic>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <string_view>
|
||||
#include <tuple>
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_interface.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class TextureFilterManager {
|
||||
public:
|
||||
static constexpr std::string_view NONE = "none";
|
||||
struct FilterNameComp {
|
||||
bool operator()(const std::string_view a, const std::string_view b) const {
|
||||
bool na = a == NONE;
|
||||
bool nb = b == NONE;
|
||||
if (na | nb)
|
||||
return na & !nb;
|
||||
return a < b;
|
||||
}
|
||||
};
|
||||
// function ensures map is initialized before use
|
||||
static const std::map<std::string_view, TextureFilterInfo, FilterNameComp>& TextureFilterMap();
|
||||
|
||||
static TextureFilterManager& GetInstance() {
|
||||
static TextureFilterManager singleton;
|
||||
return singleton;
|
||||
}
|
||||
|
||||
void Destroy() {
|
||||
filter.reset();
|
||||
}
|
||||
void SetTextureFilter(std::string filter_name, u16 new_scale_factor);
|
||||
TextureFilterInterface* GetTextureFilter() const;
|
||||
// returns true if filter has been changed and a cache reset is needed
|
||||
bool IsUpdated() const;
|
||||
void Reset();
|
||||
|
||||
private:
|
||||
std::atomic<bool> updated{false};
|
||||
std::mutex mutex;
|
||||
std::string name{"none"};
|
||||
u16 scale_factor{1};
|
||||
|
||||
std::unique_ptr<TextureFilterInterface> filter;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -0,0 +1,86 @@
|
|||
/// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#include <algorithm>
|
||||
#include <functional>
|
||||
#include <unordered_map>
|
||||
#include "common/logging/log.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/anime4k/anime4k_ultrafast.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/bicubic/bicubic.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_base.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filterer.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/xbrz/xbrz_freescale.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
namespace {
|
||||
|
||||
using TextureFilterContructor = std::function<std::unique_ptr<TextureFilterBase>(u16)>;
|
||||
|
||||
template <typename T>
|
||||
std::pair<std::string_view, TextureFilterContructor> FilterMapPair() {
|
||||
return {T::NAME, std::make_unique<T, u16>};
|
||||
};
|
||||
|
||||
static const std::unordered_map<std::string_view, TextureFilterContructor> filter_map{
|
||||
{TextureFilterer::NONE, [](u16) { return nullptr; }},
|
||||
FilterMapPair<Anime4kUltrafast>(),
|
||||
FilterMapPair<Bicubic>(),
|
||||
FilterMapPair<XbrzFreescale>(),
|
||||
};
|
||||
|
||||
} // namespace
|
||||
|
||||
TextureFilterer::TextureFilterer(std::string_view filter_name, u16 scale_factor) {
|
||||
Reset(filter_name, scale_factor);
|
||||
}
|
||||
|
||||
bool TextureFilterer::Reset(std::string_view new_filter_name, u16 new_scale_factor) {
|
||||
if (filter_name == new_filter_name && (IsNull() || filter->scale_factor == new_scale_factor))
|
||||
return false;
|
||||
|
||||
auto iter = filter_map.find(new_filter_name);
|
||||
if (iter == filter_map.end()) {
|
||||
LOG_ERROR(Render_OpenGL, "Invalid texture filter: {}", new_filter_name);
|
||||
filter = nullptr;
|
||||
return true;
|
||||
}
|
||||
|
||||
filter_name = iter->first;
|
||||
filter = iter->second(new_scale_factor);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool TextureFilterer::IsNull() const {
|
||||
return !filter;
|
||||
}
|
||||
|
||||
bool TextureFilterer::Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect,
|
||||
SurfaceParams::SurfaceType type, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) {
|
||||
// depth / stencil texture filtering is not supported for now
|
||||
if (IsNull() ||
|
||||
(type != SurfaceParams::SurfaceType::Color && type != SurfaceParams::SurfaceType::Texture))
|
||||
return false;
|
||||
filter->Filter(src_tex, src_rect, dst_tex, dst_rect, read_fb_handle, draw_fb_handle);
|
||||
return true;
|
||||
}
|
||||
|
||||
std::vector<std::string_view> TextureFilterer::GetFilterNames() {
|
||||
std::vector<std::string_view> ret;
|
||||
std::transform(filter_map.begin(), filter_map.end(), std::back_inserter(ret),
|
||||
[](auto pair) { return pair.first; });
|
||||
std::sort(ret.begin(), ret.end(), [](std::string_view lhs, std::string_view rhs) {
|
||||
// sort lexicographically with none at the top
|
||||
bool lhs_is_none{lhs == NONE};
|
||||
bool rhs_is_none{rhs == NONE};
|
||||
if (lhs_is_none || rhs_is_none)
|
||||
return lhs_is_none && !rhs_is_none;
|
||||
return lhs < rhs;
|
||||
});
|
||||
return ret;
|
||||
}
|
||||
|
||||
} // namespace OpenGL
|
|
@ -0,0 +1,39 @@
|
|||
// Copyright 2020 Citra Emulator Project
|
||||
// Licensed under GPLv2 or any later version
|
||||
// Refer to the license.txt file included.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <memory>
|
||||
#include <string_view>
|
||||
#include <vector>
|
||||
#include <glad/glad.h>
|
||||
#include "common/common_types.h"
|
||||
#include "common/math_util.h"
|
||||
#include "video_core/renderer_opengl/gl_surface_params.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_base.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class TextureFilterer {
|
||||
public:
|
||||
static constexpr std::string_view NONE = "none";
|
||||
|
||||
explicit TextureFilterer(std::string_view filter_name, u16 scale_factor);
|
||||
// returns true if the filter actually changed
|
||||
bool Reset(std::string_view new_filter_name, u16 new_scale_factor);
|
||||
// returns true if there is no active filter
|
||||
bool IsNull() const;
|
||||
// returns true if the texture was able to be filtered
|
||||
bool Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, SurfaceParams::SurfaceType type,
|
||||
GLuint read_fb_handle, GLuint draw_fb_handle);
|
||||
|
||||
static std::vector<std::string_view> GetFilterNames();
|
||||
|
||||
private:
|
||||
std::string_view filter_name = NONE;
|
||||
std::unique_ptr<TextureFilterBase> filter;
|
||||
};
|
||||
|
||||
} // namespace OpenGL
|
|
@ -48,12 +48,11 @@
|
|||
|
||||
namespace OpenGL {
|
||||
|
||||
XbrzFreescale::XbrzFreescale(u16 scale_factor) : TextureFilterInterface(scale_factor) {
|
||||
XbrzFreescale::XbrzFreescale(u16 scale_factor) : TextureFilterBase(scale_factor) {
|
||||
const OpenGLState cur_state = OpenGLState::GetCurState();
|
||||
|
||||
program.Create(xbrz_freescale_vert.data(), xbrz_freescale_frag.data());
|
||||
vao.Create();
|
||||
draw_fbo.Create();
|
||||
src_sampler.Create();
|
||||
|
||||
state.draw.shader_program = program.handle;
|
||||
|
@ -68,31 +67,24 @@ XbrzFreescale::XbrzFreescale(u16 scale_factor) : TextureFilterInterface(scale_fa
|
|||
cur_state.Apply();
|
||||
state.draw.vertex_array = vao.handle;
|
||||
state.draw.shader_program = program.handle;
|
||||
state.draw.draw_framebuffer = draw_fbo.handle;
|
||||
state.texture_units[0].sampler = src_sampler.handle;
|
||||
}
|
||||
|
||||
void XbrzFreescale::scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) {
|
||||
void XbrzFreescale::Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) {
|
||||
const OpenGLState cur_state = OpenGLState::GetCurState();
|
||||
|
||||
OGLTexture src_tex;
|
||||
src_tex.Create();
|
||||
state.texture_units[0].texture_2d = src_tex.handle;
|
||||
|
||||
state.viewport = RectToViewport(rect);
|
||||
state.texture_units[0].texture_2d = src_tex;
|
||||
state.draw.draw_framebuffer = draw_fb_handle;
|
||||
state.viewport = {static_cast<GLint>(dst_rect.left), static_cast<GLint>(dst_rect.bottom),
|
||||
static_cast<GLsizei>(dst_rect.GetWidth()),
|
||||
static_cast<GLsizei>(dst_rect.GetHeight())};
|
||||
state.Apply();
|
||||
|
||||
const FormatTuple tuple = GetFormatTuple(surface.pixel_format);
|
||||
glPixelStorei(GL_UNPACK_ROW_LENGTH, static_cast<GLint>(surface.stride));
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, tuple.internal_format, rect.GetWidth(), rect.GetHeight(), 0,
|
||||
tuple.format, tuple.type, &surface.gl_buffer[buffer_offset]);
|
||||
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D,
|
||||
cur_state.texture_units[0].texture_2d, 0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, dst_tex, 0);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
|
||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
|
||||
cur_state.Apply();
|
||||
}
|
||||
|
|
|
@ -6,28 +6,23 @@
|
|||
|
||||
#include "video_core/renderer_opengl/gl_resource_manager.h"
|
||||
#include "video_core/renderer_opengl/gl_state.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_interface.h"
|
||||
#include "video_core/renderer_opengl/texture_filters/texture_filter_base.h"
|
||||
|
||||
namespace OpenGL {
|
||||
|
||||
class XbrzFreescale : public TextureFilterInterface {
|
||||
class XbrzFreescale : public TextureFilterBase {
|
||||
public:
|
||||
static TextureFilterInfo GetInfo() {
|
||||
TextureFilterInfo info;
|
||||
info.name = "xBRZ freescale";
|
||||
info.constructor = std::make_unique<XbrzFreescale, u16>;
|
||||
return info;
|
||||
}
|
||||
static constexpr std::string_view NAME = "xBRZ freescale";
|
||||
|
||||
XbrzFreescale(u16 scale_factor);
|
||||
void scale(CachedSurface& surface, const Common::Rectangle<u32>& rect,
|
||||
std::size_t buffer_offset) override;
|
||||
explicit XbrzFreescale(u16 scale_factor);
|
||||
void Filter(GLuint src_tex, const Common::Rectangle<u32>& src_rect, GLuint dst_tex,
|
||||
const Common::Rectangle<u32>& dst_rect, GLuint read_fb_handle,
|
||||
GLuint draw_fb_handle) override;
|
||||
|
||||
private:
|
||||
OpenGLState state{};
|
||||
OGLProgram program{};
|
||||
OGLVertexArray vao{};
|
||||
OGLFramebuffer draw_fbo{};
|
||||
OGLSampler src_sampler{};
|
||||
};
|
||||
} // namespace OpenGL
|
||||
|
|
|
@ -28,6 +28,7 @@ std::atomic<bool> g_use_disk_shader_cache;
|
|||
std::atomic<bool> g_renderer_bg_color_update_requested;
|
||||
std::atomic<bool> g_renderer_sampler_update_requested;
|
||||
std::atomic<bool> g_renderer_shader_update_requested;
|
||||
std::atomic<bool> g_texture_filter_update_requested;
|
||||
// Screenshot
|
||||
std::atomic<bool> g_renderer_screenshot_requested;
|
||||
void* g_screenshot_bits;
|
||||
|
|
|
@ -36,6 +36,7 @@ extern std::atomic<bool> g_use_disk_shader_cache;
|
|||
extern std::atomic<bool> g_renderer_bg_color_update_requested;
|
||||
extern std::atomic<bool> g_renderer_sampler_update_requested;
|
||||
extern std::atomic<bool> g_renderer_shader_update_requested;
|
||||
extern std::atomic<bool> g_texture_filter_update_requested;
|
||||
// Screenshot
|
||||
extern std::atomic<bool> g_renderer_screenshot_requested;
|
||||
extern void* g_screenshot_bits;
|
||||
|
|
Reference in New Issue