yuzu-emu
/
yuzu-mainline
Archived
1
0
Fork 0

Merge pull request #1246 from degasus/instanced_rendering

gl_rasterizer: Use baseInstance instead of moving the buffer points.
This commit is contained in:
bunnei 2018-09-08 04:49:24 -04:00 committed by GitHub
commit 9cd79c25ed
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 29 additions and 21 deletions

View File

@ -151,11 +151,6 @@ void RasterizerOpenGL::SetupVertexArrays() {
Tegra::GPUVAddr start = vertex_array.StartAddress(); Tegra::GPUVAddr start = vertex_array.StartAddress();
const Tegra::GPUVAddr end = regs.vertex_array_limit[index].LimitAddress(); const Tegra::GPUVAddr end = regs.vertex_array_limit[index].LimitAddress();
if (regs.instanced_arrays.IsInstancingEnabled(index) && vertex_array.divisor != 0) {
start += static_cast<Tegra::GPUVAddr>(vertex_array.stride) *
(gpu.state.current_instance / vertex_array.divisor);
}
ASSERT(end > start); ASSERT(end > start);
const u64 size = end - start + 1; const u64 size = end - start + 1;
const GLintptr vertex_buffer_offset = buffer_cache.UploadMemory(start, size); const GLintptr vertex_buffer_offset = buffer_cache.UploadMemory(start, size);
@ -165,10 +160,8 @@ void RasterizerOpenGL::SetupVertexArrays() {
vertex_array.stride); vertex_array.stride);
if (regs.instanced_arrays.IsInstancingEnabled(index) && vertex_array.divisor != 0) { if (regs.instanced_arrays.IsInstancingEnabled(index) && vertex_array.divisor != 0) {
// Tell OpenGL that this is an instanced vertex buffer to prevent accessing different // Enable vertex buffer instancing with the specified divisor.
// indexes on each vertex. We do the instance indexing manually by incrementing the glVertexBindingDivisor(index, vertex_array.divisor);
// start address of the vertex buffer.
glVertexBindingDivisor(index, 1);
} else { } else {
// Disable the vertex buffer instancing. // Disable the vertex buffer instancing.
glVertexBindingDivisor(index, 0); glVertexBindingDivisor(index, 0);
@ -432,7 +425,8 @@ void RasterizerOpenGL::DrawArrays() {
return; return;
MICROPROFILE_SCOPE(OpenGL_Drawing); MICROPROFILE_SCOPE(OpenGL_Drawing);
const auto& regs = Core::System::GetInstance().GPU().Maxwell3D().regs; const auto& gpu = Core::System::GetInstance().GPU().Maxwell3D();
const auto& regs = gpu.regs;
ScopeAcquireGLContext acquire_context{emu_window}; ScopeAcquireGLContext acquire_context{emu_window};
@ -497,12 +491,27 @@ void RasterizerOpenGL::DrawArrays() {
index_buffer_offset += static_cast<GLintptr>(regs.index_array.first) * index_buffer_offset += static_cast<GLintptr>(regs.index_array.first) *
static_cast<GLintptr>(regs.index_array.FormatSizeInBytes()); static_cast<GLintptr>(regs.index_array.FormatSizeInBytes());
if (gpu.state.current_instance > 0) {
glDrawElementsInstancedBaseVertexBaseInstance(
primitive_mode, regs.index_array.count,
MaxwellToGL::IndexFormat(regs.index_array.format),
reinterpret_cast<const void*>(index_buffer_offset), 1, base_vertex,
gpu.state.current_instance);
} else {
glDrawElementsBaseVertex(primitive_mode, regs.index_array.count, glDrawElementsBaseVertex(primitive_mode, regs.index_array.count,
MaxwellToGL::IndexFormat(regs.index_array.format), MaxwellToGL::IndexFormat(regs.index_array.format),
reinterpret_cast<const void*>(index_buffer_offset), base_vertex); reinterpret_cast<const void*>(index_buffer_offset),
base_vertex);
}
} else {
if (gpu.state.current_instance > 0) {
glDrawArraysInstancedBaseInstance(primitive_mode, regs.vertex_buffer.first,
regs.vertex_buffer.count, 1,
gpu.state.current_instance);
} else { } else {
glDrawArrays(primitive_mode, regs.vertex_buffer.first, regs.vertex_buffer.count); glDrawArrays(primitive_mode, regs.vertex_buffer.first, regs.vertex_buffer.count);
} }
}
// Disable scissor test // Disable scissor test
state.scissor.enabled = false; state.scissor.enabled = false;
@ -518,13 +527,9 @@ void RasterizerOpenGL::DrawArrays() {
void RasterizerOpenGL::NotifyMaxwellRegisterChanged(u32 method) {} void RasterizerOpenGL::NotifyMaxwellRegisterChanged(u32 method) {}
void RasterizerOpenGL::FlushAll() { void RasterizerOpenGL::FlushAll() {}
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
}
void RasterizerOpenGL::FlushRegion(VAddr addr, u64 size) { void RasterizerOpenGL::FlushRegion(VAddr addr, u64 size) {}
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
}
void RasterizerOpenGL::InvalidateRegion(VAddr addr, u64 size) { void RasterizerOpenGL::InvalidateRegion(VAddr addr, u64 size) {
MICROPROFILE_SCOPE(OpenGL_CacheManagement); MICROPROFILE_SCOPE(OpenGL_CacheManagement);
@ -534,7 +539,6 @@ void RasterizerOpenGL::InvalidateRegion(VAddr addr, u64 size) {
} }
void RasterizerOpenGL::FlushAndInvalidateRegion(VAddr addr, u64 size) { void RasterizerOpenGL::FlushAndInvalidateRegion(VAddr addr, u64 size) {
MICROPROFILE_SCOPE(OpenGL_CacheManagement);
InvalidateRegion(addr, size); InvalidateRegion(addr, size);
} }

View File

@ -444,6 +444,8 @@ QStringList GMainWindow::GetUnsupportedGLExtensions() {
unsupported_ext.append("ARB_vertex_type_10f_11f_11f_rev"); unsupported_ext.append("ARB_vertex_type_10f_11f_11f_rev");
if (!GLAD_GL_ARB_texture_mirror_clamp_to_edge) if (!GLAD_GL_ARB_texture_mirror_clamp_to_edge)
unsupported_ext.append("ARB_texture_mirror_clamp_to_edge"); unsupported_ext.append("ARB_texture_mirror_clamp_to_edge");
if (!GLAD_GL_ARB_base_instance)
unsupported_ext.append("ARB_base_instance");
// Extensions required to support some texture formats. // Extensions required to support some texture formats.
if (!GLAD_GL_EXT_texture_compression_s3tc) if (!GLAD_GL_EXT_texture_compression_s3tc)

View File

@ -91,6 +91,8 @@ bool EmuWindow_SDL2::SupportsRequiredGLExtensions() {
unsupported_ext.push_back("ARB_vertex_type_10f_11f_11f_rev"); unsupported_ext.push_back("ARB_vertex_type_10f_11f_11f_rev");
if (!GLAD_GL_ARB_texture_mirror_clamp_to_edge) if (!GLAD_GL_ARB_texture_mirror_clamp_to_edge)
unsupported_ext.push_back("ARB_texture_mirror_clamp_to_edge"); unsupported_ext.push_back("ARB_texture_mirror_clamp_to_edge");
if (!GLAD_GL_ARB_base_instance)
unsupported_ext.push_back("ARB_base_instance");
// Extensions required to support some texture formats. // Extensions required to support some texture formats.
if (!GLAD_GL_EXT_texture_compression_s3tc) if (!GLAD_GL_EXT_texture_compression_s3tc)