1
0
Fork 0

renderer_vulkan: Optimize descriptor binding (#7142)

For each draw, Citra will rebind all descriptor set slots and may redundantly re-bind descriptor-sets that were already bound. Instead it should only bind the descriptor-sets that have either changed or have had their buffer-offsets changed. This also allows entire calls to `vkCmdBindDescriptorSets` to be removed in the case that nothing has changed between draw calls.
This commit is contained in:
Wunk 2023-11-12 14:17:38 -08:00 committed by GitHub
parent 5118798c30
commit 312068eebf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 53 additions and 11 deletions

View File

@ -205,19 +205,55 @@ bool PipelineCache::BindPipeline(const PipelineInfo& info, bool wait_built) {
return false;
}
for (u32 i = 0; i < NUM_RASTERIZER_SETS; i++) {
if (!set_dirty[i]) {
continue;
}
bound_descriptor_sets[i] = descriptor_set_providers[i].Acquire(update_data[i]);
set_dirty[i] = false;
u32 new_descriptors_start = 0;
std::span<vk::DescriptorSet> new_descriptors_span{};
std::span<u32> new_offsets_span{};
// Ensure all the descriptor sets are set at least once at the beginning.
if (scheduler.IsStateDirty(StateFlags::DescriptorSets)) {
set_dirty.set();
}
if (set_dirty.any()) {
for (u32 i = 0; i < NUM_RASTERIZER_SETS; i++) {
if (!set_dirty.test(i)) {
continue;
}
bound_descriptor_sets[i] = descriptor_set_providers[i].Acquire(update_data[i]);
}
new_descriptors_span = bound_descriptor_sets;
// Only send new offsets if the buffer descriptor-set changed.
if (set_dirty.test(0)) {
new_offsets_span = offsets;
}
// Try to compact the number of updated descriptor-set slots to the ones that have actually
// changed
if (!set_dirty.all()) {
const u64 dirty_mask = set_dirty.to_ulong();
new_descriptors_start = static_cast<u32>(std::countr_zero(dirty_mask));
const u32 new_descriptors_end = 64u - static_cast<u32>(std::countl_zero(dirty_mask));
const u32 new_descriptors_size = new_descriptors_end - new_descriptors_start;
new_descriptors_span =
new_descriptors_span.subspan(new_descriptors_start, new_descriptors_size);
}
set_dirty.reset();
}
boost::container::static_vector<vk::DescriptorSet, NUM_RASTERIZER_SETS> new_descriptors(
new_descriptors_span.begin(), new_descriptors_span.end());
boost::container::static_vector<u32, NUM_DYNAMIC_OFFSETS> new_offsets(new_offsets_span.begin(),
new_offsets_span.end());
const bool is_dirty = scheduler.IsStateDirty(StateFlags::Pipeline);
const bool pipeline_dirty = (current_pipeline != pipeline) || is_dirty;
scheduler.Record([this, is_dirty, pipeline_dirty, pipeline,
current_dynamic = current_info.dynamic, dynamic = info.dynamic,
descriptor_sets = bound_descriptor_sets, offsets = offsets,
new_descriptors_start, descriptor_sets = std::move(new_descriptors),
offsets = std::move(new_offsets),
current_rasterization = current_info.rasterization,
current_depth_stencil = current_info.depth_stencil,
rasterization = info.rasterization,
@ -318,13 +354,16 @@ bool PipelineCache::BindPipeline(const PipelineInfo& info, bool wait_built) {
}
cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics, pipeline->Handle());
}
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, *pipeline_layout, 0,
descriptor_sets, offsets);
if (descriptor_sets.size()) {
cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, *pipeline_layout,
new_descriptors_start, descriptor_sets, offsets);
}
});
current_info = info;
current_pipeline = pipeline;
scheduler.MarkStateNonDirty(StateFlags::Pipeline);
scheduler.MarkStateNonDirty(StateFlags::Pipeline | StateFlags::DescriptorSets);
return true;
}
@ -497,7 +536,10 @@ void PipelineCache::BindTexelBuffer(u32 binding, vk::BufferView buffer_view) {
}
void PipelineCache::SetBufferOffset(u32 binding, size_t offset) {
offsets[binding] = static_cast<u32>(offset);
if (offsets[binding] != static_cast<u32>(offset)) {
offsets[binding] = static_cast<u32>(offset);
set_dirty[0] = true;
}
}
bool PipelineCache::IsCacheValid(std::span<const u8> data) const {