yuzu-emu
/
yuzu-mainline
Archived
1
0
Fork 0

shader: Implement CAL inlining function calls

This commit is contained in:
ReinUsesLisp 2021-03-14 03:41:05 -03:00 committed by ameerj
parent b9f7bf4472
commit 71f96fa636
24 changed files with 286 additions and 330 deletions

View File

@ -27,8 +27,6 @@ add_library(shader_recompiler STATIC
frontend/ir/condition.h
frontend/ir/flow_test.cpp
frontend/ir/flow_test.h
frontend/ir/function.cpp
frontend/ir/function.h
frontend/ir/ir_emitter.cpp
frontend/ir/ir_emitter.h
frontend/ir/microinstruction.cpp
@ -43,8 +41,6 @@ add_library(shader_recompiler STATIC
frontend/ir/program.cpp
frontend/ir/program.h
frontend/ir/reg.h
frontend/ir/structured_control_flow.cpp
frontend/ir/structured_control_flow.h
frontend/ir/type.cpp
frontend/ir/type.h
frontend/ir/value.cpp
@ -60,6 +56,8 @@ add_library(shader_recompiler STATIC
frontend/maxwell/opcodes.h
frontend/maxwell/program.cpp
frontend/maxwell/program.h
frontend/maxwell/structured_control_flow.cpp
frontend/maxwell/structured_control_flow.h
frontend/maxwell/translate/impl/bitfield_extract.cpp
frontend/maxwell/translate/impl/bitfield_insert.cpp
frontend/maxwell/translate/impl/common_encoding.h

View File

@ -262,11 +262,9 @@ void EmitContext::DefineTextures(const Info& info, u32& binding) {
}
void EmitContext::DefineLabels(IR::Program& program) {
for (const IR::Function& function : program.functions) {
for (IR::Block* const block : function.blocks) {
for (IR::Block* const block : program.blocks) {
block->SetDefinition(OpLabel());
}
}
}
} // namespace Shader::Backend::SPIRV

View File

@ -10,7 +10,6 @@
#include "shader_recompiler/backend/spirv/emit_spirv.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/function.h"
#include "shader_recompiler/frontend/ir/microinstruction.h"
#include "shader_recompiler/frontend/ir/program.h"
@ -199,18 +198,14 @@ Id PhiArgDef(EmitContext& ctx, IR::Inst* inst, size_t index) {
std::vector<u32> EmitSPIRV(const Profile& profile, Environment& env, IR::Program& program) {
EmitContext ctx{profile, program};
const Id void_function{ctx.TypeFunction(ctx.void_id)};
// FIXME: Forward declare functions (needs sirit support)
Id func{};
for (IR::Function& function : program.functions) {
func = ctx.OpFunction(ctx.void_id, spv::FunctionControlMask::MaskNone, void_function);
for (IR::Block* const block : function.blocks) {
const Id func{ctx.OpFunction(ctx.void_id, spv::FunctionControlMask::MaskNone, void_function)};
for (IR::Block* const block : program.blocks) {
ctx.AddLabel(block->Definition<Id>());
for (IR::Inst& inst : block->Instructions()) {
EmitInst(ctx, &inst);
}
}
ctx.OpFunctionEnd();
}
boost::container::small_vector<Id, 32> interfaces;
const Info& info{program.info};
if (info.uses_workgroup_id) {

View File

@ -1,5 +0,0 @@
// Copyright 2021 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#include "shader_recompiler/frontend/ir/function.h"

View File

@ -1,18 +0,0 @@
// Copyright 2021 yuzu Emulator Project
// Licensed under GPLv2 or any later version
// Refer to the license.txt file included.
#pragma once
#include <boost/container/small_vector.hpp>
#include "shader_recompiler/frontend/ir/basic_block.h"
namespace Shader::IR {
struct Function {
BlockList blocks;
BlockList post_order_blocks;
};
} // namespace Shader::IR

View File

@ -9,7 +9,8 @@
#include <fmt/format.h>
#include "shader_recompiler/frontend/ir/function.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/microinstruction.h"
#include "shader_recompiler/frontend/ir/program.h"
namespace Shader::IR {
@ -19,19 +20,14 @@ std::string DumpProgram(const Program& program) {
std::map<const IR::Inst*, size_t> inst_to_index;
std::map<const IR::Block*, size_t> block_to_index;
for (const IR::Function& function : program.functions) {
for (const IR::Block* const block : function.blocks) {
for (const IR::Block* const block : program.blocks) {
block_to_index.emplace(block, index);
++index;
}
}
std::string ret;
for (const IR::Function& function : program.functions) {
ret += fmt::format("Function\n");
for (const auto& block : function.blocks) {
for (const auto& block : program.blocks) {
ret += IR::DumpBlock(*block, block_to_index, inst_to_index, index) + '\n';
}
}
return ret;
}

View File

@ -8,13 +8,14 @@
#include <boost/container/small_vector.hpp>
#include "shader_recompiler/frontend/ir/function.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/shader_info.h"
namespace Shader::IR {
struct Program {
boost::container::small_vector<Function, 1> functions;
BlockList blocks;
BlockList post_order_blocks;
Info info;
};

View File

@ -31,13 +31,12 @@ struct Compare {
return lhs.begin < rhs.begin;
}
};
} // Anonymous namespace
static u32 BranchOffset(Location pc, Instruction inst) {
u32 BranchOffset(Location pc, Instruction inst) {
return pc.Offset() + inst.branch.Offset() + 8;
}
static void Split(Block* old_block, Block* new_block, Location pc) {
void Split(Block* old_block, Block* new_block, Location pc) {
if (pc <= old_block->begin || pc >= old_block->end) {
throw InvalidArgument("Invalid address to split={}", pc);
}
@ -49,21 +48,19 @@ static void Split(Block* old_block, Block* new_block, Location pc) {
.cond{old_block->cond},
.branch_true{old_block->branch_true},
.branch_false{old_block->branch_false},
.ir{nullptr},
};
*old_block = Block{
.begin{old_block->begin},
.end{pc},
.end_class{EndClass::Branch},
.stack{std::move(old_block->stack)},
.cond{IR::Condition{true}},
.cond{true},
.branch_true{new_block},
.branch_false{nullptr},
.ir{nullptr},
};
}
static Token OpcodeToken(Opcode opcode) {
Token OpcodeToken(Opcode opcode) {
switch (opcode) {
case Opcode::PBK:
case Opcode::BRK:
@ -89,7 +86,7 @@ static Token OpcodeToken(Opcode opcode) {
}
}
static bool IsAbsoluteJump(Opcode opcode) {
bool IsAbsoluteJump(Opcode opcode) {
switch (opcode) {
case Opcode::JCAL:
case Opcode::JMP:
@ -100,7 +97,7 @@ static bool IsAbsoluteJump(Opcode opcode) {
}
}
static bool HasFlowTest(Opcode opcode) {
bool HasFlowTest(Opcode opcode) {
switch (opcode) {
case Opcode::BRA:
case Opcode::BRX:
@ -121,13 +118,14 @@ static bool HasFlowTest(Opcode opcode) {
}
}
static std::string NameOf(const Block& block) {
std::string NameOf(const Block& block) {
if (block.begin.IsVirtual()) {
return fmt::format("\"Virtual {}\"", block.begin);
} else {
return fmt::format("\"{}\"", block.begin);
}
}
} // Anonymous namespace
void Stack::Push(Token token, Location target) {
entries.push_back({
@ -166,26 +164,24 @@ bool Block::Contains(Location pc) const noexcept {
return pc >= begin && pc < end;
}
Function::Function(Location start_address)
Function::Function(ObjectPool<Block>& block_pool, Location start_address)
: entrypoint{start_address}, labels{{
.address{start_address},
.block{nullptr},
.block{block_pool.Create(Block{
.begin{start_address},
.end{start_address},
.end_class{EndClass::Branch},
.stack{},
.cond{true},
.branch_true{nullptr},
.branch_false{nullptr},
})},
.stack{},
}} {}
CFG::CFG(Environment& env_, ObjectPool<Block>& block_pool_, Location start_address)
: env{env_}, block_pool{block_pool_} {
functions.emplace_back(start_address);
functions.back().labels.back().block = block_pool.Create(Block{
.begin{start_address},
.end{start_address},
.end_class{EndClass::Branch},
.stack{},
.cond{IR::Condition{true}},
.branch_true{nullptr},
.branch_false{nullptr},
.ir{nullptr},
});
functions.emplace_back(block_pool, start_address);
for (FunctionId function_id = 0; function_id < functions.size(); ++function_id) {
while (!functions[function_id].labels.empty()) {
Function& function{functions[function_id]};
@ -308,11 +304,17 @@ CFG::AnalysisState CFG::AnalyzeInst(Block* block, FunctionId function_id, Locati
const Location cal_pc{is_absolute ? inst.branch.Absolute() : BranchOffset(pc, inst)};
// Technically CAL pushes into PRET, but that's implicit in the function call for us
// Insert the function into the list if it doesn't exist
if (std::ranges::find(functions, cal_pc, &Function::entrypoint) == functions.end()) {
functions.emplace_back(cal_pc);
const auto it{std::ranges::find(functions, cal_pc, &Function::entrypoint)};
const bool exists{it != functions.end()};
const FunctionId call_id{exists ? std::distance(functions.begin(), it) : functions.size()};
if (!exists) {
functions.emplace_back(block_pool, cal_pc);
}
// Handle CAL like a regular instruction
break;
block->end_class = EndClass::Call;
block->function_call = call_id;
block->return_block = AddLabel(block, block->stack, pc + 1, function_id);
block->end = pc;
return AnalysisState::Branch;
}
default:
break;
@ -348,7 +350,6 @@ void CFG::AnalyzeCondInst(Block* block, FunctionId function_id, Location pc,
.cond{cond},
.branch_true{conditional_block},
.branch_false{nullptr},
.ir{nullptr},
};
// Save the contents of the visited block in the conditional block
*conditional_block = std::move(*block);
@ -401,16 +402,6 @@ void CFG::AnalyzeBRX(Block*, Location, Instruction, bool is_absolute) {
throw NotImplementedException("{}", is_absolute ? "JMX" : "BRX");
}
void CFG::AnalyzeCAL(Location pc, Instruction inst, bool is_absolute) {
const Location cal_pc{is_absolute ? inst.branch.Absolute() : BranchOffset(pc, inst)};
// Technically CAL pushes into PRET, but that's implicit in the function call for us
// Insert the function to the function list if it doesn't exist
const auto it{std::ranges::find(functions, cal_pc, &Function::entrypoint)};
if (it == functions.end()) {
functions.emplace_back(cal_pc);
}
}
CFG::AnalysisState CFG::AnalyzeEXIT(Block* block, FunctionId function_id, Location pc,
Instruction inst) {
const IR::FlowTest flow_test{inst.branch.flow_test};
@ -455,10 +446,9 @@ Block* CFG::AddLabel(Block* block, Stack stack, Location pc, FunctionId function
.end{pc},
.end_class{EndClass::Branch},
.stack{stack},
.cond{IR::Condition{true}},
.cond{true},
.branch_true{nullptr},
.branch_false{nullptr},
.ir{nullptr},
})};
function.labels.push_back(Label{
.address{pc},
@ -495,6 +485,14 @@ std::string CFG::Dot() const {
add_branch(block.branch_false, false);
}
break;
case EndClass::Call:
dot += fmt::format("\t\t{}->N{};\n", name, node_uid);
dot += fmt::format("\t\tN{}->{};\n", node_uid, NameOf(*block.return_block));
dot += fmt::format("\t\tN{} [label=\"Call {}\"][shape=square][style=stripped];\n",
node_uid, block.function_call);
dot += '\n';
++node_uid;
break;
case EndClass::Exit:
dot += fmt::format("\t\t{}->N{};\n", name, node_uid);
dot += fmt::format("\t\tN{} [label=\"Exit\"][shape=square][style=stripped];\n",

View File

@ -20,16 +20,13 @@
#include "shader_recompiler/frontend/maxwell/opcodes.h"
#include "shader_recompiler/object_pool.h"
namespace Shader::IR {
class Block;
}
namespace Shader::Maxwell::Flow {
using FunctionId = size_t;
enum class EndClass {
Branch,
Call,
Exit,
Return,
};
@ -75,9 +72,14 @@ struct Block : boost::intrusive::set_base_hook<
EndClass end_class;
Stack stack;
IR::Condition cond;
union {
Block* branch_true;
FunctionId function_call;
};
union {
Block* branch_false;
IR::Block* ir;
Block* return_block;
};
};
struct Label {
@ -87,7 +89,7 @@ struct Label {
};
struct Function {
Function(Location start_address);
explicit Function(ObjectPool<Block>& block_pool, Location start_address);
Location entrypoint;
boost::container::small_vector<Label, 16> labels;
@ -137,7 +139,6 @@ private:
void AnalyzeBRA(Block* block, FunctionId function_id, Location pc, Instruction inst,
bool is_absolute);
void AnalyzeBRX(Block* block, Location pc, Instruction inst, bool is_absolute);
void AnalyzeCAL(Location pc, Instruction inst, bool is_absolute);
AnalysisState AnalyzeEXIT(Block* block, FunctionId function_id, Location pc, Instruction inst);
/// Return the branch target block id

View File

@ -8,67 +8,44 @@
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/post_order.h"
#include "shader_recompiler/frontend/ir/structured_control_flow.h"
#include "shader_recompiler/frontend/maxwell/program.h"
#include "shader_recompiler/frontend/maxwell/structured_control_flow.h"
#include "shader_recompiler/frontend/maxwell/translate/translate.h"
#include "shader_recompiler/ir_opt/passes.h"
namespace Shader::Maxwell {
namespace {
IR::BlockList TranslateCode(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, Flow::Function& cfg_function) {
const size_t num_blocks{cfg_function.blocks.size()};
std::vector<IR::Block*> blocks(cfg_function.blocks.size());
std::ranges::for_each(cfg_function.blocks, [&, i = size_t{0}](auto& cfg_block) mutable {
const u32 begin{cfg_block.begin.Offset()};
const u32 end{cfg_block.end.Offset()};
blocks[i] = block_pool.Create(inst_pool, begin, end);
cfg_block.ir = blocks[i];
++i;
});
std::ranges::for_each(cfg_function.blocks, [&, i = size_t{0}](auto& cfg_block) mutable {
IR::Block* const block{blocks[i]};
++i;
if (cfg_block.end_class != Flow::EndClass::Branch) {
block->SetReturn();
} else if (cfg_block.cond == IR::Condition{true}) {
block->SetBranch(cfg_block.branch_true->ir);
} else if (cfg_block.cond == IR::Condition{false}) {
block->SetBranch(cfg_block.branch_false->ir);
} else {
block->SetBranches(cfg_block.cond, cfg_block.branch_true->ir,
cfg_block.branch_false->ir);
static void RemoveUnreachableBlocks(IR::Program& program) {
// Some blocks might be unreachable if a function call exists unconditionally
// If this happens the number of blocks and post order blocks will mismatch
if (program.blocks.size() == program.post_order_blocks.size()) {
return;
}
const IR::BlockList& post_order{program.post_order_blocks};
std::erase_if(program.blocks, [&](IR::Block* block) {
return std::ranges::find(post_order, block) == post_order.end();
});
return IR::VisitAST(inst_pool, block_pool, blocks,
[&](IR::Block* block) { Translate(env, block); });
}
} // Anonymous namespace
IR::Program TranslateProgram(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, Flow::CFG& cfg) {
IR::Program program;
auto& functions{program.functions};
functions.reserve(cfg.Functions().size());
for (Flow::Function& cfg_function : cfg.Functions()) {
functions.push_back(IR::Function{
.blocks{TranslateCode(inst_pool, block_pool, env, cfg_function)},
.post_order_blocks{},
});
}
program.blocks = VisitAST(inst_pool, block_pool, env, cfg);
program.post_order_blocks = PostOrder(program.blocks);
RemoveUnreachableBlocks(program);
// Replace instructions before the SSA rewrite
Optimization::LowerFp16ToFp32(program);
for (IR::Function& function : functions) {
function.post_order_blocks = PostOrder(function.blocks);
Optimization::SsaRewritePass(function.post_order_blocks);
}
Optimization::SsaRewritePass(program);
Optimization::GlobalMemoryToStorageBufferPass(program);
Optimization::TexturePass(env, program);
for (IR::Function& function : functions) {
Optimization::PostOrderInvoke(Optimization::ConstantPropagationPass, function);
Optimization::PostOrderInvoke(Optimization::DeadCodeEliminationPass, function);
Optimization::IdentityRemovalPass(function);
Optimization::VerificationPass(function);
}
Optimization::ConstantPropagationPass(program);
Optimization::DeadCodeEliminationPass(program);
Optimization::IdentityRemovalPass(program);
Optimization::VerificationPass(program);
Optimization::CollectShaderInfoPass(program);
return program;
}

View File

@ -14,11 +14,14 @@
#include <boost/intrusive/list.hpp>
#include "shader_recompiler/environment.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/ir_emitter.h"
#include "shader_recompiler/frontend/maxwell/structured_control_flow.h"
#include "shader_recompiler/frontend/maxwell/translate/translate.h"
#include "shader_recompiler/object_pool.h"
namespace Shader::IR {
namespace Shader::Maxwell {
namespace {
struct Statement;
@ -79,7 +82,7 @@ struct Variable {};
#pragma warning(disable : 26495) // Always initialize a member variable, expected in Statement
#endif
struct Statement : ListBaseHook {
Statement(Block* code_, Statement* up_) : code{code_}, up{up_}, type{StatementType::Code} {}
Statement(IR::Block* code_, Statement* up_) : code{code_}, up{up_}, type{StatementType::Code} {}
Statement(Goto, Statement* cond_, Node label_, Statement* up_)
: label{label_}, cond{cond_}, up{up_}, type{StatementType::Goto} {}
Statement(Label, u32 id_, Statement* up_) : id{id_}, up{up_}, type{StatementType::Label} {}
@ -91,7 +94,7 @@ struct Statement : ListBaseHook {
: cond{cond_}, up{up_}, type{StatementType::Break} {}
Statement(Return) : type{StatementType::Return} {}
Statement(FunctionTag) : children{}, type{StatementType::Function} {}
Statement(Identity, Condition cond_) : guest_cond{cond_}, type{StatementType::Identity} {}
Statement(Identity, IR::Condition cond_) : guest_cond{cond_}, type{StatementType::Identity} {}
Statement(Not, Statement* op_) : op{op_}, type{StatementType::Not} {}
Statement(Or, Statement* op_a_, Statement* op_b_)
: op_a{op_a_}, op_b{op_b_}, type{StatementType::Or} {}
@ -106,10 +109,10 @@ struct Statement : ListBaseHook {
}
union {
Block* code;
IR::Block* code;
Node label;
Tree children;
Condition guest_cond;
IR::Condition guest_cond;
Statement* op;
Statement* op_a;
};
@ -269,9 +272,10 @@ bool SearchNode(const Tree& tree, ConstNode stmt, size_t& offset) {
class GotoPass {
public:
explicit GotoPass(std::span<Block* const> blocks, ObjectPool<Statement>& stmt_pool)
: pool{stmt_pool} {
std::vector gotos{BuildUnorderedTreeGetGotos(blocks)};
explicit GotoPass(Flow::CFG& cfg, ObjectPool<IR::Inst>& inst_pool_,
ObjectPool<IR::Block>& block_pool_, ObjectPool<Statement>& stmt_pool)
: inst_pool{inst_pool_}, block_pool{block_pool_}, pool{stmt_pool} {
std::vector gotos{BuildTree(cfg)};
for (const Node& goto_stmt : gotos | std::views::reverse) {
RemoveGoto(goto_stmt);
}
@ -316,6 +320,7 @@ private:
}
}
// TODO: Remove this
{
Node it{goto_stmt};
bool sibling{false};
do {
@ -329,6 +334,7 @@ private:
if (!sibling) {
throw LogicError("Not siblings");
}
}
// goto_stmt and label_stmt are guaranteed to be siblings, eliminate
if (std::next(goto_stmt) == label_stmt) {
// Simply eliminate the goto if the label is next to it
@ -342,63 +348,84 @@ private:
}
}
std::vector<Node> BuildUnorderedTreeGetGotos(std::span<Block* const> blocks) {
// Assume all blocks have two branches
std::vector<Node> BuildTree(Flow::CFG& cfg) {
u32 label_id{0};
std::vector<Node> gotos;
gotos.reserve(blocks.size() * 2);
const std::unordered_map labels_map{BuildLabels(blocks)};
Tree& root{root_stmt.children};
auto insert_point{root.begin()};
// Skip all goto variables zero-initialization
std::advance(insert_point, labels_map.size());
for (Block* const block : blocks) {
// Skip label
++insert_point;
// Skip set variable
++insert_point;
root.insert(insert_point, *pool.Create(block, &root_stmt));
if (block->IsTerminationBlock()) {
root.insert(insert_point, *pool.Create(Return{}));
continue;
}
const Condition cond{block->BranchCondition()};
Statement* const true_cond{pool.Create(Identity{}, Condition{true})};
if (cond == Condition{true} || cond == Condition{false}) {
const bool is_true{cond == Condition{true}};
const Block* const branch{is_true ? block->TrueBranch() : block->FalseBranch()};
const Node label{labels_map.at(branch)};
Statement* const goto_stmt{pool.Create(Goto{}, true_cond, label, &root_stmt)};
gotos.push_back(root.insert(insert_point, *goto_stmt));
} else {
Statement* const ident_cond{pool.Create(Identity{}, cond)};
const Node true_label{labels_map.at(block->TrueBranch())};
const Node false_label{labels_map.at(block->FalseBranch())};
Statement* goto_true{pool.Create(Goto{}, ident_cond, true_label, &root_stmt)};
Statement* goto_false{pool.Create(Goto{}, true_cond, false_label, &root_stmt)};
gotos.push_back(root.insert(insert_point, *goto_true));
gotos.push_back(root.insert(insert_point, *goto_false));
}
}
Flow::Function& first_function{cfg.Functions().front()};
BuildTree(cfg, first_function, label_id, gotos, root_stmt.children.end(), std::nullopt);
return gotos;
}
std::unordered_map<const Block*, Node> BuildLabels(std::span<Block* const> blocks) {
// TODO: Consider storing labels intrusively inside the block
std::unordered_map<const Block*, Node> labels_map;
void BuildTree(Flow::CFG& cfg, Flow::Function& function, u32& label_id,
std::vector<Node>& gotos, Node function_insert_point,
std::optional<Node> return_label) {
Statement* const false_stmt{pool.Create(Identity{}, IR::Condition{false})};
Tree& root{root_stmt.children};
u32 label_id{0};
for (const Block* const block : blocks) {
std::unordered_map<Flow::Block*, Node> local_labels;
local_labels.reserve(function.blocks.size());
for (Flow::Block& block : function.blocks) {
Statement* const label{pool.Create(Label{}, label_id, &root_stmt)};
labels_map.emplace(block, root.insert(root.end(), *label));
Statement* const false_stmt{pool.Create(Identity{}, Condition{false})};
root.push_back(*pool.Create(SetVariable{}, label_id, false_stmt, &root_stmt));
root.push_front(*pool.Create(SetVariable{}, label_id, false_stmt, &root_stmt));
const Node label_it{root.insert(function_insert_point, *label)};
local_labels.emplace(&block, label_it);
++label_id;
}
return labels_map;
for (Flow::Block& block : function.blocks) {
const Node label{local_labels.at(&block)};
// Insertion point
const Node ip{std::next(label)};
// Reset goto variables before the first block and after its respective label
const auto make_reset_variable{[&]() -> Statement& {
return *pool.Create(SetVariable{}, label->id, false_stmt, &root_stmt);
}};
root.push_front(make_reset_variable());
root.insert(ip, make_reset_variable());
const u32 begin_offset{block.begin.Offset()};
const u32 end_offset{block.end.Offset()};
IR::Block* const ir_block{block_pool.Create(inst_pool, begin_offset, end_offset)};
root.insert(ip, *pool.Create(ir_block, &root_stmt));
switch (block.end_class) {
case Flow::EndClass::Branch: {
Statement* const always_cond{pool.Create(Identity{}, IR::Condition{true})};
if (block.cond == IR::Condition{true}) {
const Node true_label{local_labels.at(block.branch_true)};
gotos.push_back(
root.insert(ip, *pool.Create(Goto{}, always_cond, true_label, &root_stmt)));
} else if (block.cond == IR::Condition{false}) {
const Node false_label{local_labels.at(block.branch_false)};
gotos.push_back(root.insert(
ip, *pool.Create(Goto{}, always_cond, false_label, &root_stmt)));
} else {
const Node true_label{local_labels.at(block.branch_true)};
const Node false_label{local_labels.at(block.branch_false)};
Statement* const true_cond{pool.Create(Identity{}, block.cond)};
gotos.push_back(
root.insert(ip, *pool.Create(Goto{}, true_cond, true_label, &root_stmt)));
gotos.push_back(root.insert(
ip, *pool.Create(Goto{}, always_cond, false_label, &root_stmt)));
}
break;
}
case Flow::EndClass::Call: {
Flow::Function& call{cfg.Functions()[block.function_call]};
const Node call_return_label{local_labels.at(block.return_block)};
BuildTree(cfg, call, label_id, gotos, ip, call_return_label);
break;
}
case Flow::EndClass::Exit:
root.insert(ip, *pool.Create(Return{}));
break;
case Flow::EndClass::Return: {
Statement* const always_cond{pool.Create(Identity{}, block.cond)};
auto goto_stmt{pool.Create(Goto{}, always_cond, return_label.value(), &root_stmt)};
gotos.push_back(root.insert(ip, *goto_stmt));
break;
}
}
}
}
void UpdateTreeUp(Statement* tree) {
@ -556,11 +583,13 @@ private:
return offset;
}
ObjectPool<IR::Inst>& inst_pool;
ObjectPool<IR::Block>& block_pool;
ObjectPool<Statement>& pool;
Statement root_stmt{FunctionTag{}};
};
Block* TryFindForwardBlock(const Statement& stmt) {
IR::Block* TryFindForwardBlock(const Statement& stmt) {
const Tree& tree{stmt.up->children};
const ConstNode end{tree.cend()};
ConstNode forward_node{std::next(Tree::s_iterator_to(stmt))};
@ -573,12 +602,12 @@ Block* TryFindForwardBlock(const Statement& stmt) {
return nullptr;
}
[[nodiscard]] U1 VisitExpr(IREmitter& ir, const Statement& stmt) {
[[nodiscard]] IR::U1 VisitExpr(IR::IREmitter& ir, const Statement& stmt) {
switch (stmt.type) {
case StatementType::Identity:
return ir.Condition(stmt.guest_cond);
case StatementType::Not:
return ir.LogicalNot(U1{VisitExpr(ir, *stmt.op)});
return ir.LogicalNot(IR::U1{VisitExpr(ir, *stmt.op)});
case StatementType::Or:
return ir.LogicalOr(VisitExpr(ir, *stmt.op_a), VisitExpr(ir, *stmt.op_b));
case StatementType::Variable:
@ -590,18 +619,18 @@ Block* TryFindForwardBlock(const Statement& stmt) {
class TranslatePass {
public:
TranslatePass(ObjectPool<Inst>& inst_pool_, ObjectPool<Block>& block_pool_,
ObjectPool<Statement>& stmt_pool_, Statement& root_stmt,
const std::function<void(IR::Block*)>& func_, BlockList& block_list_)
: stmt_pool{stmt_pool_}, inst_pool{inst_pool_}, block_pool{block_pool_}, func{func_},
TranslatePass(ObjectPool<IR::Inst>& inst_pool_, ObjectPool<IR::Block>& block_pool_,
ObjectPool<Statement>& stmt_pool_, Environment& env_, Statement& root_stmt,
IR::BlockList& block_list_)
: stmt_pool{stmt_pool_}, inst_pool{inst_pool_}, block_pool{block_pool_}, env{env_},
block_list{block_list_} {
Visit(root_stmt, nullptr, nullptr);
}
private:
void Visit(Statement& parent, Block* continue_block, Block* break_block) {
void Visit(Statement& parent, IR::Block* continue_block, IR::Block* break_block) {
Tree& tree{parent.children};
Block* current_block{nullptr};
IR::Block* current_block{nullptr};
for (auto it = tree.begin(); it != tree.end(); ++it) {
Statement& stmt{*it};
@ -611,11 +640,10 @@ private:
break;
case StatementType::Code: {
if (current_block && current_block != stmt.code) {
IREmitter ir{*current_block};
ir.Branch(stmt.code);
IR::IREmitter{*current_block}.Branch(stmt.code);
}
current_block = stmt.code;
func(stmt.code);
Translate(env, stmt.code);
block_list.push_back(stmt.code);
break;
}
@ -623,7 +651,7 @@ private:
if (!current_block) {
current_block = MergeBlock(parent, stmt);
}
IREmitter ir{*current_block};
IR::IREmitter ir{*current_block};
ir.SetGotoVariable(stmt.id, VisitExpr(ir, *stmt.op));
break;
}
@ -632,16 +660,16 @@ private:
current_block = block_pool.Create(inst_pool);
block_list.push_back(current_block);
}
Block* const merge_block{MergeBlock(parent, stmt)};
IR::Block* const merge_block{MergeBlock(parent, stmt)};
// Visit children
const size_t first_block_index{block_list.size()};
Visit(stmt, merge_block, break_block);
// Implement if header block
Block* const first_if_block{block_list.at(first_block_index)};
IREmitter ir{*current_block};
const U1 cond{VisitExpr(ir, *stmt.cond)};
IR::Block* const first_if_block{block_list.at(first_block_index)};
IR::IREmitter ir{*current_block};
const IR::U1 cond{VisitExpr(ir, *stmt.cond)};
ir.SelectionMerge(merge_block);
ir.BranchConditional(cond, first_if_block, merge_block);
@ -649,14 +677,14 @@ private:
break;
}
case StatementType::Loop: {
Block* const loop_header_block{block_pool.Create(inst_pool)};
IR::Block* const loop_header_block{block_pool.Create(inst_pool)};
if (current_block) {
IREmitter{*current_block}.Branch(loop_header_block);
IR::IREmitter{*current_block}.Branch(loop_header_block);
}
block_list.push_back(loop_header_block);
Block* const new_continue_block{block_pool.Create(inst_pool)};
Block* const merge_block{MergeBlock(parent, stmt)};
IR::Block* const new_continue_block{block_pool.Create(inst_pool)};
IR::Block* const merge_block{MergeBlock(parent, stmt)};
// Visit children
const size_t first_block_index{block_list.size()};
@ -666,14 +694,14 @@ private:
block_list.push_back(new_continue_block);
// Implement loop header block
Block* const first_loop_block{block_list.at(first_block_index)};
IREmitter ir{*loop_header_block};
IR::Block* const first_loop_block{block_list.at(first_block_index)};
IR::IREmitter ir{*loop_header_block};
ir.LoopMerge(merge_block, new_continue_block);
ir.Branch(first_loop_block);
// Implement continue block
IREmitter continue_ir{*new_continue_block};
const U1 continue_cond{VisitExpr(continue_ir, *stmt.cond)};
IR::IREmitter continue_ir{*new_continue_block};
const IR::U1 continue_cond{VisitExpr(continue_ir, *stmt.cond)};
continue_ir.BranchConditional(continue_cond, ir.block, merge_block);
current_block = merge_block;
@ -684,9 +712,9 @@ private:
current_block = block_pool.Create(inst_pool);
block_list.push_back(current_block);
}
Block* const skip_block{MergeBlock(parent, stmt)};
IR::Block* const skip_block{MergeBlock(parent, stmt)};
IREmitter ir{*current_block};
IR::IREmitter ir{*current_block};
ir.BranchConditional(VisitExpr(ir, *stmt.cond), break_block, skip_block);
current_block = skip_block;
@ -697,7 +725,7 @@ private:
current_block = block_pool.Create(inst_pool);
block_list.push_back(current_block);
}
IREmitter{*current_block}.Return();
IR::IREmitter{*current_block}.Return();
current_block = nullptr;
break;
}
@ -706,39 +734,37 @@ private:
}
}
if (current_block && continue_block) {
IREmitter ir{*current_block};
ir.Branch(continue_block);
IR::IREmitter{*current_block}.Branch(continue_block);
}
}
Block* MergeBlock(Statement& parent, Statement& stmt) {
if (Block* const block{TryFindForwardBlock(stmt)}) {
IR::Block* MergeBlock(Statement& parent, Statement& stmt) {
if (IR::Block* const block{TryFindForwardBlock(stmt)}) {
return block;
}
// Create a merge block we can visit later
Block* const block{block_pool.Create(inst_pool)};
IR::Block* const block{block_pool.Create(inst_pool)};
Statement* const merge_stmt{stmt_pool.Create(block, &parent)};
parent.children.insert(std::next(Tree::s_iterator_to(stmt)), *merge_stmt);
return block;
}
ObjectPool<Statement>& stmt_pool;
ObjectPool<Inst>& inst_pool;
ObjectPool<Block>& block_pool;
const std::function<void(IR::Block*)>& func;
BlockList& block_list;
ObjectPool<IR::Inst>& inst_pool;
ObjectPool<IR::Block>& block_pool;
Environment& env;
IR::BlockList& block_list;
};
} // Anonymous namespace
BlockList VisitAST(ObjectPool<Inst>& inst_pool, ObjectPool<Block>& block_pool,
std::span<Block* const> unordered_blocks,
const std::function<void(Block*)>& func) {
IR::BlockList VisitAST(ObjectPool<IR::Inst>& inst_pool, ObjectPool<IR::Block>& block_pool,
Environment& env, Flow::CFG& cfg) {
ObjectPool<Statement> stmt_pool{64};
GotoPass goto_pass{unordered_blocks, stmt_pool};
BlockList block_list;
TranslatePass translate_pass{inst_pool, block_pool, stmt_pool, goto_pass.RootStatement(),
func, block_list};
GotoPass goto_pass{cfg, inst_pool, block_pool, stmt_pool};
Statement& root{goto_pass.RootStatement()};
IR::BlockList block_list;
TranslatePass{inst_pool, block_pool, stmt_pool, env, root, block_list};
return block_list;
}
} // namespace Shader::IR
} // namespace Shader::Maxwell

View File

@ -9,14 +9,16 @@
#include <boost/intrusive/list.hpp>
#include "shader_recompiler/environment.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/microinstruction.h"
#include "shader_recompiler/frontend/maxwell/control_flow.h"
#include "shader_recompiler/object_pool.h"
namespace Shader::IR {
namespace Shader::Maxwell {
[[nodiscard]] BlockList VisitAST(ObjectPool<Inst>& inst_pool, ObjectPool<Block>& block_pool,
std::span<Block* const> unordered_blocks,
const std::function<void(Block*)>& func);
[[nodiscard]] IR::BlockList VisitAST(ObjectPool<IR::Inst>& inst_pool,
ObjectPool<IR::Block>& block_pool, Environment& env,
Flow::CFG& cfg);
} // namespace Shader::IR
} // namespace Shader::Maxwell

View File

@ -62,7 +62,7 @@ public:
void BRA(u64 insn);
void BRK(u64 insn);
void BRX(u64 insn);
void CAL(u64 insn);
void CAL();
void CCTL(u64 insn);
void CCTLL(u64 insn);
void CONT(u64 insn);

View File

@ -65,8 +65,8 @@ void TranslatorVisitor::BRX(u64) {
ThrowNotImplemented(Opcode::BRX);
}
void TranslatorVisitor::CAL(u64) {
ThrowNotImplemented(Opcode::CAL);
void TranslatorVisitor::CAL() {
// CAL is a no-op
}
void TranslatorVisitor::CCTL(u64) {

View File

@ -296,13 +296,11 @@ void Visit(Info& info, IR::Inst& inst) {
void CollectShaderInfoPass(IR::Program& program) {
Info& info{program.info};
for (IR::Function& function : program.functions) {
for (IR::Block* const block : function.post_order_blocks) {
for (IR::Block* const block : program.post_order_blocks) {
for (IR::Inst& inst : block->Instructions()) {
Visit(info, inst);
}
}
}
}
} // namespace Shader::Optimization

View File

@ -371,9 +371,11 @@ void ConstantPropagation(IR::Block& block, IR::Inst& inst) {
}
} // Anonymous namespace
void ConstantPropagationPass(IR::Block& block) {
for (IR::Inst& inst : block) {
ConstantPropagation(block, inst);
void ConstantPropagationPass(IR::Program& program) {
for (IR::Block* const block : program.post_order_blocks) {
for (IR::Inst& inst : block->Instructions()) {
ConstantPropagation(*block, inst);
}
}
}

View File

@ -10,14 +10,16 @@
namespace Shader::Optimization {
void DeadCodeEliminationPass(IR::Block& block) {
void DeadCodeEliminationPass(IR::Program& program) {
// We iterate over the instructions in reverse order.
// This is because removing an instruction reduces the number of uses for earlier instructions.
for (IR::Inst& inst : block | std::views::reverse) {
for (IR::Block* const block : program.post_order_blocks) {
for (IR::Inst& inst : block->Instructions() | std::views::reverse) {
if (!inst.HasUses() && !inst.MayHaveSideEffects()) {
inst.Invalidate();
}
}
}
}
} // namespace Shader::Optimization

View File

@ -351,8 +351,7 @@ void GlobalMemoryToStorageBufferPass(IR::Program& program) {
StorageBufferSet storage_buffers;
StorageInstVector to_replace;
for (IR::Function& function : program.functions) {
for (IR::Block* const block : function.post_order_blocks) {
for (IR::Block* const block : program.post_order_blocks) {
for (IR::Inst& inst : block->Instructions()) {
if (!IsGlobalMemory(inst)) {
continue;
@ -360,7 +359,6 @@ void GlobalMemoryToStorageBufferPass(IR::Program& program) {
CollectStorageBuffers(*block, inst, storage_buffers, to_replace);
}
}
}
Info& info{program.info};
u32 storage_index{};
for (const StorageBufferAddr& storage_buffer : storage_buffers) {

View File

@ -10,10 +10,10 @@
namespace Shader::Optimization {
void IdentityRemovalPass(IR::Function& function) {
void IdentityRemovalPass(IR::Program& program) {
std::vector<IR::Inst*> to_invalidate;
for (IR::Block* const block : function.blocks) {
for (IR::Block* const block : program.blocks) {
for (auto inst = block->begin(); inst != block->end();) {
const size_t num_args{inst->NumArgs()};
for (size_t i = 0; i < num_args; ++i) {

View File

@ -77,13 +77,11 @@ IR::Opcode Replace(IR::Opcode op) {
} // Anonymous namespace
void LowerFp16ToFp32(IR::Program& program) {
for (IR::Function& function : program.functions) {
for (IR::Block* const block : function.blocks) {
for (IR::Block* const block : program.blocks) {
for (IR::Inst& inst : block->Instructions()) {
inst.ReplaceOpcode(Replace(inst.Opcode()));
}
}
}
}
} // namespace Shader::Optimization

View File

@ -8,26 +8,18 @@
#include "shader_recompiler/environment.h"
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/function.h"
#include "shader_recompiler/frontend/ir/program.h"
namespace Shader::Optimization {
template <typename Func>
void PostOrderInvoke(Func&& func, IR::Function& function) {
for (const auto& block : function.post_order_blocks) {
func(*block);
}
}
void CollectShaderInfoPass(IR::Program& program);
void ConstantPropagationPass(IR::Block& block);
void DeadCodeEliminationPass(IR::Block& block);
void ConstantPropagationPass(IR::Program& program);
void DeadCodeEliminationPass(IR::Program& program);
void GlobalMemoryToStorageBufferPass(IR::Program& program);
void IdentityRemovalPass(IR::Function& function);
void IdentityRemovalPass(IR::Program& program);
void LowerFp16ToFp32(IR::Program& program);
void SsaRewritePass(std::span<IR::Block* const> post_order_blocks);
void SsaRewritePass(IR::Program& program);
void TexturePass(Environment& env, IR::Program& program);
void VerificationPass(const IR::Function& function);
void VerificationPass(const IR::Program& program);
} // namespace Shader::Optimization

View File

@ -23,7 +23,6 @@
#include <boost/container/flat_set.hpp>
#include "shader_recompiler/frontend/ir/basic_block.h"
#include "shader_recompiler/frontend/ir/function.h"
#include "shader_recompiler/frontend/ir/microinstruction.h"
#include "shader_recompiler/frontend/ir/opcodes.h"
#include "shader_recompiler/frontend/ir/pred.h"
@ -262,9 +261,9 @@ void VisitBlock(Pass& pass, IR::Block* block) {
}
} // Anonymous namespace
void SsaRewritePass(std::span<IR::Block* const> post_order_blocks) {
void SsaRewritePass(IR::Program& program) {
Pass pass;
for (IR::Block* const block : post_order_blocks | std::views::reverse) {
for (IR::Block* const block : program.post_order_blocks | std::views::reverse) {
VisitBlock(pass, block);
}
}

View File

@ -164,8 +164,7 @@ private:
void TexturePass(Environment& env, IR::Program& program) {
TextureInstVector to_replace;
for (IR::Function& function : program.functions) {
for (IR::Block* const block : function.post_order_blocks) {
for (IR::Block* const block : program.post_order_blocks) {
for (IR::Inst& inst : block->Instructions()) {
if (!IsTextureInstruction(inst)) {
continue;
@ -173,7 +172,6 @@ void TexturePass(Environment& env, IR::Program& program) {
to_replace.push_back(MakeInst(env, block, inst));
}
}
}
// Sort instructions to visit textures by constant buffer index, then by offset
std::ranges::sort(to_replace, [](const auto& lhs, const auto& rhs) {
return lhs.cbuf.offset < rhs.cbuf.offset;

View File

@ -11,8 +11,8 @@
namespace Shader::Optimization {
static void ValidateTypes(const IR::Function& function) {
for (const auto& block : function.blocks) {
static void ValidateTypes(const IR::Program& program) {
for (const auto& block : program.blocks) {
for (const IR::Inst& inst : *block) {
if (inst.Opcode() == IR::Opcode::Phi) {
// Skip validation on phi nodes
@ -30,9 +30,9 @@ static void ValidateTypes(const IR::Function& function) {
}
}
static void ValidateUses(const IR::Function& function) {
static void ValidateUses(const IR::Program& program) {
std::map<IR::Inst*, int> actual_uses;
for (const auto& block : function.blocks) {
for (const auto& block : program.blocks) {
for (const IR::Inst& inst : *block) {
const size_t num_args{inst.NumArgs()};
for (size_t i = 0; i < num_args; ++i) {
@ -45,14 +45,14 @@ static void ValidateUses(const IR::Function& function) {
}
for (const auto [inst, uses] : actual_uses) {
if (inst->UseCount() != uses) {
throw LogicError("Invalid uses in block:" /*, IR::DumpFunction(function)*/);
throw LogicError("Invalid uses in block: {}", IR::DumpProgram(program));
}
}
}
void VerificationPass(const IR::Function& function) {
ValidateTypes(function);
ValidateUses(function);
void VerificationPass(const IR::Program& program) {
ValidateTypes(program);
ValidateUses(program);
}
} // namespace Shader::Optimization