2022-04-23 08:59:50 +00:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2018 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2018-02-08 02:54:35 +00:00
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
2021-12-17 15:45:06 +00:00
|
|
|
#include <atomic>
|
2019-03-04 04:54:16 +00:00
|
|
|
#include <map>
|
2023-04-28 21:53:02 +00:00
|
|
|
#include <mutex>
|
2018-10-30 04:03:25 +00:00
|
|
|
#include <optional>
|
2020-07-26 04:16:21 +00:00
|
|
|
#include <vector>
|
2023-05-23 13:45:54 +00:00
|
|
|
#include <boost/container/small_vector.hpp>
|
2018-04-21 16:31:30 +00:00
|
|
|
|
2018-02-08 02:54:35 +00:00
|
|
|
#include "common/common_types.h"
|
2021-11-11 20:24:40 +00:00
|
|
|
#include "common/multi_level_page_table.h"
|
2022-11-17 15:36:53 +00:00
|
|
|
#include "common/range_map.h"
|
2023-05-23 13:45:54 +00:00
|
|
|
#include "common/scratch_buffer.h"
|
2022-02-05 17:15:26 +00:00
|
|
|
#include "common/virtual_buffer.h"
|
2023-05-28 23:35:51 +00:00
|
|
|
#include "core/memory.h"
|
2022-11-20 02:07:14 +00:00
|
|
|
#include "video_core/cache_types.h"
|
2023-12-25 06:32:16 +00:00
|
|
|
#include "video_core/host1x/gpu_device_memory_manager.h"
|
2022-10-16 15:49:32 +00:00
|
|
|
#include "video_core/pte_kind.h"
|
2018-02-08 02:54:35 +00:00
|
|
|
|
2020-02-15 22:47:15 +00:00
|
|
|
namespace VideoCore {
|
|
|
|
class RasterizerInterface;
|
|
|
|
}
|
|
|
|
|
2023-01-05 03:05:20 +00:00
|
|
|
namespace VideoCommon {
|
|
|
|
class InvalidationAccumulator;
|
|
|
|
}
|
|
|
|
|
2019-07-09 06:17:44 +00:00
|
|
|
namespace Core {
|
|
|
|
class System;
|
2022-02-05 17:15:26 +00:00
|
|
|
} // namespace Core
|
2019-07-09 06:17:44 +00:00
|
|
|
|
2018-02-12 04:44:12 +00:00
|
|
|
namespace Tegra {
|
|
|
|
|
2018-02-08 02:54:35 +00:00
|
|
|
class MemoryManager final {
|
|
|
|
public:
|
2021-11-11 20:24:40 +00:00
|
|
|
explicit MemoryManager(Core::System& system_, u64 address_space_bits_ = 40,
|
2022-02-05 17:15:26 +00:00
|
|
|
u64 big_page_bits_ = 16, u64 page_bits_ = 12);
|
2023-12-29 08:50:04 +00:00
|
|
|
explicit MemoryManager(Core::System& system_, MaxwellDeviceMemoryManager& memory_, u64 address_space_bits_ = 40,
|
|
|
|
u64 big_page_bits_ = 16, u64 page_bits_ = 12);
|
2019-05-09 23:04:41 +00:00
|
|
|
~MemoryManager();
|
2018-02-08 02:54:35 +00:00
|
|
|
|
2021-12-17 15:45:06 +00:00
|
|
|
size_t GetID() const {
|
|
|
|
return unique_identifier;
|
|
|
|
}
|
|
|
|
|
2020-06-11 03:58:57 +00:00
|
|
|
/// Binds a renderer to the memory manager.
|
2021-01-05 07:09:39 +00:00
|
|
|
void BindRasterizer(VideoCore::RasterizerInterface* rasterizer);
|
2020-06-11 03:58:57 +00:00
|
|
|
|
2023-12-25 06:32:16 +00:00
|
|
|
[[nodiscard]] std::optional<DAddr> GpuToCpuAddress(GPUVAddr addr) const;
|
2018-02-08 02:54:35 +00:00
|
|
|
|
2023-12-25 06:32:16 +00:00
|
|
|
[[nodiscard]] std::optional<DAddr> GpuToCpuAddress(GPUVAddr addr, std::size_t size) const;
|
2021-06-13 01:34:06 +00:00
|
|
|
|
2019-03-04 04:54:16 +00:00
|
|
|
template <typename T>
|
2020-08-27 00:14:13 +00:00
|
|
|
[[nodiscard]] T Read(GPUVAddr addr) const;
|
2019-02-24 05:15:35 +00:00
|
|
|
|
2019-03-04 04:54:16 +00:00
|
|
|
template <typename T>
|
2019-03-09 19:06:51 +00:00
|
|
|
void Write(GPUVAddr addr, T data);
|
2019-02-24 05:15:35 +00:00
|
|
|
|
2020-08-27 00:14:13 +00:00
|
|
|
[[nodiscard]] u8* GetPointer(GPUVAddr addr);
|
|
|
|
[[nodiscard]] const u8* GetPointer(GPUVAddr addr) const;
|
2019-02-24 05:15:35 +00:00
|
|
|
|
2023-05-28 23:35:51 +00:00
|
|
|
template <typename T>
|
|
|
|
[[nodiscard]] T* GetPointer(GPUVAddr addr) {
|
|
|
|
const auto address{GpuToCpuAddress(addr)};
|
|
|
|
if (!address) {
|
|
|
|
return {};
|
|
|
|
}
|
2023-12-30 03:37:25 +00:00
|
|
|
return memory.GetPointer<T>(*address);
|
2023-05-28 23:35:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
template <typename T>
|
|
|
|
[[nodiscard]] const T* GetPointer(GPUVAddr addr) const {
|
|
|
|
return GetPointer<T*>(addr);
|
|
|
|
}
|
|
|
|
|
2019-04-16 19:45:24 +00:00
|
|
|
/**
|
2019-04-16 14:11:35 +00:00
|
|
|
* ReadBlock and WriteBlock are full read and write operations over virtual
|
2019-05-09 23:02:52 +00:00
|
|
|
* GPU Memory. It's important to use these when GPU memory may not be continuous
|
2019-04-16 14:11:35 +00:00
|
|
|
* in the Host Memory counterpart. Note: This functions cause Host GPU Memory
|
|
|
|
* Flushes and Invalidations, respectively to each operation.
|
|
|
|
*/
|
2022-11-20 02:07:14 +00:00
|
|
|
void ReadBlock(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
|
|
|
void WriteBlock(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All);
|
|
|
|
void CopyBlock(GPUVAddr gpu_dest_addr, GPUVAddr gpu_src_addr, std::size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All);
|
2019-04-16 14:11:35 +00:00
|
|
|
|
2019-04-16 19:45:24 +00:00
|
|
|
/**
|
2019-04-16 14:11:35 +00:00
|
|
|
* ReadBlockUnsafe and WriteBlockUnsafe are special versions of ReadBlock and
|
|
|
|
* WriteBlock respectively. In this versions, no flushing or invalidation is actually
|
|
|
|
* done and their performance is similar to a memcpy. This functions can be used
|
|
|
|
* on either of this 2 scenarios instead of their safe counterpart:
|
|
|
|
* - Memory which is sure to never be represented in the Host GPU.
|
|
|
|
* - Memory Managed by a Cache Manager. Example: Texture Flushing should use
|
|
|
|
* WriteBlockUnsafe instead of WriteBlock since it shouldn't invalidate the texture
|
|
|
|
* being flushed.
|
|
|
|
*/
|
2020-06-20 02:02:56 +00:00
|
|
|
void ReadBlockUnsafe(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size) const;
|
|
|
|
void WriteBlockUnsafe(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size);
|
2023-01-05 03:05:20 +00:00
|
|
|
void WriteBlockCached(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size);
|
2019-04-16 14:11:35 +00:00
|
|
|
|
2020-04-08 17:34:59 +00:00
|
|
|
/**
|
2021-06-20 10:25:59 +00:00
|
|
|
* Checks if a gpu region can be simply read with a pointer.
|
2020-04-08 17:34:59 +00:00
|
|
|
*/
|
2020-08-27 00:14:13 +00:00
|
|
|
[[nodiscard]] bool IsGranularRange(GPUVAddr gpu_addr, std::size_t size) const;
|
2020-04-05 21:23:49 +00:00
|
|
|
|
2021-06-13 01:34:06 +00:00
|
|
|
/**
|
2023-12-25 06:32:16 +00:00
|
|
|
* Checks if a gpu region is mapped by a single range of device addresses.
|
2021-06-13 01:34:06 +00:00
|
|
|
*/
|
2023-03-30 02:26:12 +00:00
|
|
|
[[nodiscard]] bool IsContinuousRange(GPUVAddr gpu_addr, std::size_t size) const;
|
2021-06-13 01:34:06 +00:00
|
|
|
|
|
|
|
/**
|
2021-06-20 10:25:59 +00:00
|
|
|
* Checks if a gpu region is mapped entirely.
|
2021-06-13 01:34:06 +00:00
|
|
|
*/
|
|
|
|
[[nodiscard]] bool IsFullyMappedRange(GPUVAddr gpu_addr, std::size_t size) const;
|
|
|
|
|
|
|
|
/**
|
2023-12-25 06:32:16 +00:00
|
|
|
* Returns a vector with all the subranges of device addresses mapped beneath.
|
2023-03-12 03:10:38 +00:00
|
|
|
* if the region is continuous, a single pair will be returned. If it's unmapped, an empty
|
|
|
|
* vector will be returned;
|
2021-06-13 01:34:06 +00:00
|
|
|
*/
|
2023-05-23 13:45:54 +00:00
|
|
|
boost::container::small_vector<std::pair<GPUVAddr, std::size_t>, 32> GetSubmappedRange(
|
|
|
|
GPUVAddr gpu_addr, std::size_t size) const;
|
2021-06-13 01:34:06 +00:00
|
|
|
|
2023-12-25 06:32:16 +00:00
|
|
|
GPUVAddr Map(GPUVAddr gpu_addr, DAddr dev_addr, std::size_t size,
|
2022-10-16 15:49:32 +00:00
|
|
|
PTEKind kind = PTEKind::INVALID, bool is_big_pages = true);
|
2022-02-05 17:15:26 +00:00
|
|
|
GPUVAddr MapSparse(GPUVAddr gpu_addr, std::size_t size, bool is_big_pages = true);
|
2020-07-26 04:16:21 +00:00
|
|
|
void Unmap(GPUVAddr gpu_addr, std::size_t size);
|
2019-03-04 04:54:16 +00:00
|
|
|
|
2022-11-20 02:07:14 +00:00
|
|
|
void FlushRegion(GPUVAddr gpu_addr, size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
2022-01-29 16:42:28 +00:00
|
|
|
|
2022-11-20 02:07:14 +00:00
|
|
|
void InvalidateRegion(GPUVAddr gpu_addr, size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
2022-08-14 09:36:36 +00:00
|
|
|
|
2022-11-20 02:07:14 +00:00
|
|
|
bool IsMemoryDirty(GPUVAddr gpu_addr, size_t size,
|
|
|
|
VideoCommon::CacheType which = VideoCommon::CacheType::All) const;
|
2022-08-14 09:36:36 +00:00
|
|
|
|
2023-03-30 02:26:12 +00:00
|
|
|
size_t MaxContinuousRange(GPUVAddr gpu_addr, size_t size) const;
|
2022-08-14 09:36:36 +00:00
|
|
|
|
2022-04-13 14:20:34 +00:00
|
|
|
bool IsWithinGPUAddressRange(GPUVAddr gpu_addr) const {
|
|
|
|
return gpu_addr < address_space_size;
|
|
|
|
}
|
|
|
|
|
2022-10-16 15:49:32 +00:00
|
|
|
PTEKind GetPageKind(GPUVAddr gpu_addr) const;
|
|
|
|
|
2022-11-09 16:58:10 +00:00
|
|
|
size_t GetMemoryLayoutSize(GPUVAddr gpu_addr,
|
|
|
|
size_t max_size = std::numeric_limits<size_t>::max()) const;
|
2022-10-20 23:46:51 +00:00
|
|
|
|
2023-01-05 03:05:20 +00:00
|
|
|
void FlushCaching();
|
|
|
|
|
2023-05-28 23:35:51 +00:00
|
|
|
const u8* GetSpan(const GPUVAddr src_addr, const std::size_t size) const;
|
|
|
|
u8* GetSpan(const GPUVAddr src_addr, const std::size_t size);
|
|
|
|
|
2020-07-26 04:16:21 +00:00
|
|
|
private:
|
2022-02-05 17:15:26 +00:00
|
|
|
template <bool is_big_pages, typename FuncMapped, typename FuncReserved, typename FuncUnmapped>
|
|
|
|
inline void MemoryOperation(GPUVAddr gpu_src_addr, std::size_t size, FuncMapped&& func_mapped,
|
|
|
|
FuncReserved&& func_reserved, FuncUnmapped&& func_unmapped) const;
|
|
|
|
|
2023-01-25 10:12:04 +00:00
|
|
|
template <bool is_safe>
|
2022-11-20 02:07:14 +00:00
|
|
|
void ReadBlockImpl(GPUVAddr gpu_src_addr, void* dest_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which) const;
|
2022-02-05 17:15:26 +00:00
|
|
|
|
|
|
|
template <bool is_safe>
|
2022-11-20 02:07:14 +00:00
|
|
|
void WriteBlockImpl(GPUVAddr gpu_dest_addr, const void* src_buffer, std::size_t size,
|
|
|
|
VideoCommon::CacheType which);
|
2021-12-31 04:36:00 +00:00
|
|
|
|
2022-02-05 17:15:26 +00:00
|
|
|
template <bool is_big_page>
|
2022-09-01 03:45:22 +00:00
|
|
|
[[nodiscard]] std::size_t PageEntryIndex(GPUVAddr gpu_addr) const {
|
2022-02-05 17:15:26 +00:00
|
|
|
if constexpr (is_big_page) {
|
|
|
|
return (gpu_addr >> big_page_bits) & big_page_table_mask;
|
|
|
|
} else {
|
|
|
|
return (gpu_addr >> page_bits) & page_table_mask;
|
|
|
|
}
|
2020-07-26 04:16:21 +00:00
|
|
|
}
|
2018-04-21 18:40:51 +00:00
|
|
|
|
2023-03-30 02:26:12 +00:00
|
|
|
inline bool IsBigPageContinuous(size_t big_page_index) const;
|
|
|
|
inline void SetBigPageContinuous(size_t big_page_index, bool value);
|
2022-02-06 17:51:07 +00:00
|
|
|
|
2023-01-05 03:05:20 +00:00
|
|
|
template <bool is_gpu_address>
|
|
|
|
void GetSubmappedRangeImpl(
|
|
|
|
GPUVAddr gpu_addr, std::size_t size,
|
2023-05-23 13:45:54 +00:00
|
|
|
boost::container::small_vector<
|
2023-12-25 06:32:16 +00:00
|
|
|
std::pair<std::conditional_t<is_gpu_address, GPUVAddr, DAddr>, std::size_t>, 32>&
|
2023-01-05 03:05:20 +00:00
|
|
|
result) const;
|
|
|
|
|
2020-07-26 04:16:21 +00:00
|
|
|
Core::System& system;
|
2023-12-25 06:32:16 +00:00
|
|
|
MaxwellDeviceMemoryManager& memory;
|
2019-03-04 04:54:16 +00:00
|
|
|
|
2021-11-11 20:24:40 +00:00
|
|
|
const u64 address_space_bits;
|
|
|
|
const u64 page_bits;
|
|
|
|
u64 address_space_size;
|
|
|
|
u64 page_size;
|
|
|
|
u64 page_mask;
|
|
|
|
u64 page_table_mask;
|
|
|
|
static constexpr u64 cpu_page_bits{12};
|
|
|
|
|
2022-02-05 17:15:26 +00:00
|
|
|
const u64 big_page_bits;
|
|
|
|
u64 big_page_size;
|
|
|
|
u64 big_page_mask;
|
|
|
|
u64 big_page_table_mask;
|
|
|
|
|
2020-06-11 03:58:57 +00:00
|
|
|
VideoCore::RasterizerInterface* rasterizer = nullptr;
|
2019-07-09 06:17:44 +00:00
|
|
|
|
2021-11-11 20:24:40 +00:00
|
|
|
enum class EntryType : u64 {
|
|
|
|
Free = 0,
|
|
|
|
Reserved = 1,
|
|
|
|
Mapped = 2,
|
|
|
|
};
|
|
|
|
|
|
|
|
std::vector<u64> entries;
|
2022-02-05 17:15:26 +00:00
|
|
|
std::vector<u64> big_entries;
|
2021-11-11 20:24:40 +00:00
|
|
|
|
|
|
|
template <EntryType entry_type>
|
2023-12-25 06:32:16 +00:00
|
|
|
GPUVAddr PageTableOp(GPUVAddr gpu_addr, [[maybe_unused]] DAddr dev_addr, size_t size,
|
2022-10-16 15:49:32 +00:00
|
|
|
PTEKind kind);
|
2021-11-11 20:24:40 +00:00
|
|
|
|
2022-02-05 17:15:26 +00:00
|
|
|
template <EntryType entry_type>
|
2023-12-25 06:32:16 +00:00
|
|
|
GPUVAddr BigPageTableOp(GPUVAddr gpu_addr, [[maybe_unused]] DAddr dev_addr, size_t size,
|
2022-10-16 15:49:32 +00:00
|
|
|
PTEKind kind);
|
2022-02-05 17:15:26 +00:00
|
|
|
|
|
|
|
template <bool is_big_page>
|
|
|
|
inline EntryType GetEntry(size_t position) const;
|
2021-01-22 21:31:08 +00:00
|
|
|
|
2022-02-05 17:15:26 +00:00
|
|
|
template <bool is_big_page>
|
|
|
|
inline void SetEntry(size_t position, EntryType entry);
|
2021-01-22 21:33:10 +00:00
|
|
|
|
2021-11-11 20:24:40 +00:00
|
|
|
Common::MultiLevelPageTable<u32> page_table;
|
2022-11-17 15:36:53 +00:00
|
|
|
Common::RangeMap<GPUVAddr, PTEKind> kind_map;
|
2023-12-25 06:32:16 +00:00
|
|
|
Common::VirtualBuffer<u32> big_page_table_dev;
|
2022-02-06 17:51:07 +00:00
|
|
|
|
2023-03-30 02:26:12 +00:00
|
|
|
std::vector<u64> big_page_continuous;
|
2023-12-25 06:32:16 +00:00
|
|
|
boost::container::small_vector<std::pair<DAddr, std::size_t>, 32> page_stash{};
|
|
|
|
boost::container::small_vector<std::pair<DAddr, std::size_t>, 32> page_stash2{};
|
2023-04-28 21:53:02 +00:00
|
|
|
|
|
|
|
mutable std::mutex guard;
|
2022-02-06 17:51:07 +00:00
|
|
|
|
2023-03-30 02:26:12 +00:00
|
|
|
static constexpr size_t continuous_bits = 64;
|
2021-12-17 15:45:06 +00:00
|
|
|
|
|
|
|
const size_t unique_identifier;
|
2023-01-05 03:05:20 +00:00
|
|
|
std::unique_ptr<VideoCommon::InvalidationAccumulator> accumulator;
|
2021-12-17 15:45:06 +00:00
|
|
|
|
|
|
|
static std::atomic<size_t> unique_identifier_generator;
|
2023-05-23 13:45:54 +00:00
|
|
|
|
|
|
|
Common::ScratchBuffer<u8> tmp_buffer;
|
2018-02-08 02:54:35 +00:00
|
|
|
};
|
|
|
|
|
2018-02-12 04:44:12 +00:00
|
|
|
} // namespace Tegra
|