1 // Copyright 2015 Citra Emulator Project
2 // Licensed under GPLv2 or any later version
3 // Refer to the license.txt file included.
4 
5 #include <array>
6 #include <cstring>
7 #include <boost/serialization/array.hpp>
8 #include <boost/serialization/binary_object.hpp>
9 #include "audio_core/dsp_interface.h"
10 #include "common/archives.h"
11 #include "common/assert.h"
12 #include "common/common_types.h"
13 #include "common/logging/log.h"
14 #include "common/swap.h"
15 #include "core/arm/arm_interface.h"
16 #include "core/core.h"
17 #include "core/global.h"
18 #include "core/hle/kernel/memory.h"
19 #include "core/hle/kernel/process.h"
20 #include "core/hle/lock.h"
21 #include "core/memory.h"
22 #include "core/settings.h"
23 #include "video_core/renderer_base.h"
24 #include "video_core/video_core.h"
25 
26 SERIALIZE_EXPORT_IMPL(Memory::MemorySystem::BackingMemImpl<Memory::Region::FCRAM>)
27 SERIALIZE_EXPORT_IMPL(Memory::MemorySystem::BackingMemImpl<Memory::Region::VRAM>)
28 SERIALIZE_EXPORT_IMPL(Memory::MemorySystem::BackingMemImpl<Memory::Region::DSP>)
29 SERIALIZE_EXPORT_IMPL(Memory::MemorySystem::BackingMemImpl<Memory::Region::N3DS>)
30 
31 namespace Memory {
32 
Clear()33 void PageTable::Clear() {
34     pointers.raw.fill(nullptr);
35     pointers.refs.fill(MemoryRef());
36     attributes.fill(PageType::Unmapped);
37 }
38 
39 class RasterizerCacheMarker {
40 public:
Mark(VAddr addr,bool cached)41     void Mark(VAddr addr, bool cached) {
42         bool* p = At(addr);
43         if (p)
44             *p = cached;
45     }
46 
IsCached(VAddr addr)47     bool IsCached(VAddr addr) {
48         bool* p = At(addr);
49         if (p)
50             return *p;
51         return false;
52     }
53 
54 private:
At(VAddr addr)55     bool* At(VAddr addr) {
56         if (addr >= VRAM_VADDR && addr < VRAM_VADDR_END) {
57             return &vram[(addr - VRAM_VADDR) / PAGE_SIZE];
58         }
59         if (addr >= LINEAR_HEAP_VADDR && addr < LINEAR_HEAP_VADDR_END) {
60             return &linear_heap[(addr - LINEAR_HEAP_VADDR) / PAGE_SIZE];
61         }
62         if (addr >= NEW_LINEAR_HEAP_VADDR && addr < NEW_LINEAR_HEAP_VADDR_END) {
63             return &new_linear_heap[(addr - NEW_LINEAR_HEAP_VADDR) / PAGE_SIZE];
64         }
65         return nullptr;
66     }
67 
68     std::array<bool, VRAM_SIZE / PAGE_SIZE> vram{};
69     std::array<bool, LINEAR_HEAP_SIZE / PAGE_SIZE> linear_heap{};
70     std::array<bool, NEW_LINEAR_HEAP_SIZE / PAGE_SIZE> new_linear_heap{};
71 
72     static_assert(sizeof(bool) == 1);
73     friend class boost::serialization::access;
74     template <typename Archive>
serialize(Archive & ar,const unsigned int file_version)75     void serialize(Archive& ar, const unsigned int file_version) {
76         ar& vram;
77         ar& linear_heap;
78         ar& new_linear_heap;
79     }
80 };
81 
82 class MemorySystem::Impl {
83 public:
84     // Visual Studio would try to allocate these on compile time if they are std::array, which would
85     // exceed the memory limit.
86     std::unique_ptr<u8[]> fcram = std::make_unique<u8[]>(Memory::FCRAM_N3DS_SIZE);
87     std::unique_ptr<u8[]> vram = std::make_unique<u8[]>(Memory::VRAM_SIZE);
88     std::unique_ptr<u8[]> n3ds_extra_ram = std::make_unique<u8[]>(Memory::N3DS_EXTRA_RAM_SIZE);
89 
90     std::shared_ptr<PageTable> current_page_table = nullptr;
91     RasterizerCacheMarker cache_marker;
92     std::vector<std::shared_ptr<PageTable>> page_table_list;
93 
94     AudioCore::DspInterface* dsp = nullptr;
95 
96     std::shared_ptr<BackingMem> fcram_mem;
97     std::shared_ptr<BackingMem> vram_mem;
98     std::shared_ptr<BackingMem> n3ds_extra_ram_mem;
99     std::shared_ptr<BackingMem> dsp_mem;
100 
101     Impl();
102 
GetPtr(Region r) const103     const u8* GetPtr(Region r) const {
104         switch (r) {
105         case Region::VRAM:
106             return vram.get();
107         case Region::DSP:
108             return dsp->GetDspMemory().data();
109         case Region::FCRAM:
110             return fcram.get();
111         case Region::N3DS:
112             return n3ds_extra_ram.get();
113         default:
114             UNREACHABLE();
115         }
116     }
117 
GetPtr(Region r)118     u8* GetPtr(Region r) {
119         switch (r) {
120         case Region::VRAM:
121             return vram.get();
122         case Region::DSP:
123             return dsp->GetDspMemory().data();
124         case Region::FCRAM:
125             return fcram.get();
126         case Region::N3DS:
127             return n3ds_extra_ram.get();
128         default:
129             UNREACHABLE();
130         }
131     }
132 
GetSize(Region r) const133     u32 GetSize(Region r) const {
134         switch (r) {
135         case Region::VRAM:
136             return VRAM_SIZE;
137         case Region::DSP:
138             return DSP_RAM_SIZE;
139         case Region::FCRAM:
140             return FCRAM_N3DS_SIZE;
141         case Region::N3DS:
142             return N3DS_EXTRA_RAM_SIZE;
143         default:
144             UNREACHABLE();
145         }
146     }
147 
148 private:
149     friend class boost::serialization::access;
150     template <class Archive>
serialize(Archive & ar,const unsigned int file_version)151     void serialize(Archive& ar, const unsigned int file_version) {
152         bool save_n3ds_ram = Settings::values.is_new_3ds;
153         ar& save_n3ds_ram;
154         ar& boost::serialization::make_binary_object(vram.get(), Memory::VRAM_SIZE);
155         ar& boost::serialization::make_binary_object(
156             fcram.get(), save_n3ds_ram ? Memory::FCRAM_N3DS_SIZE : Memory::FCRAM_SIZE);
157         ar& boost::serialization::make_binary_object(
158             n3ds_extra_ram.get(), save_n3ds_ram ? Memory::N3DS_EXTRA_RAM_SIZE : 0);
159         ar& cache_marker;
160         ar& page_table_list;
161         // dsp is set from Core::System at startup
162         ar& current_page_table;
163         ar& fcram_mem;
164         ar& vram_mem;
165         ar& n3ds_extra_ram_mem;
166         ar& dsp_mem;
167     }
168 };
169 
170 // We use this rather than BufferMem because we don't want new objects to be allocated when
171 // deserializing. This avoids unnecessary memory thrashing.
172 template <Region R>
173 class MemorySystem::BackingMemImpl : public BackingMem {
174 public:
BackingMemImpl()175     BackingMemImpl() : impl(*Core::Global<Core::System>().Memory().impl) {}
BackingMemImpl(MemorySystem::Impl & impl_)176     explicit BackingMemImpl(MemorySystem::Impl& impl_) : impl(impl_) {}
GetPtr()177     u8* GetPtr() override {
178         return impl.GetPtr(R);
179     }
GetPtr() const180     const u8* GetPtr() const override {
181         return impl.GetPtr(R);
182     }
GetSize() const183     std::size_t GetSize() const override {
184         return impl.GetSize(R);
185     }
186 
187 private:
188     MemorySystem::Impl& impl;
189 
190     template <class Archive>
serialize(Archive & ar,const unsigned int)191     void serialize(Archive& ar, const unsigned int) {
192         ar& boost::serialization::base_object<BackingMem>(*this);
193     }
194     friend class boost::serialization::access;
195 };
196 
Impl()197 MemorySystem::Impl::Impl()
198     : fcram_mem(std::make_shared<BackingMemImpl<Region::FCRAM>>(*this)),
199       vram_mem(std::make_shared<BackingMemImpl<Region::VRAM>>(*this)),
200       n3ds_extra_ram_mem(std::make_shared<BackingMemImpl<Region::N3DS>>(*this)),
201       dsp_mem(std::make_shared<BackingMemImpl<Region::DSP>>(*this)) {}
202 
MemorySystem()203 MemorySystem::MemorySystem() : impl(std::make_unique<Impl>()) {}
204 MemorySystem::~MemorySystem() = default;
205 
206 template <class Archive>
serialize(Archive & ar,const unsigned int file_version)207 void MemorySystem::serialize(Archive& ar, const unsigned int file_version) {
208     ar&* impl.get();
209 }
210 
SERIALIZE_IMPL(MemorySystem)211 SERIALIZE_IMPL(MemorySystem)
212 
213 void MemorySystem::SetCurrentPageTable(std::shared_ptr<PageTable> page_table) {
214     impl->current_page_table = page_table;
215 }
216 
GetCurrentPageTable() const217 std::shared_ptr<PageTable> MemorySystem::GetCurrentPageTable() const {
218     return impl->current_page_table;
219 }
220 
MapPages(PageTable & page_table,u32 base,u32 size,MemoryRef memory,PageType type)221 void MemorySystem::MapPages(PageTable& page_table, u32 base, u32 size, MemoryRef memory,
222                             PageType type) {
223     LOG_DEBUG(HW_Memory, "Mapping {} onto {:08X}-{:08X}", (void*)memory.GetPtr(), base * PAGE_SIZE,
224               (base + size) * PAGE_SIZE);
225 
226     RasterizerFlushVirtualRegion(base << PAGE_BITS, size * PAGE_SIZE,
227                                  FlushMode::FlushAndInvalidate);
228 
229     u32 end = base + size;
230     while (base != end) {
231         ASSERT_MSG(base < PAGE_TABLE_NUM_ENTRIES, "out of range mapping at {:08X}", base);
232 
233         page_table.attributes[base] = type;
234         page_table.pointers[base] = memory;
235 
236         // If the memory to map is already rasterizer-cached, mark the page
237         if (type == PageType::Memory && impl->cache_marker.IsCached(base * PAGE_SIZE)) {
238             page_table.attributes[base] = PageType::RasterizerCachedMemory;
239             page_table.pointers[base] = nullptr;
240         }
241 
242         base += 1;
243         if (memory != nullptr && memory.GetSize() > PAGE_SIZE)
244             memory += PAGE_SIZE;
245     }
246 }
247 
MapMemoryRegion(PageTable & page_table,VAddr base,u32 size,MemoryRef target)248 void MemorySystem::MapMemoryRegion(PageTable& page_table, VAddr base, u32 size, MemoryRef target) {
249     ASSERT_MSG((size & PAGE_MASK) == 0, "non-page aligned size: {:08X}", size);
250     ASSERT_MSG((base & PAGE_MASK) == 0, "non-page aligned base: {:08X}", base);
251     MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, target, PageType::Memory);
252 }
253 
MapIoRegion(PageTable & page_table,VAddr base,u32 size,MMIORegionPointer mmio_handler)254 void MemorySystem::MapIoRegion(PageTable& page_table, VAddr base, u32 size,
255                                MMIORegionPointer mmio_handler) {
256     ASSERT_MSG((size & PAGE_MASK) == 0, "non-page aligned size: {:08X}", size);
257     ASSERT_MSG((base & PAGE_MASK) == 0, "non-page aligned base: {:08X}", base);
258     MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, nullptr, PageType::Special);
259 
260     page_table.special_regions.emplace_back(SpecialRegion{base, size, mmio_handler});
261 }
262 
UnmapRegion(PageTable & page_table,VAddr base,u32 size)263 void MemorySystem::UnmapRegion(PageTable& page_table, VAddr base, u32 size) {
264     ASSERT_MSG((size & PAGE_MASK) == 0, "non-page aligned size: {:08X}", size);
265     ASSERT_MSG((base & PAGE_MASK) == 0, "non-page aligned base: {:08X}", base);
266     MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, nullptr, PageType::Unmapped);
267 }
268 
GetPointerForRasterizerCache(VAddr addr) const269 MemoryRef MemorySystem::GetPointerForRasterizerCache(VAddr addr) const {
270     if (addr >= LINEAR_HEAP_VADDR && addr < LINEAR_HEAP_VADDR_END) {
271         return {impl->fcram_mem, addr - LINEAR_HEAP_VADDR};
272     }
273     if (addr >= NEW_LINEAR_HEAP_VADDR && addr < NEW_LINEAR_HEAP_VADDR_END) {
274         return {impl->fcram_mem, addr - NEW_LINEAR_HEAP_VADDR};
275     }
276     if (addr >= VRAM_VADDR && addr < VRAM_VADDR_END) {
277         return {impl->vram_mem, addr - VRAM_VADDR};
278     }
279     UNREACHABLE();
280 }
281 
RegisterPageTable(std::shared_ptr<PageTable> page_table)282 void MemorySystem::RegisterPageTable(std::shared_ptr<PageTable> page_table) {
283     impl->page_table_list.push_back(page_table);
284 }
285 
UnregisterPageTable(std::shared_ptr<PageTable> page_table)286 void MemorySystem::UnregisterPageTable(std::shared_ptr<PageTable> page_table) {
287     auto it = std::find(impl->page_table_list.begin(), impl->page_table_list.end(), page_table);
288     if (it != impl->page_table_list.end()) {
289         impl->page_table_list.erase(it);
290     }
291 }
292 
293 /**
294  * This function should only be called for virtual addreses with attribute `PageType::Special`.
295  */
GetMMIOHandler(const PageTable & page_table,VAddr vaddr)296 static MMIORegionPointer GetMMIOHandler(const PageTable& page_table, VAddr vaddr) {
297     for (const auto& region : page_table.special_regions) {
298         if (vaddr >= region.base && vaddr < (region.base + region.size)) {
299             return region.handler;
300         }
301     }
302     ASSERT_MSG(false, "Mapped IO page without a handler @ {:08X}", vaddr);
303     return nullptr; // Should never happen
304 }
305 
306 template <typename T>
307 T ReadMMIO(MMIORegionPointer mmio_handler, VAddr addr);
308 
309 template <typename T>
Read(const VAddr vaddr)310 T MemorySystem::Read(const VAddr vaddr) {
311     const u8* page_pointer = impl->current_page_table->pointers[vaddr >> PAGE_BITS];
312     if (page_pointer) {
313         // NOTE: Avoid adding any extra logic to this fast-path block
314         T value;
315         std::memcpy(&value, &page_pointer[vaddr & PAGE_MASK], sizeof(T));
316         return value;
317     }
318 
319     PageType type = impl->current_page_table->attributes[vaddr >> PAGE_BITS];
320     switch (type) {
321     case PageType::Unmapped:
322         LOG_ERROR(HW_Memory, "unmapped Read{} @ 0x{:08X} at PC 0x{:08X}", sizeof(T) * 8, vaddr,
323                   Core::GetRunningCore().GetPC());
324         return 0;
325     case PageType::Memory:
326         ASSERT_MSG(false, "Mapped memory page without a pointer @ {:08X}", vaddr);
327         break;
328     case PageType::RasterizerCachedMemory: {
329         RasterizerFlushVirtualRegion(vaddr, sizeof(T), FlushMode::Flush);
330 
331         T value;
332         std::memcpy(&value, GetPointerForRasterizerCache(vaddr), sizeof(T));
333         return value;
334     }
335     case PageType::Special:
336         return ReadMMIO<T>(GetMMIOHandler(*impl->current_page_table, vaddr), vaddr);
337     default:
338         UNREACHABLE();
339     }
340 }
341 
342 template <typename T>
343 void WriteMMIO(MMIORegionPointer mmio_handler, VAddr addr, const T data);
344 
345 template <typename T>
Write(const VAddr vaddr,const T data)346 void MemorySystem::Write(const VAddr vaddr, const T data) {
347     u8* page_pointer = impl->current_page_table->pointers[vaddr >> PAGE_BITS];
348     if (page_pointer) {
349         // NOTE: Avoid adding any extra logic to this fast-path block
350         std::memcpy(&page_pointer[vaddr & PAGE_MASK], &data, sizeof(T));
351         return;
352     }
353 
354     PageType type = impl->current_page_table->attributes[vaddr >> PAGE_BITS];
355     switch (type) {
356     case PageType::Unmapped:
357         LOG_ERROR(HW_Memory, "unmapped Write{} 0x{:08X} @ 0x{:08X} at PC 0x{:08X}",
358                   sizeof(data) * 8, (u32)data, vaddr, Core::GetRunningCore().GetPC());
359         return;
360     case PageType::Memory:
361         ASSERT_MSG(false, "Mapped memory page without a pointer @ {:08X}", vaddr);
362         break;
363     case PageType::RasterizerCachedMemory: {
364         RasterizerFlushVirtualRegion(vaddr, sizeof(T), FlushMode::Invalidate);
365         std::memcpy(GetPointerForRasterizerCache(vaddr), &data, sizeof(T));
366         break;
367     }
368     case PageType::Special:
369         WriteMMIO<T>(GetMMIOHandler(*impl->current_page_table, vaddr), vaddr, data);
370         break;
371     default:
372         UNREACHABLE();
373     }
374 }
375 
IsValidVirtualAddress(const Kernel::Process & process,const VAddr vaddr)376 bool IsValidVirtualAddress(const Kernel::Process& process, const VAddr vaddr) {
377     auto& page_table = *process.vm_manager.page_table;
378 
379     auto page_pointer = page_table.pointers[vaddr >> PAGE_BITS];
380     if (page_pointer)
381         return true;
382 
383     if (page_table.attributes[vaddr >> PAGE_BITS] == PageType::RasterizerCachedMemory)
384         return true;
385 
386     if (page_table.attributes[vaddr >> PAGE_BITS] != PageType::Special)
387         return false;
388 
389     MMIORegionPointer mmio_region = GetMMIOHandler(page_table, vaddr);
390     if (mmio_region) {
391         return mmio_region->IsValidAddress(vaddr);
392     }
393 
394     return false;
395 }
396 
IsValidPhysicalAddress(const PAddr paddr) const397 bool MemorySystem::IsValidPhysicalAddress(const PAddr paddr) const {
398     return GetPhysicalPointer(paddr) != nullptr;
399 }
400 
GetPointer(const VAddr vaddr)401 u8* MemorySystem::GetPointer(const VAddr vaddr) {
402     u8* page_pointer = impl->current_page_table->pointers[vaddr >> PAGE_BITS];
403     if (page_pointer) {
404         return page_pointer + (vaddr & PAGE_MASK);
405     }
406 
407     if (impl->current_page_table->attributes[vaddr >> PAGE_BITS] ==
408         PageType::RasterizerCachedMemory) {
409         return GetPointerForRasterizerCache(vaddr);
410     }
411 
412     LOG_ERROR(HW_Memory, "unknown GetPointer @ 0x{:08x} at PC 0x{:08X}", vaddr,
413               Core::GetRunningCore().GetPC());
414     return nullptr;
415 }
416 
GetPointer(const VAddr vaddr) const417 const u8* MemorySystem::GetPointer(const VAddr vaddr) const {
418     const u8* page_pointer = impl->current_page_table->pointers[vaddr >> PAGE_BITS];
419     if (page_pointer) {
420         return page_pointer + (vaddr & PAGE_MASK);
421     }
422 
423     if (impl->current_page_table->attributes[vaddr >> PAGE_BITS] ==
424         PageType::RasterizerCachedMemory) {
425         return GetPointerForRasterizerCache(vaddr);
426     }
427 
428     LOG_ERROR(HW_Memory, "unknown GetPointer @ 0x{:08x}", vaddr);
429     return nullptr;
430 }
431 
ReadCString(VAddr vaddr,std::size_t max_length)432 std::string MemorySystem::ReadCString(VAddr vaddr, std::size_t max_length) {
433     std::string string;
434     string.reserve(max_length);
435     for (std::size_t i = 0; i < max_length; ++i) {
436         char c = Read8(vaddr);
437         if (c == '\0')
438             break;
439         string.push_back(c);
440         ++vaddr;
441     }
442     string.shrink_to_fit();
443     return string;
444 }
445 
GetPhysicalPointer(PAddr address)446 u8* MemorySystem::GetPhysicalPointer(PAddr address) {
447     return GetPhysicalRef(address);
448 }
449 
GetPhysicalPointer(PAddr address) const450 const u8* MemorySystem::GetPhysicalPointer(PAddr address) const {
451     return GetPhysicalRef(address);
452 }
453 
GetPhysicalRef(PAddr address) const454 MemoryRef MemorySystem::GetPhysicalRef(PAddr address) const {
455     struct MemoryArea {
456         PAddr paddr_base;
457         u32 size;
458     };
459 
460     static constexpr MemoryArea memory_areas[] = {
461         {VRAM_PADDR, VRAM_SIZE},
462         {DSP_RAM_PADDR, DSP_RAM_SIZE},
463         {FCRAM_PADDR, FCRAM_N3DS_SIZE},
464         {N3DS_EXTRA_RAM_PADDR, N3DS_EXTRA_RAM_SIZE},
465     };
466 
467     const auto area =
468         std::find_if(std::begin(memory_areas), std::end(memory_areas), [&](const auto& area) {
469             // Note: the region end check is inclusive because the user can pass in an address that
470             // represents an open right bound
471             return address >= area.paddr_base && address <= area.paddr_base + area.size;
472         });
473 
474     if (area == std::end(memory_areas)) {
475         LOG_ERROR(HW_Memory, "unknown GetPhysicalPointer @ 0x{:08X} at PC 0x{:08X}", address,
476                   Core::GetRunningCore().GetPC());
477         return nullptr;
478     }
479 
480     u32 offset_into_region = address - area->paddr_base;
481 
482     std::shared_ptr<BackingMem> target_mem = nullptr;
483     switch (area->paddr_base) {
484     case VRAM_PADDR:
485         target_mem = impl->vram_mem;
486         break;
487     case DSP_RAM_PADDR:
488         target_mem = impl->dsp_mem;
489         break;
490     case FCRAM_PADDR:
491         target_mem = impl->fcram_mem;
492         break;
493     case N3DS_EXTRA_RAM_PADDR:
494         target_mem = impl->n3ds_extra_ram_mem;
495         break;
496     default:
497         UNREACHABLE();
498     }
499     if (offset_into_region >= target_mem->GetSize()) {
500         return {nullptr};
501     }
502 
503     return {target_mem, offset_into_region};
504 }
505 
506 /// For a rasterizer-accessible PAddr, gets a list of all possible VAddr
PhysicalToVirtualAddressForRasterizer(PAddr addr)507 static std::vector<VAddr> PhysicalToVirtualAddressForRasterizer(PAddr addr) {
508     if (addr >= VRAM_PADDR && addr < VRAM_PADDR_END) {
509         return {addr - VRAM_PADDR + VRAM_VADDR};
510     }
511     if (addr >= FCRAM_PADDR && addr < FCRAM_PADDR_END) {
512         return {addr - FCRAM_PADDR + LINEAR_HEAP_VADDR, addr - FCRAM_PADDR + NEW_LINEAR_HEAP_VADDR};
513     }
514     if (addr >= FCRAM_PADDR_END && addr < FCRAM_N3DS_PADDR_END) {
515         return {addr - FCRAM_PADDR + NEW_LINEAR_HEAP_VADDR};
516     }
517     // While the physical <-> virtual mapping is 1:1 for the regions supported by the cache,
518     // some games (like Pokemon Super Mystery Dungeon) will try to use textures that go beyond
519     // the end address of VRAM, causing the Virtual->Physical translation to fail when flushing
520     // parts of the texture.
521     LOG_ERROR(HW_Memory,
522               "Trying to use invalid physical address for rasterizer: {:08X} at PC 0x{:08X}", addr,
523               Core::GetRunningCore().GetPC());
524     return {};
525 }
526 
RasterizerMarkRegionCached(PAddr start,u32 size,bool cached)527 void MemorySystem::RasterizerMarkRegionCached(PAddr start, u32 size, bool cached) {
528     if (start == 0) {
529         return;
530     }
531 
532     u32 num_pages = ((start + size - 1) >> PAGE_BITS) - (start >> PAGE_BITS) + 1;
533     PAddr paddr = start;
534 
535     for (unsigned i = 0; i < num_pages; ++i, paddr += PAGE_SIZE) {
536         for (VAddr vaddr : PhysicalToVirtualAddressForRasterizer(paddr)) {
537             impl->cache_marker.Mark(vaddr, cached);
538             for (auto page_table : impl->page_table_list) {
539                 PageType& page_type = page_table->attributes[vaddr >> PAGE_BITS];
540 
541                 if (cached) {
542                     // Switch page type to cached if now cached
543                     switch (page_type) {
544                     case PageType::Unmapped:
545                         // It is not necessary for a process to have this region mapped into its
546                         // address space, for example, a system module need not have a VRAM mapping.
547                         break;
548                     case PageType::Memory:
549                         page_type = PageType::RasterizerCachedMemory;
550                         page_table->pointers[vaddr >> PAGE_BITS] = nullptr;
551                         break;
552                     default:
553                         UNREACHABLE();
554                     }
555                 } else {
556                     // Switch page type to uncached if now uncached
557                     switch (page_type) {
558                     case PageType::Unmapped:
559                         // It is not necessary for a process to have this region mapped into its
560                         // address space, for example, a system module need not have a VRAM mapping.
561                         break;
562                     case PageType::RasterizerCachedMemory: {
563                         page_type = PageType::Memory;
564                         page_table->pointers[vaddr >> PAGE_BITS] =
565                             GetPointerForRasterizerCache(vaddr & ~PAGE_MASK);
566                         break;
567                     }
568                     default:
569                         UNREACHABLE();
570                     }
571                 }
572             }
573         }
574     }
575 }
576 
RasterizerFlushRegion(PAddr start,u32 size)577 void RasterizerFlushRegion(PAddr start, u32 size) {
578     if (VideoCore::g_renderer == nullptr) {
579         return;
580     }
581 
582     VideoCore::g_renderer->Rasterizer()->FlushRegion(start, size);
583 }
584 
RasterizerInvalidateRegion(PAddr start,u32 size)585 void RasterizerInvalidateRegion(PAddr start, u32 size) {
586     if (VideoCore::g_renderer == nullptr) {
587         return;
588     }
589 
590     VideoCore::g_renderer->Rasterizer()->InvalidateRegion(start, size);
591 }
592 
RasterizerFlushAndInvalidateRegion(PAddr start,u32 size)593 void RasterizerFlushAndInvalidateRegion(PAddr start, u32 size) {
594     // Since pages are unmapped on shutdown after video core is shutdown, the renderer may be
595     // null here
596     if (VideoCore::g_renderer == nullptr) {
597         return;
598     }
599 
600     VideoCore::g_renderer->Rasterizer()->FlushAndInvalidateRegion(start, size);
601 }
602 
RasterizerClearAll(bool flush)603 void RasterizerClearAll(bool flush) {
604     // Since pages are unmapped on shutdown after video core is shutdown, the renderer may be
605     // null here
606     if (VideoCore::g_renderer == nullptr) {
607         return;
608     }
609 
610     VideoCore::g_renderer->Rasterizer()->ClearAll(flush);
611 }
612 
RasterizerFlushVirtualRegion(VAddr start,u32 size,FlushMode mode)613 void RasterizerFlushVirtualRegion(VAddr start, u32 size, FlushMode mode) {
614     // Since pages are unmapped on shutdown after video core is shutdown, the renderer may be
615     // null here
616     if (VideoCore::g_renderer == nullptr) {
617         return;
618     }
619 
620     VAddr end = start + size;
621 
622     auto CheckRegion = [&](VAddr region_start, VAddr region_end, PAddr paddr_region_start) {
623         if (start >= region_end || end <= region_start) {
624             // No overlap with region
625             return;
626         }
627 
628         VAddr overlap_start = std::max(start, region_start);
629         VAddr overlap_end = std::min(end, region_end);
630         PAddr physical_start = paddr_region_start + (overlap_start - region_start);
631         u32 overlap_size = overlap_end - overlap_start;
632 
633         auto* rasterizer = VideoCore::g_renderer->Rasterizer();
634         switch (mode) {
635         case FlushMode::Flush:
636             rasterizer->FlushRegion(physical_start, overlap_size);
637             break;
638         case FlushMode::Invalidate:
639             rasterizer->InvalidateRegion(physical_start, overlap_size);
640             break;
641         case FlushMode::FlushAndInvalidate:
642             rasterizer->FlushAndInvalidateRegion(physical_start, overlap_size);
643             break;
644         }
645     };
646 
647     CheckRegion(LINEAR_HEAP_VADDR, LINEAR_HEAP_VADDR_END, FCRAM_PADDR);
648     CheckRegion(NEW_LINEAR_HEAP_VADDR, NEW_LINEAR_HEAP_VADDR_END, FCRAM_PADDR);
649     CheckRegion(VRAM_VADDR, VRAM_VADDR_END, VRAM_PADDR);
650 }
651 
Read8(const VAddr addr)652 u8 MemorySystem::Read8(const VAddr addr) {
653     return Read<u8>(addr);
654 }
655 
Read16(const VAddr addr)656 u16 MemorySystem::Read16(const VAddr addr) {
657     return Read<u16_le>(addr);
658 }
659 
Read32(const VAddr addr)660 u32 MemorySystem::Read32(const VAddr addr) {
661     return Read<u32_le>(addr);
662 }
663 
Read64(const VAddr addr)664 u64 MemorySystem::Read64(const VAddr addr) {
665     return Read<u64_le>(addr);
666 }
667 
ReadBlock(const Kernel::Process & process,const VAddr src_addr,void * dest_buffer,const std::size_t size)668 void MemorySystem::ReadBlock(const Kernel::Process& process, const VAddr src_addr,
669                              void* dest_buffer, const std::size_t size) {
670     auto& page_table = *process.vm_manager.page_table;
671 
672     std::size_t remaining_size = size;
673     std::size_t page_index = src_addr >> PAGE_BITS;
674     std::size_t page_offset = src_addr & PAGE_MASK;
675 
676     while (remaining_size > 0) {
677         const std::size_t copy_amount = std::min(PAGE_SIZE - page_offset, remaining_size);
678         const VAddr current_vaddr = static_cast<VAddr>((page_index << PAGE_BITS) + page_offset);
679 
680         switch (page_table.attributes[page_index]) {
681         case PageType::Unmapped: {
682             LOG_ERROR(HW_Memory,
683                       "unmapped ReadBlock @ 0x{:08X} (start address = 0x{:08X}, size = {}) at PC "
684                       "0x{:08X}",
685                       current_vaddr, src_addr, size, Core::GetRunningCore().GetPC());
686             std::memset(dest_buffer, 0, copy_amount);
687             break;
688         }
689         case PageType::Memory: {
690             DEBUG_ASSERT(page_table.pointers[page_index]);
691 
692             const u8* src_ptr = page_table.pointers[page_index] + page_offset;
693             std::memcpy(dest_buffer, src_ptr, copy_amount);
694             break;
695         }
696         case PageType::Special: {
697             MMIORegionPointer handler = GetMMIOHandler(page_table, current_vaddr);
698             DEBUG_ASSERT(handler);
699             handler->ReadBlock(current_vaddr, dest_buffer, copy_amount);
700             break;
701         }
702         case PageType::RasterizerCachedMemory: {
703             RasterizerFlushVirtualRegion(current_vaddr, static_cast<u32>(copy_amount),
704                                          FlushMode::Flush);
705             std::memcpy(dest_buffer, GetPointerForRasterizerCache(current_vaddr), copy_amount);
706             break;
707         }
708         default:
709             UNREACHABLE();
710         }
711 
712         page_index++;
713         page_offset = 0;
714         dest_buffer = static_cast<u8*>(dest_buffer) + copy_amount;
715         remaining_size -= copy_amount;
716     }
717 }
718 
Write8(const VAddr addr,const u8 data)719 void MemorySystem::Write8(const VAddr addr, const u8 data) {
720     Write<u8>(addr, data);
721 }
722 
Write16(const VAddr addr,const u16 data)723 void MemorySystem::Write16(const VAddr addr, const u16 data) {
724     Write<u16_le>(addr, data);
725 }
726 
Write32(const VAddr addr,const u32 data)727 void MemorySystem::Write32(const VAddr addr, const u32 data) {
728     Write<u32_le>(addr, data);
729 }
730 
Write64(const VAddr addr,const u64 data)731 void MemorySystem::Write64(const VAddr addr, const u64 data) {
732     Write<u64_le>(addr, data);
733 }
734 
WriteBlock(const Kernel::Process & process,const VAddr dest_addr,const void * src_buffer,const std::size_t size)735 void MemorySystem::WriteBlock(const Kernel::Process& process, const VAddr dest_addr,
736                               const void* src_buffer, const std::size_t size) {
737     auto& page_table = *process.vm_manager.page_table;
738     std::size_t remaining_size = size;
739     std::size_t page_index = dest_addr >> PAGE_BITS;
740     std::size_t page_offset = dest_addr & PAGE_MASK;
741 
742     while (remaining_size > 0) {
743         const std::size_t copy_amount = std::min(PAGE_SIZE - page_offset, remaining_size);
744         const VAddr current_vaddr = static_cast<VAddr>((page_index << PAGE_BITS) + page_offset);
745 
746         switch (page_table.attributes[page_index]) {
747         case PageType::Unmapped: {
748             LOG_ERROR(HW_Memory,
749                       "unmapped WriteBlock @ 0x{:08X} (start address = 0x{:08X}, size = {}) at PC "
750                       "0x{:08X}",
751                       current_vaddr, dest_addr, size, Core::GetRunningCore().GetPC());
752             break;
753         }
754         case PageType::Memory: {
755             DEBUG_ASSERT(page_table.pointers[page_index]);
756 
757             u8* dest_ptr = page_table.pointers[page_index] + page_offset;
758             std::memcpy(dest_ptr, src_buffer, copy_amount);
759             break;
760         }
761         case PageType::Special: {
762             MMIORegionPointer handler = GetMMIOHandler(page_table, current_vaddr);
763             DEBUG_ASSERT(handler);
764             handler->WriteBlock(current_vaddr, src_buffer, copy_amount);
765             break;
766         }
767         case PageType::RasterizerCachedMemory: {
768             RasterizerFlushVirtualRegion(current_vaddr, static_cast<u32>(copy_amount),
769                                          FlushMode::Invalidate);
770             std::memcpy(GetPointerForRasterizerCache(current_vaddr), src_buffer, copy_amount);
771             break;
772         }
773         default:
774             UNREACHABLE();
775         }
776 
777         page_index++;
778         page_offset = 0;
779         src_buffer = static_cast<const u8*>(src_buffer) + copy_amount;
780         remaining_size -= copy_amount;
781     }
782 }
783 
ZeroBlock(const Kernel::Process & process,const VAddr dest_addr,const std::size_t size)784 void MemorySystem::ZeroBlock(const Kernel::Process& process, const VAddr dest_addr,
785                              const std::size_t size) {
786     auto& page_table = *process.vm_manager.page_table;
787     std::size_t remaining_size = size;
788     std::size_t page_index = dest_addr >> PAGE_BITS;
789     std::size_t page_offset = dest_addr & PAGE_MASK;
790 
791     static const std::array<u8, PAGE_SIZE> zeros = {};
792 
793     while (remaining_size > 0) {
794         const std::size_t copy_amount = std::min(PAGE_SIZE - page_offset, remaining_size);
795         const VAddr current_vaddr = static_cast<VAddr>((page_index << PAGE_BITS) + page_offset);
796 
797         switch (page_table.attributes[page_index]) {
798         case PageType::Unmapped: {
799             LOG_ERROR(HW_Memory,
800                       "unmapped ZeroBlock @ 0x{:08X} (start address = 0x{:08X}, size = {}) at PC "
801                       "0x{:08X}",
802                       current_vaddr, dest_addr, size, Core::GetRunningCore().GetPC());
803             break;
804         }
805         case PageType::Memory: {
806             DEBUG_ASSERT(page_table.pointers[page_index]);
807 
808             u8* dest_ptr = page_table.pointers[page_index] + page_offset;
809             std::memset(dest_ptr, 0, copy_amount);
810             break;
811         }
812         case PageType::Special: {
813             MMIORegionPointer handler = GetMMIOHandler(page_table, current_vaddr);
814             DEBUG_ASSERT(handler);
815             handler->WriteBlock(current_vaddr, zeros.data(), copy_amount);
816             break;
817         }
818         case PageType::RasterizerCachedMemory: {
819             RasterizerFlushVirtualRegion(current_vaddr, static_cast<u32>(copy_amount),
820                                          FlushMode::Invalidate);
821             std::memset(GetPointerForRasterizerCache(current_vaddr), 0, copy_amount);
822             break;
823         }
824         default:
825             UNREACHABLE();
826         }
827 
828         page_index++;
829         page_offset = 0;
830         remaining_size -= copy_amount;
831     }
832 }
833 
CopyBlock(const Kernel::Process & process,VAddr dest_addr,VAddr src_addr,const std::size_t size)834 void MemorySystem::CopyBlock(const Kernel::Process& process, VAddr dest_addr, VAddr src_addr,
835                              const std::size_t size) {
836     CopyBlock(process, process, dest_addr, src_addr, size);
837 }
838 
CopyBlock(const Kernel::Process & dest_process,const Kernel::Process & src_process,VAddr dest_addr,VAddr src_addr,std::size_t size)839 void MemorySystem::CopyBlock(const Kernel::Process& dest_process,
840                              const Kernel::Process& src_process, VAddr dest_addr, VAddr src_addr,
841                              std::size_t size) {
842     auto& page_table = *src_process.vm_manager.page_table;
843     std::size_t remaining_size = size;
844     std::size_t page_index = src_addr >> PAGE_BITS;
845     std::size_t page_offset = src_addr & PAGE_MASK;
846 
847     while (remaining_size > 0) {
848         const std::size_t copy_amount = std::min(PAGE_SIZE - page_offset, remaining_size);
849         const VAddr current_vaddr = static_cast<VAddr>((page_index << PAGE_BITS) + page_offset);
850 
851         switch (page_table.attributes[page_index]) {
852         case PageType::Unmapped: {
853             LOG_ERROR(HW_Memory,
854                       "unmapped CopyBlock @ 0x{:08X} (start address = 0x{:08X}, size = {}) at PC "
855                       "0x{:08X}",
856                       current_vaddr, src_addr, size, Core::GetRunningCore().GetPC());
857             ZeroBlock(dest_process, dest_addr, copy_amount);
858             break;
859         }
860         case PageType::Memory: {
861             DEBUG_ASSERT(page_table.pointers[page_index]);
862             const u8* src_ptr = page_table.pointers[page_index] + page_offset;
863             WriteBlock(dest_process, dest_addr, src_ptr, copy_amount);
864             break;
865         }
866         case PageType::Special: {
867             MMIORegionPointer handler = GetMMIOHandler(page_table, current_vaddr);
868             DEBUG_ASSERT(handler);
869             std::vector<u8> buffer(copy_amount);
870             handler->ReadBlock(current_vaddr, buffer.data(), buffer.size());
871             WriteBlock(dest_process, dest_addr, buffer.data(), buffer.size());
872             break;
873         }
874         case PageType::RasterizerCachedMemory: {
875             RasterizerFlushVirtualRegion(current_vaddr, static_cast<u32>(copy_amount),
876                                          FlushMode::Flush);
877             WriteBlock(dest_process, dest_addr, GetPointerForRasterizerCache(current_vaddr),
878                        copy_amount);
879             break;
880         }
881         default:
882             UNREACHABLE();
883         }
884 
885         page_index++;
886         page_offset = 0;
887         dest_addr += static_cast<VAddr>(copy_amount);
888         src_addr += static_cast<VAddr>(copy_amount);
889         remaining_size -= copy_amount;
890     }
891 }
892 
893 template <>
ReadMMIO(MMIORegionPointer mmio_handler,VAddr addr)894 u8 ReadMMIO<u8>(MMIORegionPointer mmio_handler, VAddr addr) {
895     return mmio_handler->Read8(addr);
896 }
897 
898 template <>
ReadMMIO(MMIORegionPointer mmio_handler,VAddr addr)899 u16 ReadMMIO<u16>(MMIORegionPointer mmio_handler, VAddr addr) {
900     return mmio_handler->Read16(addr);
901 }
902 
903 template <>
ReadMMIO(MMIORegionPointer mmio_handler,VAddr addr)904 u32 ReadMMIO<u32>(MMIORegionPointer mmio_handler, VAddr addr) {
905     return mmio_handler->Read32(addr);
906 }
907 
908 template <>
ReadMMIO(MMIORegionPointer mmio_handler,VAddr addr)909 u64 ReadMMIO<u64>(MMIORegionPointer mmio_handler, VAddr addr) {
910     return mmio_handler->Read64(addr);
911 }
912 
913 template <>
WriteMMIO(MMIORegionPointer mmio_handler,VAddr addr,const u8 data)914 void WriteMMIO<u8>(MMIORegionPointer mmio_handler, VAddr addr, const u8 data) {
915     mmio_handler->Write8(addr, data);
916 }
917 
918 template <>
WriteMMIO(MMIORegionPointer mmio_handler,VAddr addr,const u16 data)919 void WriteMMIO<u16>(MMIORegionPointer mmio_handler, VAddr addr, const u16 data) {
920     mmio_handler->Write16(addr, data);
921 }
922 
923 template <>
WriteMMIO(MMIORegionPointer mmio_handler,VAddr addr,const u32 data)924 void WriteMMIO<u32>(MMIORegionPointer mmio_handler, VAddr addr, const u32 data) {
925     mmio_handler->Write32(addr, data);
926 }
927 
928 template <>
WriteMMIO(MMIORegionPointer mmio_handler,VAddr addr,const u64 data)929 void WriteMMIO<u64>(MMIORegionPointer mmio_handler, VAddr addr, const u64 data) {
930     mmio_handler->Write64(addr, data);
931 }
932 
GetFCRAMOffset(const u8 * pointer) const933 u32 MemorySystem::GetFCRAMOffset(const u8* pointer) const {
934     ASSERT(pointer >= impl->fcram.get() && pointer <= impl->fcram.get() + Memory::FCRAM_N3DS_SIZE);
935     return static_cast<u32>(pointer - impl->fcram.get());
936 }
937 
GetFCRAMPointer(std::size_t offset)938 u8* MemorySystem::GetFCRAMPointer(std::size_t offset) {
939     ASSERT(offset <= Memory::FCRAM_N3DS_SIZE);
940     return impl->fcram.get() + offset;
941 }
942 
GetFCRAMPointer(std::size_t offset) const943 const u8* MemorySystem::GetFCRAMPointer(std::size_t offset) const {
944     ASSERT(offset <= Memory::FCRAM_N3DS_SIZE);
945     return impl->fcram.get() + offset;
946 }
947 
GetFCRAMRef(std::size_t offset) const948 MemoryRef MemorySystem::GetFCRAMRef(std::size_t offset) const {
949     ASSERT(offset <= Memory::FCRAM_N3DS_SIZE);
950     return MemoryRef(impl->fcram_mem, offset);
951 }
952 
SetDSP(AudioCore::DspInterface & dsp)953 void MemorySystem::SetDSP(AudioCore::DspInterface& dsp) {
954     impl->dsp = &dsp;
955 }
956 
957 } // namespace Memory
958