Refactor the JIT to be object-oriented (#1879)

* Move TinyVector to a new file

- So it's less sensitive to #include ordering

* Forgot to include assert.h

* Refactor ARMJIT_Memory into an object

* Oops, forgot a declaration

* Refactor ARMJIT to be contained in an object

* Remove an unused function declaration

* Add a missing #include

* Remove a now-unused global

* Use ARMJIT_Memory's own memory access functions

* Fix some omissions in the ARM JIT

* Move libandroid to be a member of ARMJIT_Memory instead of a global

* Default-initialize most fields in ARMJIT_Compiler.h

* Define NOOP_IF_NO_JIT

* Finish refactoring the JIT to be object-oriented
This commit is contained in:
Jesse Talavera-Greenberg
2023-11-18 10:40:54 -05:00
committed by GitHub
parent f2d7a29015
commit 544fefa27f
28 changed files with 894 additions and 889 deletions

View File

@ -20,6 +20,7 @@
#include "../ARMJIT_Internal.h"
#include "../ARMInterpreter.h"
#include "../ARMJIT.h"
#if defined(__SWITCH__)
#include <switch.h>
@ -219,7 +220,7 @@ void Compiler::PopRegs(bool saveHiRegs, bool saveRegsToBeChanged)
}
}
Compiler::Compiler()
Compiler::Compiler(ARMJIT& jit) : Arm64Gen::ARM64XEmitter(), JIT(jit)
{
#ifdef __SWITCH__
JitRWBase = aligned_alloc(0x1000, JitMemSize);
@ -704,12 +705,12 @@ JitBlockEntry Compiler::CompileBlock(ARM* cpu, bool thumb, FetchedInstr instrs[]
if (JitMemMainSize - GetCodeOffset() < 1024 * 16)
{
Log(LogLevel::Debug, "JIT near memory full, resetting...\n");
ResetBlockCache();
JIT.ResetBlockCache();
}
if ((JitMemMainSize + JitMemSecondarySize) - OtherCodeRegion < 1024 * 8)
{
Log(LogLevel::Debug, "JIT far memory full, resetting...\n");
ResetBlockCache();
JIT.ResetBlockCache();
}
JitBlockEntry res = (JitBlockEntry)GetRXPtr();
@ -722,7 +723,7 @@ JitBlockEntry Compiler::CompileBlock(ARM* cpu, bool thumb, FetchedInstr instrs[]
CPSRDirty = false;
if (hasMemInstr)
MOVP2R(RMemBase, Num == 0 ? ARMJIT_Memory::FastMem9Start : ARMJIT_Memory::FastMem7Start);
MOVP2R(RMemBase, Num == 0 ? JIT.Memory.FastMem9Start : JIT.Memory.FastMem7Start);
for (int i = 0; i < instrsCount; i++)
{

View File

@ -20,7 +20,6 @@
#define ARMJIT_A64_COMPILER_H
#include "../ARM.h"
#include "../ARMJIT.h"
#include "../dolphin/Arm64Emitter.h"
@ -31,7 +30,7 @@
namespace ARMJIT
{
class ARMJIT;
const Arm64Gen::ARM64Reg RMemBase = Arm64Gen::X26;
const Arm64Gen::ARM64Reg RCPSR = Arm64Gen::W27;
const Arm64Gen::ARM64Reg RCycles = Arm64Gen::W28;
@ -97,7 +96,7 @@ class Compiler : public Arm64Gen::ARM64XEmitter
public:
typedef void (Compiler::*CompileFunc)();
Compiler();
Compiler(ARMJIT& jit);
~Compiler();
void PushRegs(bool saveHiRegs, bool saveRegsToBeChanged, bool allowUnload = true);
@ -243,6 +242,7 @@ public:
OtherCodeRegion = offset;
}
ARMJIT& JIT;
ptrdiff_t OtherCodeRegion;
bool Exit;

View File

@ -62,9 +62,9 @@ u8* Compiler::RewriteMemAccess(u8* pc)
bool Compiler::Comp_MemLoadLiteral(int size, bool signExtend, int rd, u32 addr)
{
u32 localAddr = LocaliseCodeAddress(Num, addr);
u32 localAddr = JIT.LocaliseCodeAddress(Num, addr);
int invalidLiteralIdx = InvalidLiterals.Find(localAddr);
int invalidLiteralIdx = JIT.InvalidLiterals.Find(localAddr);
if (invalidLiteralIdx != -1)
{
return false;
@ -111,7 +111,7 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
if (size == 16)
addressMask = ~1;
if (ARMJIT::LiteralOptimizations && rn == 15 && rd != 15 && offset.IsImm && !(flags & (memop_Post|memop_Store|memop_Writeback)))
if (JIT.LiteralOptimizations && rn == 15 && rd != 15 && offset.IsImm && !(flags & (memop_Post|memop_Store|memop_Writeback)))
{
u32 addr = R15 + offset.Imm * ((flags & memop_SubtractOffset) ? -1 : 1);
@ -146,7 +146,7 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
MOV(W0, rnMapped);
}
bool addrIsStatic = ARMJIT::LiteralOptimizations
bool addrIsStatic = JIT.LiteralOptimizations
&& RegCache.IsLiteral(rn) && offset.IsImm && !(flags & (memop_Writeback|memop_Post));
u32 staticAddress;
if (addrIsStatic)
@ -185,10 +185,10 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
MOV(rnMapped, W0);
u32 expectedTarget = Num == 0
? ARMJIT_Memory::ClassifyAddress9(addrIsStatic ? staticAddress : CurInstr.DataRegion)
: ARMJIT_Memory::ClassifyAddress7(addrIsStatic ? staticAddress : CurInstr.DataRegion);
? JIT.Memory.ClassifyAddress9(addrIsStatic ? staticAddress : CurInstr.DataRegion)
: JIT.Memory.ClassifyAddress7(addrIsStatic ? staticAddress : CurInstr.DataRegion);
if (ARMJIT::FastMemory && ((!Thumb && CurInstr.Cond() != 0xE) || ARMJIT_Memory::IsFastmemCompatible(expectedTarget)))
if (JIT.FastMemory && ((!Thumb && CurInstr.Cond() != 0xE) || JIT.Memory.IsFastmemCompatible(expectedTarget)))
{
ptrdiff_t memopStart = GetCodeOffset();
LoadStorePatch patch;
@ -225,7 +225,7 @@ void Compiler::Comp_MemAccess(int rd, int rn, Op2 offset, int size, int flags)
{
void* func = NULL;
if (addrIsStatic)
func = ARMJIT_Memory::GetFuncForAddr(CurCPU, staticAddress, flags & memop_Store, size);
func = JIT.Memory.GetFuncForAddr(CurCPU, staticAddress, flags & memop_Store, size);
PushRegs(false, false);
@ -452,7 +452,7 @@ void Compiler::T_Comp_LoadPCRel()
u32 offset = ((CurInstr.Instr & 0xFF) << 2);
u32 addr = (R15 & ~0x2) + offset;
if (!ARMJIT::LiteralOptimizations || !Comp_MemLoadLiteral(32, false, CurInstr.T_Reg(8), addr))
if (!JIT.LiteralOptimizations || !Comp_MemLoadLiteral(32, false, CurInstr.T_Reg(8), addr))
Comp_MemAccess(CurInstr.T_Reg(8), 15, Op2(offset), 32, 0);
}
@ -494,11 +494,11 @@ s32 Compiler::Comp_MemAccessBlock(int rn, BitSet16 regs, bool store, bool preinc
Comp_AddCycles_CDI();
int expectedTarget = Num == 0
? ARMJIT_Memory::ClassifyAddress9(CurInstr.DataRegion)
: ARMJIT_Memory::ClassifyAddress7(CurInstr.DataRegion);
? JIT.Memory.ClassifyAddress9(CurInstr.DataRegion)
: JIT.Memory.ClassifyAddress7(CurInstr.DataRegion);
bool compileFastPath = ARMJIT::FastMemory
&& store && !usermode && (CurInstr.Cond() < 0xE || ARMJIT_Memory::IsFastmemCompatible(expectedTarget));
bool compileFastPath = JIT.FastMemory
&& store && !usermode && (CurInstr.Cond() < 0xE || JIT.Memory.IsFastmemCompatible(expectedTarget));
{
s32 offset = decrement