fix for fastmem when pc is used as immediate

and (hopefully) make SIGSEGV handler work for aarch64 linux
This commit is contained in:
RSDuck
2020-07-08 23:47:24 +02:00
parent 3786660099
commit 8f5d8d1e12
3 changed files with 18 additions and 4 deletions

View File

@ -127,7 +127,6 @@ static LONG ExceptionHandler(EXCEPTION_POINTERS* exceptionInfo)
return EXCEPTION_CONTINUE_EXECUTION; return EXCEPTION_CONTINUE_EXECUTION;
} }
printf("miauz\n");
return EXCEPTION_CONTINUE_SEARCH; return EXCEPTION_CONTINUE_SEARCH;
} }
@ -142,13 +141,22 @@ static void SigsegvHandler(int sig, siginfo_t* info, void* rawContext)
ARMJIT_Memory::FaultDescription desc; ARMJIT_Memory::FaultDescription desc;
u8* curArea = (u8*)(NDS::CurCPU == 0 ? ARMJIT_Memory::FastMem9Start : ARMJIT_Memory::FastMem7Start); u8* curArea = (u8*)(NDS::CurCPU == 0 ? ARMJIT_Memory::FastMem9Start : ARMJIT_Memory::FastMem7Start);
#ifdef __x86_64__
desc.EmulatedFaultAddr = (u8*)info->si_addr - curArea; desc.EmulatedFaultAddr = (u8*)info->si_addr - curArea;
desc.FaultPC = context->uc_mcontext.gregs[REG_RIP]; desc.FaultPC = context->uc_mcontext.gregs[REG_RIP];
#else
desc.EmulatedFaultAddr = (u8*)info->fault_address - curArea;
desc.FaultPC = context->uc_mcontext.pc;
#endif
s32 offset = 0; s32 offset = 0;
if (ARMJIT_Memory::FaultHandler(&desc, offset)) if (ARMJIT_Memory::FaultHandler(&desc, offset))
{ {
#ifdef __x86_64__
context->uc_mcontext.gregs[REG_RIP] += offset; context->uc_mcontext.gregs[REG_RIP] += offset;
#else
context->uc_mcontext.pc += offset;
#endif
return; return;
} }
@ -355,8 +363,8 @@ void SetCodeProtection(int region, u32 offset, bool protect)
{ {
Mapping& mapping = Mappings[region][i]; Mapping& mapping = Mappings[region][i];
// if (offset < mapping.LocalOffset || offset >= mapping.LocalOffset + mapping.Size) if (offset < mapping.LocalOffset || offset >= mapping.LocalOffset + mapping.Size)
// continue; continue;
u32 effectiveAddr = mapping.Addr + (offset - mapping.LocalOffset); u32 effectiveAddr = mapping.Addr + (offset - mapping.LocalOffset);
if (mapping.Num == 0 if (mapping.Num == 0

View File

@ -179,7 +179,7 @@ public:
Gen::OpArg MapReg(int reg) Gen::OpArg MapReg(int reg)
{ {
if (reg == 15 && RegCache.Mapping[reg] == Gen::INVALID_REG) if (reg == 15 && !(RegCache.LoadedRegs & (1 << 15)))
return Gen::Imm32(R15); return Gen::Imm32(R15);
assert(RegCache.Mapping[reg] != Gen::INVALID_REG); assert(RegCache.Mapping[reg] != Gen::INVALID_REG);

View File

@ -183,6 +183,12 @@ void Compiler::Comp_MemAccess(int rd, int rn, const Op2& op2, int size, int flag
if (Config::JIT_FastMemory && ((!Thumb && CurInstr.Cond() != 0xE) || ARMJIT_Memory::IsFastmemCompatible(expectedTarget))) if (Config::JIT_FastMemory && ((!Thumb && CurInstr.Cond() != 0xE) || ARMJIT_Memory::IsFastmemCompatible(expectedTarget)))
{ {
if (rdMapped.IsImm())
{
MOV(32, R(RSCRATCH4), rdMapped);
rdMapped = R(RSCRATCH4);
}
u8* memopStart = GetWritableCodePtr(); u8* memopStart = GetWritableCodePtr();
LoadStorePatch patch; LoadStorePatch patch;