diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.cpp b/Source/Core/Core/PowerPC/Jit64/Jit.cpp index e5aa885263..9fe8e1430c 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit.cpp @@ -949,53 +949,58 @@ bool Jit64::DoJit(u32 em_address, JitBlock* b, u32 nextPC) js.isLastInstruction = true; } - // Gather pipe writes using a non-immediate address are discovered by profiling. - bool gatherPipeIntCheck = js.fifoWriteAddresses.find(op.address) != js.fifoWriteAddresses.end(); - - // Gather pipe writes using an immediate address are explicitly tracked. - if (jo.optimizeGatherPipe && - (js.fifoBytesSinceCheck >= GPFifo::GATHER_PIPE_SIZE || js.mustCheckFifo)) + if (i != 0) { - js.fifoBytesSinceCheck = 0; - js.mustCheckFifo = false; - BitSet32 registersInUse = CallerSavedRegistersInUse(); - ABI_PushRegistersAndAdjustStack(registersInUse, 0); - ABI_CallFunctionP(GPFifo::FastCheckGatherPipe, &m_system.GetGPFifo()); - ABI_PopRegistersAndAdjustStack(registersInUse, 0); - gatherPipeIntCheck = true; - } - - // Gather pipe writes can generate an exception; add an exception check. - // TODO: This doesn't really match hardware; the CP interrupt is - // asynchronous. - if (gatherPipeIntCheck) - { - TEST(32, PPCSTATE(Exceptions), Imm32(EXCEPTION_EXTERNAL_INT)); - FixupBranch extException = J_CC(CC_NZ, Jump::Near); - - SwitchToFarCode(); - SetJumpTarget(extException); - TEST(32, PPCSTATE(msr), Imm32(0x0008000)); - FixupBranch noExtIntEnable = J_CC(CC_Z, Jump::Near); - MOV(64, R(RSCRATCH), ImmPtr(&m_system.GetProcessorInterface().m_interrupt_cause)); - TEST(32, MatR(RSCRATCH), - Imm32(ProcessorInterface::INT_CAUSE_CP | ProcessorInterface::INT_CAUSE_PE_TOKEN | - ProcessorInterface::INT_CAUSE_PE_FINISH)); - FixupBranch noCPInt = J_CC(CC_Z, Jump::Near); + // Gather pipe writes using a non-immediate address are discovered by profiling. + const u32 prev_address = m_code_buffer[i - 1].address; + bool gatherPipeIntCheck = + js.fifoWriteAddresses.find(prev_address) != js.fifoWriteAddresses.end(); + // Gather pipe writes using an immediate address are explicitly tracked. + if (jo.optimizeGatherPipe && + (js.fifoBytesSinceCheck >= GPFifo::GATHER_PIPE_SIZE || js.mustCheckFifo)) { - RCForkGuard gpr_guard = gpr.Fork(); - RCForkGuard fpr_guard = fpr.Fork(); - - gpr.Flush(); - fpr.Flush(); - - MOV(32, PPCSTATE(pc), Imm32(op.address)); - WriteExternalExceptionExit(); + js.fifoBytesSinceCheck = 0; + js.mustCheckFifo = false; + BitSet32 registersInUse = CallerSavedRegistersInUse(); + ABI_PushRegistersAndAdjustStack(registersInUse, 0); + ABI_CallFunctionP(GPFifo::FastCheckGatherPipe, &m_system.GetGPFifo()); + ABI_PopRegistersAndAdjustStack(registersInUse, 0); + gatherPipeIntCheck = true; + } + + // Gather pipe writes can generate an exception; add an exception check. + // TODO: This doesn't really match hardware; the CP interrupt is + // asynchronous. + if (gatherPipeIntCheck) + { + TEST(32, PPCSTATE(Exceptions), Imm32(EXCEPTION_EXTERNAL_INT)); + FixupBranch extException = J_CC(CC_NZ, Jump::Near); + + SwitchToFarCode(); + SetJumpTarget(extException); + TEST(32, PPCSTATE(msr), Imm32(0x0008000)); + FixupBranch noExtIntEnable = J_CC(CC_Z, Jump::Near); + MOV(64, R(RSCRATCH), ImmPtr(&m_system.GetProcessorInterface().m_interrupt_cause)); + TEST(32, MatR(RSCRATCH), + Imm32(ProcessorInterface::INT_CAUSE_CP | ProcessorInterface::INT_CAUSE_PE_TOKEN | + ProcessorInterface::INT_CAUSE_PE_FINISH)); + FixupBranch noCPInt = J_CC(CC_Z, Jump::Near); + + { + RCForkGuard gpr_guard = gpr.Fork(); + RCForkGuard fpr_guard = fpr.Fork(); + + gpr.Flush(); + fpr.Flush(); + + MOV(32, PPCSTATE(pc), Imm32(op.address)); + WriteExternalExceptionExit(); + } + SwitchToNearCode(); + SetJumpTarget(noCPInt); + SetJumpTarget(noExtIntEnable); } - SwitchToNearCode(); - SetJumpTarget(noCPInt); - SetJumpTarget(noExtIntEnable); } if (HandleFunctionHooking(op.address)) diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp index 236a0d939f..55c6b75231 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit.cpp @@ -1047,90 +1047,96 @@ bool JitArm64::DoJit(u32 em_address, JitBlock* b, u32 nextPC) fpr_used[op.fregOut] = true; fpr.UpdateLastUsed(fpr_used); - // Gather pipe writes using a non-immediate address are discovered by profiling. - bool gatherPipeIntCheck = js.fifoWriteAddresses.find(op.address) != js.fifoWriteAddresses.end(); - - if (jo.optimizeGatherPipe && - (js.fifoBytesSinceCheck >= GPFifo::GATHER_PIPE_SIZE || js.mustCheckFifo)) + if (i != 0) { - js.fifoBytesSinceCheck = 0; - js.mustCheckFifo = false; + // Gather pipe writes using a non-immediate address are discovered by profiling. + const u32 prev_address = m_code_buffer[i - 1].address; + bool gatherPipeIntCheck = + js.fifoWriteAddresses.find(prev_address) != js.fifoWriteAddresses.end(); - gpr.Lock(ARM64Reg::W30); - BitSet32 regs_in_use = gpr.GetCallerSavedUsed(); - BitSet32 fprs_in_use = fpr.GetCallerSavedUsed(); - regs_in_use[DecodeReg(ARM64Reg::W30)] = 0; + if (jo.optimizeGatherPipe && + (js.fifoBytesSinceCheck >= GPFifo::GATHER_PIPE_SIZE || js.mustCheckFifo)) + { + js.fifoBytesSinceCheck = 0; + js.mustCheckFifo = false; - ABI_PushRegisters(regs_in_use); - m_float_emit.ABI_PushRegisters(fprs_in_use, ARM64Reg::X30); - MOVP2R(ARM64Reg::X8, &GPFifo::FastCheckGatherPipe); - MOVP2R(ARM64Reg::X0, &m_system.GetGPFifo()); - BLR(ARM64Reg::X8); - m_float_emit.ABI_PopRegisters(fprs_in_use, ARM64Reg::X30); - ABI_PopRegisters(regs_in_use); + gpr.Lock(ARM64Reg::W30); + BitSet32 regs_in_use = gpr.GetCallerSavedUsed(); + BitSet32 fprs_in_use = fpr.GetCallerSavedUsed(); + regs_in_use[DecodeReg(ARM64Reg::W30)] = 0; - // Inline exception check - LDR(IndexType::Unsigned, ARM64Reg::W30, PPC_REG, PPCSTATE_OFF(Exceptions)); - FixupBranch no_ext_exception = TBZ(ARM64Reg::W30, MathUtil::IntLog2(EXCEPTION_EXTERNAL_INT)); - FixupBranch exception = B(); - SwitchToFarCode(); - const u8* done_here = GetCodePtr(); - FixupBranch exit = B(); - SetJumpTarget(exception); - LDR(IndexType::Unsigned, ARM64Reg::W30, PPC_REG, PPCSTATE_OFF(msr)); - TBZ(ARM64Reg::W30, 15, done_here); // MSR.EE - LDR(IndexType::Unsigned, ARM64Reg::W30, ARM64Reg::X30, - MOVPage2R(ARM64Reg::X30, &m_system.GetProcessorInterface().m_interrupt_cause)); - constexpr u32 cause_mask = ProcessorInterface::INT_CAUSE_CP | - ProcessorInterface::INT_CAUSE_PE_TOKEN | - ProcessorInterface::INT_CAUSE_PE_FINISH; - TST(ARM64Reg::W30, LogicalImm(cause_mask, 32)); - B(CC_EQ, done_here); + ABI_PushRegisters(regs_in_use); + m_float_emit.ABI_PushRegisters(fprs_in_use, ARM64Reg::X30); + MOVP2R(ARM64Reg::X8, &GPFifo::FastCheckGatherPipe); + MOVP2R(ARM64Reg::X0, &m_system.GetGPFifo()); + BLR(ARM64Reg::X8); + m_float_emit.ABI_PopRegisters(fprs_in_use, ARM64Reg::X30); + ABI_PopRegisters(regs_in_use); - gpr.Flush(FlushMode::MaintainState, ARM64Reg::W30); - fpr.Flush(FlushMode::MaintainState, ARM64Reg::INVALID_REG); - WriteExceptionExit(js.compilerPC, true, true); - SwitchToNearCode(); - SetJumpTarget(no_ext_exception); - SetJumpTarget(exit); - gpr.Unlock(ARM64Reg::W30); + // Inline exception check + LDR(IndexType::Unsigned, ARM64Reg::W30, PPC_REG, PPCSTATE_OFF(Exceptions)); + FixupBranch no_ext_exception = + TBZ(ARM64Reg::W30, MathUtil::IntLog2(EXCEPTION_EXTERNAL_INT)); + FixupBranch exception = B(); + SwitchToFarCode(); + const u8* done_here = GetCodePtr(); + FixupBranch exit = B(); + SetJumpTarget(exception); + LDR(IndexType::Unsigned, ARM64Reg::W30, PPC_REG, PPCSTATE_OFF(msr)); + TBZ(ARM64Reg::W30, 15, done_here); // MSR.EE + LDR(IndexType::Unsigned, ARM64Reg::W30, ARM64Reg::X30, + MOVPage2R(ARM64Reg::X30, &m_system.GetProcessorInterface().m_interrupt_cause)); + constexpr u32 cause_mask = ProcessorInterface::INT_CAUSE_CP | + ProcessorInterface::INT_CAUSE_PE_TOKEN | + ProcessorInterface::INT_CAUSE_PE_FINISH; + TST(ARM64Reg::W30, LogicalImm(cause_mask, 32)); + B(CC_EQ, done_here); - // So we don't check exceptions twice - gatherPipeIntCheck = false; - } - // Gather pipe writes can generate an exception; add an exception check. - // TODO: This doesn't really match hardware; the CP interrupt is - // asynchronous. - if (jo.optimizeGatherPipe && gatherPipeIntCheck) - { - ARM64Reg WA = gpr.GetReg(); - ARM64Reg XA = EncodeRegTo64(WA); + gpr.Flush(FlushMode::MaintainState, ARM64Reg::W30); + fpr.Flush(FlushMode::MaintainState, ARM64Reg::INVALID_REG); + WriteExceptionExit(js.compilerPC, true, true); + SwitchToNearCode(); + SetJumpTarget(no_ext_exception); + SetJumpTarget(exit); + gpr.Unlock(ARM64Reg::W30); - LDR(IndexType::Unsigned, WA, PPC_REG, PPCSTATE_OFF(Exceptions)); - FixupBranch no_ext_exception = TBZ(WA, MathUtil::IntLog2(EXCEPTION_EXTERNAL_INT)); - FixupBranch exception = B(); - SwitchToFarCode(); - const u8* done_here = GetCodePtr(); - FixupBranch exit = B(); - SetJumpTarget(exception); - LDR(IndexType::Unsigned, WA, PPC_REG, PPCSTATE_OFF(msr)); - TBZ(WA, 15, done_here); // MSR.EE - LDR(IndexType::Unsigned, WA, XA, - MOVPage2R(XA, &m_system.GetProcessorInterface().m_interrupt_cause)); - constexpr u32 cause_mask = ProcessorInterface::INT_CAUSE_CP | - ProcessorInterface::INT_CAUSE_PE_TOKEN | - ProcessorInterface::INT_CAUSE_PE_FINISH; - TST(WA, LogicalImm(cause_mask, 32)); - B(CC_EQ, done_here); + // So we don't check exceptions twice + gatherPipeIntCheck = false; + } + // Gather pipe writes can generate an exception; add an exception check. + // TODO: This doesn't really match hardware; the CP interrupt is + // asynchronous. + if (jo.optimizeGatherPipe && gatherPipeIntCheck) + { + ARM64Reg WA = gpr.GetReg(); + ARM64Reg XA = EncodeRegTo64(WA); - gpr.Flush(FlushMode::MaintainState, WA); - fpr.Flush(FlushMode::MaintainState, ARM64Reg::INVALID_REG); - WriteExceptionExit(js.compilerPC, true, true); - SwitchToNearCode(); - SetJumpTarget(no_ext_exception); - SetJumpTarget(exit); + LDR(IndexType::Unsigned, WA, PPC_REG, PPCSTATE_OFF(Exceptions)); + FixupBranch no_ext_exception = TBZ(WA, MathUtil::IntLog2(EXCEPTION_EXTERNAL_INT)); + FixupBranch exception = B(); + SwitchToFarCode(); + const u8* done_here = GetCodePtr(); + FixupBranch exit = B(); + SetJumpTarget(exception); + LDR(IndexType::Unsigned, WA, PPC_REG, PPCSTATE_OFF(msr)); + TBZ(WA, 15, done_here); // MSR.EE + LDR(IndexType::Unsigned, WA, XA, + MOVPage2R(XA, &m_system.GetProcessorInterface().m_interrupt_cause)); + constexpr u32 cause_mask = ProcessorInterface::INT_CAUSE_CP | + ProcessorInterface::INT_CAUSE_PE_TOKEN | + ProcessorInterface::INT_CAUSE_PE_FINISH; + TST(WA, LogicalImm(cause_mask, 32)); + B(CC_EQ, done_here); - gpr.Unlock(WA); + gpr.Flush(FlushMode::MaintainState, WA); + fpr.Flush(FlushMode::MaintainState, ARM64Reg::INVALID_REG); + WriteExceptionExit(js.compilerPC, true, true); + SwitchToNearCode(); + SetJumpTarget(no_ext_exception); + SetJumpTarget(exit); + + gpr.Unlock(WA); + } } if (HandleFunctionHooking(op.address))