diff --git a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp index 8e5cb47940..ebcb8142b7 100644 --- a/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/JitArm64_LoadStore.cpp @@ -181,7 +181,8 @@ void JitArm64::SafeStoreFromReg(s32 dest, u32 value, s32 regOffset, u32 flags, s if (!jo.fastmem) gpr.Lock(ARM64Reg::W0); - ARM64Reg RS = gpr.R(value); + // Don't materialize zero. + ARM64Reg RS = gpr.IsImm(value, 0) ? ARM64Reg::WZR : gpr.R(value); ARM64Reg reg_dest = ARM64Reg::INVALID_REG; ARM64Reg reg_off = ARM64Reg::INVALID_REG; diff --git a/Source/Core/Core/PowerPC/JitArm64/Jit_Util.cpp b/Source/Core/Core/PowerPC/JitArm64/Jit_Util.cpp index 4beb74ff1b..19274d2793 100644 --- a/Source/Core/Core/PowerPC/JitArm64/Jit_Util.cpp +++ b/Source/Core/Core/PowerPC/JitArm64/Jit_Util.cpp @@ -257,6 +257,12 @@ void ByteswapAfterLoad(ARM64XEmitter* emit, ARM64FloatEmitter* float_emit, ARM64 ARM64Reg ByteswapBeforeStore(ARM64XEmitter* emit, ARM64FloatEmitter* float_emit, ARM64Reg tmp_reg, ARM64Reg src_reg, u32 flags, bool want_reversed) { + // Byteswapping zero is still zero. + // We'd typically expect a writable register to be passed in, but recognize + // WZR for optimization purposes. + if ((flags & BackPatchInfo::FLAG_FLOAT) == 0 && src_reg == ARM64Reg::WZR) + return ARM64Reg::WZR; + ARM64Reg dst_reg = src_reg; if (want_reversed == !(flags & BackPatchInfo::FLAG_REVERSE))