diff --git a/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp b/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp index 04036e11ab..53bae65dd4 100644 --- a/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp +++ b/Source/Core/Core/PowerPC/Jit64Common/EmuCodeBlock.cpp @@ -372,7 +372,8 @@ void EmuCodeBlock::SafeLoadToReg(X64Reg reg_value, const Gen::OpArg& opAddress, FixupBranch exit; const bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || m_jit.m_ppc_state.msr.DR; - const bool fast_check_address = !slowmem && dr_set && m_jit.jo.fastmem_arena; + const bool fast_check_address = + !slowmem && dr_set && m_jit.jo.fastmem_arena && !m_jit.m_ppc_state.m_enable_dcache; if (fast_check_address) { FixupBranch slow = CheckIfSafeAddress(R(reg_value), reg_addr, registersInUse); @@ -541,7 +542,8 @@ void EmuCodeBlock::SafeWriteRegToReg(OpArg reg_value, X64Reg reg_addr, int acces FixupBranch exit; const bool dr_set = (flags & SAFE_LOADSTORE_DR_ON) || m_jit.m_ppc_state.msr.DR; - const bool fast_check_address = !slowmem && dr_set && m_jit.jo.fastmem_arena; + const bool fast_check_address = + !slowmem && dr_set && m_jit.jo.fastmem_arena && !m_jit.m_ppc_state.m_enable_dcache; if (fast_check_address) { FixupBranch slow = CheckIfSafeAddress(reg_value, reg_addr, registersInUse); diff --git a/Source/Core/Core/PowerPC/MMU.cpp b/Source/Core/Core/PowerPC/MMU.cpp index 3526ec1522..dcb1b75654 100644 --- a/Source/Core/Core/PowerPC/MMU.cpp +++ b/Source/Core/Core/PowerPC/MMU.cpp @@ -914,6 +914,9 @@ bool MMU::IsOptimizableRAMAddress(const u32 address) const if (!m_ppc_state.msr.DR) return false; + if (m_ppc_state.m_enable_dcache) + return false; + // TODO: This API needs to take an access size // // We store whether an access can be optimized to an unchecked access @@ -1211,6 +1214,9 @@ u32 MMU::IsOptimizableMMIOAccess(u32 address, u32 access_size) const if (!m_ppc_state.msr.DR) return 0; + if (m_ppc_state.m_enable_dcache) + return 0; + // Translate address // If we also optimize for TLB mappings, we'd have to clear the // JitCache on each TLB invalidation.