diff --git a/Source/Core/Common/x64Emitter.cpp b/Source/Core/Common/x64Emitter.cpp index 23c3d2647e..89d5ddf564 100644 --- a/Source/Core/Common/x64Emitter.cpp +++ b/Source/Core/Common/x64Emitter.cpp @@ -6,6 +6,7 @@ #include "Common/Common.h" #include "Common/CPUDetect.h" +#include "Common/Log.h" #include "Common/x64Emitter.h" namespace Gen @@ -516,8 +517,9 @@ void XEmitter::RET() {Write8(0xC3);} void XEmitter::RET_FAST() {Write8(0xF3); Write8(0xC3);} //two-byte return (rep ret) - recommended by AMD optimization manual for the case of jumping to a ret // The first sign of decadence: optimized NOPs. -void XEmitter::NOP(int size) +void XEmitter::NOP(size_t size) { + _dbg_assert_(DYNA_REC, (int)size > 0); while (true) { switch (size) diff --git a/Source/Core/Common/x64Emitter.h b/Source/Core/Common/x64Emitter.h index 528a4495d8..35c45f6c20 100644 --- a/Source/Core/Common/x64Emitter.h +++ b/Source/Core/Common/x64Emitter.h @@ -290,7 +290,7 @@ public: void INT3(); // Do nothing - void NOP(int count = 1); + void NOP(size_t count = 1); // Save energy in wait-loops on P4 only. Probably not too useful. void PAUSE(); diff --git a/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.cpp b/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.cpp index 7f557734e4..91eb304b79 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.cpp +++ b/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.cpp @@ -187,7 +187,7 @@ const u8 *Jitx86Base::BackPatch(u8 *codePtr, u32 emAddress, void *ctx_void) return nullptr; } - if (info.byteSwap && info.instructionSize < 5) + if (info.byteSwap && info.instructionSize < BACKPATCH_SIZE) { PanicAlert("BackPatch: MOVBE is too small"); return nullptr; @@ -217,7 +217,11 @@ const u8 *Jitx86Base::BackPatch(u8 *codePtr, u32 emAddress, void *ctx_void) const u8 *trampoline = trampolines.GetReadTrampoline(info, registersInUse); emitter.CALL((void *)trampoline); - emitter.NOP((int)info.instructionSize + bswapNopCount - 5); + int padding = info.instructionSize + bswapNopCount - BACKPATCH_SIZE; + if (padding > 0) + { + emitter.NOP(padding); + } return codePtr; } else @@ -258,11 +262,14 @@ const u8 *Jitx86Base::BackPatch(u8 *codePtr, u32 emAddress, void *ctx_void) XEmitter emitter(start); const u8 *trampoline = trampolines.GetWriteTrampoline(info, registersInUse); emitter.CALL((void *)trampoline); - emitter.NOP((int)(codePtr + info.instructionSize - emitter.GetCodePtr())); + int padding = codePtr + info.instructionSize - emitter.GetCodePtr(); + if (padding > 0) + { + emitter.NOP(padding); + } return start; } #else return 0; #endif } - diff --git a/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.h b/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.h index 8f285fe88c..dd5ad8628b 100644 --- a/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.h +++ b/Source/Core/Core/PowerPC/JitCommon/JitBackpatch.h @@ -8,6 +8,9 @@ #include "Common/x64Analyzer.h" #include "Common/x64Emitter.h" +// We need at least this many bytes for backpatching. +const int BACKPATCH_SIZE = 5; + // meh. #if defined(_WIN32) #include