diff --git a/Source/Core/Core/PowerPC/Jit64/Jit.h b/Source/Core/Core/PowerPC/Jit64/Jit.h index c1237463d8..358e7a8ea6 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit.h +++ b/Source/Core/Core/PowerPC/Jit64/Jit.h @@ -96,7 +96,7 @@ public: void GenerateConstantOverflow(bool overflow); void GenerateConstantOverflow(s64 val); - void GenerateOverflow(); + void GenerateOverflow(Gen::CCFlags cond = Gen::CCFlags::CC_NO); void FinalizeCarryOverflow(bool oe, bool inv = false); void FinalizeCarry(Gen::CCFlags cond); void FinalizeCarry(bool ca); diff --git a/Source/Core/Core/PowerPC/Jit64/Jit_Integer.cpp b/Source/Core/Core/PowerPC/Jit64/Jit_Integer.cpp index 35df5a06a5..5c6f2cc17d 100644 --- a/Source/Core/Core/PowerPC/Jit64/Jit_Integer.cpp +++ b/Source/Core/Core/PowerPC/Jit64/Jit_Integer.cpp @@ -42,9 +42,9 @@ void Jit64::GenerateConstantOverflow(bool overflow) } // We could do overflow branchlessly, but unlike carry it seems to be quite a bit rarer. -void Jit64::GenerateOverflow() +void Jit64::GenerateOverflow(Gen::CCFlags cond) { - FixupBranch jno = J_CC(CC_NO); + FixupBranch jno = J_CC(cond); // XER[OV/SO] = 1 MOV(8, PPCSTATE(xer_so_ov), Imm8(XER_OV_MASK | XER_SO_MASK)); FixupBranch exit = J();