diff --git a/Core/MIPS/ARM64/Arm64Asm.cpp b/Core/MIPS/ARM64/Arm64Asm.cpp index 1abf148d7c..755e35f57c 100644 --- a/Core/MIPS/ARM64/Arm64Asm.cpp +++ b/Core/MIPS/ARM64/Arm64Asm.cpp @@ -247,6 +247,9 @@ void Arm64Jit::GenerateFixedCode(const JitOptions &jo) { } LDR(INDEX_UNSIGNED, SCRATCH1, CTXREG, offsetof(MIPSState, pc)); +#ifdef MASKED_PSP_MEMORY + ANDI2R(SCRATCH1, SCRATCH1, 0x3FFFFFFF); +#endif LDR(SCRATCH1, MEMBASEREG, SCRATCH1_64); LSR(SCRATCH2, SCRATCH1, 24); // or UBFX(SCRATCH2, SCRATCH1, 24, 8) ANDI2R(SCRATCH1, SCRATCH1, 0x00FFFFFF); diff --git a/Core/MIPS/ARM64/Arm64CompALU.cpp b/Core/MIPS/ARM64/Arm64CompALU.cpp index aa3ffb577e..f97ba0dee7 100644 --- a/Core/MIPS/ARM64/Arm64CompALU.cpp +++ b/Core/MIPS/ARM64/Arm64CompALU.cpp @@ -43,7 +43,7 @@ using namespace MIPSAnalyst; // All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly. // Currently known non working ones should have DISABLE. -// #define CONDITIONAL_DISABLE { Comp_Generic(op); return; } +//#define CONDITIONAL_DISABLE(flag) { Comp_Generic(op); return; } #define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; } #define DISABLE { Comp_Generic(op); return; } diff --git a/Core/MIPS/ARM64/Arm64CompFPU.cpp b/Core/MIPS/ARM64/Arm64CompFPU.cpp index 98121615b1..efe6887f23 100644 --- a/Core/MIPS/ARM64/Arm64CompFPU.cpp +++ b/Core/MIPS/ARM64/Arm64CompFPU.cpp @@ -51,7 +51,7 @@ // All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly. // Currently known non working ones should have DISABLE. -// #define CONDITIONAL_DISABLE { Comp_Generic(op); return; } +// #define CONDITIONAL_DISABLE(flag) { Comp_Generic(op); return; } #define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; } #define DISABLE { Comp_Generic(op); return; } @@ -102,7 +102,11 @@ void Arm64Jit::Comp_FPULS(MIPSOpcode op) fpr.SpillLock(ft); fpr.MapReg(ft, MAP_NOINIT | MAP_DIRTY); if (gpr.IsImm(rs)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF; +#else u32 addr = offset + gpr.GetImm(rs); +#endif gpr.SetRegImm(SCRATCH1, addr); } else { gpr.MapReg(rs); @@ -129,7 +133,11 @@ void Arm64Jit::Comp_FPULS(MIPSOpcode op) fpr.MapReg(ft); if (gpr.IsImm(rs)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF; +#else u32 addr = offset + gpr.GetImm(rs); +#endif gpr.SetRegImm(SCRATCH1, addr); } else { gpr.MapReg(rs); diff --git a/Core/MIPS/ARM64/Arm64CompLoadStore.cpp b/Core/MIPS/ARM64/Arm64CompLoadStore.cpp index e322d4a9a7..a4d647d98b 100644 --- a/Core/MIPS/ARM64/Arm64CompLoadStore.cpp +++ b/Core/MIPS/ARM64/Arm64CompLoadStore.cpp @@ -41,7 +41,7 @@ // All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly. // Currently known non working ones should have DISABLE. -// #define CONDITIONAL_DISABLE { Comp_Generic(op); return; } +//#define CONDITIONAL_DISABLE(flag) { Comp_Generic(op); return; } #define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; } #define DISABLE { Comp_Generic(op); return; } @@ -56,6 +56,9 @@ namespace MIPSComp { } else { MOV(SCRATCH1, gpr.R(rs)); } +#ifdef MASKED_PSP_MEMORY + ANDI2R(SCRATCH1, SCRATCH1, 0x3FFFFFFF); +#endif } std::vector Arm64Jit::SetScratch1ForSafeAddress(MIPSGPReg rs, s16 offset, ARM64Reg tempReg) { @@ -135,12 +138,17 @@ namespace MIPSComp { std::vector skips; if (gpr.IsImm(rs) && Memory::IsValidAddress(iaddr)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = iaddr & 0x3FFFFFFF; +#else + u32 addr = iaddr; +#endif // Need to initialize since this only loads part of the register. // But rs no longer matters (even if rs == rt) since we have the address. gpr.MapReg(rt, load ? MAP_DIRTY : 0); - gpr.SetRegImm(SCRATCH1, iaddr & ~3); + gpr.SetRegImm(SCRATCH1, addr & ~3); - u8 shift = (iaddr & 3) * 8; + u8 shift = (addr & 3) * 8; switch (o) { case 34: // lwl @@ -347,7 +355,12 @@ namespace MIPSComp { } if (gpr.IsImm(rs) && Memory::IsValidAddress(iaddr)) { - if (offset == 0) { +#ifdef MASKED_PSP_MEMORY + u32 addr = iaddr & 0x3FFFFFFF; +#else + u32 addr = iaddr; +#endif + if (addr == iaddr && offset == 0) { // It was already safe. Let's shove it into a reg and use it directly. if (targetReg == INVALID_REG) { load ? gpr.MapDirtyIn(rt, rs) : gpr.MapInIn(rt, rs); @@ -360,7 +373,7 @@ namespace MIPSComp { gpr.MapReg(rt, load ? MAP_NOINIT : 0); targetReg = gpr.R(rt); } - gpr.SetRegImm(SCRATCH1, iaddr); + gpr.SetRegImm(SCRATCH1, addr); addrReg = SCRATCH1; } } else { diff --git a/Core/MIPS/ARM64/Arm64CompVFPU.cpp b/Core/MIPS/ARM64/Arm64CompVFPU.cpp index cf67f4d84f..22b618aaea 100644 --- a/Core/MIPS/ARM64/Arm64CompVFPU.cpp +++ b/Core/MIPS/ARM64/Arm64CompVFPU.cpp @@ -37,7 +37,7 @@ // All functions should have CONDITIONAL_DISABLE, so we can narrow things down to a file quickly. // Currently known non working ones should have DISABLE. -// #define CONDITIONAL_DISABLE { fpr.ReleaseSpillLocksAndDiscardTemps(); Comp_Generic(op); return; } +// #define CONDITIONAL_DISABLE(flag) { fpr.ReleaseSpillLocksAndDiscardTemps(); Comp_Generic(op); return; } #define CONDITIONAL_DISABLE(flag) if (jo.Disabled(JitDisable::flag)) { Comp_Generic(op); return; } #define DISABLE { fpr.ReleaseSpillLocksAndDiscardTemps(); Comp_Generic(op); return; } @@ -222,7 +222,11 @@ namespace MIPSComp { // CC might be set by slow path below, so load regs first. fpr.MapRegV(vt, MAP_DIRTY | MAP_NOINIT); if (gpr.IsImm(rs)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF; +#else u32 addr = offset + gpr.GetImm(rs); +#endif gpr.SetRegImm(SCRATCH1, addr); } else { gpr.MapReg(rs); @@ -251,7 +255,11 @@ namespace MIPSComp { // CC might be set by slow path below, so load regs first. fpr.MapRegV(vt); if (gpr.IsImm(rs)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = (offset + gpr.GetImm(rs)) & 0x3FFFFFFF; +#else u32 addr = offset + gpr.GetImm(rs); +#endif gpr.SetRegImm(SCRATCH1, addr); } else { gpr.MapReg(rs); @@ -293,7 +301,11 @@ namespace MIPSComp { fpr.MapRegsAndSpillLockV(vregs, V_Quad, MAP_DIRTY | MAP_NOINIT); if (gpr.IsImm(rs)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; +#else u32 addr = imm + gpr.GetImm(rs); +#endif gpr.SetRegImm(SCRATCH1_64, addr + (uintptr_t)Memory::base); } else { gpr.MapReg(rs); @@ -326,7 +338,11 @@ namespace MIPSComp { fpr.MapRegsAndSpillLockV(vregs, V_Quad, 0); if (gpr.IsImm(rs)) { +#ifdef MASKED_PSP_MEMORY + u32 addr = (imm + gpr.GetImm(rs)) & 0x3FFFFFFF; +#else u32 addr = imm + gpr.GetImm(rs); +#endif gpr.SetRegImm(SCRATCH1_64, addr + (uintptr_t)Memory::base); } else { gpr.MapReg(rs); diff --git a/Core/MIPS/ARM64/Arm64RegCache.cpp b/Core/MIPS/ARM64/Arm64RegCache.cpp index dd2c83c302..b089efa309 100644 --- a/Core/MIPS/ARM64/Arm64RegCache.cpp +++ b/Core/MIPS/ARM64/Arm64RegCache.cpp @@ -440,6 +440,9 @@ Arm64Gen::ARM64Reg Arm64RegCache::MapRegAsPointer(MIPSGPReg reg) { if (!jo_->enablePointerify) { // Convert to a pointer by adding the base and clearing off the top bits. // If SP, we can probably avoid the top bit clear, let's play with that later. +#ifdef MASKED_PSP_MEMORY + emit_->ANDI2R(EncodeRegTo64(a), EncodeRegTo64(a), 0x3FFFFFFF); +#endif emit_->ADD(EncodeRegTo64(a), EncodeRegTo64(a), MEMBASEREG); mr[reg].loc = ML_ARMREG_AS_PTR; } else if (!ar[a].pointerified) {