irjit: Fix safety of kernel bit memory addresses.

This commit is contained in:
Unknown W. Brackets 2023-09-24 10:16:10 -07:00
parent e5df318990
commit 7d0f2e43b6
3 changed files with 39 additions and 17 deletions

View file

@ -80,7 +80,12 @@ Arm64JitBackend::LoadStoreArg Arm64JitBackend::PrepareSrc1Address(IRInst inst) {
// If it's about to be clobbered, don't waste time pointerifying. Use displacement.
bool clobbersSrc1 = !readsFromSrc1 && regs_.IsGPRClobbered(inst.src1);
int32_t imm = (int32_t)inst.constant;
int64_t imm = (int32_t)inst.constant;
// It can't be this negative, must be a constant address with the top bit set.
if ((imm & 0xC0000000) == 0x80000000) {
imm = (uint64_t)(uint32_t)inst.constant;
}
LoadStoreArg addrArg;
if (inst.src1 == MIPS_REG_ZERO) {
// The constant gets applied later.
@ -100,7 +105,7 @@ Arm64JitBackend::LoadStoreArg Arm64JitBackend::PrepareSrc1Address(IRInst inst) {
// Since we can't modify src1, let's just use a temp reg while copying.
if (!addrArg.useRegisterOffset) {
ADDI2R(SCRATCH1, regs_.MapGPR(inst.src1), (s64)imm, SCRATCH2);
ADDI2R(SCRATCH1, regs_.MapGPR(inst.src1), imm, SCRATCH2);
#ifdef MASKED_PSP_MEMORY
ANDI2R(SCRATCH1, SCRATCH1, Memory::MEMVIEW32_MASK, SCRATCH2);
#endif
@ -114,7 +119,7 @@ Arm64JitBackend::LoadStoreArg Arm64JitBackend::PrepareSrc1Address(IRInst inst) {
// The offset gets set later.
addrArg.base = regs_.MapGPRAsPointer(inst.src1);
} else {
ADDI2R(SCRATCH1, regs_.MapGPR(inst.src1), (s64)imm, SCRATCH2);
ADDI2R(SCRATCH1, regs_.MapGPR(inst.src1), imm, SCRATCH2);
#ifdef MASKED_PSP_MEMORY
ANDI2R(SCRATCH1, SCRATCH1, Memory::MEMVIEW32_MASK, SCRATCH2);
#endif
@ -137,15 +142,15 @@ Arm64JitBackend::LoadStoreArg Arm64JitBackend::PrepareSrc1Address(IRInst inst) {
int scale = IROpToByteWidth(inst.op);
if (imm > 0 && (imm & (scale - 1)) == 0 && imm <= 0xFFF * scale) {
// Okay great, use the LDR/STR form.
addrArg.immOffset = imm;
addrArg.immOffset = (int)imm;
addrArg.useUnscaled = false;
} else if (imm >= -256 && imm < 256) {
// An unscaled offset (LDUR/STUR) should work fine for this range.
addrArg.immOffset = imm;
addrArg.immOffset = (int)imm;
addrArg.useUnscaled = true;
} else {
// No luck, we'll need to load into a register.
MOVI2R(SCRATCH1, (s64)imm);
MOVI2R(SCRATCH1, imm);
addrArg.regOffset = SCRATCH1;
addrArg.useRegisterOffset = true;
addrArg.signExtendRegOffset = true;

View file

@ -59,8 +59,19 @@ int32_t RiscVJitBackend::AdjustForAddressOffset(RiscVGen::RiscVReg *reg, int32_t
if (constant > 0)
constant &= Memory::MEMVIEW32_MASK;
#endif
LI(SCRATCH2, constant);
ADD(SCRATCH1, *reg, SCRATCH2);
// It can't be this negative, must be a constant with top bit set.
if ((constant & 0xC0000000) == 0x80000000) {
if (cpu_info.RiscV_Zba) {
LI(SCRATCH2, constant);
ADD_UW(SCRATCH1, SCRATCH2, *reg);
} else {
LI(SCRATCH2, (uint32_t)constant);
ADD(SCRATCH1, *reg, SCRATCH2);
}
} else {
LI(SCRATCH2, constant);
ADD(SCRATCH1, *reg, SCRATCH2);
}
*reg = SCRATCH1;
return 0;
}

View file

@ -45,35 +45,41 @@ Gen::OpArg X64JitBackend::PrepareSrc1Address(IRInst inst) {
// If it's about to be clobbered, don't waste time pointerifying. Use displacement.
bool clobbersSrc1 = !readsFromSrc1 && regs_.IsGPRClobbered(inst.src1);
int32_t disp = (int32_t)inst.constant;
// It can't be this negative, must be a constant address with the top bit set.
if ((disp & 0xC0000000) == 0x80000000) {
disp = inst.constant & 0x7FFFFFFF;
}
#ifdef MASKED_PSP_MEMORY
if (inst.constant > 0)
inst.constant &= Memory::MEMVIEW32_MASK;
if (disp > 0)
disp &= Memory::MEMVIEW32_MASK;
#endif
OpArg addrArg;
if (inst.src1 == MIPS_REG_ZERO) {
#ifdef MASKED_PSP_MEMORY
inst.constant &= Memory::MEMVIEW32_MASK;
disp &= Memory::MEMVIEW32_MASK;
#endif
#if PPSSPP_ARCH(AMD64)
addrArg = MDisp(MEMBASEREG, inst.constant & 0x7FFFFFFF);
addrArg = MDisp(MEMBASEREG, disp & 0x7FFFFFFF);
#else
addrArg = M(Memory::base + inst.constant);
addrArg = M(Memory::base + disp);
#endif
} else if ((jo.cachePointers || src1IsPointer) && !readsFromSrc1 && (!clobbersSrc1 || src1IsPointer)) {
X64Reg src1 = regs_.MapGPRAsPointer(inst.src1);
addrArg = MDisp(src1, (int)inst.constant);
addrArg = MDisp(src1, disp);
} else {
regs_.MapGPR(inst.src1);
#ifdef MASKED_PSP_MEMORY
LEA(PTRBITS, SCRATCH1, MDisp(regs_.RX(inst.src1), (int)inst.constant));
LEA(PTRBITS, SCRATCH1, MDisp(regs_.RX(inst.src1), disp));
AND(PTRBITS, R(SCRATCH1), Imm32(Memory::MEMVIEW32_MASK));
addrArg = MDisp(SCRATCH1, (intptr_t)Memory::base);
#else
#if PPSSPP_ARCH(AMD64)
addrArg = MComplex(MEMBASEREG, regs_.RX(inst.src1), SCALE_1, (int)inst.constant);
addrArg = MComplex(MEMBASEREG, regs_.RX(inst.src1), SCALE_1, disp);
#else
addrArg = MDisp(regs_.RX(inst.src1), Memory::base + inst.constant);
addrArg = MDisp(regs_.RX(inst.src1), Memory::base + disp);
#endif
#endif
}