mirror of
https://github.com/RKX1209/nsemu.git
synced 2024-06-23 14:43:16 -04:00
Add support for data processing with 1 src operation
This commit is contained in:
parent
806ac7dabf
commit
8331633c7f
|
@ -570,6 +570,38 @@ static void DisasCondSel(uint32_t insn, DisasCallback *cb) {
|
|||
cb->CondMovReg (cond, rd, rn, rm);
|
||||
}
|
||||
|
||||
static void DisasDataProc1src(uint32_t insn, DisasCallback *cb) {
|
||||
if (extract32(insn, 29, 1) || extract32(insn, 16, 5)) {
|
||||
UnallocatedOp (insn);
|
||||
return;
|
||||
}
|
||||
unsigned int sf = extract32(insn, 31, 1);
|
||||
unsigned int opcode = extract32(insn, 10, 6);
|
||||
unsigned int rn = extract32(insn, 5, 5);
|
||||
unsigned int rd = extract32(insn, 0, 5);
|
||||
|
||||
switch (opcode) {
|
||||
case 0: /* RBIT */
|
||||
cb->RevBit (rd, rn, sf);
|
||||
break;
|
||||
case 1: /* REV16 */
|
||||
cb->RevByte16 (rd, rn, sf);
|
||||
break;
|
||||
case 2: /* REV32 */
|
||||
cb->RevByte32 (rd, rn, sf);
|
||||
break;
|
||||
case 3: /* REV64 */
|
||||
cb->RevByte64 (rd, rn, sf);
|
||||
break;
|
||||
case 4: /* CLZ */
|
||||
cb->CntLeadZero (rd, rn, sf);
|
||||
break;
|
||||
case 5: /* CLS */
|
||||
cb->CntLeadSign (rd, rn, sf);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void DisasDataProcReg(uint32_t insn, DisasCallback *cb) {
|
||||
switch (extract32(insn, 24, 5)) {
|
||||
case 0x0a: /* Logical (shifted register) */
|
||||
|
@ -598,7 +630,7 @@ static void DisasDataProcReg(uint32_t insn, DisasCallback *cb) {
|
|||
break;
|
||||
case 0x6: /* Data-processing */
|
||||
if (insn & (1 << 30)) { /* (1 source) */
|
||||
|
||||
DisasDataProc1src (insn, cb);
|
||||
} else { /* (2 source) */
|
||||
|
||||
}
|
||||
|
|
|
@ -115,6 +115,20 @@ static uint64_t RotateRight(uint64_t val, uint64_t rot) {
|
|||
return left | (val >> rot);
|
||||
}
|
||||
|
||||
static uint64_t ReverseBit64(uint64_t x) {
|
||||
/* Assign the correct byte position. */
|
||||
x = byte_swap64_uint(x);
|
||||
/* Assign the correct nibble position. */
|
||||
x = ((x & 0xf0f0f0f0f0f0f0f0ull) >> 4)
|
||||
| ((x & 0x0f0f0f0f0f0f0f0full) << 4);
|
||||
/* Assign the correct bit position. */
|
||||
x = ((x & 0x8888888888888888ull) >> 3)
|
||||
| ((x & 0x4444444444444444ull) >> 1)
|
||||
| ((x & 0x2222222222222222ull) << 1)
|
||||
| ((x & 0x1111111111111111ull) << 3);
|
||||
return x;
|
||||
}
|
||||
|
||||
static uint64_t ALCalc(uint64_t arg1, uint64_t arg2, OpType op) {
|
||||
if (op == AL_TYPE_ADD)
|
||||
return arg1 + arg2;
|
||||
|
@ -297,6 +311,61 @@ void IntprCallback::UExtractI64(unsigned int rd_idx, unsigned int rn_idx, unsign
|
|||
/* TODO: */
|
||||
}
|
||||
|
||||
/* Reverse bit order */
|
||||
void IntprCallback::RevBit(unsigned int rd_idx, unsigned int rn_idx, bool bit64) {
|
||||
if (bit64)
|
||||
X(rd_idx) = ReverseBit64 (X(rn_idx));
|
||||
else
|
||||
W(rd_idx) = ReverseBit64 (W(rn_idx));
|
||||
}
|
||||
/* Reverse byte order per 16bit */
|
||||
void IntprCallback::RevByte16(unsigned int rd_idx, unsigned int rn_idx, bool bit64) {
|
||||
if (bit64)
|
||||
X(rd_idx) = byte_swap16_uint (X(rn_idx));
|
||||
else
|
||||
W(rd_idx) = byte_swap16_uint (W(rn_idx));
|
||||
}
|
||||
/* Reverse byte order per 32bit */
|
||||
void IntprCallback::RevByte32(unsigned int rd_idx, unsigned int rn_idx, bool bit64) {
|
||||
if (bit64)
|
||||
X(rd_idx) = byte_swap32_uint (X(rn_idx));
|
||||
else
|
||||
W(rd_idx) = byte_swap32_uint (W(rn_idx));
|
||||
}
|
||||
/* Reverse byte order per 64bit */
|
||||
void IntprCallback::RevByte64(unsigned int rd_idx, unsigned int rn_idx, bool bit64) {
|
||||
X(rd_idx) = byte_swap64_uint (X(rn_idx));
|
||||
}
|
||||
|
||||
static inline unsigned int Clz32(uint64_t val) {
|
||||
return val ? (unsigned int) __builtin_clz(val) : 32;
|
||||
}
|
||||
static inline unsigned int Clz64(uint64_t val) {
|
||||
return val ? (unsigned int) __builtin_clzll(val) : 64;
|
||||
}
|
||||
|
||||
static inline unsigned int Cls32(uint64_t val) {
|
||||
return val ? (unsigned int) __builtin_clrsb(val) : 32;
|
||||
}
|
||||
static inline unsigned int Cls64(uint64_t val) {
|
||||
return val ? (unsigned int) __builtin_clrsbll(val) : 64;
|
||||
}
|
||||
|
||||
/* Count Leading Zeros */
|
||||
void IntprCallback::CntLeadZero(unsigned int rd_idx, unsigned int rn_idx, bool bit64) {
|
||||
if (bit64)
|
||||
X(rd_idx) = Clz64 (X(rn_idx));
|
||||
else
|
||||
W(rd_idx) = Clz32 (W(rn_idx));
|
||||
}
|
||||
/* Count Leading Signed bits */
|
||||
void IntprCallback::CntLeadSign(unsigned int rd_idx, unsigned int rn_idx, bool bit64) {
|
||||
if (bit64)
|
||||
X(rd_idx) = Cls64 (X(rn_idx));
|
||||
else
|
||||
W(rd_idx) = Cls32 (W(rn_idx));
|
||||
}
|
||||
|
||||
/* Conditional compare... with Immediate value */
|
||||
void IntprCallback::CondCmpI64(unsigned int rn_idx, unsigned int imm, unsigned int nzcv, unsigned int cond, unsigned int op, bool bit64) {
|
||||
if (CondHold (cond)) {
|
||||
|
|
|
@ -45,6 +45,19 @@ virtual void ExtendReg(unsigned int rd_idx, unsigned int rn_idx, unsigned int ex
|
|||
virtual void SExtractI64(unsigned int rd_idx, unsigned int rn_idx, unsigned int pos, unsigned int len, bool bit64) = 0;
|
||||
virtual void UExtractI64(unsigned int rd_idx, unsigned int rn_idx, unsigned int pos, unsigned int len, bool bit64) = 0;
|
||||
|
||||
/* Reverse bit order */
|
||||
virtual void RevBit(unsigned int rd_idx, unsigned int rn_idx, bool bit64) = 0;
|
||||
/* Reverse byte order per 16bit */
|
||||
virtual void RevByte16(unsigned int rd_idx, unsigned int rn_idx, bool bit64) = 0;
|
||||
/* Reverse byte order per 32bit */
|
||||
virtual void RevByte32(unsigned int rd_idx, unsigned int rn_idx, bool bit64) = 0;
|
||||
/* Reverse byte order per 64bit */
|
||||
virtual void RevByte64(unsigned int rd_idx, unsigned int rn_idx, bool bit64) = 0;
|
||||
/* Count Leading Zeros */
|
||||
virtual void CntLeadZero(unsigned int rd_idx, unsigned int rn_idx, bool bit64) = 0;
|
||||
/* Count Leading Signed bits */
|
||||
virtual void CntLeadSign(unsigned int rd_idx, unsigned int rn_idx, bool bit64) = 0;
|
||||
|
||||
/* Conditional compare... with Immediate value */
|
||||
virtual void CondCmpI64(unsigned int rn_idx, unsigned int imm, unsigned int nzcv, unsigned int cond, unsigned int op, bool bit64) = 0;
|
||||
|
||||
|
|
|
@ -45,6 +45,19 @@ void ExtendReg(unsigned int rd_idx, unsigned int rn_idx, unsigned int extend_typ
|
|||
void SExtractI64(unsigned int rd_idx, unsigned int rn_idx, unsigned int pos, unsigned int len, bool bit64);
|
||||
void UExtractI64(unsigned int rd_idx, unsigned int rn_idx, unsigned int pos, unsigned int len, bool bit64);
|
||||
|
||||
/* Reverse bit order */
|
||||
void RevBit(unsigned int rd_idx, unsigned int rn_idx, bool bit64);
|
||||
/* Reverse byte order per 16bit */
|
||||
void RevByte16(unsigned int rd_idx, unsigned int rn_idx, bool bit64);
|
||||
/* Reverse byte order per 32bit */
|
||||
void RevByte32(unsigned int rd_idx, unsigned int rn_idx, bool bit64);
|
||||
/* Reverse byte order per 64bit */
|
||||
void RevByte64(unsigned int rd_idx, unsigned int rn_idx, bool bit64);
|
||||
/* Count Leading Zeros */
|
||||
void CntLeadZero(unsigned int rd_idx, unsigned int rn_idx, bool bit64);
|
||||
/* Count Leading Signed bits */
|
||||
void CntLeadSign(unsigned int rd_idx, unsigned int rn_idx, bool bit64);
|
||||
|
||||
/* Conditional compare... with Immediate value */
|
||||
void CondCmpI64(unsigned int rn_idx, unsigned int imm, unsigned int nzcv, unsigned int cond, unsigned int op, bool bit64);
|
||||
/* Conditional compare... between registers */
|
||||
|
|
|
@ -47,6 +47,11 @@ inline void bindump(uint8_t *ptr, size_t size) {
|
|||
}
|
||||
}
|
||||
|
||||
inline uint16_t byte_swap16_uint(uint16_t b) {
|
||||
return (((b & 0x00ff) << 8) |
|
||||
((b & 0xff00) >> 8));
|
||||
}
|
||||
|
||||
inline uint32_t byte_swap32_uint(uint32_t b) {
|
||||
return ((b >> 24) & 0xff) | ((b << 8) & 0xff0000) | ((b >> 8) & 0xff00) | ((b << 24) & 0xff000000);
|
||||
}
|
||||
|
@ -55,6 +60,17 @@ inline uint32_t byte_swap32_str(const char *b) {
|
|||
return ((b[3]) << 24) | ((b[2]) << 16) | ((b[1]) << 8) | (b[0]);
|
||||
}
|
||||
|
||||
inline uint64_t byte_swap64_uint(uint64_t b) {
|
||||
return (((b & 0x00000000000000ffULL) << 56) |
|
||||
((b & 0x000000000000ff00ULL) << 40) |
|
||||
((b & 0x0000000000ff0000ULL) << 24) |
|
||||
((b & 0x00000000ff000000ULL) << 8) |
|
||||
((b & 0x000000ff00000000ULL) >> 8) |
|
||||
((b & 0x0000ff0000000000ULL) >> 24) |
|
||||
((b & 0x00ff000000000000ULL) >> 40) |
|
||||
((b & 0xff00000000000000ULL) >> 56));
|
||||
}
|
||||
|
||||
static inline uint32_t extract32(uint32_t bitfield, int from, int len) {
|
||||
assert (from >= 0 && len > 0 && from + len <= 32);
|
||||
return (bitfield >> from) & (~0U >> (32 - len));
|
||||
|
|
Loading…
Reference in a new issue