diff options
| author | Yang Liu <liuyang22@iscas.ac.cn> | 2025-06-24 22:34:49 +0800 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2025-06-24 16:34:49 +0200 |
| commit | 5e9972f2d383f77e04b55cad9ff2a9d2622512bb (patch) | |
| tree | d844471f83974542fbec11ead8721a75fe4ec5fa /src | |
| parent | acc0d451bb83be5c8e349b497a261742bf8cd6f5 (diff) | |
| download | box64-5e9972f2d383f77e04b55cad9ff2a9d2622512bb.tar.gz box64-5e9972f2d383f77e04b55cad9ff2a9d2622512bb.zip | |
[DYNACACHE][LA64] Added const table for later use in internal relocation (#2770)
Diffstat (limited to 'src')
| -rw-r--r-- | src/dynarec/dynacache_reloc.c | 8 | ||||
| -rw-r--r-- | src/dynarec/dynacache_reloc.h | 11 | ||||
| -rw-r--r-- | src/dynarec/dynarec_next.h | 2 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_00.c | 32 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_0f.c | 32 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_66.c | 10 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_660f.c | 18 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_consts.c | 141 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_consts.h | 115 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_helper.c | 8 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_helper.h | 7 | ||||
| -rw-r--r-- | src/dynarec/la64/dynarec_la64_private.h | 2 |
12 files changed, 317 insertions, 69 deletions
diff --git a/src/dynarec/dynacache_reloc.c b/src/dynarec/dynacache_reloc.c index 3184ad13..d3d4fa0b 100644 --- a/src/dynarec/dynacache_reloc.c +++ b/src/dynarec/dynacache_reloc.c @@ -241,11 +241,3 @@ uintptr_t RelocGetNext() { return getConst(const_native_next); } - -#ifdef FAKE_GETCONST -inline uintptr_t getConst(native_consts_t which) -{ - (void)which; - return 0; // dummy -} -#endif \ No newline at end of file diff --git a/src/dynarec/dynacache_reloc.h b/src/dynarec/dynacache_reloc.h index dc86d8ac..63b8b772 100644 --- a/src/dynarec/dynacache_reloc.h +++ b/src/dynarec/dynacache_reloc.h @@ -7,14 +7,11 @@ #elif defined(RV64) #include "dynarec/rv64/dynarec_rv64_consts.h" #define native_consts_t rv64_consts_t +#elif defined(LA64) +#include "dynarec/la64/dynarec_la64_consts.h" +#define native_consts_t la64_consts_t #else -typedef enum native_consts_s { - const_none, - const_native_next, - const_last -} native_consts_t; -#define FAKE_GETCONST -uintptr_t getConst(native_consts_t which); +#error Unsupported architecture #endif void AddRelocTable64Const(dynarec_native_t* dyn, int ninst, native_consts_t C, int pass); diff --git a/src/dynarec/dynarec_next.h b/src/dynarec/dynarec_next.h index ded0800d..301ec4a5 100644 --- a/src/dynarec/dynarec_next.h +++ b/src/dynarec/dynarec_next.h @@ -12,9 +12,11 @@ void arm64_epilog(void) EXPORTDYN; void la64_next(void) EXPORTDYN; void la64_prolog(x64emu_t* emu, void* addr) EXPORTDYN; void la64_epilog(void) EXPORTDYN; +void la64_epilog_fast(void) EXPORTDYN; #define native_next la64_next #define native_prolog la64_prolog #define native_epilog la64_epilog +#define native_epilog_fast la64_epilog_fast #elif defined(RV64) void rv64_next(void) EXPORTDYN; void rv64_prolog(x64emu_t* emu, void* addr) EXPORTDYN; diff --git a/src/dynarec/la64/dynarec_la64_00.c b/src/dynarec/la64/dynarec_la64_00.c index 9ec6d4b1..30212e92 100644 --- a/src/dynarec/la64/dynarec_la64_00.c +++ b/src/dynarec/la64/dynarec_la64_00.c @@ -671,7 +671,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } GETIP(ip); STORE_XEMU_CALL(); - CALL(native_priv, -1); + CALL(const_native_priv, -1); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); *need_epilog = 0; @@ -687,7 +687,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } GETIP(ip); STORE_XEMU_CALL(); - CALL(native_priv, -1); + CALL(const_native_priv, -1); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); *need_epilog = 0; @@ -1671,7 +1671,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETEB(x1, 1); u8 = F8; MOV32w(x2, u8); - CALL_(rol8, ed, x3); + CALL_(const_rol8, ed, x3); EBBACK(); break; case 4: @@ -1982,7 +1982,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETIP(ip + 1); // read the 0xCC STORE_XEMU_CALL(); ADDI_D(x1, xEmu, (uint32_t)offsetof(x64emu_t, ip)); // setup addr as &emu->ip - CALL_S(EmuInt3, -1); + CALL_S(const_int3, -1); LOAD_XEMU_CALL(); addr += 8 + 8; TABLE64(x3, addr); // expected return address @@ -2002,7 +2002,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni BEQZ_MARK(x3); GETIP(addr); STORE_XEMU_CALL(); - CALL(native_int3, -1); + CALL(const_native_int3, -1); LOAD_XEMU_CALL(); MARK; jump_to_epilog(dyn, addr, 0, ninst); @@ -2020,7 +2020,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETIP(ip); // priviledged instruction, IP not updated STORE_XEMU_CALL(); MOV32w(x1, u8); - CALL(native_int, -1); + CALL(const_native_int, -1); LOAD_XEMU_CALL(); } else if (u8 == 0x80) { INST_NAME("32bits SYSCALL"); @@ -2028,7 +2028,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni SMEND(); GETIP(addr); STORE_XEMU_CALL(); - CALL_S(EmuX86Syscall, -1); + CALL_S(const_x86syscall, -1); LOAD_XEMU_CALL(); TABLE64(x3, addr); // expected return address BNE_MARK(xRIP, x3); @@ -2046,7 +2046,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } GETIP(addr); STORE_XEMU_CALL(); - CALL(native_int3, -1); + CALL(const_native_int3, -1); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); *need_epilog = 0; @@ -2060,7 +2060,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } GETIP(ip); // priviledged instruction, IP not updated STORE_XEMU_CALL(); - CALL(native_priv, -1); + CALL(const_native_priv, -1); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); *need_epilog = 0; @@ -2091,7 +2091,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } MESSAGE(LOG_DUMP, "Need Optimization\n"); SETFLAGS(X_OF | X_CF, SF_SET_DF, NAT_FLAGS_NOFUSION); - CALL_(rol8, ed, x3); + CALL_(const_rol8, ed, x3); EBBACK(); break; case 4: @@ -2173,7 +2173,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni SETFLAGS(X_OF | X_CF, SF_SET_DF, NAT_FLAGS_NOFUSION); MOV32w(x2, 1); GETEDW(x4, x1, 0); - CALL_(rex.w ? ((void*)rcr64) : ((void*)rcr32), ed, x4); + CALL_(rex.w ? const_rcr64 : const_rcr32, ed, x4); WBACK; if (!wback && !rex.w) ZEROUP(ed); break; @@ -2409,7 +2409,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETIP_(dyn->insts[ninst].natcall); // read the 0xCC already STORE_XEMU_CALL(); ADDI_D(x1, xEmu, (uint32_t)offsetof(x64emu_t, ip)); // setup addr as &emu->ip - CALL_S(EmuInt3, -1); + CALL_S(const_int3, -1); LOAD_XEMU_CALL(); TABLE64(x3, dyn->insts[ninst].natcall); ADDI_D(x3, x3, 2 + 8 + 8); @@ -2537,7 +2537,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } GETIP(ip); STORE_XEMU_CALL(); - CALL(native_priv, -1); + CALL(const_native_priv, -1); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); *need_epilog = 0; @@ -2603,7 +2603,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni MESSAGE(LOG_DUMP, "Need Optimization\n"); SETFLAGS(X_ALL, SF_SET_DF, NAT_FLAGS_NOFUSION); GETEB(x1, 0); - CALL(div8, -1); + CALL(const_div8, -1); break; default: DEFAULT; @@ -2714,7 +2714,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETEDH(x4, x1, 0); // get edd changed addr, so cannot be called 2 times for same op... BEQ_MARK(xRDX, xZR); if (ed != x1) { MV(x1, ed); } - CALL(div64, -1); + CALL(const_div64, -1); B_NEXT_nocond; MARK; DIV_DU(x2, xRAX, ed); @@ -2760,7 +2760,7 @@ uintptr_t dynarec64_00(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni BLT_MARK(xRAX, xZR); MARK3; if (ed != x1) MV(x1, ed); - CALL((void*)idiv64, -1); + CALL(const_idiv64, -1); B_NEXT_nocond; MARK; DIV_D(x2, xRAX, ed); diff --git a/src/dynarec/la64/dynarec_la64_0f.c b/src/dynarec/la64/dynarec_la64_0f.c index 18c27852..a8601aa7 100644 --- a/src/dynarec/la64/dynarec_la64_0f.c +++ b/src/dynarec/la64/dynarec_la64_0f.c @@ -88,7 +88,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni SMEND(); GETIP(addr); STORE_XEMU_CALL(); - CALL_S(EmuX64Syscall, -1); + CALL_S(const_x64syscall, -1); LOAD_XEMU_CALL(); TABLE64(x3, addr); // expected return address BNE_MARK(xRIP, x3); @@ -107,7 +107,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } GETIP(ip); STORE_XEMU_CALL(); - CALL(native_ud, -1); + CALL(const_native_ud, -1); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); *need_epilog = 0; @@ -420,7 +420,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni INST_NAME("RDTSC"); NOTEST(x1); if (box64_rdtsc) { - CALL(ReadTSC, x3); // will return the u64 in x3 + CALL(const_readtsc, x3); // will return the u64 in x3 } else { RDTIME_D(x3, xZR); } @@ -636,22 +636,22 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni sse_reflect_reg(dyn, ninst, 0); switch (u8) { case 0xC8: - CALL(sha1nexte, -1); + CALL(const_sha1nexte, -1); break; case 0xC9: - CALL(sha1msg1, -1); + CALL(const_sha1msg1, -1); break; case 0xCA: - CALL(sha1msg2, -1); + CALL(const_sha1msg2, -1); break; case 0xCB: - CALL(sha256rnds2, -1); + CALL(const_sha256rnds2, -1); break; case 0xCC: - CALL(sha256msg1, -1); + CALL(const_sha256msg1, -1); break; case 0xCD: - CALL(sha256msg2, -1); + CALL(const_sha256msg2, -1); break; } break; @@ -723,7 +723,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni sse_forget_reg(dyn, ninst, gd); ADDI_D(x1, xEmu, offsetof(x64emu_t, xmm[gd])); MOV32w(x3, u8); - CALL(sha1rnds4, -1); + CALL(const_sha1rnds4, -1); break; default: DEFAULT; @@ -1355,7 +1355,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni INST_NAME("CPUID"); NOTEST(x1); MV(A1, xRAX); - CALL_(my_cpuid, -1, 0); + CALL_(const_cpuid, -1, 0); // BX and DX are not synchronized durring the call, so need to force the update LD_D(xRDX, xEmu, offsetof(x64emu_t, regs[_DX])); LD_D(xRBX, xEmu, offsetof(x64emu_t, regs[_BX])); @@ -1484,7 +1484,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni } else { addr = geted(dyn, addr, ninst, nextop, &ed, x1, x3, &fixedaddress, rex, NULL, 0, 0); if (ed != x1) { MV(x1, ed); } - CALL(rex.is32bits ? ((void*)fpu_fxsave32) : ((void*)fpu_fxsave64), -1); + CALL(rex.is32bits ? const_fpu_fxsave32 : const_fpu_fxsave64, -1); } break; case 1: @@ -1494,7 +1494,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni fpu_purgecache(dyn, ninst, 0, x1, x2, x3); addr = geted(dyn, addr, ninst, nextop, &ed, x1, x3, &fixedaddress, rex, NULL, 0, 0); if (ed != x1) { MV(x1, ed); } - CALL(rex.is32bits ? ((void*)fpu_fxrstor32) : ((void*)fpu_fxrstor64), -1); + CALL(rex.is32bits ? const_fpu_fxrstor32 : const_fpu_fxrstor64, -1); break; case 2: INST_NAME("LDMXCSR Md"); @@ -1517,7 +1517,7 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni addr = geted(dyn, addr, ninst, nextop, &ed, x1, x2, &fixedaddress, rex, NULL, 0, 0); if (ed != x1) { MV(x1, ed); } MOV32w(x2, rex.w ? 0 : 1); - CALL((void*)fpu_xsave, -1); + CALL(const_fpu_xsave, -1); break; case 5: INST_NAME("XRSTOR Ed"); @@ -1526,14 +1526,14 @@ uintptr_t dynarec64_0F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni addr = geted(dyn, addr, ninst, nextop, &ed, x1, x2, &fixedaddress, rex, NULL, 0, 0); if (ed != x1) { MV(x1, ed); } MOV32w(x2, rex.w ? 0 : 1); - CALL((void*)fpu_xrstor, -1); + CALL(const_fpu_xrstor, -1); break; case 7: INST_NAME("CLFLUSH Ed"); MESSAGE(LOG_DUMP, "Need Optimization?\n"); addr = geted(dyn, addr, ninst, nextop, &ed, x1, x2, &fixedaddress, rex, NULL, 0, 0); if (ed != x1) { MV(x1, ed); } - CALL_(native_clflush, -1, 0); + CALL_(const_native_clflush, -1, 0); break; default: DEFAULT; diff --git a/src/dynarec/la64/dynarec_la64_66.c b/src/dynarec/la64/dynarec_la64_66.c index ce5a1845..b4d1e50d 100644 --- a/src/dynarec/la64/dynarec_la64_66.c +++ b/src/dynarec/la64/dynarec_la64_66.c @@ -782,7 +782,7 @@ uintptr_t dynarec64_66(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETEW(x1, 1); u8 = F8; MOV32w(x2, u8); - CALL_(rol16, x1, x3); + CALL_(const_rol16, x1, x3); EWBACK; break; case 1: @@ -792,7 +792,7 @@ uintptr_t dynarec64_66(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni GETEW(x1, 1); u8 = F8; MOV32w(x2, u8); - CALL_(ror16, x1, x3); + CALL_(const_ror16, x1, x3); EWBACK; break; case 4: @@ -872,7 +872,7 @@ uintptr_t dynarec64_66(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni SETFLAGS(X_OF | X_CF, SF_SET_DF, NAT_FLAGS_NOFUSION); if (BOX64DRENV(dynarec_safeflags) > 1) MAYSETFLAGS(); GETEW(x1, 1); - CALL_(rol16, x1, x3); + CALL_(const_rol16, x1, x3); EWBACK; break; case 5: @@ -970,7 +970,7 @@ uintptr_t dynarec64_66(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni BNE_MARK3(ed, xZR); GETIP_(ip); STORE_XEMU_CALL(); - CALL(native_div0, -1); + CALL(const_native_div0, -1); CLEARIP(); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); @@ -991,7 +991,7 @@ uintptr_t dynarec64_66(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int ni BNE_MARK3(ed, xZR); GETIP_(ip); STORE_XEMU_CALL(); - CALL(native_div0, -1); + CALL(const_native_div0, -1); CLEARIP(); LOAD_XEMU_CALL(); jump_to_epilog(dyn, 0, xRIP, ninst); diff --git a/src/dynarec/la64/dynarec_la64_660f.c b/src/dynarec/la64/dynarec_la64_660f.c index 8485057b..4eb11299 100644 --- a/src/dynarec/la64/dynarec_la64_660f.c +++ b/src/dynarec/la64/dynarec_la64_660f.c @@ -801,7 +801,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int } sse_forget_reg(dyn, ninst, gd); MOV32w(x1, gd); - CALL(native_aesimc, -1); + CALL(const_native_aesimc, -1); break; case 0xDC: INST_NAME("AESENC Gx, Ex"); // AES-NI @@ -815,7 +815,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int d0 = -1; sse_forget_reg(dyn, ninst, gd); MOV32w(x1, gd); - CALL(native_aese, -1); + CALL(const_native_aese, -1); GETGX(q0, 1); VXOR_V(q0, q0, (d0 != -1) ? d0 : q1); break; @@ -831,7 +831,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int d0 = -1; sse_forget_reg(dyn, ninst, gd); MOV32w(x1, gd); - CALL(native_aeselast, -1); + CALL(const_native_aeselast, -1); GETGX(q0, 1); VXOR_V(q0, q0, (d0 != -1) ? d0 : q1); break; @@ -847,7 +847,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int d0 = -1; sse_forget_reg(dyn, ninst, gd); MOV32w(x1, gd); - CALL(native_aesd, -1); + CALL(const_native_aesd, -1); GETGX(q0, 1); VXOR_V(q0, q0, (d0 != -1) ? d0 : q1); break; @@ -863,7 +863,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int d0 = -1; sse_forget_reg(dyn, ninst, gd); MOV32w(x1, gd); - CALL(native_aesdlast, -1); + CALL(const_native_aesdlast, -1); GETGX(q0, 1); VXOR_V(q0, q0, (d0 != -1) ? d0 : q1); break; @@ -1262,7 +1262,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int } u8 = F8; MOV32w(x4, u8); - CALL(native_pclmul, -1); + CALL(const_native_pclmul, -1); break; case 0x61: INST_NAME("PCMPESTRI Gx, Ex, Ib"); @@ -1286,7 +1286,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int MV(x4, xRAX); u8 = F8; MOV32w(x5, u8); - CALL(sse42_compare_string_explicit_len, x1); + CALL(const_sse42_compare_string_explicit_len, x1); ZEROUP(x1); BNEZ_MARK(x1); MOV32w(xRCX, (u8 & 1) ? 8 : 16); @@ -1317,7 +1317,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int } u8 = F8; MOV32w(x3, u8); - CALL(sse42_compare_string_implicit_len, x1); + CALL(const_sse42_compare_string_implicit_len, x1); BNEZ_MARK(x1); MOV32w(xRCX, (u8 & 1) ? 8 : 16); B_NEXT_nocond; @@ -1350,7 +1350,7 @@ uintptr_t dynarec64_660F(dynarec_la64_t* dyn, uintptr_t addr, uintptr_t ip, int } u8 = F8; MOV32w(x4, u8); - CALL(native_aeskeygenassist, -1); + CALL(const_native_aeskeygenassist, -1); break; default: DEFAULT; diff --git a/src/dynarec/la64/dynarec_la64_consts.c b/src/dynarec/la64/dynarec_la64_consts.c new file mode 100644 index 00000000..fae11205 --- /dev/null +++ b/src/dynarec/la64/dynarec_la64_consts.c @@ -0,0 +1,141 @@ +#include <stdint.h> +#include <string.h> + +#include "dynarec_la64_consts.h" +#include "debug.h" +#include "box64context.h" +#include "box64cpu.h" +#include "emu/x64emu_private.h" +#include "x64emu.h" +#include "box64stack.h" +#include "callback.h" +#include "emu/x64run_private.h" +#include "emu/x87emu_private.h" +#include "emu/x64primop.h" +#include "my_cpuid.h" +#include "freq.h" +#include "debug.h" +#include "custommem.h" +#include "dynarec_la64_functions.h" +#include "emu/x64shaext.h" +#include "emu/x87emu_private.h" +#include "emu/x64compstrings.h" +#include "x64test.h" +#include "dynarec/dynarec_next.h" +#include "bitutils.h" + +#ifndef HAVE_TRACE +void PrintTrace() {} +#endif + +uintptr_t getConst(la64_consts_t which) +{ + switch(which) { + case const_none: dynarec_log(LOG_NONE, "Warning, const none used\n"); + return 0; + case const_daa8: return (uintptr_t)daa8; + case const_das8: return (uintptr_t)das8; + case const_aaa16: return (uintptr_t)aaa16; + case const_aas16: return (uintptr_t)aas16; + case const_aam16: return (uintptr_t)aam16; + case const_aad16: return (uintptr_t)aad16; + case const_native_br: return (uintptr_t)native_br; + case const_native_ud: return (uintptr_t)native_ud; + case const_native_priv: return (uintptr_t)native_priv; + case const_native_int3: return (uintptr_t)native_int3; + case const_native_int: return (uintptr_t)native_int; + case const_native_div0: return (uintptr_t)native_div0; + case const_native_clflush: return (uintptr_t)native_clflush; + case const_native_fprem: return (uintptr_t)native_fprem; + case const_native_fprem1: return (uintptr_t)native_fprem1; + case const_native_frstor16: return (uintptr_t)native_frstor16; + case const_native_fsave16: return (uintptr_t)native_fsave16; + case const_native_fsave: return (uintptr_t)native_fsave; + case const_native_aesimc: return (uintptr_t)native_aesimc; + case const_native_aesd: return (uintptr_t)native_aesd; + case const_native_aesd_y: return (uintptr_t)native_aesd_y; + case const_native_aesdlast: return (uintptr_t)native_aesdlast; + case const_native_aesdlast_y: return (uintptr_t)native_aesdlast_y; + case const_native_aese: return (uintptr_t)native_aese; + case const_native_aese_y: return (uintptr_t)native_aese_y; + case const_native_aeselast: return (uintptr_t)native_aeselast; + case const_native_aeselast_y: return (uintptr_t)native_aeselast_y; + case const_native_aeskeygenassist: return (uintptr_t)native_aeskeygenassist; + case const_native_pclmul: return (uintptr_t)native_pclmul; + case const_native_pclmul_x: return (uintptr_t)native_pclmul_x; + case const_native_pclmul_y: return (uintptr_t)native_pclmul_y; + case const_native_f2xm1: return (uintptr_t)native_f2xm1; + case const_native_fyl2x: return (uintptr_t)native_fyl2x; + case const_native_fyl2xp1: return (uintptr_t)native_fyl2xp1; + case const_native_fxtract: return (uintptr_t)native_fxtract; + case const_native_ftan: return (uintptr_t)native_ftan; + case const_native_fpatan: return (uintptr_t)native_fpatan; + case const_native_fcos: return (uintptr_t)native_fcos; + case const_native_fsin: return (uintptr_t)native_fsin; + case const_native_fsincos: return (uintptr_t)native_fsincos; + case const_native_fscale: return (uintptr_t)native_fscale; + case const_native_fld: return (uintptr_t)native_fld; + case const_native_fstp: return (uintptr_t)native_fstp; + case const_native_frstor: return (uintptr_t)native_frstor; + case const_native_next: return (uintptr_t)native_next; + case const_int3: return (uintptr_t)EmuInt3; + case const_x86syscall: return (uintptr_t)EmuX86Syscall; + case const_x64syscall: return (uintptr_t)EmuX64Syscall; + case const_rcl8: return (uintptr_t)rcl8; + case const_rcl16: return (uintptr_t)rcl16; + case const_rcl32: return (uintptr_t)rcl32; + case const_rcl64: return (uintptr_t)rcl64; + case const_rcr8: return (uintptr_t)rcr8; + case const_rcr16: return (uintptr_t)rcr16; + case const_rcr32: return (uintptr_t)rcr32; + case const_rcr64: return (uintptr_t)rcr64; + case const_rol8: return (uintptr_t)rol8; + case const_rol16: return (uintptr_t)rol16; + case const_ror8: return (uintptr_t)ror8; + case const_ror16: return (uintptr_t)ror16; + case const_div64: return (uintptr_t)div64; + case const_div8: return (uintptr_t)div8; + case const_idiv64: return (uintptr_t)idiv64; + case const_idiv8: return (uintptr_t)idiv8; + case const_random32: return (uintptr_t)get_random32; + case const_random64: return (uintptr_t)get_random64; + case const_readtsc: return (uintptr_t)ReadTSC; + case const_helper_getcpu: return (uintptr_t)helper_getcpu; + case const_cpuid: return (uintptr_t)my_cpuid; + case const_getsegmentbase: return (uintptr_t)GetSegmentBaseEmu; + case const_updateflags: return (uintptr_t)UpdateFlags; + case const_reset_fpu: return (uintptr_t)reset_fpu; + case const_sha1nexte: return (uintptr_t)sha1nexte; + case const_sha1msg1: return (uintptr_t)sha1msg1; + case const_sha1msg2: return (uintptr_t)sha1msg2; + case const_sha1rnds4: return (uintptr_t)sha1rnds4; + case const_sha256msg1: return (uintptr_t)sha256msg1; + case const_sha256msg2: return (uintptr_t)sha256msg2; + case const_sha256rnds2: return (uintptr_t)sha256rnds2; + case const_fpu_loadenv: return (uintptr_t)fpu_loadenv; + case const_fpu_savenv: return (uintptr_t)fpu_savenv; + case const_fpu_fxsave32: return (uintptr_t)fpu_fxsave32; + case const_fpu_fxsave64: return (uintptr_t)fpu_fxsave64; + case const_fpu_fxrstor32: return (uintptr_t)fpu_fxrstor32; + case const_fpu_fxrstor64: return (uintptr_t)fpu_fxrstor64; + case const_fpu_xsave: return (uintptr_t)fpu_xsave; + case const_fpu_xrstor: return (uintptr_t)fpu_xrstor; + case const_fpu_fbld: return (uintptr_t)fpu_fbld; + case const_fpu_fbst: return (uintptr_t)fpu_fbst; + case const_sse42_compare_string_explicit_len: return (uintptr_t)sse42_compare_string_explicit_len; + case const_sse42_compare_string_implicit_len: return (uintptr_t)sse42_compare_string_implicit_len; + case const_x64test_step: return (uintptr_t)x64test_step; + case const_printtrace: return (uintptr_t)PrintTrace; + case const_epilog: return (uintptr_t)native_epilog; + case const_epilog_fast: return (uintptr_t)native_epilog_fast; + case const_jmptbl32: return getJumpTable32(); + case const_jmptbl48: return getJumpTable48(); + case const_jmptbl64: return getJumpTable64(); + case const_context: return (uintptr_t)my_context; + + case const_last: dynarec_log(LOG_NONE, "Warning, const last used\n"); + return 0; + } + dynarec_log(LOG_NONE, "Warning, Unknown const %d used\n", which); + return 0; +} \ No newline at end of file diff --git a/src/dynarec/la64/dynarec_la64_consts.h b/src/dynarec/la64/dynarec_la64_consts.h new file mode 100644 index 00000000..ddc2ff9c --- /dev/null +++ b/src/dynarec/la64/dynarec_la64_consts.h @@ -0,0 +1,115 @@ +#ifndef __DYNAREC_LA64_CONSTS__ +#define __DYNAREC_LA64_CONSTS__ +#include <stdint.h> + +typedef enum la64_consts_s { + const_none, + const_daa8, + const_das8, + const_aaa16, + const_aas16, + const_aam16, + const_aad16, + const_native_br, + const_native_ud, + const_native_priv, + const_native_int3, + const_native_int, + const_native_div0, + const_native_clflush, + const_native_fprem, + const_native_fprem1, + const_native_frstor16, + const_native_fsave16, + const_native_fsave, + const_native_aesimc, + const_native_aesd, + const_native_aesd_y, + const_native_aesdlast, + const_native_aesdlast_y, + const_native_aese, + const_native_aese_y, + const_native_aeselast, + const_native_aeselast_y, + const_native_aeskeygenassist, + const_native_pclmul, + const_native_pclmul_x, + const_native_pclmul_y, + const_native_f2xm1, + const_native_fyl2x, + const_native_fyl2xp1, + const_native_fxtract, + const_native_ftan, + const_native_fpatan, + const_native_fcos, + const_native_fsin, + const_native_fsincos, + const_native_fscale, + const_native_fld, + const_native_fstp, + const_native_frstor, + const_native_next, + const_int3, + const_x86syscall, + const_x64syscall, + const_rcl8, + const_rcl16, + const_rcl32, + const_rcl64, + const_rcr8, + const_rcr16, + const_rcr32, + const_rcr64, + const_rol8, + const_rol16, + const_ror8, + const_ror16, + const_div64, + const_div8, + const_idiv64, + const_idiv8, + const_random32, + const_random64, + const_readtsc, + const_helper_getcpu, + const_cpuid, + const_getsegmentbase, + const_updateflags, + const_reset_fpu, + const_sha1nexte, + const_sha1msg1, + const_sha1msg2, + const_sha1rnds4, + const_sha256msg1, + const_sha256msg2, + const_sha256rnds2, + const_fpu_loadenv, + const_fpu_savenv, + const_fpu_fxsave32, + const_fpu_fxsave64, + const_fpu_fxrstor32, + const_fpu_fxrstor64, + const_fpu_xsave, + const_fpu_xrstor, + const_fpu_fbld, + const_fpu_fbst, + const_sse42_compare_string_explicit_len, + const_sse42_compare_string_implicit_len, + const_x64test_step, + const_printtrace, + const_epilog, + const_epilog_fast, + const_jmptbl32, + const_jmptbl48, + const_jmptbl64, + const_context, + + const_last +} la64_consts_t; + +uintptr_t getConst(la64_consts_t which); + +// temporary define... +#define const_PrintTrace const_printtrace + +#endif //__DYNAREC_LA64_CONSTS__ \ No newline at end of file diff --git a/src/dynarec/la64/dynarec_la64_helper.c b/src/dynarec/la64/dynarec_la64_helper.c index f119c984..265bd117 100644 --- a/src/dynarec/la64/dynarec_la64_helper.c +++ b/src/dynarec/la64/dynarec_la64_helper.c @@ -683,7 +683,7 @@ void iret_to_epilog(dynarec_la64_t* dyn, uintptr_t ip, int ninst, int is64bits) CLEARIP(); } -void call_c(dynarec_la64_t* dyn, int ninst, void* fnc, int reg, int ret, int saveflags, int savereg) +void call_c(dynarec_la64_t* dyn, int ninst, la64_consts_t fnc, int reg, int ret, int saveflags, int savereg) { MAYUSE(fnc); if (savereg == 0) @@ -708,7 +708,7 @@ void call_c(dynarec_la64_t* dyn, int ninst, void* fnc, int reg, int ret, int sav STORE_REG(RDI); ST_D(xRIP, xEmu, offsetof(x64emu_t, ip)); } - TABLE64(reg, (uintptr_t)fnc); + TABLE64(reg, getConst(fnc)); JIRL(xRA, reg, 0); if (ret >= 0) { MV(ret, xEmu); @@ -762,7 +762,7 @@ void grab_segdata(dynarec_la64_t* dyn, uintptr_t addr, int ninst, int reg, int s CBZ_MARKSEG(t1); } MOV64x(x1, segment); - call_c(dyn, ninst, GetSegmentBaseEmu, t2, reg, 0, xFlags); + call_c(dyn, ninst, const_getsegmentbase, t2, reg, 0, xFlags); MARKSEG; MESSAGE(LOG_DUMP, "----%s Offset\n", (segment == _FS) ? "FS" : "GS"); } @@ -1688,7 +1688,7 @@ static void flagsCacheTransform(dynarec_la64_t* dyn, int ninst, int s1) j64 = (GETMARKF2) - (dyn->native_size); BEQZ(s1, j64); } - CALL_(UpdateFlags, -1, 0); + CALL_(const_updateflags, -1, 0); MARKF2; } } diff --git a/src/dynarec/la64/dynarec_la64_helper.h b/src/dynarec/la64/dynarec_la64_helper.h index 6f37c293..7d833547 100644 --- a/src/dynarec/la64/dynarec_la64_helper.h +++ b/src/dynarec/la64/dynarec_la64_helper.h @@ -17,6 +17,7 @@ #include "debug.h" #include "la64_emitter.h" #include "../emu/x64primop.h" +#include "dynarec_la64_consts.h" #define F8 *(uint8_t*)(addr++) #define F8S *(int8_t*)(addr++) @@ -863,7 +864,7 @@ if (dyn->f.pending == SF_PENDING \ && dyn->insts[ninst].x64.need_after \ && !(dyn->insts[ninst].x64.need_after & X_PEND)) { \ - CALL_(UpdateFlags, -1, 0); \ + CALL_(const_updateflags, -1, 0); \ dyn->f.pending = SF_SET; \ SET_NODF(); \ } \ @@ -934,7 +935,7 @@ j64 = (GETMARKF) - (dyn->native_size); \ BEQ(x3, xZR, j64); \ } \ - CALL_(UpdateFlags, -1, 0); \ + CALL_(const_updateflags, -1, 0); \ MARKF; \ dyn->f.pending = SF_SET; \ SET_DFOK(); \ @@ -1235,7 +1236,7 @@ void jump_to_next(dynarec_la64_t* dyn, uintptr_t ip, int reg, int ninst, int is3 void ret_to_epilog(dynarec_la64_t* dyn, uintptr_t ip, int ninst, rex_t rex); void retn_to_epilog(dynarec_la64_t* dyn, uintptr_t ip, int ninst, rex_t rex, int n); void iret_to_epilog(dynarec_la64_t* dyn, uintptr_t ip, int ninst, int is64bits); -void call_c(dynarec_la64_t* dyn, int ninst, void* fnc, int reg, int ret, int saveflags, int save_reg); +void call_c(dynarec_la64_t* dyn, int ninst, la64_consts_t fnc, int reg, int ret, int saveflags, int save_reg); void grab_segdata(dynarec_la64_t* dyn, uintptr_t addr, int ninst, int reg, int segment, int modreg); void emit_cmp8(dynarec_la64_t* dyn, int ninst, int s1, int s2, int s3, int s4, int s5, int s6); void emit_cmp16(dynarec_la64_t* dyn, int ninst, int s1, int s2, int s3, int s4, int s5, int s6); diff --git a/src/dynarec/la64/dynarec_la64_private.h b/src/dynarec/la64/dynarec_la64_private.h index a47ab362..5b25edb9 100644 --- a/src/dynarec/la64/dynarec_la64_private.h +++ b/src/dynarec/la64/dynarec_la64_private.h @@ -189,7 +189,7 @@ void CreateJmpNext(void* addr, void* next); MV(A1, xRIP); \ STORE_XEMU_CALL(); \ MOV64x(A2, B); \ - CALL(A, -1); \ + CALL(const_##A, -1); \ LOAD_XEMU_CALL() #endif //__DYNAREC_ARM_PRIVATE_H_ |