diff options
| author | ptitSeb <sebastien.chev@gmail.com> | 2025-05-21 16:38:59 +0200 |
|---|---|---|
| committer | ptitSeb <sebastien.chev@gmail.com> | 2025-05-21 16:39:09 +0200 |
| commit | 1c7a7c98954bfd88c250a879bc8230333a0f4f8f (patch) | |
| tree | 72d0cf1e7e677f9ff75f8d6bcd189ee7a3309e3a /src | |
| parent | bd23f259ccb3b5ed2b7d6fc79f9e7456cbf8dad0 (diff) | |
| download | box64-1c7a7c98954bfd88c250a879bc8230333a0f4f8f.tar.gz box64-1c7a7c98954bfd88c250a879bc8230333a0f4f8f.zip | |
[ARM64_DYNAREC] Fix (and small optim) on VPMASKMOVD/VPMASKMOVQ opcodes
Diffstat (limited to 'src')
| -rw-r--r-- | src/dynarec/arm64/dynarec_arm64_avx_66_0f38.c | 16 |
1 files changed, 8 insertions, 8 deletions
diff --git a/src/dynarec/arm64/dynarec_arm64_avx_66_0f38.c b/src/dynarec/arm64/dynarec_arm64_avx_66_0f38.c index 55daee09..a57d6a3e 100644 --- a/src/dynarec/arm64/dynarec_arm64_avx_66_0f38.c +++ b/src/dynarec/arm64/dynarec_arm64_avx_66_0f38.c @@ -1226,10 +1226,6 @@ uintptr_t dynarec64_AVX_66_0F38(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip if(!l) { GETGX_empty_VX(v0, v2); addr = geted(dyn, addr, ninst, nextop, &ed, x3, &fixedaddress, NULL, 0, 0, rex, NULL, 0, 0); - if(ed!=x3) { - MOVx_REG(x3, ed); - ed = x3; - } v1 = fpu_get_scratch(dyn, ninst); } else { GETGY_empty_VY(v0, v2, 0, -1, -1); @@ -1243,7 +1239,8 @@ uintptr_t dynarec64_AVX_66_0F38(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip VMOVQDto(x4, q0, 0); CBZx(x4, 4+1*4); VLD1_64(v1, 0, ed); - ADDx_U12(ed, ed, 8); + ADDx_U12(x3, ed, 8); + if(ed!=x3) ed=x3; VMOVQDto(x4, q0, 1); CBZx(x4, 4+1*4); VLD1_64(v1, 1, ed); @@ -1254,7 +1251,8 @@ uintptr_t dynarec64_AVX_66_0F38(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip VMOVSto(x4, q0, 0); CBZx(x4, 4+1*4); VLD1_32(v1, 0, ed); - ADDx_U12(ed, ed, 4); + ADDx_U12(x3, ed, 4); + if(ed!=x3) ed=x3; VMOVSto(x4, q0, 1); CBZx(x4, 4+1*4); VLD1_32(v1, 1, ed); @@ -1310,7 +1308,8 @@ uintptr_t dynarec64_AVX_66_0F38(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip VMOVQDto(x4, q0, 0); CBZx(x4, 4+1*4); VST1_64(v0, 0, ed); - ADDx_U12(ed, ed, 8); + ADDx_U12(x3, ed, 8); + if(ed!=x3) ed=x3; VMOVQDto(x4, q0, 1); CBZx(x4, 4+1*4); VST1_64(v0, 1, ed); @@ -1323,7 +1322,8 @@ uintptr_t dynarec64_AVX_66_0F38(dynarec_arm_t* dyn, uintptr_t addr, uintptr_t ip VMOVSto(x4, q0, 0); CBZx(x4, 4+1*4); VST1_32(v0, 0, ed); - ADDx_U12(ed, ed, 4); + ADDx_U12(x3, ed, 4); + if(ed!=x3) ed=x3; VMOVSto(x4, q0, 1); CBZx(x4, 4+1*4); VST1_32(v0, 1, ed); |