diff options
| author | ptitSeb <seebastien.chev@gmail.com> | 2023-09-09 16:26:50 +0200 |
|---|---|---|
| committer | ptitSeb <seebastien.chev@gmail.com> | 2023-09-09 16:26:50 +0200 |
| commit | e245dee90b431532fd6fb2296e66f4e03c17095c (patch) | |
| tree | cb717fc97f3837ffd07b02141a1851bef51f1f66 | |
| parent | edba2a2e96ca446b22d9258f27250408db2eb949 (diff) | |
| download | box64-e245dee90b431532fd6fb2296e66f4e03c17095c.tar.gz box64-e245dee90b431532fd6fb2296e66f4e03c17095c.zip | |
[DYNAREC] Small improvment on multi-thread reliance for the jumptable
| -rw-r--r-- | src/custommem.c | 2 | ||||
| -rw-r--r-- | src/dynarec/arm64/arm64_lock.S | 6 | ||||
| -rw-r--r-- | src/dynarec/arm64/arm64_lock.h | 3 | ||||
| -rw-r--r-- | src/dynarec/dynablock.c | 8 | ||||
| -rw-r--r-- | src/dynarec/native_lock.h | 2 | ||||
| -rw-r--r-- | src/dynarec/rv64/rv64_lock.S | 6 | ||||
| -rw-r--r-- | src/dynarec/rv64/rv64_lock.h | 3 |
7 files changed, 25 insertions, 5 deletions
diff --git a/src/custommem.c b/src/custommem.c index a92f9831..3ec46783 100644 --- a/src/custommem.c +++ b/src/custommem.c @@ -748,7 +748,7 @@ void setJumpTableDefault64(void* addr) if(box64_jmptbl3[idx3][idx2][idx1] == box64_jmptbldefault0) return; idx0 = (((uintptr_t)addr) )&JMPTABLE_MASK0; - box64_jmptbl3[idx3][idx2][idx1][idx0] = (uintptr_t)native_next; + native_lock_store_dd(&box64_jmptbl3[idx3][idx2][idx1][idx0], (uintptr_t)native_next); } void setJumpTableDefaultRef64(void* addr, void* jmp) { diff --git a/src/dynarec/arm64/arm64_lock.S b/src/dynarec/arm64/arm64_lock.S index b091101a..139625d6 100644 --- a/src/dynarec/arm64/arm64_lock.S +++ b/src/dynarec/arm64/arm64_lock.S @@ -29,6 +29,7 @@ .global arm64_lock_incif0 .global arm64_lock_decifnot0 .global arm64_lock_store +.global arm64_lock_store_dd arm64_lock_read_b: dmb ish @@ -254,3 +255,8 @@ arm64_lock_store: str w1, [x0] dmb ish ret + +arm64_lock_store_dd: + str x1, [x0] + dmb ish + ret diff --git a/src/dynarec/arm64/arm64_lock.h b/src/dynarec/arm64/arm64_lock.h index 118c807f..6572adc4 100644 --- a/src/dynarec/arm64/arm64_lock.h +++ b/src/dynarec/arm64/arm64_lock.h @@ -69,4 +69,7 @@ extern int arm64_lock_decifnot0(void*p); // atomic store (with memory barrier) extern void arm64_lock_store(void*p, uint32_t v); +// atomic store (with memory barrier) +extern void arm64_lock_store_dd(void*p, uint64_t v); + #endif //__ARM64_LOCK__H__ diff --git a/src/dynarec/dynablock.c b/src/dynarec/dynablock.c index d95fb829..d6240191 100644 --- a/src/dynarec/dynablock.c +++ b/src/dynarec/dynablock.c @@ -41,10 +41,10 @@ dynablock_t* InvalidDynablock(dynablock_t* db, int need_lock) if(db->gone) return NULL; // already in the process of deletion! dynarec_log(LOG_DEBUG, "InvalidDynablock(%p), db->block=%p x64=%p:%p already gone=%d\n", db, db->block, db->x64_addr, db->x64_addr+db->x64_size-1, db->gone); + // remove jumptable without waiting + setJumpTableDefault64(db->x64_addr); if(need_lock) mutex_lock(&my_context->mutex_dyndump); - // remove jumptable - setJumpTableDefault64(db->x64_addr); db->done = 0; db->gone = 1; if(need_lock) @@ -74,10 +74,10 @@ void FreeDynablock(dynablock_t* db, int need_lock) if(db->gone) return; // already in the process of deletion! dynarec_log(LOG_DEBUG, "FreeDynablock(%p), db->block=%p x64=%p:%p already gone=%d\n", db, db->block, db->x64_addr, db->x64_addr+db->x64_size-1, db->gone); + // remove jumptable without waiting + setJumpTableDefault64(db->x64_addr); if(need_lock) mutex_lock(&my_context->mutex_dyndump); - // remove jumptable - setJumpTableDefault64(db->x64_addr); dynarec_log(LOG_DEBUG, " -- FreeDyrecMap(%p, %d)\n", db->actual_block, db->size); db->done = 0; db->gone = 1; diff --git a/src/dynarec/native_lock.h b/src/dynarec/native_lock.h index dd2f3b6f..a27ca514 100644 --- a/src/dynarec/native_lock.h +++ b/src/dynarec/native_lock.h @@ -28,6 +28,7 @@ #define native_lock_incif0(A) arm64_lock_incif0(A) #define native_lock_decifnot0(A) arm64_lock_decifnot0(A) #define native_lock_store(A, B) arm64_lock_store(A, B) +#define native_lock_store_dd(A, B) arm64_lock_store_dd(A, B) #elif defined(RV64) #include "rv64/rv64_lock.h" @@ -50,6 +51,7 @@ #define native_lock_incif0(A) rv64_lock_incif0(A) #define native_lock_decifnot0(A) rv64_lock_decifnot0(A) #define native_lock_store(A, B) rv64_lock_store(A, B) +#define native_lock_store_dd(A, B) rv64_lock_store_dd(A, B) #define native_lock_cas_d(A, B, C) rv64_lock_cas_d(A, B, C) #define native_lock_cas_dd(A, B, C) rv64_lock_cas_dd(A, B, C) diff --git a/src/dynarec/rv64/rv64_lock.S b/src/dynarec/rv64/rv64_lock.S index d6ea9c42..505e3f31 100644 --- a/src/dynarec/rv64/rv64_lock.S +++ b/src/dynarec/rv64/rv64_lock.S @@ -17,6 +17,7 @@ .global rv64_lock_incif0 .global rv64_lock_decifnot0 .global rv64_lock_store +.global rv64_lock_store_dd .global rv64_lock_cas_d .global rv64_lock_cas_dd .global rv64_lock_cas_dq @@ -202,6 +203,11 @@ rv64_lock_store: fence rw, rw ret +rv64_lock_store_dd: + sd a1, 0(a0) + fence rw, rw + ret + rv64_lock_cas_d: lr.w t0, (a0) bne t0, a1, 1f diff --git a/src/dynarec/rv64/rv64_lock.h b/src/dynarec/rv64/rv64_lock.h index 33ce09a5..c59b876d 100644 --- a/src/dynarec/rv64/rv64_lock.h +++ b/src/dynarec/rv64/rv64_lock.h @@ -44,6 +44,9 @@ extern int rv64_lock_decifnot0(void*p); // atomic store (with memory barrier) extern void rv64_lock_store(void*p, uint32_t v); +// atomic store (with memory barrier) +extern void rv64_lock_store_dd(void*p, uint64_t v); + // (mostly) Atomically store val1 and val2 at [p] if old [p] is ref. Return 0 if OK, 1 is not. p needs to be aligned extern int rv64_lock_cas_dq(void* p, uint64_t ref, uint64_t val1, uint64_t val2); |