about summary refs log tree commit diff stats
path: root/src
diff options
context:
space:
mode:
authorptitSeb <sebastien.chev@gmail.com>2025-03-30 14:56:16 +0200
committerptitSeb <sebastien.chev@gmail.com>2025-03-30 14:56:16 +0200
commit03c69338f5aec5ae8c47bae3d8d91de3270a941e (patch)
tree5ef2e341aefb8abee40d7cfa9340a81e5770cace /src
parent0fa28b87f15b240b79ac2af70bd78da1b5af1014 (diff)
downloadbox64-03c69338f5aec5ae8c47bae3d8d91de3270a941e.tar.gz
box64-03c69338f5aec5ae8c47bae3d8d91de3270a941e.zip
[ARM64_DYNAREC] Added atomic support for various lock helpers
Diffstat (limited to 'src')
-rw-r--r--src/dynarec/arm64/arm64_lock.S120
-rw-r--r--src/dynarec/arm64/arm64_lock.h9
2 files changed, 129 insertions, 0 deletions
diff --git a/src/dynarec/arm64/arm64_lock.S b/src/dynarec/arm64/arm64_lock.S
index e57137e7..2203648b 100644
--- a/src/dynarec/arm64/arm64_lock.S
+++ b/src/dynarec/arm64/arm64_lock.S
@@ -5,6 +5,7 @@
 .text
 .align 4
 
+.extern arm64_atomics
 .global arm64_lock_read_b
 .global arm64_lock_write_b
 .global arm64_lock_read_h
@@ -104,6 +105,10 @@ arm64_lock_write_dq:
 
 
 arm64_lock_xchg_dd:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_xchg_dd
     dmb     ish
 arm64_lock_xchg_dd_0:
     // address is x0, value is x1, return old value in x0
@@ -113,7 +118,17 @@ arm64_lock_xchg_dd_0:
     mov     x0, x2
     ret
 
+arm64_atomic_xchg_dd:
+    dmb     ish
+    // address is x0, value is x1, return old value in x0
+    swpal   x1, x0, [x0]
+    ret
+
 arm64_lock_xchg_d:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_xchg_d
     dmb     ish
 arm64_lock_xchg_d_0:
     // address is x0, value is x1, return old value in x0
@@ -123,7 +138,17 @@ arm64_lock_xchg_d_0:
     mov     w0, w2
     ret
 
+arm64_atomic_xchg_d:
+    dmb     ish
+    // address is x0, value is x1, return old value in x0
+    swpal   w1, w0, [x0]
+    ret
+
 arm64_lock_xchg_h:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_xchg_h
     dmb     ish
 arm64_lock_xchg_h_0:
     // address is x0, value is x1, return old value in x0
@@ -133,7 +158,17 @@ arm64_lock_xchg_h_0:
     mov     w0, w2
     ret
 
+arm64_atomic_xchg_h:
+    dmb     ish
+    // address is x0, value is x1, return old value in x0
+    swpalh  w1, w0, [x0]
+    ret
+
 arm64_lock_xchg_b:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_xchg_b
     dmb     ish
 arm64_lock_xchg_b_0:
     // address is x0, value is x1, return old value in x0
@@ -143,7 +178,17 @@ arm64_lock_xchg_b_0:
     mov     w0, w2
     ret
 
+arm64_atomic_xchg_b:
+    dmb     ish
+    // address is x0, value is x1, return old value in x0
+    swpalb  w1, w0, [x0]
+    ret
+
 arm64_lock_storeifnull:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_storeifnull
     dmb     ish
 1:
     // address is x0, value is x1, x1 store to x0 only if [x0] is 0. return old [x0] value
@@ -155,7 +200,20 @@ arm64_lock_storeifnull:
     mov     x0, x2
     ret
 
+arm64_atomic_storeifnull:
+    dmb     ish
+    // address is x0, value is x1, x1 store to x0 only if [x0] is 0. return old [x0] value
+    mov     x2, xzr
+    casal   x2, x1, [x0]
+    mov     x0, x2
+    ret
+
+
 arm64_lock_storeifnull_d:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_storeifnull_d
     dmb     ish
 1:
     // address is x0, value is w1, w1 store to x0 only if [x0] is 0. return old [x0] value
@@ -168,7 +226,19 @@ arm64_lock_storeifnull_d:
     mov     w0, w2
     ret
 
+arm64_atomic_storeifnull_d:
+    dmb     ish
+    // address is x0, value is w1, w1 store to x0 only if [x0] is 0. return old [x0] value
+    mov     x2, xzr
+    casal   w2, w1, [x0]
+    mov     w0, w2
+    ret
+
 arm64_lock_storeifref:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_storeifref
     dmb     ish
 1:
     // address is x0, value is x1, x1 store to x0 only if [x0] is x2. return new [x0] value (so x1 or old value)
@@ -183,7 +253,23 @@ arm64_lock_storeifref:
     mov     x0, x3
     ret
 
+arm64_atomic_storeifref:
+    dmb     ish
+    // address is x0, value is x1, x1 store to x0 only if [x0] is x2. return new [x0] value (so x1 or old value)
+    mov     x3, x2
+    casal   x2, x1, [x0]
+    cmp     x2, x3
+    mov     x0, x1
+    ret
+2:
+    mov     x0, x3
+    ret
+
 arm64_lock_storeifref_d:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_storeifref_d
     dmb     ish
 1:
     // address is x0, value is w1, w1 store to x0 only if [x0] is w2. return new [x0] value (so x1 or old value)
@@ -198,7 +284,23 @@ arm64_lock_storeifref_d:
     mov     w0, w3
     ret
 
+arm64_atomic_storeifref_d:
+    dmb     ish
+    // address is x0, value is w1, w1 store to x0 only if [x0] is w2. return new [x0] value (so w1 or old value)
+    mov     w3, w2
+    casal   w2, w1, [x0]
+    cmp     w2, w3
+    mov     w0, w1
+    ret
+2:
+    mov     w0, w3
+    ret
+
 arm64_lock_storeifref2_d:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_storeifref2_d
     dmb     ish
 1:
     // address is x0, value is w1, w1 store to x0 only if [x0] is w2. return old [x0] value
@@ -211,6 +313,13 @@ arm64_lock_storeifref2_d:
     mov     w0, w3
     ret
 
+arm64_atomic_storeifref2_d:
+    dmb     ish
+    // address is x0, value is w1, w1 store to x0 only if [x0] is w2. return old [x0] value
+    casal   w2, w1, [x0]
+    mov     w0, w2
+    ret
+
 arm64_lock_decifnot0b:
     dmb     ish
 1:
@@ -242,6 +351,10 @@ arm64_lock_decifnot0:
     ret
 
 arm64_lock_incif0:
+    adrp    x3, arm64_atomics
+    add     x3, x3, #:lo12:arm64_atomics
+    ldr     w3, [x3]
+    cbnz    w3, arm64_atomic_incif0
     dmb     ish
 1:
     ldaxr   w1, [x0]
@@ -254,6 +367,13 @@ arm64_lock_incif0:
     mov     w0, w1
     ret
 
+arm64_atomic_incif0:
+    mov     w1, #1
+    dmb     ish
+    swpal   w1, wzr, [x0]
+    mov     w0, w1
+    ret
+
 arm64_lock_store:
     str     w1, [x0]
     dmb     ish
diff --git a/src/dynarec/arm64/arm64_lock.h b/src/dynarec/arm64/arm64_lock.h
index 1c827d00..cca8d677 100644
--- a/src/dynarec/arm64/arm64_lock.h
+++ b/src/dynarec/arm64/arm64_lock.h
@@ -29,30 +29,39 @@ extern int arm64_lock_write_dq(uint64_t a, uint64_t b, void* addr);
 
 // Atomically exchange value at [p] with val, return old p
 extern uintptr_t arm64_lock_xchg_dd(void* p, uintptr_t val);
+extern uintptr_t arm64_atomic_xchg_dd(void* p, uintptr_t val);
 
 // Atomically exchange value at [p] with val, return old p
 extern uint32_t arm64_lock_xchg_d(void* p, uint32_t val);
+extern uint32_t arm64_atomic_xchg_d(void* p, uint32_t val);
 
 // Atomically exchange value at [p] with val, return old p
 extern uint32_t arm64_lock_xchg_h(void* p, uint32_t val);
+extern uint32_t arm64_atomic_xchg_h(void* p, uint32_t val);
 
 // Atomically exchange value at [p] with val, return old p
 extern uint32_t arm64_lock_xchg_b(void* p, uint32_t val);
+extern uint32_t arm64_atomic_xchg_b(void* p, uint32_t val);
 
 // Atomically store value to [p] only if [p] is NULL. Return old [p] value
 extern uint32_t arm64_lock_storeifnull_d(void*p, uint32_t val);
+extern uint32_t arm64_atomic_storeifnull_d(void*p, uint32_t val);
 
 // Atomically store value to [p] only if [p] is NULL. Return old [p] value
 extern void* arm64_lock_storeifnull(void*p, void* val);
+extern void* arm64_atomic_storeifnull(void*p, void* val);
 
 // Atomically store value to [p] only if [p] is ref. Return new [p] value (so val or old)
 extern void* arm64_lock_storeifref(void*p, void* val, void* ref);
+extern void* arm64_atomic_storeifref(void*p, void* val, void* ref);
 
 // Atomically store value to [p] only if [p] is ref. Return new [p] value (so val or old)
 extern uint32_t arm64_lock_storeifref_d(void*p, uint32_t val, uint32_t ref);
+extern uint32_t arm64_atomic_storeifref_d(void*p, uint32_t val, uint32_t ref);
 
 // Atomically store value to [p] only if [p] is ref. Return new [p] value (so val or old)
 extern uint32_t arm64_lock_storeifref2_d(void*p, uint32_t val, uint32_t ref);
+extern uint32_t arm64_atomic_storeifref2_d(void*p, uint32_t val, uint32_t ref);
 
 // decrement atomically the byte at [p] (but only if p not 0)
 extern void arm64_lock_decifnot0b(void*p);