1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
|
//arm lock helper
//there is 2 part: read and write
// write return 0 on success, 1 on fail (value has been changed)
.text
.align 4
.global arm64_lock_read_b
.global arm64_lock_write_b
.global arm64_lock_read_h
.global arm64_lock_write_h
.global arm64_lock_read_d
.global arm64_lock_write_d
.global arm64_lock_read_dd
.global arm64_lock_write_dd
.global arm64_lock_read_dq
.global arm64_lock_write_dq
.global arm64_lock_xchg
.global arm64_lock_storeifnull
.global arm64_lock_storeifref
arm64_lock_read_b:
dmb ish
// address is x0, return is x0
ldaxrb w0, [x0]
ret
arm64_lock_write_b:
// address is x0, value is x1, return is x0
mov x2, x0
stlxrb w0, w1, [x2]
dmb ish
ret
arm64_lock_read_h:
dmb ish
// address is x0, return is x0
ldaxrh w0, [x0]
ret
arm64_lock_write_h:
// address is x0, value is x1, return is x0
mov x2, x0
stlxrh w0, w1, [x2]
dmb ish
ret
arm64_lock_read_d:
dmb ish
// address is x0, return is x0
ldaxr w0, [x0]
ret
arm64_lock_write_d:
// address is x0, value is w1, return is x0
mov x2, x0
stlxr w0, w1, [x2]
dmb ish
ret
arm64_lock_read_dd:
dmb ish
// address is x0, return is x0
ldaxr x0, [x0]
ret
arm64_lock_write_dd:
// address is x0, value is x1, return is x0
mov x2, x0
stlxr w0, x1, [x2]
dmb ish
ret
arm64_lock_read_dq:
dmb ish
// address is r2, return is r0, r1
ldaxp x4, x3, [x2]
str x4, [x0]
str x3, [x1]
ret
arm64_lock_write_dq:
// address is r2, value is r0, r1, return is r0
// r0 needs to be aligned
stlxp w3, x0, x1, [x2]
mov w0, w3
dmb ish
ret
arm64_lock_xchg:
dmb ish
arm64_lock_xchg_0:
// address is x0, value is x1, return old value in x0
ldaxr x2, [x0]
stlxr w3, x1, [x0]
cbnz w3, arm64_lock_xchg_0
dmb ish
mov x0, x2
ret
arm64_lock_storeifnull:
dmb ish
arm64_lock_storeifnull_0:
// address is x0, value is x1, x1 store to x0 only if [x0] is 0. return new [x0] value (so x1 or old value)
ldaxr x2, [x0]
cbnz x2, arm64_lock_storeifnull_exit
mov x2, x1
stlxr w3, x2, [x0]
cbnz w3, arm64_lock_storeifnull_0
arm64_lock_storeifnull_exit:
dmb ish
mov x0, x2
ret
arm64_lock_storeifref:
dmb ish
arm64_lock_storeifref_0:
// address is x0, value is x1, x1 store to x0 only if [x0] is x3. return new [x0] value (so x1 or old value)
ldaxr x3, [x0]
cmp x2, x3
bne arm64_lock_storeifref_exit
stlxr w4, x1, [x0]
cbnz w4, arm64_lock_storeifref_0
mov x0, x1
ret
arm64_lock_storeifref_exit:
dmb ish
mov x0, x3
ret
|