1 /* SPDX-License-Identifier: GPL-2.0-or-later */
2 /*
3  * atomic64_t for 386/486
4  *
5  * Copyright © 2010  Luca Barbieri
6  */
7 
8 #include <linux/linkage.h>
9 #include <asm/alternative.h>
10 
11 /* if you want SMP support, implement these with real spinlocks */
12 .macro IRQ_SAVE reg
13 	pushfl
14 	cli
15 .endm
16 
17 .macro IRQ_RESTORE reg
18 	popfl
19 .endm
20 
21 #define BEGIN_IRQ_SAVE(op) \
22 .macro endp; \
23 SYM_FUNC_END(atomic64_##op##_386); \
24 .purgem endp; \
25 .endm; \
26 SYM_FUNC_START(atomic64_##op##_386); \
27 	IRQ_SAVE v;
28 
29 #define ENDP endp
30 
31 #define RET_IRQ_RESTORE \
32 	IRQ_RESTORE v; \
33 	RET
34 
35 #define v %ecx
36 BEGIN_IRQ_SAVE(read)
37 	movl  (v), %eax
38 	movl 4(v), %edx
39 	RET_IRQ_RESTORE
40 ENDP
41 #undef v
42 
43 #define v %esi
44 BEGIN_IRQ_SAVE(set)
45 	movl %ebx,  (v)
46 	movl %ecx, 4(v)
47 	RET_IRQ_RESTORE
48 ENDP
49 #undef v
50 
51 #define v  %esi
52 BEGIN_IRQ_SAVE(xchg)
53 	movl  (v), %eax
54 	movl 4(v), %edx
55 	movl %ebx,  (v)
56 	movl %ecx, 4(v)
57 	RET_IRQ_RESTORE
58 ENDP
59 #undef v
60 
61 #define v %ecx
62 BEGIN_IRQ_SAVE(add)
63 	addl %eax,  (v)
64 	adcl %edx, 4(v)
65 	RET_IRQ_RESTORE
66 ENDP
67 #undef v
68 
69 #define v %ecx
70 BEGIN_IRQ_SAVE(add_return)
71 	addl  (v), %eax
72 	adcl 4(v), %edx
73 	movl %eax,  (v)
74 	movl %edx, 4(v)
75 	RET_IRQ_RESTORE
76 ENDP
77 #undef v
78 
79 #define v %ecx
80 BEGIN_IRQ_SAVE(sub)
81 	subl %eax,  (v)
82 	sbbl %edx, 4(v)
83 	RET_IRQ_RESTORE
84 ENDP
85 #undef v
86 
87 #define v %ecx
88 BEGIN_IRQ_SAVE(sub_return)
89 	negl %edx
90 	negl %eax
91 	sbbl $0, %edx
92 	addl  (v), %eax
93 	adcl 4(v), %edx
94 	movl %eax,  (v)
95 	movl %edx, 4(v)
96 	RET_IRQ_RESTORE
97 ENDP
98 #undef v
99 
100 #define v %esi
101 BEGIN_IRQ_SAVE(inc)
102 	addl $1,  (v)
103 	adcl $0, 4(v)
104 	RET_IRQ_RESTORE
105 ENDP
106 #undef v
107 
108 #define v %esi
109 BEGIN_IRQ_SAVE(inc_return)
110 	movl  (v), %eax
111 	movl 4(v), %edx
112 	addl $1, %eax
113 	adcl $0, %edx
114 	movl %eax,  (v)
115 	movl %edx, 4(v)
116 	RET_IRQ_RESTORE
117 ENDP
118 #undef v
119 
120 #define v %esi
121 BEGIN_IRQ_SAVE(dec)
122 	subl $1,  (v)
123 	sbbl $0, 4(v)
124 	RET_IRQ_RESTORE
125 ENDP
126 #undef v
127 
128 #define v %esi
129 BEGIN_IRQ_SAVE(dec_return)
130 	movl  (v), %eax
131 	movl 4(v), %edx
132 	subl $1, %eax
133 	sbbl $0, %edx
134 	movl %eax,  (v)
135 	movl %edx, 4(v)
136 	RET_IRQ_RESTORE
137 ENDP
138 #undef v
139 
140 #define v %esi
141 BEGIN_IRQ_SAVE(add_unless)
142 	addl %eax, %ecx
143 	adcl %edx, %edi
144 	addl  (v), %eax
145 	adcl 4(v), %edx
146 	cmpl %eax, %ecx
147 	je 3f
148 1:
149 	movl %eax,  (v)
150 	movl %edx, 4(v)
151 	movl $1, %eax
152 2:
153 	RET_IRQ_RESTORE
154 3:
155 	cmpl %edx, %edi
156 	jne 1b
157 	xorl %eax, %eax
158 	jmp 2b
159 ENDP
160 #undef v
161 
162 #define v %esi
163 BEGIN_IRQ_SAVE(inc_not_zero)
164 	movl  (v), %eax
165 	movl 4(v), %edx
166 	testl %eax, %eax
167 	je 3f
168 1:
169 	addl $1, %eax
170 	adcl $0, %edx
171 	movl %eax,  (v)
172 	movl %edx, 4(v)
173 	movl $1, %eax
174 2:
175 	RET_IRQ_RESTORE
176 3:
177 	testl %edx, %edx
178 	jne 1b
179 	jmp 2b
180 ENDP
181 #undef v
182 
183 #define v %esi
184 BEGIN_IRQ_SAVE(dec_if_positive)
185 	movl  (v), %eax
186 	movl 4(v), %edx
187 	subl $1, %eax
188 	sbbl $0, %edx
189 	js 1f
190 	movl %eax,  (v)
191 	movl %edx, 4(v)
192 1:
193 	RET_IRQ_RESTORE
194 ENDP
195 #undef v
196