]> git.ipfire.org Git - thirdparty/kernel/stable.git/blob - arch/sparc64/lib/atomic.S
Linux-2.6.12-rc2
[thirdparty/kernel/stable.git] / arch / sparc64 / lib / atomic.S
1 /* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
2 * atomic.S: These things are too big to do inline.
3 *
4 * Copyright (C) 1999 David S. Miller (davem@redhat.com)
5 */
6
7 #include <linux/config.h>
8 #include <asm/asi.h>
9
10 /* On SMP we need to use memory barriers to ensure
11 * correct memory operation ordering, nop these out
12 * for uniprocessor.
13 */
14 #ifdef CONFIG_SMP
15 #define ATOMIC_PRE_BARRIER membar #StoreLoad | #LoadLoad
16 #define ATOMIC_POST_BARRIER membar #StoreLoad | #StoreStore
17 #else
18 #define ATOMIC_PRE_BARRIER nop
19 #define ATOMIC_POST_BARRIER nop
20 #endif
21
22 .text
23
24 /* Two versions of the atomic routines, one that
25 * does not return a value and does not perform
26 * memory barriers, and a second which returns
27 * a value and does the barriers.
28 */
29 .globl atomic_add
30 .type atomic_add,#function
31 atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
32 1: lduw [%o1], %g1
33 add %g1, %o0, %g7
34 cas [%o1], %g1, %g7
35 cmp %g1, %g7
36 bne,pn %icc, 1b
37 nop
38 retl
39 nop
40 .size atomic_add, .-atomic_add
41
42 .globl atomic_sub
43 .type atomic_sub,#function
44 atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
45 1: lduw [%o1], %g1
46 sub %g1, %o0, %g7
47 cas [%o1], %g1, %g7
48 cmp %g1, %g7
49 bne,pn %icc, 1b
50 nop
51 retl
52 nop
53 .size atomic_sub, .-atomic_sub
54
55 .globl atomic_add_ret
56 .type atomic_add_ret,#function
57 atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
58 ATOMIC_PRE_BARRIER
59 1: lduw [%o1], %g1
60 add %g1, %o0, %g7
61 cas [%o1], %g1, %g7
62 cmp %g1, %g7
63 bne,pn %icc, 1b
64 add %g7, %o0, %g7
65 ATOMIC_POST_BARRIER
66 retl
67 sra %g7, 0, %o0
68 .size atomic_add_ret, .-atomic_add_ret
69
70 .globl atomic_sub_ret
71 .type atomic_sub_ret,#function
72 atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
73 ATOMIC_PRE_BARRIER
74 1: lduw [%o1], %g1
75 sub %g1, %o0, %g7
76 cas [%o1], %g1, %g7
77 cmp %g1, %g7
78 bne,pn %icc, 1b
79 sub %g7, %o0, %g7
80 ATOMIC_POST_BARRIER
81 retl
82 sra %g7, 0, %o0
83 .size atomic_sub_ret, .-atomic_sub_ret
84
85 .globl atomic64_add
86 .type atomic64_add,#function
87 atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
88 1: ldx [%o1], %g1
89 add %g1, %o0, %g7
90 casx [%o1], %g1, %g7
91 cmp %g1, %g7
92 bne,pn %xcc, 1b
93 nop
94 retl
95 nop
96 .size atomic64_add, .-atomic64_add
97
98 .globl atomic64_sub
99 .type atomic64_sub,#function
100 atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
101 1: ldx [%o1], %g1
102 sub %g1, %o0, %g7
103 casx [%o1], %g1, %g7
104 cmp %g1, %g7
105 bne,pn %xcc, 1b
106 nop
107 retl
108 nop
109 .size atomic64_sub, .-atomic64_sub
110
111 .globl atomic64_add_ret
112 .type atomic64_add_ret,#function
113 atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
114 ATOMIC_PRE_BARRIER
115 1: ldx [%o1], %g1
116 add %g1, %o0, %g7
117 casx [%o1], %g1, %g7
118 cmp %g1, %g7
119 bne,pn %xcc, 1b
120 add %g7, %o0, %g7
121 ATOMIC_POST_BARRIER
122 retl
123 mov %g7, %o0
124 .size atomic64_add_ret, .-atomic64_add_ret
125
126 .globl atomic64_sub_ret
127 .type atomic64_sub_ret,#function
128 atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
129 ATOMIC_PRE_BARRIER
130 1: ldx [%o1], %g1
131 sub %g1, %o0, %g7
132 casx [%o1], %g1, %g7
133 cmp %g1, %g7
134 bne,pn %xcc, 1b
135 sub %g7, %o0, %g7
136 ATOMIC_POST_BARRIER
137 retl
138 mov %g7, %o0
139 .size atomic64_sub_ret, .-atomic64_sub_ret