]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/config/bpf/atomic.md
caf8cc15cd4fa91163ab7a8bd6dd432a71205c1f
[thirdparty/gcc.git] / gcc / config / bpf / atomic.md
1 ;; Machine description for eBPF.
2 ;; Copyright (C) 2023 Free Software Foundation, Inc.
3
4 ;; This file is part of GCC.
5
6 ;; GCC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 3, or (at your option)
9 ;; any later version.
10
11 ;; GCC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GCC; see the file COPYING3. If not see
18 ;; <http://www.gnu.org/licenses/>.
19
20
21 (define_mode_iterator AMO [SI DI])
22
23 ;;; Plain atomic modify operations.
24
25 ;; Non-fetching atomic add predates all other BPF atomic insns.
26 ;; Use xadd{w,dw} for compatibility with older GAS without support
27 ;; for v3 atomics. Newer GAS supports "aadd[32]" in line with the
28 ;; other atomic operations.
29 (define_insn "atomic_add<AMO:mode>"
30 [(set (match_operand:AMO 0 "memory_operand" "+m")
31 (unspec_volatile:AMO
32 [(plus:AMO (match_dup 0)
33 (match_operand:AMO 1 "register_operand" "r"))
34 (match_operand:SI 2 "const_int_operand")] ;; Memory model.
35 UNSPEC_AADD))]
36 ""
37 "{xadd<mop>\t%0,%1|lock *(<smop> *)(%w0) += %w1}"
38 [(set_attr "type" "atomic")])
39
40 (define_insn "atomic_and<AMO:mode>"
41 [(set (match_operand:AMO 0 "memory_operand" "+m")
42 (unspec_volatile:AMO
43 [(and:AMO (match_dup 0)
44 (match_operand:AMO 1 "register_operand" "r"))
45 (match_operand:SI 2 "const_int_operand")] ;; Memory model.
46 UNSPEC_AAND))]
47 "bpf_has_v3_atomics"
48 "{aand<msuffix>\t%0,%1|lock *(<smop> *)(%w0) &= %w1}")
49
50 (define_insn "atomic_or<AMO:mode>"
51 [(set (match_operand:AMO 0 "memory_operand" "+m")
52 (unspec_volatile:AMO
53 [(ior:AMO (match_dup 0)
54 (match_operand:AMO 1 "register_operand" "r"))
55 (match_operand:SI 2 "const_int_operand")] ;; Memory model.
56 UNSPEC_AOR))]
57 "bpf_has_v3_atomics"
58 "{aor<msuffix>\t%0,%1|lock *(<smop> *)(%w0) %|= %w1}")
59
60 (define_insn "atomic_xor<AMO:mode>"
61 [(set (match_operand:AMO 0 "memory_operand" "+m")
62 (unspec_volatile:AMO
63 [(xor:AMO (match_dup 0)
64 (match_operand:AMO 1 "register_operand" "r"))
65 (match_operand:SI 2 "const_int_operand")] ;; Memory model.
66 UNSPEC_AXOR))]
67 "bpf_has_v3_atomics"
68 "{axor<msuffix>\t%0,%1|lock *(<smop> *)(%w0) ^= %w1}")
69
70 ;;; Feching (read-modify-store) versions of atomic operations.
71
72 (define_insn "atomic_fetch_add<AMO:mode>"
73 [(set (match_operand:AMO 0 "register_operand" "=r") ; output
74 (match_operand:AMO 1 "memory_operand" "+m"))
75 (set (match_dup 1)
76 (unspec_volatile:AMO
77 [(plus:AMO (match_dup 1)
78 (match_operand:AMO 2 "nonmemory_operand" "0")) ; second operand to op
79 (match_operand:AMO 3 "const_int_operand")] ;; Memory model
80 UNSPEC_AFADD))]
81 "bpf_has_v3_atomics"
82 "{afadd<msuffix>\t%1,%0|%w0 = atomic_fetch_add((<smop> *)(%1), %w0)}")
83
84 (define_insn "atomic_fetch_and<AMO:mode>"
85 [(set (match_operand:AMO 0 "register_operand" "=r")
86 (match_operand:AMO 1 "memory_operand" "+m"))
87 (set (match_dup 1)
88 (unspec_volatile:AMO
89 [(and:AMO (match_dup 1)
90 (match_operand:AMO 2 "nonmemory_operand" "0"))
91 (match_operand:AMO 3 "const_int_operand")]
92 UNSPEC_AFAND))]
93 "bpf_has_v3_atomics"
94 "{afand<msuffix>\t%1,%0|%w0 = atomic_fetch_and((<smop> *)(%1), %w0)}")
95
96 (define_insn "atomic_fetch_or<AMO:mode>"
97 [(set (match_operand:AMO 0 "register_operand" "=r")
98 (match_operand:AMO 1 "memory_operand" "+m"))
99 (set (match_dup 1)
100 (unspec_volatile:AMO
101 [(ior:AMO (match_dup 1)
102 (match_operand:AMO 2 "nonmemory_operand" "0"))
103 (match_operand:AMO 3 "const_int_operand")]
104 UNSPEC_AFOR))]
105 "bpf_has_v3_atomics"
106 "{afor<msuffix>\t%1,%0|%w0 = atomic_fetch_or((<smop> *)(%1), %w0)}")
107
108 (define_insn "atomic_fetch_xor<AMO:mode>"
109 [(set (match_operand:AMO 0 "register_operand" "=r")
110 (match_operand:AMO 1 "memory_operand" "+m"))
111 (set (match_dup 1)
112 (unspec_volatile:AMO
113 [(xor:AMO (match_dup 1)
114 (match_operand:AMO 2 "nonmemory_operand" "0"))
115 (match_operand:AMO 3 "const_int_operand")]
116 UNSPEC_AFXOR))]
117 "bpf_has_v3_atomics"
118 "{afxor<msuffix>\t%1,%0|%w0 = atomic_fetch_xor((<smop> *)(%1), %w0)}")
119
120 ;; Weird suffixes used in pseudo-c atomic compare-exchange insns.
121 (define_mode_attr pcaxsuffix [(SI "32_32") (DI "_64")])
122
123 (define_insn "atomic_exchange<AMO:mode>"
124 [(set (match_operand:AMO 0 "register_operand" "=r")
125 (unspec_volatile:AMO
126 [(match_operand:AMO 1 "memory_operand" "+m")
127 (match_operand:AMO 3 "const_int_operand")]
128 UNSPEC_AXCHG))
129 (set (match_dup 1)
130 (match_operand:AMO 2 "nonmemory_operand" "0"))]
131 "bpf_has_v3_atomics"
132 "{axchg<msuffix>\t%1,%0|%w0 = xchg<pcaxsuffix>(%1, %w0)}")
133
134 ;; The eBPF atomic-compare-and-exchange instruction has the form
135 ;; acmp [%dst+offset], %src
136 ;; The instruction atomically compares the value addressed by %dst+offset
137 ;; with register R0. If they match, the value at %dst+offset is overwritten
138 ;; with the value of %src. Otherwise, no write occurs. In either case, the
139 ;; original value of %dst+offset is zero-extended and loaded back into R0.
140
141 (define_expand "atomic_compare_and_swap<AMO:mode>"
142 [(match_operand:SI 0 "register_operand" "=r") ;; bool success
143 (match_operand:AMO 1 "register_operand" "=r") ;; old value
144 (match_operand:AMO 2 "memory_operand" "+m") ;; memory
145 (match_operand:AMO 3 "register_operand") ;; expected
146 (match_operand:AMO 4 "register_operand") ;; desired
147 (match_operand:SI 5 "const_int_operand") ;; is_weak (unused)
148 (match_operand:SI 6 "const_int_operand") ;; success model (unused)
149 (match_operand:SI 7 "const_int_operand")] ;; failure model (unused)
150 "bpf_has_v3_atomics"
151 {
152 /* Load the expected value (into R0 by constraint of below). */
153 emit_move_insn (operands[1], operands[3]);
154
155 /* Emit the acmp. */
156 emit_insn (gen_atomic_compare_and_swap<AMO:mode>_1 (operands[1], operands[2], operands[3], operands[4]));
157
158 /* Assume that the operation was successful. */
159 emit_move_insn (operands[0], const1_rtx);
160 rtx_code_label *success_label = gen_label_rtx ();
161
162 /* Compare value that was in memory (now in R0/op[1]) to expected value.
163 If they are equal, then the write occurred. Otherwise, indicate fail in output. */
164 emit_cmp_and_jump_insns (operands[1], operands[3], EQ, 0,
165 GET_MODE (operands[1]), 1, success_label);
166 emit_move_insn (operands[0], const0_rtx);
167
168 if (success_label)
169 {
170 emit_label (success_label);
171 LABEL_NUSES (success_label) = 1;
172 }
173 DONE;
174 })
175
176 (define_insn "atomic_compare_and_swap<AMO:mode>_1"
177 [(set (match_operand:AMO 0 "register_operand" "+t") ;; R0 is both input (expected value)
178 (unspec_volatile:AMO ;; and output (original value)
179 [(match_dup 0) ;; result depends on R0
180 (match_operand:AMO 1 "memory_operand") ;; memory
181 (match_operand:AMO 2 "register_operand") ;; expected
182 (match_operand:AMO 3 "register_operand")] ;; desired
183 UNSPEC_ACMP))]
184 "bpf_has_v3_atomics"
185 "{acmp<msuffix>\t%1,%3|%w0 = cmpxchg<pcaxsuffix>(%1, %w0, %w3)}")