]>
Commit | Line | Data |
---|---|---|
d5e254e1 | 1 | /* RTL manipulation functions exported by Pointer Bounds Checker. |
5624e564 | 2 | Copyright (C) 2014-2015 Free Software Foundation, Inc. |
d5e254e1 IE |
3 | Contributed by Ilya Enkovich (ilya.enkovich@intel.com) |
4 | ||
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify it under | |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 3, or (at your option) any later | |
10 | version. | |
11 | ||
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
18 | along with GCC; see the file COPYING3. If not see | |
19 | <http://www.gnu.org/licenses/>. */ | |
20 | ||
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
c7131fb2 AM |
24 | #include "backend.h" |
25 | #include "tree.h" | |
26 | #include "gimple.h" | |
36566b39 PK |
27 | #include "rtl.h" |
28 | #include "flags.h" | |
40e23961 | 29 | #include "alias.h" |
36566b39 PK |
30 | #include "insn-config.h" |
31 | #include "expmed.h" | |
32 | #include "dojump.h" | |
33 | #include "explow.h" | |
34 | #include "calls.h" | |
35 | #include "emit-rtl.h" | |
36 | #include "varasm.h" | |
37 | #include "stmt.h" | |
38 | #include "expr.h" | |
39 | #include "target.h" | |
36566b39 | 40 | #include "internal-fn.h" |
40e23961 | 41 | #include "fold-const.h" |
d5e254e1 IE |
42 | #include "rtl-chkp.h" |
43 | #include "tree-chkp.h" | |
d5e254e1 IE |
44 | |
45 | static hash_map<tree, rtx> *chkp_rtx_bounds_map; | |
46 | ||
47 | /* Get bounds rtx associated with NODE via | |
48 | chkp_set_rtl_bounds call. */ | |
49 | rtx | |
50 | chkp_get_rtl_bounds (tree node) | |
51 | { | |
52 | rtx *slot; | |
53 | ||
54 | if (!chkp_rtx_bounds_map) | |
55 | return NULL_RTX; | |
56 | ||
57 | slot = chkp_rtx_bounds_map->get (node); | |
58 | return slot ? *slot : NULL_RTX; | |
59 | } | |
60 | ||
61 | /* Associate bounds rtx VAL with NODE. */ | |
62 | void | |
63 | chkp_set_rtl_bounds (tree node, rtx val) | |
64 | { | |
65 | if (!chkp_rtx_bounds_map) | |
66 | chkp_rtx_bounds_map = new hash_map<tree, rtx>; | |
67 | ||
68 | chkp_rtx_bounds_map->put (node, val); | |
69 | } | |
70 | ||
71 | /* Reset all bounds stored via chkp_set_rtl_bounds. */ | |
72 | void | |
73 | chkp_reset_rtl_bounds () | |
74 | { | |
75 | if (!chkp_rtx_bounds_map) | |
76 | return; | |
77 | ||
78 | delete chkp_rtx_bounds_map; | |
79 | chkp_rtx_bounds_map = NULL; | |
80 | } | |
81 | ||
82 | /* Split SLOT identifying slot for function value or | |
83 | argument into two parts SLOT_VAL and SLOT_BND. | |
84 | First is the slot for regular value and the other one is | |
85 | for bounds. */ | |
86 | void | |
87 | chkp_split_slot (rtx slot, rtx *slot_val, rtx *slot_bnd) | |
88 | { | |
89 | int i; | |
90 | int val_num = 0; | |
91 | int bnd_num = 0; | |
92 | rtx *val_tmps; | |
93 | rtx *bnd_tmps; | |
94 | ||
95 | *slot_bnd = 0; | |
96 | ||
97 | if (!slot | |
98 | || GET_CODE (slot) != PARALLEL) | |
99 | { | |
100 | *slot_val = slot; | |
101 | return; | |
102 | } | |
103 | ||
104 | val_tmps = XALLOCAVEC (rtx, XVECLEN (slot, 0)); | |
105 | bnd_tmps = XALLOCAVEC (rtx, XVECLEN (slot, 0)); | |
106 | ||
107 | for (i = 0; i < XVECLEN (slot, 0); i++) | |
108 | { | |
109 | rtx elem = XVECEXP (slot, 0, i); | |
110 | rtx reg = GET_CODE (elem) == EXPR_LIST ? XEXP (elem, 0) : elem; | |
111 | ||
112 | if (!reg) | |
113 | continue; | |
114 | ||
115 | if (POINTER_BOUNDS_MODE_P (GET_MODE (reg)) || CONST_INT_P (reg)) | |
116 | bnd_tmps[bnd_num++] = elem; | |
117 | else | |
118 | val_tmps[val_num++] = elem; | |
119 | } | |
120 | ||
121 | gcc_assert (val_num); | |
122 | ||
123 | if (!bnd_num) | |
124 | { | |
125 | *slot_val = slot; | |
126 | return; | |
127 | } | |
128 | ||
129 | if ((GET_CODE (val_tmps[0]) == EXPR_LIST) || (val_num > 1)) | |
130 | *slot_val = gen_rtx_PARALLEL (GET_MODE (slot), | |
131 | gen_rtvec_v (val_num, val_tmps)); | |
132 | else | |
133 | *slot_val = val_tmps[0]; | |
134 | ||
135 | if ((GET_CODE (bnd_tmps[0]) == EXPR_LIST) || (bnd_num > 1)) | |
136 | *slot_bnd = gen_rtx_PARALLEL (VOIDmode, | |
137 | gen_rtvec_v (bnd_num, bnd_tmps)); | |
138 | else | |
139 | *slot_bnd = bnd_tmps[0]; | |
140 | } | |
141 | ||
142 | /* Join previously splitted to VAL and BND rtx for function | |
143 | value or argument and return it. */ | |
144 | rtx | |
145 | chkp_join_splitted_slot (rtx val, rtx bnd) | |
146 | { | |
147 | rtx res; | |
148 | int i, n = 0; | |
149 | ||
150 | if (!bnd) | |
151 | return val; | |
152 | ||
153 | if (GET_CODE (val) == PARALLEL) | |
154 | n += XVECLEN (val, 0); | |
155 | else | |
156 | n++; | |
157 | ||
158 | if (GET_CODE (bnd) == PARALLEL) | |
159 | n += XVECLEN (bnd, 0); | |
160 | else | |
161 | n++; | |
162 | ||
163 | res = gen_rtx_PARALLEL (GET_MODE (val), rtvec_alloc (n)); | |
164 | ||
165 | n = 0; | |
166 | ||
167 | if (GET_CODE (val) == PARALLEL) | |
168 | for (i = 0; i < XVECLEN (val, 0); i++) | |
169 | XVECEXP (res, 0, n++) = XVECEXP (val, 0, i); | |
170 | else | |
171 | XVECEXP (res, 0, n++) = val; | |
172 | ||
173 | if (GET_CODE (bnd) == PARALLEL) | |
174 | for (i = 0; i < XVECLEN (bnd, 0); i++) | |
175 | XVECEXP (res, 0, n++) = XVECEXP (bnd, 0, i); | |
176 | else | |
177 | XVECEXP (res, 0, n++) = bnd; | |
178 | ||
179 | return res; | |
180 | } | |
181 | ||
182 | /* If PAR is PARALLEL holding registers then transform | |
183 | it into PARALLEL holding EXPR_LISTs of those regs | |
184 | and zero constant (similar to how function value | |
185 | on multiple registers looks like). */ | |
186 | void | |
187 | chkp_put_regs_to_expr_list (rtx par) | |
188 | { | |
189 | int n; | |
190 | ||
191 | if (GET_CODE (par) != PARALLEL | |
192 | || GET_CODE (XVECEXP (par, 0, 0)) == EXPR_LIST) | |
193 | return; | |
194 | ||
195 | for (n = 0; n < XVECLEN (par, 0); n++) | |
196 | XVECEXP (par, 0, n) = gen_rtx_EXPR_LIST (VOIDmode, | |
197 | XVECEXP (par, 0, n), | |
198 | const0_rtx); | |
199 | } | |
200 | ||
201 | /* Search rtx PAR describing function return value for an | |
202 | item related to value at offset OFFS and return it. | |
203 | Return NULL if item was not found. */ | |
204 | rtx | |
205 | chkp_get_value_with_offs (rtx par, rtx offs) | |
206 | { | |
207 | int n; | |
208 | ||
209 | gcc_assert (GET_CODE (par) == PARALLEL); | |
210 | ||
211 | for (n = 0; n < XVECLEN (par, 0); n++) | |
212 | { | |
213 | rtx par_offs = XEXP (XVECEXP (par, 0, n), 1); | |
214 | if (INTVAL (offs) == INTVAL (par_offs)) | |
215 | return XEXP (XVECEXP (par, 0, n), 0); | |
216 | } | |
217 | ||
218 | return NULL; | |
219 | } | |
220 | ||
221 | /* Emit instructions to store BOUNDS for pointer VALUE | |
222 | stored in MEM. | |
223 | Function is used by expand to pass bounds for args | |
224 | passed on stack. */ | |
225 | void | |
226 | chkp_emit_bounds_store (rtx bounds, rtx value, rtx mem) | |
227 | { | |
228 | gcc_assert (MEM_P (mem)); | |
229 | ||
230 | if (REG_P (bounds) || CONST_INT_P (bounds)) | |
231 | { | |
232 | rtx ptr; | |
233 | ||
234 | if (REG_P (value)) | |
235 | ptr = value; | |
236 | else | |
237 | { | |
238 | rtx slot = adjust_address (value, Pmode, 0); | |
239 | ptr = gen_reg_rtx (Pmode); | |
240 | emit_move_insn (ptr, slot); | |
241 | } | |
242 | ||
243 | if (CONST_INT_P (bounds)) | |
244 | bounds = targetm.calls.load_bounds_for_arg (value, ptr, bounds); | |
245 | ||
246 | targetm.calls.store_bounds_for_arg (ptr, mem, | |
247 | bounds, NULL); | |
248 | } | |
249 | else | |
250 | { | |
251 | int i; | |
252 | ||
253 | gcc_assert (GET_CODE (bounds) == PARALLEL); | |
254 | gcc_assert (GET_CODE (value) == PARALLEL || MEM_P (value) || REG_P (value)); | |
255 | ||
256 | for (i = 0; i < XVECLEN (bounds, 0); i++) | |
257 | { | |
258 | rtx reg = XEXP (XVECEXP (bounds, 0, i), 0); | |
259 | rtx offs = XEXP (XVECEXP (bounds, 0, i), 1); | |
260 | rtx slot = adjust_address (mem, Pmode, INTVAL (offs)); | |
261 | rtx ptr; | |
262 | ||
263 | if (GET_CODE (value) == PARALLEL) | |
264 | ptr = chkp_get_value_with_offs (value, offs); | |
265 | else if (MEM_P (value)) | |
266 | { | |
267 | rtx tmp = adjust_address (value, Pmode, INTVAL (offs)); | |
268 | ptr = gen_reg_rtx (Pmode); | |
269 | emit_move_insn (ptr, tmp); | |
270 | } | |
271 | else | |
272 | ptr = gen_rtx_SUBREG (Pmode, value, INTVAL (offs)); | |
273 | ||
274 | targetm.calls.store_bounds_for_arg (ptr, slot, reg, NULL); | |
275 | } | |
276 | } | |
277 | } | |
278 | ||
279 | /* Emit code to copy bounds for structure VALUE of type TYPE | |
280 | copied to SLOT. */ | |
281 | void | |
282 | chkp_copy_bounds_for_stack_parm (rtx slot, rtx value, tree type) | |
283 | { | |
284 | bitmap have_bound; | |
285 | bitmap_iterator bi; | |
286 | unsigned i; | |
287 | rtx tmp = NULL, bnd; | |
288 | ||
289 | gcc_assert (TYPE_SIZE (type)); | |
290 | gcc_assert (MEM_P (value)); | |
291 | gcc_assert (MEM_P (slot)); | |
292 | gcc_assert (RECORD_OR_UNION_TYPE_P (type)); | |
293 | ||
294 | bitmap_obstack_initialize (NULL); | |
295 | have_bound = BITMAP_ALLOC (NULL); | |
296 | chkp_find_bound_slots (type, have_bound); | |
297 | ||
298 | EXECUTE_IF_SET_IN_BITMAP (have_bound, 0, i, bi) | |
299 | { | |
300 | rtx ptr = adjust_address (value, Pmode, i * POINTER_SIZE / 8); | |
301 | rtx to = adjust_address (slot, Pmode, i * POINTER_SIZE / 8); | |
302 | ||
303 | if (!tmp) | |
304 | tmp = gen_reg_rtx (Pmode); | |
305 | ||
306 | emit_move_insn (tmp, ptr); | |
307 | bnd = targetm.calls.load_bounds_for_arg (ptr, tmp, NULL); | |
308 | targetm.calls.store_bounds_for_arg (tmp, to, bnd, NULL); | |
309 | } | |
310 | ||
311 | BITMAP_FREE (have_bound); | |
312 | bitmap_obstack_release (NULL); | |
313 | } |