]>
Commit | Line | Data |
---|---|---|
502d63b6 | 1 | /* Schedule GIMPLE vector statements. |
a945c346 | 2 | Copyright (C) 2020-2024 Free Software Foundation, Inc. |
502d63b6 ML |
3 | |
4 | This file is part of GCC. | |
5 | ||
6 | GCC is free software; you can redistribute it and/or modify it | |
7 | under the terms of the GNU General Public License as published by the | |
8 | Free Software Foundation; either version 3, or (at your option) any | |
9 | later version. | |
10 | ||
11 | GCC is distributed in the hope that it will be useful, but WITHOUT | |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
15 | ||
16 | You should have received a copy of the GNU General Public License | |
17 | along with GCC; see the file COPYING3. If not see | |
18 | <http://www.gnu.org/licenses/>. */ | |
19 | ||
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
23 | #include "backend.h" | |
24 | #include "rtl.h" | |
25 | #include "tree.h" | |
26 | #include "gimple.h" | |
27 | #include "tree-pass.h" | |
28 | #include "ssa.h" | |
29 | #include "expmed.h" | |
30 | #include "optabs-tree.h" | |
31 | #include "tree-eh.h" | |
32 | #include "gimple-iterator.h" | |
33 | #include "gimplify-me.h" | |
34 | #include "gimplify.h" | |
35 | #include "tree-cfg.h" | |
a8d8caca ML |
36 | #include "bitmap.h" |
37 | #include "tree-ssa-dce.h" | |
683e55fa XL |
38 | #include "memmodel.h" |
39 | #include "optabs.h" | |
fddc7f00 | 40 | #include "gimple-fold.h" |
298e76e6 | 41 | #include "internal-fn.h" |
683e55fa XL |
42 | |
43 | /* Expand all ARRAY_REF(VIEW_CONVERT_EXPR) gimple assignments into calls to | |
44 | internal function based on vector type of selected expansion. | |
c30efd8c RD |
45 | |
46 | For vec_set: | |
47 | ||
ddd46293 | 48 | VIEW_CONVERT_EXPR<int[4]>(u)[_1] = i_4(D); |
683e55fa XL |
49 | => |
50 | _7 = u; | |
51 | _8 = .VEC_SET (_7, i_4(D), _1); | |
c30efd8c RD |
52 | u = _8; |
53 | ||
54 | For vec_extract: | |
55 | ||
56 | _3 = VIEW_CONVERT_EXPR<intD.1[4]>(vD.2208)[idx_2(D)]; | |
57 | => | |
58 | _4 = vD.2208; | |
59 | _3 = .VEC_EXTRACT (_4, idx_2(D)); */ | |
683e55fa | 60 | |
e1a41143 | 61 | static bool |
c30efd8c RD |
62 | gimple_expand_vec_set_extract_expr (struct function *fun, |
63 | gimple_stmt_iterator *gsi) | |
683e55fa | 64 | { |
683e55fa XL |
65 | gcall *new_stmt = NULL; |
66 | gassign *ass_stmt = NULL; | |
e1a41143 | 67 | bool cfg_changed = false; |
683e55fa XL |
68 | |
69 | /* Only consider code == GIMPLE_ASSIGN. */ | |
70 | gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi)); | |
71 | if (!stmt) | |
e1a41143 | 72 | return false; |
683e55fa | 73 | |
c30efd8c RD |
74 | bool is_extract = false; |
75 | ||
683e55fa | 76 | tree lhs = gimple_assign_lhs (stmt); |
c30efd8c RD |
77 | tree rhs = gimple_assign_rhs1 (stmt); |
78 | tree val, ref; | |
79 | if (TREE_CODE (lhs) == ARRAY_REF) | |
80 | { | |
81 | /* Assume it is a vec_set. */ | |
82 | val = rhs; | |
83 | ref = lhs; | |
84 | } | |
85 | else if (TREE_CODE (rhs) == ARRAY_REF) | |
86 | { | |
87 | /* vec_extract. */ | |
88 | is_extract = true; | |
89 | val = lhs; | |
90 | ref = rhs; | |
91 | } | |
92 | else | |
e1a41143 | 93 | return false; |
683e55fa | 94 | |
c30efd8c | 95 | tree op0 = TREE_OPERAND (ref, 0); |
683e55fa XL |
96 | if (TREE_CODE (op0) == VIEW_CONVERT_EXPR && DECL_P (TREE_OPERAND (op0, 0)) |
97 | && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))) | |
c30efd8c | 98 | && TYPE_MODE (TREE_TYPE (ref)) |
683e55fa XL |
99 | == TYPE_MODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 0))))) |
100 | { | |
c30efd8c RD |
101 | tree pos = TREE_OPERAND (ref, 1); |
102 | ||
683e55fa XL |
103 | tree view_op0 = TREE_OPERAND (op0, 0); |
104 | machine_mode outermode = TYPE_MODE (TREE_TYPE (view_op0)); | |
c30efd8c RD |
105 | machine_mode extract_mode = TYPE_MODE (TREE_TYPE (ref)); |
106 | ||
871afdc5 | 107 | if (auto_var_in_fn_p (view_op0, fun->decl) |
c30efd8c RD |
108 | && !TREE_ADDRESSABLE (view_op0) |
109 | && ((!is_extract && can_vec_set_var_idx_p (outermode)) | |
110 | || (is_extract | |
111 | && can_vec_extract_var_idx_p (outermode, extract_mode)))) | |
683e55fa XL |
112 | { |
113 | location_t loc = gimple_location (stmt); | |
114 | tree var_src = make_ssa_name (TREE_TYPE (view_op0)); | |
683e55fa XL |
115 | |
116 | ass_stmt = gimple_build_assign (var_src, view_op0); | |
117 | gimple_set_vuse (ass_stmt, gimple_vuse (stmt)); | |
118 | gimple_set_location (ass_stmt, loc); | |
119 | gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT); | |
120 | ||
c30efd8c RD |
121 | if (!is_extract) |
122 | { | |
123 | tree var_dst = make_ssa_name (TREE_TYPE (view_op0)); | |
683e55fa | 124 | |
c30efd8c RD |
125 | new_stmt = gimple_build_call_internal (IFN_VEC_SET, 3, var_src, |
126 | val, pos); | |
127 | ||
128 | gimple_call_set_lhs (new_stmt, var_dst); | |
129 | gimple_set_location (new_stmt, loc); | |
130 | gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); | |
131 | ||
132 | ass_stmt = gimple_build_assign (view_op0, var_dst); | |
133 | gimple_set_location (ass_stmt, loc); | |
134 | gimple_move_vops (ass_stmt, stmt); | |
135 | gsi_insert_before (gsi, ass_stmt, GSI_SAME_STMT); | |
683e55fa | 136 | |
c30efd8c RD |
137 | basic_block bb = gimple_bb (stmt); |
138 | if (gsi_remove (gsi, true) | |
139 | && gimple_purge_dead_eh_edges (bb)) | |
140 | cfg_changed = true; | |
141 | *gsi = gsi_for_stmt (ass_stmt); | |
142 | } | |
143 | else | |
144 | { | |
145 | new_stmt | |
146 | = gimple_build_call_internal (IFN_VEC_EXTRACT, 2, var_src, pos); | |
147 | gimple_call_set_lhs (new_stmt, lhs); | |
148 | ||
149 | gsi_replace (gsi, new_stmt, true); | |
150 | cfg_changed = true; | |
151 | } | |
683e55fa XL |
152 | } |
153 | } | |
154 | ||
e1a41143 | 155 | return cfg_changed; |
683e55fa | 156 | } |
502d63b6 ML |
157 | |
158 | /* Expand all VEC_COND_EXPR gimple assignments into calls to internal | |
159 | function based on type of selected expansion. */ | |
160 | ||
161 | static gimple * | |
871afdc5 | 162 | gimple_expand_vec_cond_expr (struct function *fun, gimple_stmt_iterator *gsi, |
502d63b6 ML |
163 | hash_map<tree, unsigned int> *vec_cond_ssa_name_uses) |
164 | { | |
165 | tree lhs, op0a = NULL_TREE, op0b = NULL_TREE; | |
166 | enum tree_code code; | |
167 | enum tree_code tcode; | |
168 | machine_mode cmp_op_mode; | |
169 | bool unsignedp; | |
170 | enum insn_code icode; | |
171 | imm_use_iterator imm_iter; | |
172 | ||
173 | /* Only consider code == GIMPLE_ASSIGN. */ | |
174 | gassign *stmt = dyn_cast<gassign *> (gsi_stmt (*gsi)); | |
175 | if (!stmt) | |
176 | return NULL; | |
177 | ||
178 | code = gimple_assign_rhs_code (stmt); | |
179 | if (code != VEC_COND_EXPR) | |
180 | return NULL; | |
181 | ||
182 | tree op0 = gimple_assign_rhs1 (stmt); | |
183 | tree op1 = gimple_assign_rhs2 (stmt); | |
184 | tree op2 = gimple_assign_rhs3 (stmt); | |
185 | lhs = gimple_assign_lhs (stmt); | |
186 | machine_mode mode = TYPE_MODE (TREE_TYPE (lhs)); | |
187 | ||
fddc7f00 RB |
188 | /* Lower mask typed, non-vector mode VEC_COND_EXPRs to bitwise operations. |
189 | Those can end up generated by folding and at least for integer mode masks | |
190 | we cannot expect vcond expanders to exist. We lower a ? b : c | |
191 | to (b & a) | (c & ~a). */ | |
5c197b83 RB |
192 | if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (lhs)) |
193 | && !VECTOR_MODE_P (mode)) | |
fddc7f00 | 194 | { |
5c197b83 | 195 | gcc_assert (types_compatible_p (TREE_TYPE (op0), TREE_TYPE (op1))); |
fddc7f00 RB |
196 | gimple_seq stmts = NULL; |
197 | tree type = TREE_TYPE (lhs); | |
198 | location_t loc = gimple_location (stmt); | |
199 | tree tem0 = gimple_build (&stmts, loc, BIT_AND_EXPR, type, op1, op0); | |
200 | tree tem1 = gimple_build (&stmts, loc, BIT_NOT_EXPR, type, op0); | |
201 | tree tem2 = gimple_build (&stmts, loc, BIT_AND_EXPR, type, op2, tem1); | |
202 | tree tem3 = gimple_build (&stmts, loc, BIT_IOR_EXPR, type, tem0, tem2); | |
203 | gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT); | |
204 | return gimple_build_assign (lhs, tem3); | |
205 | } | |
206 | ||
78595e91 | 207 | bool can_compute_op0 = true; |
502d63b6 ML |
208 | gcc_assert (!COMPARISON_CLASS_P (op0)); |
209 | if (TREE_CODE (op0) == SSA_NAME) | |
210 | { | |
211 | unsigned int used_vec_cond_exprs = 0; | |
212 | unsigned int *slot = vec_cond_ssa_name_uses->get (op0); | |
213 | if (slot) | |
214 | used_vec_cond_exprs = *slot; | |
215 | else | |
216 | { | |
217 | gimple *use_stmt; | |
218 | FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, op0) | |
219 | { | |
220 | gassign *assign = dyn_cast<gassign *> (use_stmt); | |
221 | if (assign != NULL | |
222 | && gimple_assign_rhs_code (assign) == VEC_COND_EXPR | |
223 | && gimple_assign_rhs1 (assign) == op0) | |
224 | used_vec_cond_exprs++; | |
225 | } | |
226 | vec_cond_ssa_name_uses->put (op0, used_vec_cond_exprs); | |
227 | } | |
228 | ||
229 | gassign *def_stmt = dyn_cast<gassign *> (SSA_NAME_DEF_STMT (op0)); | |
8f8f8c68 | 230 | if (def_stmt) |
502d63b6 ML |
231 | { |
232 | tcode = gimple_assign_rhs_code (def_stmt); | |
233 | op0a = gimple_assign_rhs1 (def_stmt); | |
234 | op0b = gimple_assign_rhs2 (def_stmt); | |
235 | ||
d0d8a165 | 236 | tree op0_type = TREE_TYPE (op0); |
502d63b6 | 237 | tree op0a_type = TREE_TYPE (op0a); |
78595e91 RS |
238 | if (TREE_CODE_CLASS (tcode) == tcc_comparison) |
239 | can_compute_op0 = expand_vec_cmp_expr_p (op0a_type, op0_type, | |
240 | tcode); | |
3a6e3ad3 PK |
241 | |
242 | /* Try to fold x CMP y ? -1 : 0 to x CMP y. */ | |
78595e91 RS |
243 | if (can_compute_op0 |
244 | && integer_minus_onep (op1) | |
3a6e3ad3 | 245 | && integer_zerop (op2) |
78595e91 | 246 | && TYPE_MODE (TREE_TYPE (lhs)) == TYPE_MODE (TREE_TYPE (op0))) |
3a6e3ad3 PK |
247 | { |
248 | tree conv_op = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), op0); | |
249 | gassign *new_stmt = gimple_build_assign (lhs, conv_op); | |
250 | gsi_replace (gsi, new_stmt, true); | |
251 | return new_stmt; | |
252 | } | |
253 | ||
8f8f8c68 RB |
254 | /* When the compare has EH we do not want to forward it when |
255 | it has multiple uses and in general because of the complication | |
256 | with EH redirection. */ | |
257 | if (stmt_can_throw_internal (fun, def_stmt)) | |
258 | tcode = TREE_CODE (op0); | |
259 | ||
260 | /* If we can compute op0 and have multiple uses, keep the SSA | |
261 | name and use vcond_mask. */ | |
262 | else if (can_compute_op0 | |
263 | && used_vec_cond_exprs >= 2 | |
264 | && (get_vcond_mask_icode (mode, TYPE_MODE (op0_type)) | |
265 | != CODE_FOR_nothing)) | |
266 | tcode = TREE_CODE (op0); | |
502d63b6 ML |
267 | } |
268 | else | |
269 | tcode = TREE_CODE (op0); | |
270 | } | |
271 | else | |
272 | tcode = TREE_CODE (op0); | |
273 | ||
274 | if (TREE_CODE_CLASS (tcode) != tcc_comparison) | |
275 | { | |
276 | gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0))); | |
277 | if (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0))) | |
278 | != CODE_FOR_nothing) | |
279 | return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2); | |
280 | /* Fake op0 < 0. */ | |
281 | else | |
282 | { | |
283 | gcc_assert (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (op0))) | |
284 | == MODE_VECTOR_INT); | |
285 | op0a = op0; | |
286 | op0b = build_zero_cst (TREE_TYPE (op0)); | |
287 | tcode = LT_EXPR; | |
288 | } | |
289 | } | |
290 | cmp_op_mode = TYPE_MODE (TREE_TYPE (op0a)); | |
291 | unsignedp = TYPE_UNSIGNED (TREE_TYPE (op0a)); | |
292 | ||
e29dd0eb RS |
293 | gcc_assert (known_eq (GET_MODE_NUNITS (mode), |
294 | GET_MODE_NUNITS (cmp_op_mode))); | |
502d63b6 ML |
295 | |
296 | icode = get_vcond_icode (mode, cmp_op_mode, unsignedp); | |
36f1de95 RB |
297 | /* Some targets do not have vcondeq and only vcond with NE/EQ |
298 | but not vcondu, so make sure to also try vcond here as | |
299 | vcond_icode_p would canonicalize the optab query to. */ | |
300 | if (icode == CODE_FOR_nothing | |
301 | && (tcode == NE_EXPR || tcode == EQ_EXPR) | |
302 | && ((icode = get_vcond_icode (mode, cmp_op_mode, !unsignedp)) | |
303 | != CODE_FOR_nothing)) | |
304 | unsignedp = !unsignedp; | |
502d63b6 ML |
305 | if (icode == CODE_FOR_nothing) |
306 | { | |
307 | if (tcode == LT_EXPR | |
3457dae5 | 308 | && op0a == op0) |
502d63b6 ML |
309 | { |
310 | /* A VEC_COND_EXPR condition could be folded from EQ_EXPR/NE_EXPR | |
311 | into a constant when only get_vcond_eq_icode is supported. | |
3457dae5 | 312 | Try changing it to NE_EXPR. */ |
502d63b6 ML |
313 | tcode = NE_EXPR; |
314 | } | |
298e76e6 RS |
315 | if ((tcode == EQ_EXPR || tcode == NE_EXPR) |
316 | && direct_internal_fn_supported_p (IFN_VCONDEQ, TREE_TYPE (lhs), | |
317 | TREE_TYPE (op0a), | |
318 | OPTIMIZE_FOR_BOTH)) | |
502d63b6 ML |
319 | { |
320 | tree tcode_tree = build_int_cst (integer_type_node, tcode); | |
321 | return gimple_build_call_internal (IFN_VCONDEQ, 5, op0a, op0b, op1, | |
322 | op2, tcode_tree); | |
323 | } | |
502d63b6 | 324 | |
78595e91 RS |
325 | gcc_assert (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (op0)) |
326 | && can_compute_op0 | |
327 | && (get_vcond_mask_icode (mode, TYPE_MODE (TREE_TYPE (op0))) | |
328 | != CODE_FOR_nothing)); | |
329 | return gimple_build_call_internal (IFN_VCOND_MASK, 3, op0, op1, op2); | |
330 | } | |
331 | ||
502d63b6 ML |
332 | tree tcode_tree = build_int_cst (integer_type_node, tcode); |
333 | return gimple_build_call_internal (unsignedp ? IFN_VCONDU : IFN_VCOND, | |
334 | 5, op0a, op0b, op1, op2, tcode_tree); | |
335 | } | |
336 | ||
337 | ||
338 | ||
cbe5f685 RB |
339 | namespace { |
340 | ||
341 | const pass_data pass_data_gimple_isel = | |
342 | { | |
343 | GIMPLE_PASS, /* type */ | |
344 | "isel", /* name */ | |
345 | OPTGROUP_VEC, /* optinfo_flags */ | |
346 | TV_NONE, /* tv_id */ | |
347 | PROP_cfg, /* properties_required */ | |
348 | 0, /* properties_provided */ | |
349 | 0, /* properties_destroyed */ | |
350 | 0, /* todo_flags_start */ | |
351 | TODO_update_ssa, /* todo_flags_finish */ | |
352 | }; | |
502d63b6 | 353 | |
cbe5f685 RB |
354 | class pass_gimple_isel : public gimple_opt_pass |
355 | { | |
356 | public: | |
357 | pass_gimple_isel (gcc::context *ctxt) | |
358 | : gimple_opt_pass (pass_data_gimple_isel, ctxt) | |
359 | {} | |
360 | ||
361 | /* opt_pass methods: */ | |
362 | bool gate (function *) final override | |
363 | { | |
364 | return true; | |
365 | } | |
366 | ||
367 | unsigned int execute (function *fun) final override; | |
368 | }; // class pass_gimple_isel | |
369 | ||
370 | ||
371 | /* Iterate all gimple statements and perform pre RTL expansion | |
372 | GIMPLE massaging to improve instruction selection. */ | |
373 | ||
374 | unsigned int | |
375 | pass_gimple_isel::execute (struct function *fun) | |
502d63b6 ML |
376 | { |
377 | gimple_stmt_iterator gsi; | |
378 | basic_block bb; | |
502d63b6 | 379 | hash_map<tree, unsigned int> vec_cond_ssa_name_uses; |
a8d8caca | 380 | auto_bitmap dce_ssa_names; |
e1a41143 | 381 | bool cfg_changed = false; |
502d63b6 | 382 | |
871afdc5 | 383 | FOR_EACH_BB_FN (bb, fun) |
502d63b6 ML |
384 | { |
385 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | |
386 | { | |
cbe5f685 RB |
387 | /* Pre-expand VEC_COND_EXPRs to .VCOND* internal function |
388 | calls mapping to supported optabs. */ | |
871afdc5 | 389 | gimple *g = gimple_expand_vec_cond_expr (fun, &gsi, |
502d63b6 ML |
390 | &vec_cond_ssa_name_uses); |
391 | if (g != NULL) | |
392 | { | |
393 | tree lhs = gimple_assign_lhs (gsi_stmt (gsi)); | |
394 | gimple_set_lhs (g, lhs); | |
395 | gsi_replace (&gsi, g, false); | |
396 | } | |
683e55fa | 397 | |
cbe5f685 | 398 | /* Recognize .VEC_SET and .VEC_EXTRACT patterns. */ |
c30efd8c | 399 | cfg_changed |= gimple_expand_vec_set_extract_expr (fun, &gsi); |
5877c544 ML |
400 | if (gsi_end_p (gsi)) |
401 | break; | |
cbe5f685 RB |
402 | |
403 | gassign *stmt = dyn_cast <gassign *> (*gsi); | |
404 | if (!stmt) | |
405 | continue; | |
406 | ||
407 | tree_code code = gimple_assign_rhs_code (stmt); | |
408 | tree lhs = gimple_assign_lhs (stmt); | |
409 | if (TREE_CODE_CLASS (code) == tcc_comparison | |
410 | && !has_single_use (lhs)) | |
411 | { | |
412 | /* Duplicate COND_EXPR condition defs when they are | |
413 | comparisons so RTL expansion with the help of TER | |
414 | can perform better if conversion. */ | |
415 | imm_use_iterator imm_iter; | |
416 | use_operand_p use_p; | |
417 | auto_vec<gassign *, 4> cond_exprs; | |
418 | unsigned cnt = 0; | |
419 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs) | |
420 | { | |
421 | if (is_gimple_debug (USE_STMT (use_p))) | |
422 | continue; | |
423 | cnt++; | |
424 | if (gimple_bb (USE_STMT (use_p)) == bb | |
425 | && is_gimple_assign (USE_STMT (use_p)) | |
426 | && gimple_assign_rhs1_ptr (USE_STMT (use_p)) == use_p->use | |
427 | && gimple_assign_rhs_code (USE_STMT (use_p)) == COND_EXPR) | |
428 | cond_exprs.safe_push (as_a <gassign *> (USE_STMT (use_p))); | |
429 | } | |
430 | for (unsigned i = cond_exprs.length () == cnt ? 1 : 0; | |
431 | i < cond_exprs.length (); ++i) | |
432 | { | |
433 | gassign *copy = as_a <gassign *> (gimple_copy (stmt)); | |
434 | tree new_def = duplicate_ssa_name (lhs, copy); | |
435 | gimple_assign_set_lhs (copy, new_def); | |
436 | auto gsi2 = gsi_for_stmt (cond_exprs[i]); | |
437 | gsi_insert_before (&gsi2, copy, GSI_SAME_STMT); | |
438 | gimple_assign_set_rhs1 (cond_exprs[i], new_def); | |
439 | update_stmt (cond_exprs[i]); | |
440 | } | |
441 | } | |
502d63b6 ML |
442 | } |
443 | } | |
444 | ||
cbe5f685 | 445 | for (auto it = vec_cond_ssa_name_uses.begin (); |
a8d8caca ML |
446 | it != vec_cond_ssa_name_uses.end (); ++it) |
447 | bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION ((*it).first)); | |
448 | ||
449 | simple_dce_from_worklist (dce_ssa_names); | |
450 | ||
e1a41143 | 451 | return cfg_changed ? TODO_cleanup_cfg : 0; |
502d63b6 ML |
452 | } |
453 | ||
502d63b6 ML |
454 | } // anon namespace |
455 | ||
456 | gimple_opt_pass * | |
457 | make_pass_gimple_isel (gcc::context *ctxt) | |
458 | { | |
459 | return new pass_gimple_isel (ctxt); | |
460 | } | |
461 |