]>
Commit | Line | Data |
---|---|---|
230d793d | 1 | /* Optimize by combining instructions for GNU compiler. |
3c71940f | 2 | Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, |
505ddab6 | 3 | 1999, 2000, 2001, 2002 Free Software Foundation, Inc. |
230d793d | 4 | |
1322177d | 5 | This file is part of GCC. |
230d793d | 6 | |
1322177d LB |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free | |
9 | Software Foundation; either version 2, or (at your option) any later | |
10 | version. | |
230d793d | 11 | |
1322177d LB |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
15 | for more details. | |
230d793d RS |
16 | |
17 | You should have received a copy of the GNU General Public License | |
1322177d LB |
18 | along with GCC; see the file COPYING. If not, write to the Free |
19 | Software Foundation, 59 Temple Place - Suite 330, Boston, MA | |
20 | 02111-1307, USA. */ | |
230d793d | 21 | |
230d793d RS |
22 | /* This module is essentially the "combiner" phase of the U. of Arizona |
23 | Portable Optimizer, but redone to work on our list-structured | |
24 | representation for RTL instead of their string representation. | |
25 | ||
26 | The LOG_LINKS of each insn identify the most recent assignment | |
27 | to each REG used in the insn. It is a list of previous insns, | |
28 | each of which contains a SET for a REG that is used in this insn | |
29 | and not used or set in between. LOG_LINKs never cross basic blocks. | |
30 | They were set up by the preceding pass (lifetime analysis). | |
31 | ||
32 | We try to combine each pair of insns joined by a logical link. | |
33 | We also try to combine triples of insns A, B and C when | |
34 | C has a link back to B and B has a link back to A. | |
35 | ||
36 | LOG_LINKS does not have links for use of the CC0. They don't | |
37 | need to, because the insn that sets the CC0 is always immediately | |
38 | before the insn that tests it. So we always regard a branch | |
39 | insn as having a logical link to the preceding insn. The same is true | |
40 | for an insn explicitly using CC0. | |
41 | ||
42 | We check (with use_crosses_set_p) to avoid combining in such a way | |
43 | as to move a computation to a place where its value would be different. | |
44 | ||
45 | Combination is done by mathematically substituting the previous | |
46 | insn(s) values for the regs they set into the expressions in | |
47 | the later insns that refer to these regs. If the result is a valid insn | |
48 | for our target machine, according to the machine description, | |
49 | we install it, delete the earlier insns, and update the data flow | |
50 | information (LOG_LINKS and REG_NOTES) for what we did. | |
51 | ||
52 | There are a few exceptions where the dataflow information created by | |
53 | flow.c aren't completely updated: | |
54 | ||
55 | - reg_live_length is not updated | |
56 | - reg_n_refs is not adjusted in the rare case when a register is | |
57 | no longer required in a computation | |
58 | - there are extremely rare cases (see distribute_regnotes) when a | |
59 | REG_DEAD note is lost | |
60 | - a LOG_LINKS entry that refers to an insn with multiple SETs may be | |
663522cb | 61 | removed because there is no way to know which register it was |
230d793d RS |
62 | linking |
63 | ||
64 | To simplify substitution, we combine only when the earlier insn(s) | |
65 | consist of only a single assignment. To simplify updating afterward, | |
66 | we never combine when a subroutine call appears in the middle. | |
67 | ||
68 | Since we do not represent assignments to CC0 explicitly except when that | |
69 | is all an insn does, there is no LOG_LINKS entry in an insn that uses | |
70 | the condition code for the insn that set the condition code. | |
71 | Fortunately, these two insns must be consecutive. | |
72 | Therefore, every JUMP_INSN is taken to have an implicit logical link | |
73 | to the preceding insn. This is not quite right, since non-jumps can | |
74 | also use the condition code; but in practice such insns would not | |
75 | combine anyway. */ | |
76 | ||
230d793d | 77 | #include "config.h" |
670ee920 | 78 | #include "system.h" |
4977bab6 ZW |
79 | #include "coretypes.h" |
80 | #include "tm.h" | |
c5c76735 | 81 | #include "rtl.h" |
a091679a | 82 | #include "tm_p.h" |
230d793d RS |
83 | #include "flags.h" |
84 | #include "regs.h" | |
55310dad | 85 | #include "hard-reg-set.h" |
230d793d RS |
86 | #include "basic-block.h" |
87 | #include "insn-config.h" | |
49ad7cfa | 88 | #include "function.h" |
ec5c56db | 89 | /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */ |
d6f4ec51 | 90 | #include "expr.h" |
230d793d RS |
91 | #include "insn-attr.h" |
92 | #include "recog.h" | |
93 | #include "real.h" | |
2e107e9e | 94 | #include "toplev.h" |
f73ad30e | 95 | |
230d793d RS |
96 | /* It is not safe to use ordinary gen_lowpart in combine. |
97 | Use gen_lowpart_for_combine instead. See comments there. */ | |
98 | #define gen_lowpart dont_use_gen_lowpart_you_dummy | |
99 | ||
100 | /* Number of attempts to combine instructions in this function. */ | |
101 | ||
102 | static int combine_attempts; | |
103 | ||
104 | /* Number of attempts that got as far as substitution in this function. */ | |
105 | ||
106 | static int combine_merges; | |
107 | ||
108 | /* Number of instructions combined with added SETs in this function. */ | |
109 | ||
110 | static int combine_extras; | |
111 | ||
112 | /* Number of instructions combined in this function. */ | |
113 | ||
114 | static int combine_successes; | |
115 | ||
116 | /* Totals over entire compilation. */ | |
117 | ||
118 | static int total_attempts, total_merges, total_extras, total_successes; | |
9210df58 | 119 | |
230d793d RS |
120 | \f |
121 | /* Vector mapping INSN_UIDs to cuids. | |
5089e22e | 122 | The cuids are like uids but increase monotonically always. |
230d793d RS |
123 | Combine always uses cuids so that it can compare them. |
124 | But actually renumbering the uids, which we used to do, | |
125 | proves to be a bad idea because it makes it hard to compare | |
126 | the dumps produced by earlier passes with those from later passes. */ | |
127 | ||
128 | static int *uid_cuid; | |
4255220d | 129 | static int max_uid_cuid; |
230d793d RS |
130 | |
131 | /* Get the cuid of an insn. */ | |
132 | ||
1427d6d2 RK |
133 | #define INSN_CUID(INSN) \ |
134 | (INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)]) | |
230d793d | 135 | |
42a6ff51 AO |
136 | /* In case BITS_PER_WORD == HOST_BITS_PER_WIDE_INT, shifting by |
137 | BITS_PER_WORD would invoke undefined behavior. Work around it. */ | |
138 | ||
139 | #define UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD(val) \ | |
505ddab6 | 140 | (((unsigned HOST_WIDE_INT) (val) << (BITS_PER_WORD - 1)) << 1) |
42a6ff51 | 141 | |
230d793d RS |
142 | /* Maximum register number, which is the size of the tables below. */ |
143 | ||
770ae6cc | 144 | static unsigned int combine_max_regno; |
230d793d RS |
145 | |
146 | /* Record last point of death of (hard or pseudo) register n. */ | |
147 | ||
148 | static rtx *reg_last_death; | |
149 | ||
150 | /* Record last point of modification of (hard or pseudo) register n. */ | |
151 | ||
152 | static rtx *reg_last_set; | |
153 | ||
154 | /* Record the cuid of the last insn that invalidated memory | |
155 | (anything that writes memory, and subroutine calls, but not pushes). */ | |
156 | ||
157 | static int mem_last_set; | |
158 | ||
159 | /* Record the cuid of the last CALL_INSN | |
160 | so we can tell whether a potential combination crosses any calls. */ | |
161 | ||
162 | static int last_call_cuid; | |
163 | ||
164 | /* When `subst' is called, this is the insn that is being modified | |
165 | (by combining in a previous insn). The PATTERN of this insn | |
166 | is still the old pattern partially modified and it should not be | |
167 | looked at, but this may be used to examine the successors of the insn | |
168 | to judge whether a simplification is valid. */ | |
169 | ||
170 | static rtx subst_insn; | |
171 | ||
172 | /* This is the lowest CUID that `subst' is currently dealing with. | |
173 | get_last_value will not return a value if the register was set at or | |
174 | after this CUID. If not for this mechanism, we could get confused if | |
175 | I2 or I1 in try_combine were an insn that used the old value of a register | |
176 | to obtain a new value. In that case, we might erroneously get the | |
177 | new value of the register when we wanted the old one. */ | |
178 | ||
179 | static int subst_low_cuid; | |
180 | ||
6e25d159 RK |
181 | /* This contains any hard registers that are used in newpat; reg_dead_at_p |
182 | must consider all these registers to be always live. */ | |
183 | ||
184 | static HARD_REG_SET newpat_used_regs; | |
185 | ||
abe6e52f RK |
186 | /* This is an insn to which a LOG_LINKS entry has been added. If this |
187 | insn is the earlier than I2 or I3, combine should rescan starting at | |
188 | that location. */ | |
189 | ||
190 | static rtx added_links_insn; | |
191 | ||
f6366fc7 ZD |
192 | /* Basic block in which we are performing combines. */ |
193 | static basic_block this_basic_block; | |
715e7fbc | 194 | |
663522cb KH |
195 | /* A bitmap indicating which blocks had registers go dead at entry. |
196 | After combine, we'll need to re-do global life analysis with | |
715e7fbc RH |
197 | those blocks as starting points. */ |
198 | static sbitmap refresh_blocks; | |
230d793d RS |
199 | \f |
200 | /* The next group of arrays allows the recording of the last value assigned | |
201 | to (hard or pseudo) register n. We use this information to see if a | |
5089e22e | 202 | operation being processed is redundant given a prior operation performed |
230d793d RS |
203 | on the register. For example, an `and' with a constant is redundant if |
204 | all the zero bits are already known to be turned off. | |
205 | ||
206 | We use an approach similar to that used by cse, but change it in the | |
207 | following ways: | |
208 | ||
209 | (1) We do not want to reinitialize at each label. | |
210 | (2) It is useful, but not critical, to know the actual value assigned | |
211 | to a register. Often just its form is helpful. | |
212 | ||
213 | Therefore, we maintain the following arrays: | |
214 | ||
215 | reg_last_set_value the last value assigned | |
216 | reg_last_set_label records the value of label_tick when the | |
217 | register was assigned | |
218 | reg_last_set_table_tick records the value of label_tick when a | |
219 | value using the register is assigned | |
da7d8304 | 220 | reg_last_set_invalid set to nonzero when it is not valid |
230d793d RS |
221 | to use the value of this register in some |
222 | register's value | |
223 | ||
224 | To understand the usage of these tables, it is important to understand | |
225 | the distinction between the value in reg_last_set_value being valid | |
226 | and the register being validly contained in some other expression in the | |
227 | table. | |
228 | ||
da7d8304 | 229 | Entry I in reg_last_set_value is valid if it is nonzero, and either |
230d793d RS |
230 | reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick. |
231 | ||
232 | Register I may validly appear in any expression returned for the value | |
233 | of another register if reg_n_sets[i] is 1. It may also appear in the | |
234 | value for register J if reg_last_set_label[i] < reg_last_set_label[j] or | |
235 | reg_last_set_invalid[j] is zero. | |
236 | ||
237 | If an expression is found in the table containing a register which may | |
238 | not validly appear in an expression, the register is replaced by | |
239 | something that won't match, (clobber (const_int 0)). | |
240 | ||
da7d8304 | 241 | reg_last_set_invalid[i] is set nonzero when register I is being assigned |
230d793d RS |
242 | to and reg_last_set_table_tick[i] == label_tick. */ |
243 | ||
0f41302f | 244 | /* Record last value assigned to (hard or pseudo) register n. */ |
230d793d RS |
245 | |
246 | static rtx *reg_last_set_value; | |
247 | ||
248 | /* Record the value of label_tick when the value for register n is placed in | |
249 | reg_last_set_value[n]. */ | |
250 | ||
568356af | 251 | static int *reg_last_set_label; |
230d793d RS |
252 | |
253 | /* Record the value of label_tick when an expression involving register n | |
0f41302f | 254 | is placed in reg_last_set_value. */ |
230d793d | 255 | |
568356af | 256 | static int *reg_last_set_table_tick; |
230d793d | 257 | |
da7d8304 | 258 | /* Set nonzero if references to register n in expressions should not be |
230d793d RS |
259 | used. */ |
260 | ||
261 | static char *reg_last_set_invalid; | |
262 | ||
0f41302f | 263 | /* Incremented for each label. */ |
230d793d | 264 | |
568356af | 265 | static int label_tick; |
230d793d RS |
266 | |
267 | /* Some registers that are set more than once and used in more than one | |
268 | basic block are nevertheless always set in similar ways. For example, | |
269 | a QImode register may be loaded from memory in two places on a machine | |
270 | where byte loads zero extend. | |
271 | ||
951553af | 272 | We record in the following array what we know about the nonzero |
230d793d RS |
273 | bits of a register, specifically which bits are known to be zero. |
274 | ||
275 | If an entry is zero, it means that we don't know anything special. */ | |
276 | ||
55310dad | 277 | static unsigned HOST_WIDE_INT *reg_nonzero_bits; |
230d793d | 278 | |
951553af | 279 | /* Mode used to compute significance in reg_nonzero_bits. It is the largest |
5f4f0e22 | 280 | integer mode that can fit in HOST_BITS_PER_WIDE_INT. */ |
230d793d | 281 | |
951553af | 282 | static enum machine_mode nonzero_bits_mode; |
230d793d | 283 | |
d0ab8cd3 RK |
284 | /* Nonzero if we know that a register has some leading bits that are always |
285 | equal to the sign bit. */ | |
286 | ||
770ae6cc | 287 | static unsigned char *reg_sign_bit_copies; |
d0ab8cd3 | 288 | |
951553af | 289 | /* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used. |
1a26b032 RK |
290 | It is zero while computing them and after combine has completed. This |
291 | former test prevents propagating values based on previously set values, | |
292 | which can be incorrect if a variable is modified in a loop. */ | |
230d793d | 293 | |
951553af | 294 | static int nonzero_sign_valid; |
55310dad RK |
295 | |
296 | /* These arrays are maintained in parallel with reg_last_set_value | |
297 | and are used to store the mode in which the register was last set, | |
298 | the bits that were known to be zero when it was last set, and the | |
299 | number of sign bits copies it was known to have when it was last set. */ | |
300 | ||
301 | static enum machine_mode *reg_last_set_mode; | |
302 | static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits; | |
303 | static char *reg_last_set_sign_bit_copies; | |
230d793d RS |
304 | \f |
305 | /* Record one modification to rtl structure | |
306 | to be undone by storing old_contents into *where. | |
307 | is_int is 1 if the contents are an int. */ | |
308 | ||
309 | struct undo | |
310 | { | |
241cea85 | 311 | struct undo *next; |
230d793d | 312 | int is_int; |
3129af4c RS |
313 | union {rtx r; int i;} old_contents; |
314 | union {rtx *r; int *i;} where; | |
230d793d RS |
315 | }; |
316 | ||
317 | /* Record a bunch of changes to be undone, up to MAX_UNDO of them. | |
318 | num_undo says how many are currently recorded. | |
319 | ||
230d793d | 320 | other_insn is nonzero if we have modified some other insn in the process |
f1c6ba8b | 321 | of working on subst_insn. It must be verified too. */ |
230d793d RS |
322 | |
323 | struct undobuf | |
324 | { | |
241cea85 RK |
325 | struct undo *undos; |
326 | struct undo *frees; | |
230d793d RS |
327 | rtx other_insn; |
328 | }; | |
329 | ||
330 | static struct undobuf undobuf; | |
331 | ||
230d793d RS |
332 | /* Number of times the pseudo being substituted for |
333 | was found and replaced. */ | |
334 | ||
335 | static int n_occurrences; | |
336 | ||
83d2b3b9 | 337 | static void do_SUBST PARAMS ((rtx *, rtx)); |
3129af4c | 338 | static void do_SUBST_INT PARAMS ((int *, int)); |
83d2b3b9 KG |
339 | static void init_reg_last_arrays PARAMS ((void)); |
340 | static void setup_incoming_promotions PARAMS ((void)); | |
341 | static void set_nonzero_bits_and_sign_copies PARAMS ((rtx, rtx, void *)); | |
c3410241 | 342 | static int cant_combine_insn_p PARAMS ((rtx)); |
83d2b3b9 KG |
343 | static int can_combine_p PARAMS ((rtx, rtx, rtx, rtx, rtx *, rtx *)); |
344 | static int sets_function_arg_p PARAMS ((rtx)); | |
345 | static int combinable_i3pat PARAMS ((rtx, rtx *, rtx, rtx, int, rtx *)); | |
346 | static int contains_muldiv PARAMS ((rtx)); | |
44a76fc8 | 347 | static rtx try_combine PARAMS ((rtx, rtx, rtx, int *)); |
83d2b3b9 KG |
348 | static void undo_all PARAMS ((void)); |
349 | static void undo_commit PARAMS ((void)); | |
350 | static rtx *find_split_point PARAMS ((rtx *, rtx)); | |
351 | static rtx subst PARAMS ((rtx, rtx, rtx, int, int)); | |
352 | static rtx combine_simplify_rtx PARAMS ((rtx, enum machine_mode, int, int)); | |
353 | static rtx simplify_if_then_else PARAMS ((rtx)); | |
354 | static rtx simplify_set PARAMS ((rtx)); | |
355 | static rtx simplify_logical PARAMS ((rtx, int)); | |
356 | static rtx expand_compound_operation PARAMS ((rtx)); | |
357 | static rtx expand_field_assignment PARAMS ((rtx)); | |
770ae6cc RK |
358 | static rtx make_extraction PARAMS ((enum machine_mode, rtx, HOST_WIDE_INT, |
359 | rtx, unsigned HOST_WIDE_INT, int, | |
360 | int, int)); | |
83d2b3b9 KG |
361 | static rtx extract_left_shift PARAMS ((rtx, int)); |
362 | static rtx make_compound_operation PARAMS ((rtx, enum rtx_code)); | |
770ae6cc RK |
363 | static int get_pos_from_mask PARAMS ((unsigned HOST_WIDE_INT, |
364 | unsigned HOST_WIDE_INT *)); | |
83d2b3b9 KG |
365 | static rtx force_to_mode PARAMS ((rtx, enum machine_mode, |
366 | unsigned HOST_WIDE_INT, rtx, int)); | |
367 | static rtx if_then_else_cond PARAMS ((rtx, rtx *, rtx *)); | |
368 | static rtx known_cond PARAMS ((rtx, enum rtx_code, rtx, rtx)); | |
369 | static int rtx_equal_for_field_assignment_p PARAMS ((rtx, rtx)); | |
370 | static rtx make_field_assignment PARAMS ((rtx)); | |
371 | static rtx apply_distributive_law PARAMS ((rtx)); | |
372 | static rtx simplify_and_const_int PARAMS ((rtx, enum machine_mode, rtx, | |
373 | unsigned HOST_WIDE_INT)); | |
374 | static unsigned HOST_WIDE_INT nonzero_bits PARAMS ((rtx, enum machine_mode)); | |
770ae6cc | 375 | static unsigned int num_sign_bit_copies PARAMS ((rtx, enum machine_mode)); |
83d2b3b9 KG |
376 | static int merge_outer_ops PARAMS ((enum rtx_code *, HOST_WIDE_INT *, |
377 | enum rtx_code, HOST_WIDE_INT, | |
378 | enum machine_mode, int *)); | |
379 | static rtx simplify_shift_const PARAMS ((rtx, enum rtx_code, enum machine_mode, | |
380 | rtx, int)); | |
381 | static int recog_for_combine PARAMS ((rtx *, rtx, rtx *)); | |
382 | static rtx gen_lowpart_for_combine PARAMS ((enum machine_mode, rtx)); | |
83d2b3b9 KG |
383 | static rtx gen_binary PARAMS ((enum rtx_code, enum machine_mode, |
384 | rtx, rtx)); | |
83d2b3b9 | 385 | static enum rtx_code simplify_comparison PARAMS ((enum rtx_code, rtx *, rtx *)); |
83d2b3b9 KG |
386 | static void update_table_tick PARAMS ((rtx)); |
387 | static void record_value_for_reg PARAMS ((rtx, rtx, rtx)); | |
388 | static void check_promoted_subreg PARAMS ((rtx, rtx)); | |
389 | static void record_dead_and_set_regs_1 PARAMS ((rtx, rtx, void *)); | |
390 | static void record_dead_and_set_regs PARAMS ((rtx)); | |
391 | static int get_last_value_validate PARAMS ((rtx *, rtx, int, int)); | |
392 | static rtx get_last_value PARAMS ((rtx)); | |
393 | static int use_crosses_set_p PARAMS ((rtx, int)); | |
394 | static void reg_dead_at_p_1 PARAMS ((rtx, rtx, void *)); | |
395 | static int reg_dead_at_p PARAMS ((rtx, rtx)); | |
396 | static void move_deaths PARAMS ((rtx, rtx, int, rtx, rtx *)); | |
397 | static int reg_bitfield_target_p PARAMS ((rtx, rtx)); | |
398 | static void distribute_notes PARAMS ((rtx, rtx, rtx, rtx, rtx, rtx)); | |
399 | static void distribute_links PARAMS ((rtx)); | |
400 | static void mark_used_regs_combine PARAMS ((rtx)); | |
401 | static int insn_cuid PARAMS ((rtx)); | |
c6991660 | 402 | static void record_promoted_value PARAMS ((rtx, rtx)); |
9a915772 JH |
403 | static rtx reversed_comparison PARAMS ((rtx, enum machine_mode, rtx, rtx)); |
404 | static enum rtx_code combine_reversed_comparison_code PARAMS ((rtx)); | |
230d793d | 405 | \f |
76095e2f RH |
406 | /* Substitute NEWVAL, an rtx expression, into INTO, a place in some |
407 | insn. The substitution can be undone by undo_all. If INTO is already | |
408 | set to NEWVAL, do not record this change. Because computing NEWVAL might | |
409 | also call SUBST, we have to compute it before we put anything into | |
410 | the undo table. */ | |
411 | ||
412 | static void | |
663522cb | 413 | do_SUBST (into, newval) |
76095e2f RH |
414 | rtx *into, newval; |
415 | { | |
416 | struct undo *buf; | |
417 | rtx oldval = *into; | |
418 | ||
419 | if (oldval == newval) | |
420 | return; | |
421 | ||
4161da12 AO |
422 | /* We'd like to catch as many invalid transformations here as |
423 | possible. Unfortunately, there are way too many mode changes | |
424 | that are perfectly valid, so we'd waste too much effort for | |
425 | little gain doing the checks here. Focus on catching invalid | |
426 | transformations involving integer constants. */ | |
427 | if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT | |
428 | && GET_CODE (newval) == CONST_INT) | |
429 | { | |
430 | /* Sanity check that we're replacing oldval with a CONST_INT | |
431 | that is a valid sign-extension for the original mode. */ | |
432 | if (INTVAL (newval) != trunc_int_for_mode (INTVAL (newval), | |
433 | GET_MODE (oldval))) | |
434 | abort (); | |
435 | ||
436 | /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a | |
437 | CONST_INT is not valid, because after the replacement, the | |
438 | original mode would be gone. Unfortunately, we can't tell | |
439 | when do_SUBST is called to replace the operand thereof, so we | |
440 | perform this test on oldval instead, checking whether an | |
441 | invalid replacement took place before we got here. */ | |
442 | if ((GET_CODE (oldval) == SUBREG | |
443 | && GET_CODE (SUBREG_REG (oldval)) == CONST_INT) | |
444 | || (GET_CODE (oldval) == ZERO_EXTEND | |
445 | && GET_CODE (XEXP (oldval, 0)) == CONST_INT)) | |
446 | abort (); | |
447 | } | |
448 | ||
76095e2f RH |
449 | if (undobuf.frees) |
450 | buf = undobuf.frees, undobuf.frees = buf->next; | |
451 | else | |
452 | buf = (struct undo *) xmalloc (sizeof (struct undo)); | |
453 | ||
454 | buf->is_int = 0; | |
455 | buf->where.r = into; | |
456 | buf->old_contents.r = oldval; | |
457 | *into = newval; | |
458 | ||
459 | buf->next = undobuf.undos, undobuf.undos = buf; | |
460 | } | |
461 | ||
462 | #define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL)) | |
463 | ||
464 | /* Similar to SUBST, but NEWVAL is an int expression. Note that substitution | |
465 | for the value of a HOST_WIDE_INT value (including CONST_INT) is | |
466 | not safe. */ | |
467 | ||
468 | static void | |
663522cb | 469 | do_SUBST_INT (into, newval) |
3129af4c | 470 | int *into, newval; |
76095e2f RH |
471 | { |
472 | struct undo *buf; | |
3129af4c | 473 | int oldval = *into; |
76095e2f RH |
474 | |
475 | if (oldval == newval) | |
476 | return; | |
477 | ||
478 | if (undobuf.frees) | |
479 | buf = undobuf.frees, undobuf.frees = buf->next; | |
480 | else | |
481 | buf = (struct undo *) xmalloc (sizeof (struct undo)); | |
482 | ||
483 | buf->is_int = 1; | |
484 | buf->where.i = into; | |
485 | buf->old_contents.i = oldval; | |
486 | *into = newval; | |
487 | ||
488 | buf->next = undobuf.undos, undobuf.undos = buf; | |
489 | } | |
490 | ||
491 | #define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL)) | |
492 | \f | |
230d793d | 493 | /* Main entry point for combiner. F is the first insn of the function. |
663522cb | 494 | NREGS is the first unused pseudo-reg number. |
230d793d | 495 | |
da7d8304 | 496 | Return nonzero if the combiner has turned an indirect jump |
44a76fc8 AG |
497 | instruction into a direct jump. */ |
498 | int | |
230d793d RS |
499 | combine_instructions (f, nregs) |
500 | rtx f; | |
770ae6cc | 501 | unsigned int nregs; |
230d793d | 502 | { |
b3694847 | 503 | rtx insn, next; |
b729186a | 504 | #ifdef HAVE_cc0 |
b3694847 | 505 | rtx prev; |
b729186a | 506 | #endif |
b3694847 SS |
507 | int i; |
508 | rtx links, nextlinks; | |
230d793d | 509 | |
44a76fc8 AG |
510 | int new_direct_jump_p = 0; |
511 | ||
230d793d RS |
512 | combine_attempts = 0; |
513 | combine_merges = 0; | |
514 | combine_extras = 0; | |
515 | combine_successes = 0; | |
516 | ||
517 | combine_max_regno = nregs; | |
518 | ||
663522cb | 519 | reg_nonzero_bits = ((unsigned HOST_WIDE_INT *) |
c05ddfa7 | 520 | xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT))); |
770ae6cc RK |
521 | reg_sign_bit_copies |
522 | = (unsigned char *) xcalloc (nregs, sizeof (unsigned char)); | |
c05ddfa7 MM |
523 | |
524 | reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx)); | |
525 | reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx)); | |
526 | reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx)); | |
527 | reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int)); | |
528 | reg_last_set_label = (int *) xmalloc (nregs * sizeof (int)); | |
529 | reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char)); | |
55310dad | 530 | reg_last_set_mode |
c05ddfa7 | 531 | = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode)); |
55310dad | 532 | reg_last_set_nonzero_bits |
c05ddfa7 | 533 | = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT)); |
55310dad | 534 | reg_last_set_sign_bit_copies |
c05ddfa7 | 535 | = (char *) xmalloc (nregs * sizeof (char)); |
55310dad | 536 | |
ef026f91 | 537 | init_reg_last_arrays (); |
230d793d RS |
538 | |
539 | init_recog_no_volatile (); | |
540 | ||
541 | /* Compute maximum uid value so uid_cuid can be allocated. */ | |
542 | ||
543 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) | |
544 | if (INSN_UID (insn) > i) | |
545 | i = INSN_UID (insn); | |
546 | ||
c05ddfa7 | 547 | uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int)); |
4255220d | 548 | max_uid_cuid = i; |
230d793d | 549 | |
951553af | 550 | nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0); |
230d793d | 551 | |
951553af | 552 | /* Don't use reg_nonzero_bits when computing it. This can cause problems |
230d793d RS |
553 | when, for example, we have j <<= 1 in a loop. */ |
554 | ||
951553af | 555 | nonzero_sign_valid = 0; |
230d793d RS |
556 | |
557 | /* Compute the mapping from uids to cuids. | |
558 | Cuids are numbers assigned to insns, like uids, | |
663522cb | 559 | except that cuids increase monotonically through the code. |
230d793d RS |
560 | |
561 | Scan all SETs and see if we can deduce anything about what | |
951553af | 562 | bits are known to be zero for some registers and how many copies |
d79f08e0 RK |
563 | of the sign bit are known to exist for those registers. |
564 | ||
565 | Also set any known values so that we can use it while searching | |
566 | for what bits are known to be set. */ | |
567 | ||
568 | label_tick = 1; | |
230d793d | 569 | |
7988fd36 RK |
570 | setup_incoming_promotions (); |
571 | ||
d55bc081 | 572 | refresh_blocks = sbitmap_alloc (last_basic_block); |
715e7fbc | 573 | sbitmap_zero (refresh_blocks); |
715e7fbc | 574 | |
230d793d RS |
575 | for (insn = f, i = 0; insn; insn = NEXT_INSN (insn)) |
576 | { | |
4255220d | 577 | uid_cuid[INSN_UID (insn)] = ++i; |
d79f08e0 RK |
578 | subst_low_cuid = i; |
579 | subst_insn = insn; | |
580 | ||
2c3c49de | 581 | if (INSN_P (insn)) |
d79f08e0 | 582 | { |
663522cb | 583 | note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies, |
84832317 | 584 | NULL); |
d79f08e0 | 585 | record_dead_and_set_regs (insn); |
2dab894a RK |
586 | |
587 | #ifdef AUTO_INC_DEC | |
588 | for (links = REG_NOTES (insn); links; links = XEXP (links, 1)) | |
589 | if (REG_NOTE_KIND (links) == REG_INC) | |
84832317 MM |
590 | set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX, |
591 | NULL); | |
2dab894a | 592 | #endif |
d79f08e0 RK |
593 | } |
594 | ||
595 | if (GET_CODE (insn) == CODE_LABEL) | |
596 | label_tick++; | |
230d793d RS |
597 | } |
598 | ||
951553af | 599 | nonzero_sign_valid = 1; |
230d793d RS |
600 | |
601 | /* Now scan all the insns in forward order. */ | |
602 | ||
603 | label_tick = 1; | |
604 | last_call_cuid = 0; | |
605 | mem_last_set = 0; | |
ef026f91 | 606 | init_reg_last_arrays (); |
7988fd36 RK |
607 | setup_incoming_promotions (); |
608 | ||
e0082a72 | 609 | FOR_EACH_BB (this_basic_block) |
230d793d | 610 | { |
e0082a72 ZD |
611 | for (insn = this_basic_block->head; |
612 | insn != NEXT_INSN (this_basic_block->end); | |
613 | insn = next ? next : NEXT_INSN (insn)) | |
230d793d | 614 | { |
e0082a72 | 615 | next = 0; |
aabb6c74 | 616 | |
e0082a72 ZD |
617 | if (GET_CODE (insn) == CODE_LABEL) |
618 | label_tick++; | |
aabb6c74 | 619 | |
e0082a72 | 620 | else if (INSN_P (insn)) |
0b17ab2f | 621 | { |
e0082a72 ZD |
622 | /* See if we know about function return values before this |
623 | insn based upon SUBREG flags. */ | |
624 | check_promoted_subreg (insn, PATTERN (insn)); | |
230d793d | 625 | |
e0082a72 | 626 | /* Try this insn with each insn it links back to. */ |
230d793d | 627 | |
e0082a72 ZD |
628 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) |
629 | if ((next = try_combine (insn, XEXP (links, 0), | |
630 | NULL_RTX, &new_direct_jump_p)) != 0) | |
230d793d | 631 | goto retry; |
0b17ab2f | 632 | |
e0082a72 ZD |
633 | /* Try each sequence of three linked insns ending with this one. */ |
634 | ||
635 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
636 | { | |
637 | rtx link = XEXP (links, 0); | |
638 | ||
639 | /* If the linked insn has been replaced by a note, then there | |
640 | is no point in pursuing this chain any further. */ | |
641 | if (GET_CODE (link) == NOTE) | |
642 | continue; | |
643 | ||
644 | for (nextlinks = LOG_LINKS (link); | |
645 | nextlinks; | |
646 | nextlinks = XEXP (nextlinks, 1)) | |
647 | if ((next = try_combine (insn, link, | |
648 | XEXP (nextlinks, 0), | |
649 | &new_direct_jump_p)) != 0) | |
650 | goto retry; | |
651 | } | |
230d793d | 652 | |
9b89393b | 653 | #ifdef HAVE_cc0 |
e0082a72 ZD |
654 | /* Try to combine a jump insn that uses CC0 |
655 | with a preceding insn that sets CC0, and maybe with its | |
656 | logical predecessor as well. | |
657 | This is how we make decrement-and-branch insns. | |
658 | We need this special code because data flow connections | |
659 | via CC0 do not get entered in LOG_LINKS. */ | |
660 | ||
661 | if (GET_CODE (insn) == JUMP_INSN | |
662 | && (prev = prev_nonnote_insn (insn)) != 0 | |
663 | && GET_CODE (prev) == INSN | |
664 | && sets_cc0_p (PATTERN (prev))) | |
665 | { | |
666 | if ((next = try_combine (insn, prev, | |
667 | NULL_RTX, &new_direct_jump_p)) != 0) | |
668 | goto retry; | |
669 | ||
670 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
671 | nextlinks = XEXP (nextlinks, 1)) | |
672 | if ((next = try_combine (insn, prev, | |
673 | XEXP (nextlinks, 0), | |
674 | &new_direct_jump_p)) != 0) | |
675 | goto retry; | |
676 | } | |
230d793d | 677 | |
e0082a72 ZD |
678 | /* Do the same for an insn that explicitly references CC0. */ |
679 | if (GET_CODE (insn) == INSN | |
680 | && (prev = prev_nonnote_insn (insn)) != 0 | |
681 | && GET_CODE (prev) == INSN | |
682 | && sets_cc0_p (PATTERN (prev)) | |
683 | && GET_CODE (PATTERN (insn)) == SET | |
684 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn)))) | |
685 | { | |
686 | if ((next = try_combine (insn, prev, | |
687 | NULL_RTX, &new_direct_jump_p)) != 0) | |
688 | goto retry; | |
689 | ||
690 | for (nextlinks = LOG_LINKS (prev); nextlinks; | |
691 | nextlinks = XEXP (nextlinks, 1)) | |
692 | if ((next = try_combine (insn, prev, | |
693 | XEXP (nextlinks, 0), | |
694 | &new_direct_jump_p)) != 0) | |
695 | goto retry; | |
696 | } | |
230d793d | 697 | |
e0082a72 ZD |
698 | /* Finally, see if any of the insns that this insn links to |
699 | explicitly references CC0. If so, try this insn, that insn, | |
700 | and its predecessor if it sets CC0. */ | |
701 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
702 | if (GET_CODE (XEXP (links, 0)) == INSN | |
703 | && GET_CODE (PATTERN (XEXP (links, 0))) == SET | |
704 | && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0)))) | |
705 | && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0 | |
706 | && GET_CODE (prev) == INSN | |
707 | && sets_cc0_p (PATTERN (prev)) | |
708 | && (next = try_combine (insn, XEXP (links, 0), | |
709 | prev, &new_direct_jump_p)) != 0) | |
710 | goto retry; | |
9b89393b | 711 | #endif |
e0082a72 ZD |
712 | |
713 | /* Try combining an insn with two different insns whose results it | |
714 | uses. */ | |
715 | for (links = LOG_LINKS (insn); links; links = XEXP (links, 1)) | |
716 | for (nextlinks = XEXP (links, 1); nextlinks; | |
717 | nextlinks = XEXP (nextlinks, 1)) | |
718 | if ((next = try_combine (insn, XEXP (links, 0), | |
719 | XEXP (nextlinks, 0), | |
720 | &new_direct_jump_p)) != 0) | |
721 | goto retry; | |
722 | ||
723 | if (GET_CODE (insn) != NOTE) | |
724 | record_dead_and_set_regs (insn); | |
725 | ||
726 | retry: | |
727 | ; | |
728 | } | |
230d793d RS |
729 | } |
730 | } | |
c51d95ec | 731 | clear_bb_flags (); |
230d793d | 732 | |
f6366fc7 ZD |
733 | EXECUTE_IF_SET_IN_SBITMAP (refresh_blocks, 0, i, |
734 | BASIC_BLOCK (i)->flags |= BB_DIRTY); | |
c51d95ec | 735 | new_direct_jump_p |= purge_all_dead_edges (0); |
0005550b JH |
736 | delete_noop_moves (f); |
737 | ||
c51d95ec JH |
738 | update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES, |
739 | PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE | |
740 | | PROP_KILL_DEAD_CODE); | |
c05ddfa7 MM |
741 | |
742 | /* Clean up. */ | |
715e7fbc | 743 | sbitmap_free (refresh_blocks); |
c05ddfa7 MM |
744 | free (reg_nonzero_bits); |
745 | free (reg_sign_bit_copies); | |
746 | free (reg_last_death); | |
747 | free (reg_last_set); | |
748 | free (reg_last_set_value); | |
749 | free (reg_last_set_table_tick); | |
750 | free (reg_last_set_label); | |
751 | free (reg_last_set_invalid); | |
752 | free (reg_last_set_mode); | |
753 | free (reg_last_set_nonzero_bits); | |
754 | free (reg_last_set_sign_bit_copies); | |
755 | free (uid_cuid); | |
715e7fbc | 756 | |
e7749837 RH |
757 | { |
758 | struct undo *undo, *next; | |
759 | for (undo = undobuf.frees; undo; undo = next) | |
760 | { | |
761 | next = undo->next; | |
762 | free (undo); | |
763 | } | |
764 | undobuf.frees = 0; | |
765 | } | |
766 | ||
230d793d RS |
767 | total_attempts += combine_attempts; |
768 | total_merges += combine_merges; | |
769 | total_extras += combine_extras; | |
770 | total_successes += combine_successes; | |
1a26b032 | 771 | |
951553af | 772 | nonzero_sign_valid = 0; |
972b320c R |
773 | |
774 | /* Make recognizer allow volatile MEMs again. */ | |
775 | init_recog (); | |
44a76fc8 AG |
776 | |
777 | return new_direct_jump_p; | |
230d793d | 778 | } |
ef026f91 RS |
779 | |
780 | /* Wipe the reg_last_xxx arrays in preparation for another pass. */ | |
781 | ||
782 | static void | |
783 | init_reg_last_arrays () | |
784 | { | |
770ae6cc | 785 | unsigned int nregs = combine_max_regno; |
ef026f91 | 786 | |
961192e1 JM |
787 | memset ((char *) reg_last_death, 0, nregs * sizeof (rtx)); |
788 | memset ((char *) reg_last_set, 0, nregs * sizeof (rtx)); | |
789 | memset ((char *) reg_last_set_value, 0, nregs * sizeof (rtx)); | |
790 | memset ((char *) reg_last_set_table_tick, 0, nregs * sizeof (int)); | |
791 | memset ((char *) reg_last_set_label, 0, nregs * sizeof (int)); | |
792 | memset (reg_last_set_invalid, 0, nregs * sizeof (char)); | |
793 | memset ((char *) reg_last_set_mode, 0, nregs * sizeof (enum machine_mode)); | |
794 | memset ((char *) reg_last_set_nonzero_bits, 0, nregs * sizeof (HOST_WIDE_INT)); | |
795 | memset (reg_last_set_sign_bit_copies, 0, nregs * sizeof (char)); | |
ef026f91 | 796 | } |
230d793d | 797 | \f |
7988fd36 RK |
798 | /* Set up any promoted values for incoming argument registers. */ |
799 | ||
ee791cc3 | 800 | static void |
7988fd36 RK |
801 | setup_incoming_promotions () |
802 | { | |
803 | #ifdef PROMOTE_FUNCTION_ARGS | |
770ae6cc | 804 | unsigned int regno; |
7988fd36 RK |
805 | rtx reg; |
806 | enum machine_mode mode; | |
807 | int unsignedp; | |
808 | rtx first = get_insns (); | |
809 | ||
c285f57a JJ |
810 | #ifndef OUTGOING_REGNO |
811 | #define OUTGOING_REGNO(N) N | |
812 | #endif | |
7988fd36 | 813 | for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) |
c285f57a JJ |
814 | /* Check whether this register can hold an incoming pointer |
815 | argument. FUNCTION_ARG_REGNO_P tests outgoing register | |
816 | numbers, so translate if necessary due to register windows. */ | |
817 | if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno)) | |
7988fd36 | 818 | && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0) |
38a448ca RH |
819 | { |
820 | record_value_for_reg | |
821 | (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND | |
822 | : SIGN_EXTEND), | |
823 | GET_MODE (reg), | |
824 | gen_rtx_CLOBBER (mode, const0_rtx))); | |
825 | } | |
7988fd36 RK |
826 | #endif |
827 | } | |
828 | \f | |
91102d5a RK |
829 | /* Called via note_stores. If X is a pseudo that is narrower than |
830 | HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero. | |
230d793d RS |
831 | |
832 | If we are setting only a portion of X and we can't figure out what | |
833 | portion, assume all bits will be used since we don't know what will | |
d0ab8cd3 RK |
834 | be happening. |
835 | ||
836 | Similarly, set how many bits of X are known to be copies of the sign bit | |
663522cb | 837 | at all locations in the function. This is the smallest number implied |
d0ab8cd3 | 838 | by any set of X. */ |
230d793d RS |
839 | |
840 | static void | |
84832317 | 841 | set_nonzero_bits_and_sign_copies (x, set, data) |
230d793d RS |
842 | rtx x; |
843 | rtx set; | |
84832317 | 844 | void *data ATTRIBUTE_UNUSED; |
230d793d | 845 | { |
770ae6cc | 846 | unsigned int num; |
d0ab8cd3 | 847 | |
230d793d RS |
848 | if (GET_CODE (x) == REG |
849 | && REGNO (x) >= FIRST_PSEUDO_REGISTER | |
e8095e80 RK |
850 | /* If this register is undefined at the start of the file, we can't |
851 | say what its contents were. */ | |
f6366fc7 | 852 | && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, REGNO (x)) |
5f4f0e22 | 853 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) |
230d793d | 854 | { |
2dab894a | 855 | if (set == 0 || GET_CODE (set) == CLOBBER) |
e8095e80 RK |
856 | { |
857 | reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x)); | |
88306d12 | 858 | reg_sign_bit_copies[REGNO (x)] = 1; |
e8095e80 RK |
859 | return; |
860 | } | |
230d793d RS |
861 | |
862 | /* If this is a complex assignment, see if we can convert it into a | |
5089e22e | 863 | simple assignment. */ |
230d793d | 864 | set = expand_field_assignment (set); |
d79f08e0 RK |
865 | |
866 | /* If this is a simple assignment, or we have a paradoxical SUBREG, | |
867 | set what we know about X. */ | |
868 | ||
869 | if (SET_DEST (set) == x | |
870 | || (GET_CODE (SET_DEST (set)) == SUBREG | |
705c7b3b JW |
871 | && (GET_MODE_SIZE (GET_MODE (SET_DEST (set))) |
872 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set))))) | |
d79f08e0 | 873 | && SUBREG_REG (SET_DEST (set)) == x)) |
d0ab8cd3 | 874 | { |
9afa3d54 RK |
875 | rtx src = SET_SRC (set); |
876 | ||
877 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
878 | /* If X is narrower than a word and SRC is a non-negative | |
879 | constant that would appear negative in the mode of X, | |
880 | sign-extend it for use in reg_nonzero_bits because some | |
881 | machines (maybe most) will actually do the sign-extension | |
663522cb | 882 | and this is the conservative approach. |
9afa3d54 RK |
883 | |
884 | ??? For 2.5, try to tighten up the MD files in this regard | |
885 | instead of this kludge. */ | |
886 | ||
887 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
888 | && GET_CODE (src) == CONST_INT | |
889 | && INTVAL (src) > 0 | |
890 | && 0 != (INTVAL (src) | |
891 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 892 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
893 | src = GEN_INT (INTVAL (src) |
894 | | ((HOST_WIDE_INT) (-1) | |
895 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
896 | #endif | |
897 | ||
0a0440c9 JJ |
898 | /* Don't call nonzero_bits if it cannot change anything. */ |
899 | if (reg_nonzero_bits[REGNO (x)] != ~(unsigned HOST_WIDE_INT) 0) | |
900 | reg_nonzero_bits[REGNO (x)] | |
901 | |= nonzero_bits (src, nonzero_bits_mode); | |
d0ab8cd3 RK |
902 | num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x)); |
903 | if (reg_sign_bit_copies[REGNO (x)] == 0 | |
904 | || reg_sign_bit_copies[REGNO (x)] > num) | |
905 | reg_sign_bit_copies[REGNO (x)] = num; | |
906 | } | |
230d793d | 907 | else |
d0ab8cd3 | 908 | { |
951553af | 909 | reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x)); |
88306d12 | 910 | reg_sign_bit_copies[REGNO (x)] = 1; |
d0ab8cd3 | 911 | } |
230d793d RS |
912 | } |
913 | } | |
914 | \f | |
915 | /* See if INSN can be combined into I3. PRED and SUCC are optionally | |
916 | insns that were previously combined into I3 or that will be combined | |
917 | into the merger of INSN and I3. | |
918 | ||
919 | Return 0 if the combination is not allowed for any reason. | |
920 | ||
663522cb | 921 | If the combination is allowed, *PDEST will be set to the single |
230d793d RS |
922 | destination of INSN and *PSRC to the single source, and this function |
923 | will return 1. */ | |
924 | ||
925 | static int | |
926 | can_combine_p (insn, i3, pred, succ, pdest, psrc) | |
927 | rtx insn; | |
928 | rtx i3; | |
e51712db KG |
929 | rtx pred ATTRIBUTE_UNUSED; |
930 | rtx succ; | |
230d793d RS |
931 | rtx *pdest, *psrc; |
932 | { | |
933 | int i; | |
934 | rtx set = 0, src, dest; | |
b729186a JL |
935 | rtx p; |
936 | #ifdef AUTO_INC_DEC | |
76d31c63 | 937 | rtx link; |
b729186a | 938 | #endif |
230d793d RS |
939 | int all_adjacent = (succ ? (next_active_insn (insn) == succ |
940 | && next_active_insn (succ) == i3) | |
941 | : next_active_insn (insn) == i3); | |
942 | ||
943 | /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0. | |
663522cb | 944 | or a PARALLEL consisting of such a SET and CLOBBERs. |
230d793d RS |
945 | |
946 | If INSN has CLOBBER parallel parts, ignore them for our processing. | |
947 | By definition, these happen during the execution of the insn. When it | |
948 | is merged with another insn, all bets are off. If they are, in fact, | |
949 | needed and aren't also supplied in I3, they may be added by | |
663522cb | 950 | recog_for_combine. Otherwise, it won't match. |
230d793d RS |
951 | |
952 | We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED | |
953 | note. | |
954 | ||
663522cb | 955 | Get the source and destination of INSN. If more than one, can't |
230d793d | 956 | combine. */ |
663522cb | 957 | |
230d793d RS |
958 | if (GET_CODE (PATTERN (insn)) == SET) |
959 | set = PATTERN (insn); | |
960 | else if (GET_CODE (PATTERN (insn)) == PARALLEL | |
961 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) | |
962 | { | |
963 | for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | |
964 | { | |
965 | rtx elt = XVECEXP (PATTERN (insn), 0, i); | |
966 | ||
967 | switch (GET_CODE (elt)) | |
968 | { | |
e3258cef R |
969 | /* This is important to combine floating point insns |
970 | for the SH4 port. */ | |
971 | case USE: | |
972 | /* Combining an isolated USE doesn't make sense. | |
d2604ae9 | 973 | We depend here on combinable_i3pat to reject them. */ |
e3258cef R |
974 | /* The code below this loop only verifies that the inputs of |
975 | the SET in INSN do not change. We call reg_set_between_p | |
eaec9b3d | 976 | to verify that the REG in the USE does not change between |
e3258cef R |
977 | I3 and INSN. |
978 | If the USE in INSN was for a pseudo register, the matching | |
979 | insn pattern will likely match any register; combining this | |
980 | with any other USE would only be safe if we knew that the | |
981 | used registers have identical values, or if there was | |
982 | something to tell them apart, e.g. different modes. For | |
eaec9b3d | 983 | now, we forgo such complicated tests and simply disallow |
e3258cef R |
984 | combining of USES of pseudo registers with any other USE. */ |
985 | if (GET_CODE (XEXP (elt, 0)) == REG | |
986 | && GET_CODE (PATTERN (i3)) == PARALLEL) | |
987 | { | |
988 | rtx i3pat = PATTERN (i3); | |
989 | int i = XVECLEN (i3pat, 0) - 1; | |
770ae6cc RK |
990 | unsigned int regno = REGNO (XEXP (elt, 0)); |
991 | ||
e3258cef R |
992 | do |
993 | { | |
994 | rtx i3elt = XVECEXP (i3pat, 0, i); | |
770ae6cc | 995 | |
e3258cef R |
996 | if (GET_CODE (i3elt) == USE |
997 | && GET_CODE (XEXP (i3elt, 0)) == REG | |
998 | && (REGNO (XEXP (i3elt, 0)) == regno | |
999 | ? reg_set_between_p (XEXP (elt, 0), | |
1000 | PREV_INSN (insn), i3) | |
1001 | : regno >= FIRST_PSEUDO_REGISTER)) | |
1002 | return 0; | |
1003 | } | |
1004 | while (--i >= 0); | |
1005 | } | |
1006 | break; | |
1007 | ||
230d793d RS |
1008 | /* We can ignore CLOBBERs. */ |
1009 | case CLOBBER: | |
1010 | break; | |
1011 | ||
1012 | case SET: | |
1013 | /* Ignore SETs whose result isn't used but not those that | |
1014 | have side-effects. */ | |
1015 | if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt)) | |
1016 | && ! side_effects_p (elt)) | |
1017 | break; | |
1018 | ||
1019 | /* If we have already found a SET, this is a second one and | |
1020 | so we cannot combine with this insn. */ | |
1021 | if (set) | |
1022 | return 0; | |
1023 | ||
1024 | set = elt; | |
1025 | break; | |
1026 | ||
1027 | default: | |
1028 | /* Anything else means we can't combine. */ | |
1029 | return 0; | |
1030 | } | |
1031 | } | |
1032 | ||
1033 | if (set == 0 | |
1034 | /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs, | |
1035 | so don't do anything with it. */ | |
1036 | || GET_CODE (SET_SRC (set)) == ASM_OPERANDS) | |
1037 | return 0; | |
1038 | } | |
1039 | else | |
1040 | return 0; | |
1041 | ||
1042 | if (set == 0) | |
1043 | return 0; | |
1044 | ||
1045 | set = expand_field_assignment (set); | |
1046 | src = SET_SRC (set), dest = SET_DEST (set); | |
1047 | ||
1048 | /* Don't eliminate a store in the stack pointer. */ | |
1049 | if (dest == stack_pointer_rtx | |
230d793d RS |
1050 | /* If we couldn't eliminate a field assignment, we can't combine. */ |
1051 | || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART | |
1052 | /* Don't combine with an insn that sets a register to itself if it has | |
1053 | a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */ | |
5f4f0e22 | 1054 | || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX)) |
62f7f1f5 GK |
1055 | /* Can't merge an ASM_OPERANDS. */ |
1056 | || GET_CODE (src) == ASM_OPERANDS | |
230d793d RS |
1057 | /* Can't merge a function call. */ |
1058 | || GET_CODE (src) == CALL | |
cd5e8f1f | 1059 | /* Don't eliminate a function call argument. */ |
4dca5ec5 RK |
1060 | || (GET_CODE (i3) == CALL_INSN |
1061 | && (find_reg_fusage (i3, USE, dest) | |
1062 | || (GET_CODE (dest) == REG | |
1063 | && REGNO (dest) < FIRST_PSEUDO_REGISTER | |
1064 | && global_regs[REGNO (dest)]))) | |
230d793d RS |
1065 | /* Don't substitute into an incremented register. */ |
1066 | || FIND_REG_INC_NOTE (i3, dest) | |
1067 | || (succ && FIND_REG_INC_NOTE (succ, dest)) | |
ec35104c | 1068 | #if 0 |
230d793d | 1069 | /* Don't combine the end of a libcall into anything. */ |
ec35104c JL |
1070 | /* ??? This gives worse code, and appears to be unnecessary, since no |
1071 | pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does | |
1072 | use REG_RETVAL notes for noconflict blocks, but other code here | |
1073 | makes sure that those insns don't disappear. */ | |
5f4f0e22 | 1074 | || find_reg_note (insn, REG_RETVAL, NULL_RTX) |
ec35104c | 1075 | #endif |
230d793d RS |
1076 | /* Make sure that DEST is not used after SUCC but before I3. */ |
1077 | || (succ && ! all_adjacent | |
1078 | && reg_used_between_p (dest, succ, i3)) | |
1079 | /* Make sure that the value that is to be substituted for the register | |
1080 | does not use any registers whose values alter in between. However, | |
1081 | If the insns are adjacent, a use can't cross a set even though we | |
1082 | think it might (this can happen for a sequence of insns each setting | |
1083 | the same destination; reg_last_set of that register might point to | |
d81481d3 RK |
1084 | a NOTE). If INSN has a REG_EQUIV note, the register is always |
1085 | equivalent to the memory so the substitution is valid even if there | |
1086 | are intervening stores. Also, don't move a volatile asm or | |
1087 | UNSPEC_VOLATILE across any other insns. */ | |
230d793d | 1088 | || (! all_adjacent |
d81481d3 RK |
1089 | && (((GET_CODE (src) != MEM |
1090 | || ! find_reg_note (insn, REG_EQUIV, src)) | |
1091 | && use_crosses_set_p (src, INSN_CUID (insn))) | |
a66a10c7 RS |
1092 | || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src)) |
1093 | || GET_CODE (src) == UNSPEC_VOLATILE)) | |
230d793d RS |
1094 | /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get |
1095 | better register allocation by not doing the combine. */ | |
1096 | || find_reg_note (i3, REG_NO_CONFLICT, dest) | |
1097 | || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest)) | |
1098 | /* Don't combine across a CALL_INSN, because that would possibly | |
1099 | change whether the life span of some REGs crosses calls or not, | |
1100 | and it is a pain to update that information. | |
1101 | Exception: if source is a constant, moving it later can't hurt. | |
1102 | Accept that special case, because it helps -fforce-addr a lot. */ | |
1103 | || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src))) | |
1104 | return 0; | |
1105 | ||
1106 | /* DEST must either be a REG or CC0. */ | |
1107 | if (GET_CODE (dest) == REG) | |
1108 | { | |
1109 | /* If register alignment is being enforced for multi-word items in all | |
1110 | cases except for parameters, it is possible to have a register copy | |
1111 | insn referencing a hard register that is not allowed to contain the | |
1112 | mode being copied and which would not be valid as an operand of most | |
1113 | insns. Eliminate this problem by not combining with such an insn. | |
1114 | ||
1115 | Also, on some machines we don't want to extend the life of a hard | |
53895717 | 1116 | register. */ |
230d793d RS |
1117 | |
1118 | if (GET_CODE (src) == REG | |
1119 | && ((REGNO (dest) < FIRST_PSEUDO_REGISTER | |
1120 | && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest))) | |
c448a43e RK |
1121 | /* Don't extend the life of a hard register unless it is |
1122 | user variable (if we have few registers) or it can't | |
1123 | fit into the desired register (meaning something special | |
ecd40809 RK |
1124 | is going on). |
1125 | Also avoid substituting a return register into I3, because | |
1126 | reload can't handle a conflict with constraints of other | |
1127 | inputs. */ | |
230d793d | 1128 | || (REGNO (src) < FIRST_PSEUDO_REGISTER |
53895717 | 1129 | && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))))) |
230d793d RS |
1130 | return 0; |
1131 | } | |
1132 | else if (GET_CODE (dest) != CC0) | |
1133 | return 0; | |
1134 | ||
5f96750d RS |
1135 | /* Don't substitute for a register intended as a clobberable operand. |
1136 | Similarly, don't substitute an expression containing a register that | |
1137 | will be clobbered in I3. */ | |
230d793d RS |
1138 | if (GET_CODE (PATTERN (i3)) == PARALLEL) |
1139 | for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--) | |
1140 | if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER | |
5f96750d RS |
1141 | && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), |
1142 | src) | |
1143 | || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))) | |
230d793d RS |
1144 | return 0; |
1145 | ||
1146 | /* If INSN contains anything volatile, or is an `asm' (whether volatile | |
d276f2bb | 1147 | or not), reject, unless nothing volatile comes between it and I3 */ |
230d793d RS |
1148 | |
1149 | if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src)) | |
d276f2bb CM |
1150 | { |
1151 | /* Make sure succ doesn't contain a volatile reference. */ | |
1152 | if (succ != 0 && volatile_refs_p (PATTERN (succ))) | |
1153 | return 0; | |
663522cb | 1154 | |
d276f2bb | 1155 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) |
2c3c49de | 1156 | if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p))) |
cf0d9408 | 1157 | return 0; |
d276f2bb | 1158 | } |
230d793d | 1159 | |
b79ee7eb RH |
1160 | /* If INSN is an asm, and DEST is a hard register, reject, since it has |
1161 | to be an explicit register variable, and was chosen for a reason. */ | |
1162 | ||
1163 | if (GET_CODE (src) == ASM_OPERANDS | |
1164 | && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER) | |
1165 | return 0; | |
1166 | ||
4b2cb4a2 RS |
1167 | /* If there are any volatile insns between INSN and I3, reject, because |
1168 | they might affect machine state. */ | |
1169 | ||
1170 | for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p)) | |
2c3c49de | 1171 | if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p))) |
4b2cb4a2 RS |
1172 | return 0; |
1173 | ||
230d793d RS |
1174 | /* If INSN or I2 contains an autoincrement or autodecrement, |
1175 | make sure that register is not used between there and I3, | |
1176 | and not already used in I3 either. | |
1177 | Also insist that I3 not be a jump; if it were one | |
1178 | and the incremented register were spilled, we would lose. */ | |
1179 | ||
1180 | #ifdef AUTO_INC_DEC | |
1181 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) | |
1182 | if (REG_NOTE_KIND (link) == REG_INC | |
1183 | && (GET_CODE (i3) == JUMP_INSN | |
1184 | || reg_used_between_p (XEXP (link, 0), insn, i3) | |
1185 | || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3)))) | |
1186 | return 0; | |
1187 | #endif | |
1188 | ||
1189 | #ifdef HAVE_cc0 | |
1190 | /* Don't combine an insn that follows a CC0-setting insn. | |
1191 | An insn that uses CC0 must not be separated from the one that sets it. | |
1192 | We do, however, allow I2 to follow a CC0-setting insn if that insn | |
1193 | is passed as I1; in that case it will be deleted also. | |
1194 | We also allow combining in this case if all the insns are adjacent | |
1195 | because that would leave the two CC0 insns adjacent as well. | |
1196 | It would be more logical to test whether CC0 occurs inside I1 or I2, | |
1197 | but that would be much slower, and this ought to be equivalent. */ | |
1198 | ||
1199 | p = prev_nonnote_insn (insn); | |
1200 | if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p)) | |
1201 | && ! all_adjacent) | |
1202 | return 0; | |
1203 | #endif | |
1204 | ||
1205 | /* If we get here, we have passed all the tests and the combination is | |
1206 | to be allowed. */ | |
1207 | ||
1208 | *pdest = dest; | |
1209 | *psrc = src; | |
1210 | ||
1211 | return 1; | |
1212 | } | |
1213 | \f | |
956d6950 JL |
1214 | /* Check if PAT is an insn - or a part of it - used to set up an |
1215 | argument for a function in a hard register. */ | |
1216 | ||
1217 | static int | |
1218 | sets_function_arg_p (pat) | |
1219 | rtx pat; | |
1220 | { | |
1221 | int i; | |
1222 | rtx inner_dest; | |
1223 | ||
1224 | switch (GET_CODE (pat)) | |
1225 | { | |
1226 | case INSN: | |
1227 | return sets_function_arg_p (PATTERN (pat)); | |
1228 | ||
1229 | case PARALLEL: | |
1230 | for (i = XVECLEN (pat, 0); --i >= 0;) | |
1231 | if (sets_function_arg_p (XVECEXP (pat, 0, i))) | |
1232 | return 1; | |
1233 | ||
1234 | break; | |
1235 | ||
1236 | case SET: | |
1237 | inner_dest = SET_DEST (pat); | |
1238 | while (GET_CODE (inner_dest) == STRICT_LOW_PART | |
1239 | || GET_CODE (inner_dest) == SUBREG | |
1240 | || GET_CODE (inner_dest) == ZERO_EXTRACT) | |
1241 | inner_dest = XEXP (inner_dest, 0); | |
1242 | ||
1243 | return (GET_CODE (inner_dest) == REG | |
1244 | && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER | |
1245 | && FUNCTION_ARG_REGNO_P (REGNO (inner_dest))); | |
1d300e19 KG |
1246 | |
1247 | default: | |
1248 | break; | |
956d6950 JL |
1249 | } |
1250 | ||
1251 | return 0; | |
1252 | } | |
1253 | ||
230d793d RS |
1254 | /* LOC is the location within I3 that contains its pattern or the component |
1255 | of a PARALLEL of the pattern. We validate that it is valid for combining. | |
1256 | ||
1257 | One problem is if I3 modifies its output, as opposed to replacing it | |
1258 | entirely, we can't allow the output to contain I2DEST or I1DEST as doing | |
1259 | so would produce an insn that is not equivalent to the original insns. | |
1260 | ||
1261 | Consider: | |
1262 | ||
1263 | (set (reg:DI 101) (reg:DI 100)) | |
1264 | (set (subreg:SI (reg:DI 101) 0) <foo>) | |
1265 | ||
1266 | This is NOT equivalent to: | |
1267 | ||
1268 | (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>) | |
23190837 | 1269 | (set (reg:DI 101) (reg:DI 100))]) |
230d793d RS |
1270 | |
1271 | Not only does this modify 100 (in which case it might still be valid | |
663522cb | 1272 | if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100. |
230d793d RS |
1273 | |
1274 | We can also run into a problem if I2 sets a register that I1 | |
1275 | uses and I1 gets directly substituted into I3 (not via I2). In that | |
1276 | case, we would be getting the wrong value of I2DEST into I3, so we | |
1277 | must reject the combination. This case occurs when I2 and I1 both | |
1278 | feed into I3, rather than when I1 feeds into I2, which feeds into I3. | |
da7d8304 | 1279 | If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source |
230d793d RS |
1280 | of a SET must prevent combination from occurring. |
1281 | ||
230d793d RS |
1282 | Before doing the above check, we first try to expand a field assignment |
1283 | into a set of logical operations. | |
1284 | ||
da7d8304 | 1285 | If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which |
230d793d RS |
1286 | we place a register that is both set and used within I3. If more than one |
1287 | such register is detected, we fail. | |
1288 | ||
1289 | Return 1 if the combination is valid, zero otherwise. */ | |
1290 | ||
1291 | static int | |
1292 | combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed) | |
1293 | rtx i3; | |
1294 | rtx *loc; | |
1295 | rtx i2dest; | |
1296 | rtx i1dest; | |
1297 | int i1_not_in_src; | |
1298 | rtx *pi3dest_killed; | |
1299 | { | |
1300 | rtx x = *loc; | |
1301 | ||
1302 | if (GET_CODE (x) == SET) | |
1303 | { | |
1304 | rtx set = expand_field_assignment (x); | |
1305 | rtx dest = SET_DEST (set); | |
1306 | rtx src = SET_SRC (set); | |
29a82058 | 1307 | rtx inner_dest = dest; |
663522cb | 1308 | |
29a82058 JL |
1309 | #if 0 |
1310 | rtx inner_src = src; | |
1311 | #endif | |
230d793d RS |
1312 | |
1313 | SUBST (*loc, set); | |
1314 | ||
1315 | while (GET_CODE (inner_dest) == STRICT_LOW_PART | |
1316 | || GET_CODE (inner_dest) == SUBREG | |
1317 | || GET_CODE (inner_dest) == ZERO_EXTRACT) | |
1318 | inner_dest = XEXP (inner_dest, 0); | |
1319 | ||
1320 | /* We probably don't need this any more now that LIMIT_RELOAD_CLASS | |
1321 | was added. */ | |
1322 | #if 0 | |
1323 | while (GET_CODE (inner_src) == STRICT_LOW_PART | |
1324 | || GET_CODE (inner_src) == SUBREG | |
1325 | || GET_CODE (inner_src) == ZERO_EXTRACT) | |
1326 | inner_src = XEXP (inner_src, 0); | |
1327 | ||
1328 | /* If it is better that two different modes keep two different pseudos, | |
1329 | avoid combining them. This avoids producing the following pattern | |
1330 | on a 386: | |
1331 | (set (subreg:SI (reg/v:QI 21) 0) | |
1332 | (lshiftrt:SI (reg/v:SI 20) | |
1333 | (const_int 24))) | |
1334 | If that were made, reload could not handle the pair of | |
1335 | reg 20/21, since it would try to get any GENERAL_REGS | |
1336 | but some of them don't handle QImode. */ | |
1337 | ||
1338 | if (rtx_equal_p (inner_src, i2dest) | |
1339 | && GET_CODE (inner_dest) == REG | |
1340 | && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest))) | |
1341 | return 0; | |
1342 | #endif | |
1343 | ||
1344 | /* Check for the case where I3 modifies its output, as | |
1345 | discussed above. */ | |
1346 | if ((inner_dest != dest | |
1347 | && (reg_overlap_mentioned_p (i2dest, inner_dest) | |
1348 | || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest)))) | |
956d6950 | 1349 | |
53895717 BS |
1350 | /* This is the same test done in can_combine_p except we can't test |
1351 | all_adjacent; we don't have to, since this instruction will stay | |
1352 | in place, thus we are not considering increasing the lifetime of | |
1353 | INNER_DEST. | |
956d6950 JL |
1354 | |
1355 | Also, if this insn sets a function argument, combining it with | |
1356 | something that might need a spill could clobber a previous | |
1357 | function argument; the all_adjacent test in can_combine_p also | |
1358 | checks this; here, we do a more specific test for this case. */ | |
663522cb | 1359 | |
230d793d | 1360 | || (GET_CODE (inner_dest) == REG |
dfbe1b2f | 1361 | && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER |
c448a43e | 1362 | && (! HARD_REGNO_MODE_OK (REGNO (inner_dest), |
53895717 | 1363 | GET_MODE (inner_dest)))) |
230d793d RS |
1364 | || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src))) |
1365 | return 0; | |
1366 | ||
1367 | /* If DEST is used in I3, it is being killed in this insn, | |
663522cb | 1368 | so record that for later. |
36a9c2e9 JL |
1369 | Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the |
1370 | STACK_POINTER_REGNUM, since these are always considered to be | |
1371 | live. Similarly for ARG_POINTER_REGNUM if it is fixed. */ | |
230d793d | 1372 | if (pi3dest_killed && GET_CODE (dest) == REG |
36a9c2e9 JL |
1373 | && reg_referenced_p (dest, PATTERN (i3)) |
1374 | && REGNO (dest) != FRAME_POINTER_REGNUM | |
6d7096b0 DE |
1375 | #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1376 | && REGNO (dest) != HARD_FRAME_POINTER_REGNUM | |
1377 | #endif | |
36a9c2e9 JL |
1378 | #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM |
1379 | && (REGNO (dest) != ARG_POINTER_REGNUM | |
1380 | || ! fixed_regs [REGNO (dest)]) | |
1381 | #endif | |
1382 | && REGNO (dest) != STACK_POINTER_REGNUM) | |
230d793d RS |
1383 | { |
1384 | if (*pi3dest_killed) | |
1385 | return 0; | |
1386 | ||
1387 | *pi3dest_killed = dest; | |
1388 | } | |
1389 | } | |
1390 | ||
1391 | else if (GET_CODE (x) == PARALLEL) | |
1392 | { | |
1393 | int i; | |
1394 | ||
1395 | for (i = 0; i < XVECLEN (x, 0); i++) | |
1396 | if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest, | |
1397 | i1_not_in_src, pi3dest_killed)) | |
1398 | return 0; | |
1399 | } | |
1400 | ||
1401 | return 1; | |
1402 | } | |
1403 | \f | |
14a774a9 RK |
1404 | /* Return 1 if X is an arithmetic expression that contains a multiplication |
1405 | and division. We don't count multiplications by powers of two here. */ | |
1406 | ||
1407 | static int | |
1408 | contains_muldiv (x) | |
1409 | rtx x; | |
1410 | { | |
1411 | switch (GET_CODE (x)) | |
1412 | { | |
1413 | case MOD: case DIV: case UMOD: case UDIV: | |
1414 | return 1; | |
1415 | ||
1416 | case MULT: | |
1417 | return ! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1418 | && exact_log2 (INTVAL (XEXP (x, 1))) >= 0); | |
1419 | default: | |
1420 | switch (GET_RTX_CLASS (GET_CODE (x))) | |
1421 | { | |
1422 | case 'c': case '<': case '2': | |
1423 | return contains_muldiv (XEXP (x, 0)) | |
1424 | || contains_muldiv (XEXP (x, 1)); | |
1425 | ||
1426 | case '1': | |
1427 | return contains_muldiv (XEXP (x, 0)); | |
1428 | ||
1429 | default: | |
1430 | return 0; | |
1431 | } | |
1432 | } | |
1433 | } | |
1434 | \f | |
c3410241 BS |
1435 | /* Determine whether INSN can be used in a combination. Return nonzero if |
1436 | not. This is used in try_combine to detect early some cases where we | |
1437 | can't perform combinations. */ | |
1438 | ||
1439 | static int | |
1440 | cant_combine_insn_p (insn) | |
1441 | rtx insn; | |
1442 | { | |
1443 | rtx set; | |
1444 | rtx src, dest; | |
23190837 | 1445 | |
c3410241 BS |
1446 | /* If this isn't really an insn, we can't do anything. |
1447 | This can occur when flow deletes an insn that it has merged into an | |
1448 | auto-increment address. */ | |
1449 | if (! INSN_P (insn)) | |
1450 | return 1; | |
1451 | ||
4c11675d DJ |
1452 | /* Never combine loads and stores involving hard regs. The register |
1453 | allocator can usually handle such reg-reg moves by tying. If we allow | |
1454 | the combiner to make substitutions of hard regs, we risk aborting in | |
1455 | reload on machines that have SMALL_REGISTER_CLASSES. | |
c3410241 BS |
1456 | As an exception, we allow combinations involving fixed regs; these are |
1457 | not available to the register allocator so there's no risk involved. */ | |
1458 | ||
1459 | set = single_set (insn); | |
1460 | if (! set) | |
1461 | return 0; | |
1462 | src = SET_SRC (set); | |
1463 | dest = SET_DEST (set); | |
ad334b51 JH |
1464 | if (GET_CODE (src) == SUBREG) |
1465 | src = SUBREG_REG (src); | |
1466 | if (GET_CODE (dest) == SUBREG) | |
1467 | dest = SUBREG_REG (dest); | |
53895717 BS |
1468 | if (REG_P (src) && REG_P (dest) |
1469 | && ((REGNO (src) < FIRST_PSEUDO_REGISTER | |
4c11675d | 1470 | && ! fixed_regs[REGNO (src)]) |
53895717 | 1471 | || (REGNO (dest) < FIRST_PSEUDO_REGISTER |
4c11675d | 1472 | && ! fixed_regs[REGNO (dest)]))) |
c3410241 | 1473 | return 1; |
53895717 | 1474 | |
c3410241 BS |
1475 | return 0; |
1476 | } | |
1477 | ||
230d793d RS |
1478 | /* Try to combine the insns I1 and I2 into I3. |
1479 | Here I1 and I2 appear earlier than I3. | |
1480 | I1 can be zero; then we combine just I2 into I3. | |
663522cb | 1481 | |
04956a1a | 1482 | If we are combining three insns and the resulting insn is not recognized, |
230d793d RS |
1483 | try splitting it into two insns. If that happens, I2 and I3 are retained |
1484 | and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2 | |
1485 | are pseudo-deleted. | |
1486 | ||
663522cb | 1487 | Return 0 if the combination does not work. Then nothing is changed. |
abe6e52f | 1488 | If we did the combination, return the insn at which combine should |
663522cb KH |
1489 | resume scanning. |
1490 | ||
da7d8304 | 1491 | Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a |
44a76fc8 | 1492 | new direct jump instruction. */ |
230d793d RS |
1493 | |
1494 | static rtx | |
44a76fc8 | 1495 | try_combine (i3, i2, i1, new_direct_jump_p) |
b3694847 SS |
1496 | rtx i3, i2, i1; |
1497 | int *new_direct_jump_p; | |
230d793d | 1498 | { |
02359929 | 1499 | /* New patterns for I3 and I2, respectively. */ |
230d793d | 1500 | rtx newpat, newi2pat = 0; |
cddd8b72 | 1501 | int substed_i2 = 0, substed_i1 = 0; |
230d793d RS |
1502 | /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */ |
1503 | int added_sets_1, added_sets_2; | |
1504 | /* Total number of SETs to put into I3. */ | |
1505 | int total_sets; | |
1506 | /* Nonzero is I2's body now appears in I3. */ | |
1507 | int i2_is_used; | |
1508 | /* INSN_CODEs for new I3, new I2, and user of condition code. */ | |
6a651371 | 1509 | int insn_code_number, i2_code_number = 0, other_code_number = 0; |
230d793d RS |
1510 | /* Contains I3 if the destination of I3 is used in its source, which means |
1511 | that the old life of I3 is being killed. If that usage is placed into | |
1512 | I2 and not in I3, a REG_DEAD note must be made. */ | |
1513 | rtx i3dest_killed = 0; | |
1514 | /* SET_DEST and SET_SRC of I2 and I1. */ | |
1515 | rtx i2dest, i2src, i1dest = 0, i1src = 0; | |
1516 | /* PATTERN (I2), or a copy of it in certain cases. */ | |
1517 | rtx i2pat; | |
1518 | /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */ | |
c4e861e8 | 1519 | int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0; |
230d793d RS |
1520 | int i1_feeds_i3 = 0; |
1521 | /* Notes that must be added to REG_NOTES in I3 and I2. */ | |
1522 | rtx new_i3_notes, new_i2_notes; | |
176c9e6b JW |
1523 | /* Notes that we substituted I3 into I2 instead of the normal case. */ |
1524 | int i3_subst_into_i2 = 0; | |
df7d75de RK |
1525 | /* Notes that I1, I2 or I3 is a MULT operation. */ |
1526 | int have_mult = 0; | |
230d793d RS |
1527 | |
1528 | int maxreg; | |
1529 | rtx temp; | |
b3694847 | 1530 | rtx link; |
230d793d RS |
1531 | int i; |
1532 | ||
c3410241 BS |
1533 | /* Exit early if one of the insns involved can't be used for |
1534 | combinations. */ | |
1535 | if (cant_combine_insn_p (i3) | |
1536 | || cant_combine_insn_p (i2) | |
1537 | || (i1 && cant_combine_insn_p (i1)) | |
1538 | /* We also can't do anything if I3 has a | |
1539 | REG_LIBCALL note since we don't want to disrupt the contiguity of a | |
1540 | libcall. */ | |
ec35104c JL |
1541 | #if 0 |
1542 | /* ??? This gives worse code, and appears to be unnecessary, since no | |
1543 | pass after flow uses REG_LIBCALL/REG_RETVAL notes. */ | |
1544 | || find_reg_note (i3, REG_LIBCALL, NULL_RTX) | |
1545 | #endif | |
663522cb | 1546 | ) |
230d793d RS |
1547 | return 0; |
1548 | ||
1549 | combine_attempts++; | |
230d793d RS |
1550 | undobuf.other_insn = 0; |
1551 | ||
6e25d159 RK |
1552 | /* Reset the hard register usage information. */ |
1553 | CLEAR_HARD_REG_SET (newpat_used_regs); | |
1554 | ||
230d793d RS |
1555 | /* If I1 and I2 both feed I3, they can be in any order. To simplify the |
1556 | code below, set I1 to be the earlier of the two insns. */ | |
1557 | if (i1 && INSN_CUID (i1) > INSN_CUID (i2)) | |
1558 | temp = i1, i1 = i2, i2 = temp; | |
1559 | ||
abe6e52f | 1560 | added_links_insn = 0; |
137e889e | 1561 | |
230d793d | 1562 | /* First check for one important special-case that the code below will |
c7be4f66 | 1563 | not handle. Namely, the case where I1 is zero, I2 is a PARALLEL |
230d793d RS |
1564 | and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case, |
1565 | we may be able to replace that destination with the destination of I3. | |
1566 | This occurs in the common code where we compute both a quotient and | |
1567 | remainder into a structure, in which case we want to do the computation | |
1568 | directly into the structure to avoid register-register copies. | |
1569 | ||
c7be4f66 RK |
1570 | Note that this case handles both multiple sets in I2 and also |
1571 | cases where I2 has a number of CLOBBER or PARALLELs. | |
1572 | ||
230d793d RS |
1573 | We make very conservative checks below and only try to handle the |
1574 | most common cases of this. For example, we only handle the case | |
1575 | where I2 and I3 are adjacent to avoid making difficult register | |
1576 | usage tests. */ | |
1577 | ||
1578 | if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET | |
1579 | && GET_CODE (SET_SRC (PATTERN (i3))) == REG | |
1580 | && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER | |
230d793d RS |
1581 | && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3))) |
1582 | && GET_CODE (PATTERN (i2)) == PARALLEL | |
1583 | && ! side_effects_p (SET_DEST (PATTERN (i3))) | |
5089e22e RS |
1584 | /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code |
1585 | below would need to check what is inside (and reg_overlap_mentioned_p | |
1586 | doesn't support those codes anyway). Don't allow those destinations; | |
1587 | the resulting insn isn't likely to be recognized anyway. */ | |
1588 | && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT | |
1589 | && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART | |
230d793d RS |
1590 | && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)), |
1591 | SET_DEST (PATTERN (i3))) | |
1592 | && next_real_insn (i2) == i3) | |
5089e22e RS |
1593 | { |
1594 | rtx p2 = PATTERN (i2); | |
1595 | ||
1596 | /* Make sure that the destination of I3, | |
1597 | which we are going to substitute into one output of I2, | |
1598 | is not used within another output of I2. We must avoid making this: | |
1599 | (parallel [(set (mem (reg 69)) ...) | |
1600 | (set (reg 69) ...)]) | |
1601 | which is not well-defined as to order of actions. | |
1602 | (Besides, reload can't handle output reloads for this.) | |
1603 | ||
1604 | The problem can also happen if the dest of I3 is a memory ref, | |
1605 | if another dest in I2 is an indirect memory ref. */ | |
1606 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
7ca919b7 RK |
1607 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1608 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
5089e22e RS |
1609 | && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)), |
1610 | SET_DEST (XVECEXP (p2, 0, i)))) | |
1611 | break; | |
230d793d | 1612 | |
5089e22e RS |
1613 | if (i == XVECLEN (p2, 0)) |
1614 | for (i = 0; i < XVECLEN (p2, 0); i++) | |
481c7efa FS |
1615 | if ((GET_CODE (XVECEXP (p2, 0, i)) == SET |
1616 | || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER) | |
1617 | && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3))) | |
5089e22e RS |
1618 | { |
1619 | combine_merges++; | |
230d793d | 1620 | |
5089e22e RS |
1621 | subst_insn = i3; |
1622 | subst_low_cuid = INSN_CUID (i2); | |
230d793d | 1623 | |
c4e861e8 | 1624 | added_sets_2 = added_sets_1 = 0; |
5089e22e | 1625 | i2dest = SET_SRC (PATTERN (i3)); |
230d793d | 1626 | |
5089e22e RS |
1627 | /* Replace the dest in I2 with our dest and make the resulting |
1628 | insn the new pattern for I3. Then skip to where we | |
1629 | validate the pattern. Everything was set up above. */ | |
663522cb | 1630 | SUBST (SET_DEST (XVECEXP (p2, 0, i)), |
5089e22e RS |
1631 | SET_DEST (PATTERN (i3))); |
1632 | ||
1633 | newpat = p2; | |
176c9e6b | 1634 | i3_subst_into_i2 = 1; |
5089e22e RS |
1635 | goto validate_replacement; |
1636 | } | |
1637 | } | |
230d793d | 1638 | |
667c1c2c RK |
1639 | /* If I2 is setting a double-word pseudo to a constant and I3 is setting |
1640 | one of those words to another constant, merge them by making a new | |
1641 | constant. */ | |
1642 | if (i1 == 0 | |
1643 | && (temp = single_set (i2)) != 0 | |
1644 | && (GET_CODE (SET_SRC (temp)) == CONST_INT | |
1645 | || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE) | |
1646 | && GET_CODE (SET_DEST (temp)) == REG | |
1647 | && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT | |
1648 | && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD | |
1649 | && GET_CODE (PATTERN (i3)) == SET | |
1650 | && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG | |
1651 | && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp) | |
1652 | && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT | |
1653 | && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD | |
1654 | && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT) | |
1655 | { | |
1656 | HOST_WIDE_INT lo, hi; | |
1657 | ||
1658 | if (GET_CODE (SET_SRC (temp)) == CONST_INT) | |
1659 | lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0; | |
1660 | else | |
1661 | { | |
1662 | lo = CONST_DOUBLE_LOW (SET_SRC (temp)); | |
1663 | hi = CONST_DOUBLE_HIGH (SET_SRC (temp)); | |
1664 | } | |
1665 | ||
1666 | if (subreg_lowpart_p (SET_DEST (PATTERN (i3)))) | |
48b4d901 AO |
1667 | { |
1668 | /* We don't handle the case of the target word being wider | |
1669 | than a host wide int. */ | |
1670 | if (HOST_BITS_PER_WIDE_INT < BITS_PER_WORD) | |
1671 | abort (); | |
1672 | ||
42a6ff51 | 1673 | lo &= ~(UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1); |
2ef1a7f9 GK |
1674 | lo |= (INTVAL (SET_SRC (PATTERN (i3))) |
1675 | & (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1)); | |
48b4d901 AO |
1676 | } |
1677 | else if (HOST_BITS_PER_WIDE_INT == BITS_PER_WORD) | |
667c1c2c | 1678 | hi = INTVAL (SET_SRC (PATTERN (i3))); |
48b4d901 AO |
1679 | else if (HOST_BITS_PER_WIDE_INT >= 2 * BITS_PER_WORD) |
1680 | { | |
1681 | int sign = -(int) ((unsigned HOST_WIDE_INT) lo | |
1682 | >> (HOST_BITS_PER_WIDE_INT - 1)); | |
1683 | ||
42a6ff51 AO |
1684 | lo &= ~ (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD |
1685 | (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD (1) - 1)); | |
1686 | lo |= (UWIDE_SHIFT_LEFT_BY_BITS_PER_WORD | |
1687 | (INTVAL (SET_SRC (PATTERN (i3))))); | |
48b4d901 AO |
1688 | if (hi == sign) |
1689 | hi = lo < 0 ? -1 : 0; | |
1690 | } | |
1691 | else | |
1692 | /* We don't handle the case of the higher word not fitting | |
1693 | entirely in either hi or lo. */ | |
1694 | abort (); | |
667c1c2c RK |
1695 | |
1696 | combine_merges++; | |
1697 | subst_insn = i3; | |
1698 | subst_low_cuid = INSN_CUID (i2); | |
1699 | added_sets_2 = added_sets_1 = 0; | |
1700 | i2dest = SET_DEST (temp); | |
1701 | ||
1702 | SUBST (SET_SRC (temp), | |
1703 | immed_double_const (lo, hi, GET_MODE (SET_DEST (temp)))); | |
1704 | ||
1705 | newpat = PATTERN (i2); | |
667c1c2c RK |
1706 | goto validate_replacement; |
1707 | } | |
1708 | ||
230d793d RS |
1709 | #ifndef HAVE_cc0 |
1710 | /* If we have no I1 and I2 looks like: | |
1711 | (parallel [(set (reg:CC X) (compare:CC OP (const_int 0))) | |
1712 | (set Y OP)]) | |
1713 | make up a dummy I1 that is | |
1714 | (set Y OP) | |
1715 | and change I2 to be | |
1716 | (set (reg:CC X) (compare:CC Y (const_int 0))) | |
1717 | ||
1718 | (We can ignore any trailing CLOBBERs.) | |
1719 | ||
1720 | This undoes a previous combination and allows us to match a branch-and- | |
1721 | decrement insn. */ | |
1722 | ||
1723 | if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL | |
1724 | && XVECLEN (PATTERN (i2), 0) >= 2 | |
1725 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET | |
1726 | && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0)))) | |
1727 | == MODE_CC) | |
1728 | && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE | |
1729 | && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx | |
1730 | && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET | |
1731 | && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG | |
1732 | && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0), | |
1733 | SET_SRC (XVECEXP (PATTERN (i2), 0, 1)))) | |
1734 | { | |
663522cb | 1735 | for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--) |
230d793d RS |
1736 | if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER) |
1737 | break; | |
1738 | ||
1739 | if (i == 1) | |
1740 | { | |
1741 | /* We make I1 with the same INSN_UID as I2. This gives it | |
1742 | the same INSN_CUID for value tracking. Our fake I1 will | |
1743 | never appear in the insn stream so giving it the same INSN_UID | |
1744 | as I2 will not cause a problem. */ | |
1745 | ||
4977bab6 ZW |
1746 | i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2, |
1747 | BLOCK_FOR_INSN (i2), INSN_SCOPE (i2), | |
1748 | XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX, | |
1749 | NULL_RTX); | |
230d793d RS |
1750 | |
1751 | SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0)); | |
1752 | SUBST (XEXP (SET_SRC (PATTERN (i2)), 0), | |
1753 | SET_DEST (PATTERN (i1))); | |
1754 | } | |
1755 | } | |
1756 | #endif | |
1757 | ||
1758 | /* Verify that I2 and I1 are valid for combining. */ | |
5f4f0e22 CH |
1759 | if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src) |
1760 | || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src))) | |
230d793d RS |
1761 | { |
1762 | undo_all (); | |
1763 | return 0; | |
1764 | } | |
1765 | ||
1766 | /* Record whether I2DEST is used in I2SRC and similarly for the other | |
1767 | cases. Knowing this will help in register status updating below. */ | |
1768 | i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src); | |
1769 | i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src); | |
1770 | i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src); | |
1771 | ||
916f14f1 | 1772 | /* See if I1 directly feeds into I3. It does if I1DEST is not used |
230d793d RS |
1773 | in I2SRC. */ |
1774 | i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src); | |
1775 | ||
1776 | /* Ensure that I3's pattern can be the destination of combines. */ | |
1777 | if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest, | |
1778 | i1 && i2dest_in_i1src && i1_feeds_i3, | |
1779 | &i3dest_killed)) | |
1780 | { | |
1781 | undo_all (); | |
1782 | return 0; | |
1783 | } | |
1784 | ||
df7d75de RK |
1785 | /* See if any of the insns is a MULT operation. Unless one is, we will |
1786 | reject a combination that is, since it must be slower. Be conservative | |
1787 | here. */ | |
1788 | if (GET_CODE (i2src) == MULT | |
1789 | || (i1 != 0 && GET_CODE (i1src) == MULT) | |
1790 | || (GET_CODE (PATTERN (i3)) == SET | |
1791 | && GET_CODE (SET_SRC (PATTERN (i3))) == MULT)) | |
1792 | have_mult = 1; | |
1793 | ||
230d793d RS |
1794 | /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd. |
1795 | We used to do this EXCEPT in one case: I3 has a post-inc in an | |
1796 | output operand. However, that exception can give rise to insns like | |
23190837 | 1797 | mov r3,(r3)+ |
230d793d | 1798 | which is a famous insn on the PDP-11 where the value of r3 used as the |
5089e22e | 1799 | source was model-dependent. Avoid this sort of thing. */ |
230d793d RS |
1800 | |
1801 | #if 0 | |
1802 | if (!(GET_CODE (PATTERN (i3)) == SET | |
1803 | && GET_CODE (SET_SRC (PATTERN (i3))) == REG | |
1804 | && GET_CODE (SET_DEST (PATTERN (i3))) == MEM | |
1805 | && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC | |
1806 | || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC))) | |
1807 | /* It's not the exception. */ | |
1808 | #endif | |
1809 | #ifdef AUTO_INC_DEC | |
1810 | for (link = REG_NOTES (i3); link; link = XEXP (link, 1)) | |
1811 | if (REG_NOTE_KIND (link) == REG_INC | |
1812 | && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2)) | |
1813 | || (i1 != 0 | |
1814 | && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1))))) | |
1815 | { | |
1816 | undo_all (); | |
1817 | return 0; | |
1818 | } | |
1819 | #endif | |
1820 | ||
1821 | /* See if the SETs in I1 or I2 need to be kept around in the merged | |
1822 | instruction: whenever the value set there is still needed past I3. | |
1823 | For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3. | |
1824 | ||
1825 | For the SET in I1, we have two cases: If I1 and I2 independently | |
1826 | feed into I3, the set in I1 needs to be kept around if I1DEST dies | |
1827 | or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set | |
1828 | in I1 needs to be kept around unless I1DEST dies or is set in either | |
1829 | I2 or I3. We can distinguish these cases by seeing if I2SRC mentions | |
1830 | I1DEST. If so, we know I1 feeds into I2. */ | |
1831 | ||
1832 | added_sets_2 = ! dead_or_set_p (i3, i2dest); | |
1833 | ||
1834 | added_sets_1 | |
1835 | = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest) | |
1836 | : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest))); | |
1837 | ||
1838 | /* If the set in I2 needs to be kept around, we must make a copy of | |
1839 | PATTERN (I2), so that when we substitute I1SRC for I1DEST in | |
5089e22e | 1840 | PATTERN (I2), we are only substituting for the original I1DEST, not into |
230d793d RS |
1841 | an already-substituted copy. This also prevents making self-referential |
1842 | rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to | |
1843 | I2DEST. */ | |
1844 | ||
1845 | i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL | |
38a448ca | 1846 | ? gen_rtx_SET (VOIDmode, i2dest, i2src) |
230d793d RS |
1847 | : PATTERN (i2)); |
1848 | ||
1849 | if (added_sets_2) | |
1850 | i2pat = copy_rtx (i2pat); | |
1851 | ||
1852 | combine_merges++; | |
1853 | ||
1854 | /* Substitute in the latest insn for the regs set by the earlier ones. */ | |
1855 | ||
1856 | maxreg = max_reg_num (); | |
1857 | ||
1858 | subst_insn = i3; | |
230d793d RS |
1859 | |
1860 | /* It is possible that the source of I2 or I1 may be performing an | |
1861 | unneeded operation, such as a ZERO_EXTEND of something that is known | |
1862 | to have the high part zero. Handle that case by letting subst look at | |
1863 | the innermost one of them. | |
1864 | ||
1865 | Another way to do this would be to have a function that tries to | |
1866 | simplify a single insn instead of merging two or more insns. We don't | |
1867 | do this because of the potential of infinite loops and because | |
1868 | of the potential extra memory required. However, doing it the way | |
1869 | we are is a bit of a kludge and doesn't catch all cases. | |
1870 | ||
1871 | But only do this if -fexpensive-optimizations since it slows things down | |
1872 | and doesn't usually win. */ | |
1873 | ||
1874 | if (flag_expensive_optimizations) | |
1875 | { | |
1876 | /* Pass pc_rtx so no substitutions are done, just simplifications. | |
1877 | The cases that we are interested in here do not involve the few | |
1878 | cases were is_replaced is checked. */ | |
1879 | if (i1) | |
d0ab8cd3 RK |
1880 | { |
1881 | subst_low_cuid = INSN_CUID (i1); | |
1882 | i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0); | |
1883 | } | |
230d793d | 1884 | else |
d0ab8cd3 RK |
1885 | { |
1886 | subst_low_cuid = INSN_CUID (i2); | |
1887 | i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0); | |
1888 | } | |
230d793d RS |
1889 | } |
1890 | ||
1891 | #ifndef HAVE_cc0 | |
1892 | /* Many machines that don't use CC0 have insns that can both perform an | |
1893 | arithmetic operation and set the condition code. These operations will | |
1894 | be represented as a PARALLEL with the first element of the vector | |
1895 | being a COMPARE of an arithmetic operation with the constant zero. | |
1896 | The second element of the vector will set some pseudo to the result | |
1897 | of the same arithmetic operation. If we simplify the COMPARE, we won't | |
1898 | match such a pattern and so will generate an extra insn. Here we test | |
1899 | for this case, where both the comparison and the operation result are | |
1900 | needed, and make the PARALLEL by just replacing I2DEST in I3SRC with | |
1901 | I2SRC. Later we will make the PARALLEL that contains I2. */ | |
1902 | ||
1903 | if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET | |
1904 | && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE | |
1905 | && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx | |
1906 | && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest)) | |
1907 | { | |
081f5e7e | 1908 | #ifdef EXTRA_CC_MODES |
230d793d RS |
1909 | rtx *cc_use; |
1910 | enum machine_mode compare_mode; | |
081f5e7e | 1911 | #endif |
230d793d RS |
1912 | |
1913 | newpat = PATTERN (i3); | |
1914 | SUBST (XEXP (SET_SRC (newpat), 0), i2src); | |
1915 | ||
1916 | i2_is_used = 1; | |
1917 | ||
1918 | #ifdef EXTRA_CC_MODES | |
1919 | /* See if a COMPARE with the operand we substituted in should be done | |
1920 | with the mode that is currently being used. If not, do the same | |
1921 | processing we do in `subst' for a SET; namely, if the destination | |
1922 | is used only once, try to replace it with a register of the proper | |
1923 | mode and also replace the COMPARE. */ | |
1924 | if (undobuf.other_insn == 0 | |
1925 | && (cc_use = find_single_use (SET_DEST (newpat), i3, | |
1926 | &undobuf.other_insn)) | |
77fa0940 RK |
1927 | && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use), |
1928 | i2src, const0_rtx)) | |
230d793d RS |
1929 | != GET_MODE (SET_DEST (newpat)))) |
1930 | { | |
770ae6cc | 1931 | unsigned int regno = REGNO (SET_DEST (newpat)); |
38a448ca | 1932 | rtx new_dest = gen_rtx_REG (compare_mode, regno); |
230d793d RS |
1933 | |
1934 | if (regno < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 1935 | || (REG_N_SETS (regno) == 1 && ! added_sets_2 |
230d793d RS |
1936 | && ! REG_USERVAR_P (SET_DEST (newpat)))) |
1937 | { | |
1938 | if (regno >= FIRST_PSEUDO_REGISTER) | |
1939 | SUBST (regno_reg_rtx[regno], new_dest); | |
1940 | ||
1941 | SUBST (SET_DEST (newpat), new_dest); | |
1942 | SUBST (XEXP (*cc_use, 0), new_dest); | |
1943 | SUBST (SET_SRC (newpat), | |
f1c6ba8b | 1944 | gen_rtx_COMPARE (compare_mode, i2src, const0_rtx)); |
230d793d RS |
1945 | } |
1946 | else | |
1947 | undobuf.other_insn = 0; | |
1948 | } | |
663522cb | 1949 | #endif |
230d793d RS |
1950 | } |
1951 | else | |
1952 | #endif | |
1953 | { | |
1954 | n_occurrences = 0; /* `subst' counts here */ | |
1955 | ||
1956 | /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we | |
1957 | need to make a unique copy of I2SRC each time we substitute it | |
1958 | to avoid self-referential rtl. */ | |
1959 | ||
d0ab8cd3 | 1960 | subst_low_cuid = INSN_CUID (i2); |
230d793d RS |
1961 | newpat = subst (PATTERN (i3), i2dest, i2src, 0, |
1962 | ! i1_feeds_i3 && i1dest_in_i1src); | |
cddd8b72 | 1963 | substed_i2 = 1; |
230d793d RS |
1964 | |
1965 | /* Record whether i2's body now appears within i3's body. */ | |
1966 | i2_is_used = n_occurrences; | |
1967 | } | |
1968 | ||
1969 | /* If we already got a failure, don't try to do more. Otherwise, | |
1970 | try to substitute in I1 if we have it. */ | |
1971 | ||
1972 | if (i1 && GET_CODE (newpat) != CLOBBER) | |
1973 | { | |
1974 | /* Before we can do this substitution, we must redo the test done | |
1975 | above (see detailed comments there) that ensures that I1DEST | |
0f41302f | 1976 | isn't mentioned in any SETs in NEWPAT that are field assignments. */ |
230d793d | 1977 | |
5f4f0e22 | 1978 | if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, |
cf0d9408 | 1979 | 0, (rtx*) 0)) |
230d793d RS |
1980 | { |
1981 | undo_all (); | |
1982 | return 0; | |
1983 | } | |
1984 | ||
1985 | n_occurrences = 0; | |
d0ab8cd3 | 1986 | subst_low_cuid = INSN_CUID (i1); |
230d793d | 1987 | newpat = subst (newpat, i1dest, i1src, 0, 0); |
cddd8b72 | 1988 | substed_i1 = 1; |
230d793d RS |
1989 | } |
1990 | ||
916f14f1 RK |
1991 | /* Fail if an autoincrement side-effect has been duplicated. Be careful |
1992 | to count all the ways that I2SRC and I1SRC can be used. */ | |
5f4f0e22 | 1993 | if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0 |
916f14f1 | 1994 | && i2_is_used + added_sets_2 > 1) |
5f4f0e22 | 1995 | || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0 |
916f14f1 RK |
1996 | && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3) |
1997 | > 1)) | |
230d793d RS |
1998 | /* Fail if we tried to make a new register (we used to abort, but there's |
1999 | really no reason to). */ | |
2000 | || max_reg_num () != maxreg | |
2001 | /* Fail if we couldn't do something and have a CLOBBER. */ | |
df7d75de RK |
2002 | || GET_CODE (newpat) == CLOBBER |
2003 | /* Fail if this new pattern is a MULT and we didn't have one before | |
2004 | at the outer level. */ | |
2005 | || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT | |
2006 | && ! have_mult)) | |
230d793d RS |
2007 | { |
2008 | undo_all (); | |
2009 | return 0; | |
2010 | } | |
2011 | ||
2012 | /* If the actions of the earlier insns must be kept | |
2013 | in addition to substituting them into the latest one, | |
2014 | we must make a new PARALLEL for the latest insn | |
2015 | to hold additional the SETs. */ | |
2016 | ||
2017 | if (added_sets_1 || added_sets_2) | |
2018 | { | |
2019 | combine_extras++; | |
2020 | ||
2021 | if (GET_CODE (newpat) == PARALLEL) | |
2022 | { | |
2023 | rtvec old = XVEC (newpat, 0); | |
2024 | total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2; | |
38a448ca | 2025 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets)); |
d38a30c9 KG |
2026 | memcpy (XVEC (newpat, 0)->elem, &old->elem[0], |
2027 | sizeof (old->elem[0]) * old->num_elem); | |
230d793d RS |
2028 | } |
2029 | else | |
2030 | { | |
2031 | rtx old = newpat; | |
2032 | total_sets = 1 + added_sets_1 + added_sets_2; | |
38a448ca | 2033 | newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets)); |
230d793d RS |
2034 | XVECEXP (newpat, 0, 0) = old; |
2035 | } | |
2036 | ||
cf0d9408 KH |
2037 | if (added_sets_1) |
2038 | XVECEXP (newpat, 0, --total_sets) | |
2039 | = (GET_CODE (PATTERN (i1)) == PARALLEL | |
2040 | ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1)); | |
2041 | ||
2042 | if (added_sets_2) | |
2043 | { | |
2044 | /* If there is no I1, use I2's body as is. We used to also not do | |
2045 | the subst call below if I2 was substituted into I3, | |
2046 | but that could lose a simplification. */ | |
2047 | if (i1 == 0) | |
2048 | XVECEXP (newpat, 0, --total_sets) = i2pat; | |
2049 | else | |
2050 | /* See comment where i2pat is assigned. */ | |
2051 | XVECEXP (newpat, 0, --total_sets) | |
2052 | = subst (i2pat, i1dest, i1src, 0, 0); | |
2053 | } | |
230d793d RS |
2054 | } |
2055 | ||
2056 | /* We come here when we are replacing a destination in I2 with the | |
2057 | destination of I3. */ | |
2058 | validate_replacement: | |
2059 | ||
6e25d159 RK |
2060 | /* Note which hard regs this insn has as inputs. */ |
2061 | mark_used_regs_combine (newpat); | |
2062 | ||
230d793d | 2063 | /* Is the result of combination a valid instruction? */ |
8e2f6e35 | 2064 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2065 | |
2066 | /* If the result isn't valid, see if it is a PARALLEL of two SETs where | |
2067 | the second SET's destination is a register that is unused. In that case, | |
2068 | we just need the first SET. This can occur when simplifying a divmod | |
2069 | insn. We *must* test for this case here because the code below that | |
2070 | splits two independent SETs doesn't handle this case correctly when it | |
2071 | updates the register status. Also check the case where the first | |
2072 | SET's destination is unused. That would not cause incorrect code, but | |
2073 | does cause an unneeded insn to remain. */ | |
2074 | ||
2075 | if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL | |
2076 | && XVECLEN (newpat, 0) == 2 | |
2077 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2078 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2079 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG | |
2080 | && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1))) | |
2081 | && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1))) | |
2082 | && asm_noperands (newpat) < 0) | |
2083 | { | |
2084 | newpat = XVECEXP (newpat, 0, 0); | |
8e2f6e35 | 2085 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2086 | } |
2087 | ||
2088 | else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL | |
2089 | && XVECLEN (newpat, 0) == 2 | |
2090 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2091 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2092 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG | |
2093 | && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0))) | |
2094 | && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0))) | |
2095 | && asm_noperands (newpat) < 0) | |
2096 | { | |
2097 | newpat = XVECEXP (newpat, 0, 1); | |
8e2f6e35 | 2098 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2099 | } |
2100 | ||
2101 | /* If we were combining three insns and the result is a simple SET | |
2102 | with no ASM_OPERANDS that wasn't recognized, try to split it into two | |
663522cb | 2103 | insns. There are two ways to do this. It can be split using a |
916f14f1 RK |
2104 | machine-specific method (like when you have an addition of a large |
2105 | constant) or by combine in the function find_split_point. */ | |
2106 | ||
230d793d RS |
2107 | if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET |
2108 | && asm_noperands (newpat) < 0) | |
2109 | { | |
916f14f1 | 2110 | rtx m_split, *split; |
42495ca0 | 2111 | rtx ni2dest = i2dest; |
916f14f1 RK |
2112 | |
2113 | /* See if the MD file can split NEWPAT. If it can't, see if letting it | |
42495ca0 RK |
2114 | use I2DEST as a scratch register will help. In the latter case, |
2115 | convert I2DEST to the mode of the source of NEWPAT if we can. */ | |
916f14f1 RK |
2116 | |
2117 | m_split = split_insns (newpat, i3); | |
a70c61d9 JW |
2118 | |
2119 | /* We can only use I2DEST as a scratch reg if it doesn't overlap any | |
2120 | inputs of NEWPAT. */ | |
2121 | ||
2122 | /* ??? If I2DEST is not safe, and I1DEST exists, then it would be | |
2123 | possible to try that as a scratch reg. This would require adding | |
2124 | more code to make it work though. */ | |
2125 | ||
2126 | if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat)) | |
42495ca0 RK |
2127 | { |
2128 | /* If I2DEST is a hard register or the only use of a pseudo, | |
2129 | we can change its mode. */ | |
2130 | if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest) | |
02f4ada4 | 2131 | && GET_MODE (SET_DEST (newpat)) != VOIDmode |
60654f77 | 2132 | && GET_CODE (i2dest) == REG |
42495ca0 | 2133 | && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER |
b1f21e0a | 2134 | || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2 |
42495ca0 | 2135 | && ! REG_USERVAR_P (i2dest)))) |
38a448ca | 2136 | ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)), |
c5c76735 JL |
2137 | REGNO (i2dest)); |
2138 | ||
2139 | m_split = split_insns (gen_rtx_PARALLEL | |
2140 | (VOIDmode, | |
2141 | gen_rtvec (2, newpat, | |
2142 | gen_rtx_CLOBBER (VOIDmode, | |
2143 | ni2dest))), | |
2144 | i3); | |
5dd3e650 R |
2145 | /* If the split with the mode-changed register didn't work, try |
2146 | the original register. */ | |
2147 | if (! m_split && ni2dest != i2dest) | |
c7ca5912 RK |
2148 | { |
2149 | ni2dest = i2dest; | |
2150 | m_split = split_insns (gen_rtx_PARALLEL | |
2151 | (VOIDmode, | |
2152 | gen_rtvec (2, newpat, | |
2153 | gen_rtx_CLOBBER (VOIDmode, | |
2154 | i2dest))), | |
2155 | i3); | |
2156 | } | |
42495ca0 | 2157 | } |
916f14f1 | 2158 | |
2f937369 | 2159 | if (m_split && NEXT_INSN (m_split) == NULL_RTX) |
d340408c | 2160 | { |
2f937369 | 2161 | m_split = PATTERN (m_split); |
d340408c RH |
2162 | insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes); |
2163 | if (insn_code_number >= 0) | |
2164 | newpat = m_split; | |
23190837 | 2165 | } |
2f937369 | 2166 | else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX |
d340408c | 2167 | && (next_real_insn (i2) == i3 |
2f937369 | 2168 | || ! use_crosses_set_p (PATTERN (m_split), INSN_CUID (i2)))) |
916f14f1 | 2169 | { |
1a26b032 | 2170 | rtx i2set, i3set; |
2f937369 DM |
2171 | rtx newi3pat = PATTERN (NEXT_INSN (m_split)); |
2172 | newi2pat = PATTERN (m_split); | |
916f14f1 | 2173 | |
2f937369 DM |
2174 | i3set = single_set (NEXT_INSN (m_split)); |
2175 | i2set = single_set (m_split); | |
1a26b032 | 2176 | |
42495ca0 RK |
2177 | /* In case we changed the mode of I2DEST, replace it in the |
2178 | pseudo-register table here. We can't do it above in case this | |
2179 | code doesn't get executed and we do a split the other way. */ | |
2180 | ||
2181 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
2182 | SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest); | |
2183 | ||
8e2f6e35 | 2184 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
1a26b032 RK |
2185 | |
2186 | /* If I2 or I3 has multiple SETs, we won't know how to track | |
9cc96794 RK |
2187 | register status, so don't use these insns. If I2's destination |
2188 | is used between I2 and I3, we also can't use these insns. */ | |
1a26b032 | 2189 | |
9cc96794 RK |
2190 | if (i2_code_number >= 0 && i2set && i3set |
2191 | && (next_real_insn (i2) == i3 | |
2192 | || ! reg_used_between_p (SET_DEST (i2set), i2, i3))) | |
8e2f6e35 BS |
2193 | insn_code_number = recog_for_combine (&newi3pat, i3, |
2194 | &new_i3_notes); | |
d0ab8cd3 RK |
2195 | if (insn_code_number >= 0) |
2196 | newpat = newi3pat; | |
2197 | ||
c767f54b | 2198 | /* It is possible that both insns now set the destination of I3. |
22609cbf | 2199 | If so, we must show an extra use of it. */ |
c767f54b | 2200 | |
393de53f RK |
2201 | if (insn_code_number >= 0) |
2202 | { | |
2203 | rtx new_i3_dest = SET_DEST (i3set); | |
2204 | rtx new_i2_dest = SET_DEST (i2set); | |
2205 | ||
2206 | while (GET_CODE (new_i3_dest) == ZERO_EXTRACT | |
2207 | || GET_CODE (new_i3_dest) == STRICT_LOW_PART | |
2208 | || GET_CODE (new_i3_dest) == SUBREG) | |
2209 | new_i3_dest = XEXP (new_i3_dest, 0); | |
2210 | ||
d4096689 RK |
2211 | while (GET_CODE (new_i2_dest) == ZERO_EXTRACT |
2212 | || GET_CODE (new_i2_dest) == STRICT_LOW_PART | |
2213 | || GET_CODE (new_i2_dest) == SUBREG) | |
2214 | new_i2_dest = XEXP (new_i2_dest, 0); | |
2215 | ||
393de53f RK |
2216 | if (GET_CODE (new_i3_dest) == REG |
2217 | && GET_CODE (new_i2_dest) == REG | |
2218 | && REGNO (new_i3_dest) == REGNO (new_i2_dest)) | |
b1f21e0a | 2219 | REG_N_SETS (REGNO (new_i2_dest))++; |
393de53f | 2220 | } |
916f14f1 | 2221 | } |
230d793d RS |
2222 | |
2223 | /* If we can split it and use I2DEST, go ahead and see if that | |
2224 | helps things be recognized. Verify that none of the registers | |
2225 | are set between I2 and I3. */ | |
d0ab8cd3 | 2226 | if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0 |
230d793d RS |
2227 | #ifdef HAVE_cc0 |
2228 | && GET_CODE (i2dest) == REG | |
2229 | #endif | |
2230 | /* We need I2DEST in the proper mode. If it is a hard register | |
2231 | or the only use of a pseudo, we can change its mode. */ | |
2232 | && (GET_MODE (*split) == GET_MODE (i2dest) | |
2233 | || GET_MODE (*split) == VOIDmode | |
2234 | || REGNO (i2dest) < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 2235 | || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2 |
230d793d RS |
2236 | && ! REG_USERVAR_P (i2dest))) |
2237 | && (next_real_insn (i2) == i3 | |
2238 | || ! use_crosses_set_p (*split, INSN_CUID (i2))) | |
2239 | /* We can't overwrite I2DEST if its value is still used by | |
2240 | NEWPAT. */ | |
2241 | && ! reg_referenced_p (i2dest, newpat)) | |
2242 | { | |
2243 | rtx newdest = i2dest; | |
df7d75de RK |
2244 | enum rtx_code split_code = GET_CODE (*split); |
2245 | enum machine_mode split_mode = GET_MODE (*split); | |
230d793d RS |
2246 | |
2247 | /* Get NEWDEST as a register in the proper mode. We have already | |
2248 | validated that we can do this. */ | |
df7d75de | 2249 | if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode) |
230d793d | 2250 | { |
38a448ca | 2251 | newdest = gen_rtx_REG (split_mode, REGNO (i2dest)); |
230d793d RS |
2252 | |
2253 | if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER) | |
2254 | SUBST (regno_reg_rtx[REGNO (i2dest)], newdest); | |
2255 | } | |
2256 | ||
2257 | /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to | |
2258 | an ASHIFT. This can occur if it was inside a PLUS and hence | |
2259 | appeared to be a memory address. This is a kludge. */ | |
df7d75de | 2260 | if (split_code == MULT |
230d793d | 2261 | && GET_CODE (XEXP (*split, 1)) == CONST_INT |
1568d79b | 2262 | && INTVAL (XEXP (*split, 1)) > 0 |
230d793d | 2263 | && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0) |
1dc8a823 | 2264 | { |
f1c6ba8b RK |
2265 | SUBST (*split, gen_rtx_ASHIFT (split_mode, |
2266 | XEXP (*split, 0), GEN_INT (i))); | |
1dc8a823 JW |
2267 | /* Update split_code because we may not have a multiply |
2268 | anymore. */ | |
2269 | split_code = GET_CODE (*split); | |
2270 | } | |
230d793d RS |
2271 | |
2272 | #ifdef INSN_SCHEDULING | |
2273 | /* If *SPLIT is a paradoxical SUBREG, when we split it, it should | |
2274 | be written as a ZERO_EXTEND. */ | |
df7d75de | 2275 | if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM) |
25c25947 R |
2276 | { |
2277 | #ifdef LOAD_EXTEND_OP | |
2278 | /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's | |
2279 | what it really is. */ | |
2280 | if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split))) | |
2281 | == SIGN_EXTEND) | |
2282 | SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode, | |
2283 | SUBREG_REG (*split))); | |
2284 | else | |
2285 | #endif | |
2286 | SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode, | |
2287 | SUBREG_REG (*split))); | |
2288 | } | |
230d793d RS |
2289 | #endif |
2290 | ||
f1c6ba8b | 2291 | newi2pat = gen_rtx_SET (VOIDmode, newdest, *split); |
230d793d | 2292 | SUBST (*split, newdest); |
8e2f6e35 | 2293 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
df7d75de RK |
2294 | |
2295 | /* If the split point was a MULT and we didn't have one before, | |
2296 | don't use one now. */ | |
2297 | if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult)) | |
8e2f6e35 | 2298 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2299 | } |
2300 | } | |
2301 | ||
2302 | /* Check for a case where we loaded from memory in a narrow mode and | |
2303 | then sign extended it, but we need both registers. In that case, | |
2304 | we have a PARALLEL with both loads from the same memory location. | |
2305 | We can split this into a load from memory followed by a register-register | |
2306 | copy. This saves at least one insn, more if register allocation can | |
f0343c74 RK |
2307 | eliminate the copy. |
2308 | ||
a9b2f059 JW |
2309 | We cannot do this if the destination of the first assignment is a |
2310 | condition code register or cc0. We eliminate this case by making sure | |
2311 | the SET_DEST and SET_SRC have the same mode. | |
2312 | ||
f0343c74 RK |
2313 | We cannot do this if the destination of the second assignment is |
2314 | a register that we have already assumed is zero-extended. Similarly | |
2315 | for a SUBREG of such a register. */ | |
230d793d RS |
2316 | |
2317 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2318 | && GET_CODE (newpat) == PARALLEL | |
2319 | && XVECLEN (newpat, 0) == 2 | |
2320 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2321 | && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND | |
a9b2f059 JW |
2322 | && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0))) |
2323 | == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0)))) | |
230d793d RS |
2324 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET |
2325 | && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2326 | XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0)) | |
2327 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2328 | INSN_CUID (i2)) | |
2329 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2330 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
f0343c74 RK |
2331 | && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)), |
2332 | (GET_CODE (temp) == REG | |
2333 | && reg_nonzero_bits[REGNO (temp)] != 0 | |
2334 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD | |
2335 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
2336 | && (reg_nonzero_bits[REGNO (temp)] | |
2337 | != GET_MODE_MASK (word_mode)))) | |
2338 | && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG | |
2339 | && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))), | |
2340 | (GET_CODE (temp) == REG | |
2341 | && reg_nonzero_bits[REGNO (temp)] != 0 | |
2342 | && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD | |
2343 | && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT | |
2344 | && (reg_nonzero_bits[REGNO (temp)] | |
2345 | != GET_MODE_MASK (word_mode))))) | |
230d793d RS |
2346 | && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)), |
2347 | SET_SRC (XVECEXP (newpat, 0, 1))) | |
2348 | && ! find_reg_note (i3, REG_UNUSED, | |
2349 | SET_DEST (XVECEXP (newpat, 0, 0)))) | |
2350 | { | |
472fbdd1 RK |
2351 | rtx ni2dest; |
2352 | ||
230d793d | 2353 | newi2pat = XVECEXP (newpat, 0, 0); |
472fbdd1 | 2354 | ni2dest = SET_DEST (XVECEXP (newpat, 0, 0)); |
230d793d RS |
2355 | newpat = XVECEXP (newpat, 0, 1); |
2356 | SUBST (SET_SRC (newpat), | |
472fbdd1 | 2357 | gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest)); |
8e2f6e35 | 2358 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
a29ca9db | 2359 | |
230d793d | 2360 | if (i2_code_number >= 0) |
8e2f6e35 | 2361 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
5089e22e RS |
2362 | |
2363 | if (insn_code_number >= 0) | |
2364 | { | |
2365 | rtx insn; | |
2366 | rtx link; | |
2367 | ||
2368 | /* If we will be able to accept this, we have made a change to the | |
2369 | destination of I3. This can invalidate a LOG_LINKS pointing | |
2370 | to I3. No other part of combine.c makes such a transformation. | |
2371 | ||
2372 | The new I3 will have a destination that was previously the | |
2373 | destination of I1 or I2 and which was used in i2 or I3. Call | |
2374 | distribute_links to make a LOG_LINK from the next use of | |
2375 | that destination. */ | |
2376 | ||
2377 | PATTERN (i3) = newpat; | |
38a448ca | 2378 | distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX)); |
5089e22e RS |
2379 | |
2380 | /* I3 now uses what used to be its destination and which is | |
2381 | now I2's destination. That means we need a LOG_LINK from | |
2382 | I3 to I2. But we used to have one, so we still will. | |
2383 | ||
2384 | However, some later insn might be using I2's dest and have | |
2385 | a LOG_LINK pointing at I3. We must remove this link. | |
2386 | The simplest way to remove the link is to point it at I1, | |
2387 | which we know will be a NOTE. */ | |
2388 | ||
2389 | for (insn = NEXT_INSN (i3); | |
f6366fc7 ZD |
2390 | insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR |
2391 | || insn != this_basic_block->next_bb->head); | |
5089e22e RS |
2392 | insn = NEXT_INSN (insn)) |
2393 | { | |
2c3c49de | 2394 | if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn))) |
5089e22e RS |
2395 | { |
2396 | for (link = LOG_LINKS (insn); link; | |
2397 | link = XEXP (link, 1)) | |
2398 | if (XEXP (link, 0) == i3) | |
2399 | XEXP (link, 0) = i1; | |
2400 | ||
2401 | break; | |
2402 | } | |
2403 | } | |
2404 | } | |
230d793d | 2405 | } |
663522cb | 2406 | |
230d793d RS |
2407 | /* Similarly, check for a case where we have a PARALLEL of two independent |
2408 | SETs but we started with three insns. In this case, we can do the sets | |
2409 | as two separate insns. This case occurs when some SET allows two | |
2410 | other insns to combine, but the destination of that SET is still live. */ | |
2411 | ||
2412 | else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0 | |
2413 | && GET_CODE (newpat) == PARALLEL | |
2414 | && XVECLEN (newpat, 0) == 2 | |
2415 | && GET_CODE (XVECEXP (newpat, 0, 0)) == SET | |
2416 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT | |
2417 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART | |
2418 | && GET_CODE (XVECEXP (newpat, 0, 1)) == SET | |
2419 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT | |
2420 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART | |
2421 | && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)), | |
2422 | INSN_CUID (i2)) | |
2423 | /* Don't pass sets with (USE (MEM ...)) dests to the following. */ | |
2424 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE | |
2425 | && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE | |
2426 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)), | |
2427 | XVECEXP (newpat, 0, 0)) | |
2428 | && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)), | |
14a774a9 RK |
2429 | XVECEXP (newpat, 0, 1)) |
2430 | && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0))) | |
2431 | && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))) | |
230d793d | 2432 | { |
e9a25f70 JL |
2433 | /* Normally, it doesn't matter which of the two is done first, |
2434 | but it does if one references cc0. In that case, it has to | |
2435 | be first. */ | |
2436 | #ifdef HAVE_cc0 | |
2437 | if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))) | |
2438 | { | |
2439 | newi2pat = XVECEXP (newpat, 0, 0); | |
2440 | newpat = XVECEXP (newpat, 0, 1); | |
2441 | } | |
2442 | else | |
2443 | #endif | |
2444 | { | |
2445 | newi2pat = XVECEXP (newpat, 0, 1); | |
2446 | newpat = XVECEXP (newpat, 0, 0); | |
2447 | } | |
230d793d | 2448 | |
8e2f6e35 | 2449 | i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes); |
a29ca9db | 2450 | |
230d793d | 2451 | if (i2_code_number >= 0) |
8e2f6e35 | 2452 | insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes); |
230d793d RS |
2453 | } |
2454 | ||
2455 | /* If it still isn't recognized, fail and change things back the way they | |
2456 | were. */ | |
2457 | if ((insn_code_number < 0 | |
2458 | /* Is the result a reasonable ASM_OPERANDS? */ | |
2459 | && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2))) | |
2460 | { | |
2461 | undo_all (); | |
2462 | return 0; | |
2463 | } | |
2464 | ||
2465 | /* If we had to change another insn, make sure it is valid also. */ | |
2466 | if (undobuf.other_insn) | |
2467 | { | |
230d793d RS |
2468 | rtx other_pat = PATTERN (undobuf.other_insn); |
2469 | rtx new_other_notes; | |
2470 | rtx note, next; | |
2471 | ||
6e25d159 RK |
2472 | CLEAR_HARD_REG_SET (newpat_used_regs); |
2473 | ||
8e2f6e35 BS |
2474 | other_code_number = recog_for_combine (&other_pat, undobuf.other_insn, |
2475 | &new_other_notes); | |
230d793d RS |
2476 | |
2477 | if (other_code_number < 0 && ! check_asm_operands (other_pat)) | |
2478 | { | |
2479 | undo_all (); | |
2480 | return 0; | |
2481 | } | |
2482 | ||
2483 | PATTERN (undobuf.other_insn) = other_pat; | |
2484 | ||
2485 | /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they | |
2486 | are still valid. Then add any non-duplicate notes added by | |
2487 | recog_for_combine. */ | |
2488 | for (note = REG_NOTES (undobuf.other_insn); note; note = next) | |
2489 | { | |
2490 | next = XEXP (note, 1); | |
2491 | ||
2492 | if (REG_NOTE_KIND (note) == REG_UNUSED | |
2493 | && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn))) | |
1a26b032 RK |
2494 | { |
2495 | if (GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 2496 | REG_N_DEATHS (REGNO (XEXP (note, 0)))--; |
1a26b032 RK |
2497 | |
2498 | remove_note (undobuf.other_insn, note); | |
2499 | } | |
230d793d RS |
2500 | } |
2501 | ||
1a26b032 RK |
2502 | for (note = new_other_notes; note; note = XEXP (note, 1)) |
2503 | if (GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 2504 | REG_N_DEATHS (REGNO (XEXP (note, 0)))++; |
1a26b032 | 2505 | |
230d793d | 2506 | distribute_notes (new_other_notes, undobuf.other_insn, |
5f4f0e22 | 2507 | undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX); |
230d793d | 2508 | } |
5ef17dd2 | 2509 | #ifdef HAVE_cc0 |
663522cb | 2510 | /* If I2 is the setter CC0 and I3 is the user CC0 then check whether |
ec5c56db | 2511 | they are adjacent to each other or not. */ |
5ef17dd2 CC |
2512 | { |
2513 | rtx p = prev_nonnote_insn (i3); | |
663522cb KH |
2514 | if (p && p != i2 && GET_CODE (p) == INSN && newi2pat |
2515 | && sets_cc0_p (newi2pat)) | |
5ef17dd2 | 2516 | { |
663522cb KH |
2517 | undo_all (); |
2518 | return 0; | |
5ef17dd2 | 2519 | } |
663522cb KH |
2520 | } |
2521 | #endif | |
230d793d | 2522 | |
663522cb | 2523 | /* We now know that we can do this combination. Merge the insns and |
230d793d RS |
2524 | update the status of registers and LOG_LINKS. */ |
2525 | ||
2526 | { | |
2527 | rtx i3notes, i2notes, i1notes = 0; | |
2528 | rtx i3links, i2links, i1links = 0; | |
2529 | rtx midnotes = 0; | |
770ae6cc | 2530 | unsigned int regno; |
ff3467a9 JW |
2531 | /* Compute which registers we expect to eliminate. newi2pat may be setting |
2532 | either i3dest or i2dest, so we must check it. Also, i1dest may be the | |
2533 | same as i3dest, in which case newi2pat may be setting i1dest. */ | |
2534 | rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat)) | |
2535 | || i2dest_in_i2src || i2dest_in_i1src | |
230d793d | 2536 | ? 0 : i2dest); |
ff3467a9 JW |
2537 | rtx elim_i1 = (i1 == 0 || i1dest_in_i1src |
2538 | || (newi2pat && reg_set_p (i1dest, newi2pat)) | |
2539 | ? 0 : i1dest); | |
230d793d RS |
2540 | |
2541 | /* Get the old REG_NOTES and LOG_LINKS from all our insns and | |
2542 | clear them. */ | |
2543 | i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3); | |
2544 | i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2); | |
2545 | if (i1) | |
2546 | i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1); | |
2547 | ||
2548 | /* Ensure that we do not have something that should not be shared but | |
2549 | occurs multiple times in the new insns. Check this by first | |
5089e22e | 2550 | resetting all the `used' flags and then copying anything is shared. */ |
230d793d RS |
2551 | |
2552 | reset_used_flags (i3notes); | |
2553 | reset_used_flags (i2notes); | |
2554 | reset_used_flags (i1notes); | |
2555 | reset_used_flags (newpat); | |
2556 | reset_used_flags (newi2pat); | |
2557 | if (undobuf.other_insn) | |
2558 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2559 | ||
2560 | i3notes = copy_rtx_if_shared (i3notes); | |
2561 | i2notes = copy_rtx_if_shared (i2notes); | |
2562 | i1notes = copy_rtx_if_shared (i1notes); | |
2563 | newpat = copy_rtx_if_shared (newpat); | |
2564 | newi2pat = copy_rtx_if_shared (newi2pat); | |
2565 | if (undobuf.other_insn) | |
2566 | reset_used_flags (PATTERN (undobuf.other_insn)); | |
2567 | ||
2568 | INSN_CODE (i3) = insn_code_number; | |
2569 | PATTERN (i3) = newpat; | |
cddd8b72 AO |
2570 | |
2571 | if (GET_CODE (i3) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (i3)) | |
2572 | { | |
2573 | rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3); | |
2574 | ||
2575 | reset_used_flags (call_usage); | |
2576 | call_usage = copy_rtx (call_usage); | |
2577 | ||
2578 | if (substed_i2) | |
2579 | replace_rtx (call_usage, i2dest, i2src); | |
2580 | ||
2581 | if (substed_i1) | |
2582 | replace_rtx (call_usage, i1dest, i1src); | |
2583 | ||
2584 | CALL_INSN_FUNCTION_USAGE (i3) = call_usage; | |
2585 | } | |
2586 | ||
230d793d RS |
2587 | if (undobuf.other_insn) |
2588 | INSN_CODE (undobuf.other_insn) = other_code_number; | |
2589 | ||
2590 | /* We had one special case above where I2 had more than one set and | |
2591 | we replaced a destination of one of those sets with the destination | |
2592 | of I3. In that case, we have to update LOG_LINKS of insns later | |
176c9e6b JW |
2593 | in this basic block. Note that this (expensive) case is rare. |
2594 | ||
2595 | Also, in this case, we must pretend that all REG_NOTEs for I2 | |
2596 | actually came from I3, so that REG_UNUSED notes from I2 will be | |
2597 | properly handled. */ | |
2598 | ||
c7be4f66 | 2599 | if (i3_subst_into_i2) |
176c9e6b | 2600 | { |
1786009e | 2601 | for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++) |
95ac07b0 AO |
2602 | if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != USE |
2603 | && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG | |
1786009e ZW |
2604 | && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest |
2605 | && ! find_reg_note (i2, REG_UNUSED, | |
2606 | SET_DEST (XVECEXP (PATTERN (i2), 0, i)))) | |
2607 | for (temp = NEXT_INSN (i2); | |
f6366fc7 ZD |
2608 | temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR |
2609 | || this_basic_block->head != temp); | |
1786009e ZW |
2610 | temp = NEXT_INSN (temp)) |
2611 | if (temp != i3 && INSN_P (temp)) | |
2612 | for (link = LOG_LINKS (temp); link; link = XEXP (link, 1)) | |
2613 | if (XEXP (link, 0) == i2) | |
2614 | XEXP (link, 0) = i3; | |
176c9e6b JW |
2615 | |
2616 | if (i3notes) | |
2617 | { | |
2618 | rtx link = i3notes; | |
2619 | while (XEXP (link, 1)) | |
2620 | link = XEXP (link, 1); | |
2621 | XEXP (link, 1) = i2notes; | |
2622 | } | |
2623 | else | |
2624 | i3notes = i2notes; | |
2625 | i2notes = 0; | |
2626 | } | |
230d793d RS |
2627 | |
2628 | LOG_LINKS (i3) = 0; | |
2629 | REG_NOTES (i3) = 0; | |
2630 | LOG_LINKS (i2) = 0; | |
2631 | REG_NOTES (i2) = 0; | |
2632 | ||
2633 | if (newi2pat) | |
2634 | { | |
2635 | INSN_CODE (i2) = i2_code_number; | |
2636 | PATTERN (i2) = newi2pat; | |
2637 | } | |
2638 | else | |
2639 | { | |
2640 | PUT_CODE (i2, NOTE); | |
2641 | NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED; | |
2642 | NOTE_SOURCE_FILE (i2) = 0; | |
2643 | } | |
2644 | ||
2645 | if (i1) | |
2646 | { | |
2647 | LOG_LINKS (i1) = 0; | |
2648 | REG_NOTES (i1) = 0; | |
2649 | PUT_CODE (i1, NOTE); | |
2650 | NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED; | |
2651 | NOTE_SOURCE_FILE (i1) = 0; | |
2652 | } | |
2653 | ||
2654 | /* Get death notes for everything that is now used in either I3 or | |
663522cb | 2655 | I2 and used to die in a previous insn. If we built two new |
6eb12cef RK |
2656 | patterns, move from I1 to I2 then I2 to I3 so that we get the |
2657 | proper movement on registers that I2 modifies. */ | |
230d793d | 2658 | |
230d793d | 2659 | if (newi2pat) |
6eb12cef RK |
2660 | { |
2661 | move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes); | |
2662 | move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes); | |
2663 | } | |
2664 | else | |
2665 | move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2), | |
2666 | i3, &midnotes); | |
230d793d RS |
2667 | |
2668 | /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */ | |
2669 | if (i3notes) | |
5f4f0e22 CH |
2670 | distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX, |
2671 | elim_i2, elim_i1); | |
230d793d | 2672 | if (i2notes) |
5f4f0e22 CH |
2673 | distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX, |
2674 | elim_i2, elim_i1); | |
230d793d | 2675 | if (i1notes) |
5f4f0e22 CH |
2676 | distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX, |
2677 | elim_i2, elim_i1); | |
230d793d | 2678 | if (midnotes) |
5f4f0e22 CH |
2679 | distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2680 | elim_i2, elim_i1); | |
230d793d RS |
2681 | |
2682 | /* Distribute any notes added to I2 or I3 by recog_for_combine. We | |
2683 | know these are REG_UNUSED and want them to go to the desired insn, | |
663522cb | 2684 | so we always pass it as i3. We have not counted the notes in |
1a26b032 RK |
2685 | reg_n_deaths yet, so we need to do so now. */ |
2686 | ||
230d793d | 2687 | if (newi2pat && new_i2_notes) |
1a26b032 RK |
2688 | { |
2689 | for (temp = new_i2_notes; temp; temp = XEXP (temp, 1)) | |
2690 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
b1f21e0a | 2691 | REG_N_DEATHS (REGNO (XEXP (temp, 0)))++; |
663522cb | 2692 | |
1a26b032 RK |
2693 | distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
2694 | } | |
2695 | ||
230d793d | 2696 | if (new_i3_notes) |
1a26b032 RK |
2697 | { |
2698 | for (temp = new_i3_notes; temp; temp = XEXP (temp, 1)) | |
2699 | if (GET_CODE (XEXP (temp, 0)) == REG) | |
b1f21e0a | 2700 | REG_N_DEATHS (REGNO (XEXP (temp, 0)))++; |
663522cb | 2701 | |
1a26b032 RK |
2702 | distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX); |
2703 | } | |
230d793d RS |
2704 | |
2705 | /* If I3DEST was used in I3SRC, it really died in I3. We may need to | |
e9a25f70 JL |
2706 | put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets |
2707 | I3DEST, the death must be somewhere before I2, not I3. If we passed I3 | |
2708 | in that case, it might delete I2. Similarly for I2 and I1. | |
1a26b032 RK |
2709 | Show an additional death due to the REG_DEAD note we make here. If |
2710 | we discard it in distribute_notes, we will decrement it again. */ | |
d0ab8cd3 | 2711 | |
230d793d | 2712 | if (i3dest_killed) |
1a26b032 RK |
2713 | { |
2714 | if (GET_CODE (i3dest_killed) == REG) | |
b1f21e0a | 2715 | REG_N_DEATHS (REGNO (i3dest_killed))++; |
1a26b032 | 2716 | |
e9a25f70 | 2717 | if (newi2pat && reg_set_p (i3dest_killed, newi2pat)) |
38a448ca RH |
2718 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, |
2719 | NULL_RTX), | |
ff3467a9 | 2720 | NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1); |
e9a25f70 | 2721 | else |
38a448ca RH |
2722 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed, |
2723 | NULL_RTX), | |
e9a25f70 | 2724 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
ff3467a9 | 2725 | elim_i2, elim_i1); |
1a26b032 | 2726 | } |
58c8c593 | 2727 | |
230d793d | 2728 | if (i2dest_in_i2src) |
58c8c593 | 2729 | { |
1a26b032 | 2730 | if (GET_CODE (i2dest) == REG) |
b1f21e0a | 2731 | REG_N_DEATHS (REGNO (i2dest))++; |
1a26b032 | 2732 | |
58c8c593 | 2733 | if (newi2pat && reg_set_p (i2dest, newi2pat)) |
38a448ca | 2734 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), |
58c8c593 RK |
2735 | NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
2736 | else | |
38a448ca | 2737 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX), |
58c8c593 RK |
2738 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2739 | NULL_RTX, NULL_RTX); | |
2740 | } | |
2741 | ||
230d793d | 2742 | if (i1dest_in_i1src) |
58c8c593 | 2743 | { |
1a26b032 | 2744 | if (GET_CODE (i1dest) == REG) |
b1f21e0a | 2745 | REG_N_DEATHS (REGNO (i1dest))++; |
1a26b032 | 2746 | |
58c8c593 | 2747 | if (newi2pat && reg_set_p (i1dest, newi2pat)) |
38a448ca | 2748 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), |
58c8c593 RK |
2749 | NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX); |
2750 | else | |
38a448ca | 2751 | distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX), |
58c8c593 RK |
2752 | NULL_RTX, i3, newi2pat ? i2 : NULL_RTX, |
2753 | NULL_RTX, NULL_RTX); | |
2754 | } | |
230d793d RS |
2755 | |
2756 | distribute_links (i3links); | |
2757 | distribute_links (i2links); | |
2758 | distribute_links (i1links); | |
2759 | ||
2760 | if (GET_CODE (i2dest) == REG) | |
2761 | { | |
d0ab8cd3 RK |
2762 | rtx link; |
2763 | rtx i2_insn = 0, i2_val = 0, set; | |
2764 | ||
2765 | /* The insn that used to set this register doesn't exist, and | |
2766 | this life of the register may not exist either. See if one of | |
663522cb | 2767 | I3's links points to an insn that sets I2DEST. If it does, |
d0ab8cd3 RK |
2768 | that is now the last known value for I2DEST. If we don't update |
2769 | this and I2 set the register to a value that depended on its old | |
230d793d RS |
2770 | contents, we will get confused. If this insn is used, thing |
2771 | will be set correctly in combine_instructions. */ | |
d0ab8cd3 RK |
2772 | |
2773 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2774 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2775 | && rtx_equal_p (i2dest, SET_DEST (set))) | |
2776 | i2_insn = XEXP (link, 0), i2_val = SET_SRC (set); | |
2777 | ||
2778 | record_value_for_reg (i2dest, i2_insn, i2_val); | |
230d793d RS |
2779 | |
2780 | /* If the reg formerly set in I2 died only once and that was in I3, | |
2781 | zero its use count so it won't make `reload' do any work. */ | |
538fe8cd ILT |
2782 | if (! added_sets_2 |
2783 | && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat)) | |
2784 | && ! i2dest_in_i2src) | |
230d793d RS |
2785 | { |
2786 | regno = REGNO (i2dest); | |
b1f21e0a | 2787 | REG_N_SETS (regno)--; |
230d793d RS |
2788 | } |
2789 | } | |
2790 | ||
2791 | if (i1 && GET_CODE (i1dest) == REG) | |
2792 | { | |
d0ab8cd3 RK |
2793 | rtx link; |
2794 | rtx i1_insn = 0, i1_val = 0, set; | |
2795 | ||
2796 | for (link = LOG_LINKS (i3); link; link = XEXP (link, 1)) | |
2797 | if ((set = single_set (XEXP (link, 0))) != 0 | |
2798 | && rtx_equal_p (i1dest, SET_DEST (set))) | |
2799 | i1_insn = XEXP (link, 0), i1_val = SET_SRC (set); | |
2800 | ||
2801 | record_value_for_reg (i1dest, i1_insn, i1_val); | |
2802 | ||
230d793d | 2803 | regno = REGNO (i1dest); |
5af91171 | 2804 | if (! added_sets_1 && ! i1dest_in_i1src) |
770ae6cc | 2805 | REG_N_SETS (regno)--; |
230d793d RS |
2806 | } |
2807 | ||
951553af | 2808 | /* Update reg_nonzero_bits et al for any changes that may have been made |
663522cb | 2809 | to this insn. The order of set_nonzero_bits_and_sign_copies() is |
5fb7c247 | 2810 | important. Because newi2pat can affect nonzero_bits of newpat */ |
22609cbf | 2811 | if (newi2pat) |
84832317 | 2812 | note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL); |
5fb7c247 | 2813 | note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL); |
22609cbf | 2814 | |
44a76fc8 AG |
2815 | /* Set new_direct_jump_p if a new return or simple jump instruction |
2816 | has been created. | |
2817 | ||
663522cb | 2818 | If I3 is now an unconditional jump, ensure that it has a |
230d793d | 2819 | BARRIER following it since it may have initially been a |
381ee8af | 2820 | conditional jump. It may also be the last nonnote insn. */ |
663522cb | 2821 | |
f40f4c8e | 2822 | if (returnjump_p (i3) || any_uncondjump_p (i3)) |
44a76fc8 AG |
2823 | { |
2824 | *new_direct_jump_p = 1; | |
230d793d | 2825 | |
44a76fc8 AG |
2826 | if ((temp = next_nonnote_insn (i3)) == NULL_RTX |
2827 | || GET_CODE (temp) != BARRIER) | |
2828 | emit_barrier_after (i3); | |
2829 | } | |
f40f4c8e RS |
2830 | |
2831 | if (undobuf.other_insn != NULL_RTX | |
2832 | && (returnjump_p (undobuf.other_insn) | |
2833 | || any_uncondjump_p (undobuf.other_insn))) | |
2834 | { | |
2835 | *new_direct_jump_p = 1; | |
2836 | ||
2837 | if ((temp = next_nonnote_insn (undobuf.other_insn)) == NULL_RTX | |
2838 | || GET_CODE (temp) != BARRIER) | |
2839 | emit_barrier_after (undobuf.other_insn); | |
2840 | } | |
2841 | ||
592a6d1d JH |
2842 | /* An NOOP jump does not need barrier, but it does need cleaning up |
2843 | of CFG. */ | |
2844 | if (GET_CODE (newpat) == SET | |
2845 | && SET_SRC (newpat) == pc_rtx | |
2846 | && SET_DEST (newpat) == pc_rtx) | |
2847 | *new_direct_jump_p = 1; | |
230d793d RS |
2848 | } |
2849 | ||
2850 | combine_successes++; | |
e7749837 | 2851 | undo_commit (); |
230d793d | 2852 | |
abe6e52f RK |
2853 | if (added_links_insn |
2854 | && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2)) | |
2855 | && INSN_CUID (added_links_insn) < INSN_CUID (i3)) | |
2856 | return added_links_insn; | |
2857 | else | |
2858 | return newi2pat ? i2 : i3; | |
230d793d RS |
2859 | } |
2860 | \f | |
2861 | /* Undo all the modifications recorded in undobuf. */ | |
2862 | ||
2863 | static void | |
2864 | undo_all () | |
2865 | { | |
241cea85 RK |
2866 | struct undo *undo, *next; |
2867 | ||
2868 | for (undo = undobuf.undos; undo; undo = next) | |
7c046e4e | 2869 | { |
241cea85 RK |
2870 | next = undo->next; |
2871 | if (undo->is_int) | |
2872 | *undo->where.i = undo->old_contents.i; | |
7c046e4e | 2873 | else |
241cea85 RK |
2874 | *undo->where.r = undo->old_contents.r; |
2875 | ||
2876 | undo->next = undobuf.frees; | |
2877 | undobuf.frees = undo; | |
7c046e4e | 2878 | } |
230d793d | 2879 | |
f1c6ba8b | 2880 | undobuf.undos = 0; |
230d793d | 2881 | } |
e7749837 RH |
2882 | |
2883 | /* We've committed to accepting the changes we made. Move all | |
2884 | of the undos to the free list. */ | |
2885 | ||
2886 | static void | |
2887 | undo_commit () | |
2888 | { | |
2889 | struct undo *undo, *next; | |
2890 | ||
2891 | for (undo = undobuf.undos; undo; undo = next) | |
2892 | { | |
2893 | next = undo->next; | |
2894 | undo->next = undobuf.frees; | |
2895 | undobuf.frees = undo; | |
2896 | } | |
f1c6ba8b | 2897 | undobuf.undos = 0; |
e7749837 RH |
2898 | } |
2899 | ||
230d793d RS |
2900 | \f |
2901 | /* Find the innermost point within the rtx at LOC, possibly LOC itself, | |
d0ab8cd3 RK |
2902 | where we have an arithmetic expression and return that point. LOC will |
2903 | be inside INSN. | |
230d793d RS |
2904 | |
2905 | try_combine will call this function to see if an insn can be split into | |
2906 | two insns. */ | |
2907 | ||
2908 | static rtx * | |
d0ab8cd3 | 2909 | find_split_point (loc, insn) |
230d793d | 2910 | rtx *loc; |
d0ab8cd3 | 2911 | rtx insn; |
230d793d RS |
2912 | { |
2913 | rtx x = *loc; | |
2914 | enum rtx_code code = GET_CODE (x); | |
2915 | rtx *split; | |
770ae6cc RK |
2916 | unsigned HOST_WIDE_INT len = 0; |
2917 | HOST_WIDE_INT pos = 0; | |
2918 | int unsignedp = 0; | |
6a651371 | 2919 | rtx inner = NULL_RTX; |
230d793d RS |
2920 | |
2921 | /* First special-case some codes. */ | |
2922 | switch (code) | |
2923 | { | |
2924 | case SUBREG: | |
2925 | #ifdef INSN_SCHEDULING | |
2926 | /* If we are making a paradoxical SUBREG invalid, it becomes a split | |
2927 | point. */ | |
2928 | if (GET_CODE (SUBREG_REG (x)) == MEM) | |
2929 | return loc; | |
2930 | #endif | |
d0ab8cd3 | 2931 | return find_split_point (&SUBREG_REG (x), insn); |
230d793d | 2932 | |
230d793d | 2933 | case MEM: |
916f14f1 | 2934 | #ifdef HAVE_lo_sum |
230d793d RS |
2935 | /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it |
2936 | using LO_SUM and HIGH. */ | |
2937 | if (GET_CODE (XEXP (x, 0)) == CONST | |
2938 | || GET_CODE (XEXP (x, 0)) == SYMBOL_REF) | |
2939 | { | |
2940 | SUBST (XEXP (x, 0), | |
f1c6ba8b RK |
2941 | gen_rtx_LO_SUM (Pmode, |
2942 | gen_rtx_HIGH (Pmode, XEXP (x, 0)), | |
2943 | XEXP (x, 0))); | |
230d793d RS |
2944 | return &XEXP (XEXP (x, 0), 0); |
2945 | } | |
230d793d RS |
2946 | #endif |
2947 | ||
916f14f1 RK |
2948 | /* If we have a PLUS whose second operand is a constant and the |
2949 | address is not valid, perhaps will can split it up using | |
2950 | the machine-specific way to split large constants. We use | |
ddd5a7c1 | 2951 | the first pseudo-reg (one of the virtual regs) as a placeholder; |
916f14f1 RK |
2952 | it will not remain in the result. */ |
2953 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
2954 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
2955 | && ! memory_address_p (GET_MODE (x), XEXP (x, 0))) | |
2956 | { | |
2957 | rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER]; | |
38a448ca | 2958 | rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)), |
916f14f1 RK |
2959 | subst_insn); |
2960 | ||
2961 | /* This should have produced two insns, each of which sets our | |
2962 | placeholder. If the source of the second is a valid address, | |
2963 | we can make put both sources together and make a split point | |
2964 | in the middle. */ | |
2965 | ||
2f937369 DM |
2966 | if (seq |
2967 | && NEXT_INSN (seq) != NULL_RTX | |
2968 | && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX | |
2969 | && GET_CODE (seq) == INSN | |
2970 | && GET_CODE (PATTERN (seq)) == SET | |
2971 | && SET_DEST (PATTERN (seq)) == reg | |
916f14f1 | 2972 | && ! reg_mentioned_p (reg, |
2f937369 DM |
2973 | SET_SRC (PATTERN (seq))) |
2974 | && GET_CODE (NEXT_INSN (seq)) == INSN | |
2975 | && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET | |
2976 | && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg | |
916f14f1 | 2977 | && memory_address_p (GET_MODE (x), |
2f937369 | 2978 | SET_SRC (PATTERN (NEXT_INSN (seq))))) |
916f14f1 | 2979 | { |
2f937369 DM |
2980 | rtx src1 = SET_SRC (PATTERN (seq)); |
2981 | rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq))); | |
916f14f1 RK |
2982 | |
2983 | /* Replace the placeholder in SRC2 with SRC1. If we can | |
2984 | find where in SRC2 it was placed, that can become our | |
2985 | split point and we can replace this address with SRC2. | |
2986 | Just try two obvious places. */ | |
2987 | ||
2988 | src2 = replace_rtx (src2, reg, src1); | |
2989 | split = 0; | |
2990 | if (XEXP (src2, 0) == src1) | |
2991 | split = &XEXP (src2, 0); | |
2992 | else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e' | |
2993 | && XEXP (XEXP (src2, 0), 0) == src1) | |
2994 | split = &XEXP (XEXP (src2, 0), 0); | |
2995 | ||
2996 | if (split) | |
2997 | { | |
2998 | SUBST (XEXP (x, 0), src2); | |
2999 | return split; | |
3000 | } | |
3001 | } | |
663522cb | 3002 | |
1a26b032 RK |
3003 | /* If that didn't work, perhaps the first operand is complex and |
3004 | needs to be computed separately, so make a split point there. | |
3005 | This will occur on machines that just support REG + CONST | |
3006 | and have a constant moved through some previous computation. */ | |
3007 | ||
3008 | else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o' | |
3009 | && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG | |
3010 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0)))) | |
3011 | == 'o'))) | |
3012 | return &XEXP (XEXP (x, 0), 0); | |
916f14f1 RK |
3013 | } |
3014 | break; | |
3015 | ||
230d793d RS |
3016 | case SET: |
3017 | #ifdef HAVE_cc0 | |
3018 | /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a | |
3019 | ZERO_EXTRACT, the most likely reason why this doesn't match is that | |
3020 | we need to put the operand into a register. So split at that | |
3021 | point. */ | |
3022 | ||
3023 | if (SET_DEST (x) == cc0_rtx | |
3024 | && GET_CODE (SET_SRC (x)) != COMPARE | |
3025 | && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT | |
3026 | && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o' | |
3027 | && ! (GET_CODE (SET_SRC (x)) == SUBREG | |
3028 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o')) | |
3029 | return &SET_SRC (x); | |
3030 | #endif | |
3031 | ||
3032 | /* See if we can split SET_SRC as it stands. */ | |
d0ab8cd3 | 3033 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3034 | if (split && split != &SET_SRC (x)) |
3035 | return split; | |
3036 | ||
041d7180 JL |
3037 | /* See if we can split SET_DEST as it stands. */ |
3038 | split = find_split_point (&SET_DEST (x), insn); | |
3039 | if (split && split != &SET_DEST (x)) | |
3040 | return split; | |
3041 | ||
230d793d RS |
3042 | /* See if this is a bitfield assignment with everything constant. If |
3043 | so, this is an IOR of an AND, so split it into that. */ | |
3044 | if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
3045 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))) | |
5f4f0e22 | 3046 | <= HOST_BITS_PER_WIDE_INT) |
230d793d RS |
3047 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT |
3048 | && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT | |
3049 | && GET_CODE (SET_SRC (x)) == CONST_INT | |
3050 | && ((INTVAL (XEXP (SET_DEST (x), 1)) | |
cf0d9408 | 3051 | + INTVAL (XEXP (SET_DEST (x), 2))) |
230d793d RS |
3052 | <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))) |
3053 | && ! side_effects_p (XEXP (SET_DEST (x), 0))) | |
3054 | { | |
770ae6cc RK |
3055 | HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2)); |
3056 | unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1)); | |
3057 | unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x)); | |
230d793d RS |
3058 | rtx dest = XEXP (SET_DEST (x), 0); |
3059 | enum machine_mode mode = GET_MODE (dest); | |
5f4f0e22 | 3060 | unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1; |
230d793d | 3061 | |
f76b9db2 ILT |
3062 | if (BITS_BIG_ENDIAN) |
3063 | pos = GET_MODE_BITSIZE (mode) - len - pos; | |
230d793d | 3064 | |
770ae6cc | 3065 | if (src == mask) |
230d793d | 3066 | SUBST (SET_SRC (x), |
5f4f0e22 | 3067 | gen_binary (IOR, mode, dest, GEN_INT (src << pos))); |
230d793d RS |
3068 | else |
3069 | SUBST (SET_SRC (x), | |
3070 | gen_binary (IOR, mode, | |
663522cb | 3071 | gen_binary (AND, mode, dest, |
da6886f6 FS |
3072 | gen_int_mode (~(mask << pos), |
3073 | mode)), | |
5f4f0e22 | 3074 | GEN_INT (src << pos))); |
230d793d RS |
3075 | |
3076 | SUBST (SET_DEST (x), dest); | |
3077 | ||
d0ab8cd3 | 3078 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3079 | if (split && split != &SET_SRC (x)) |
3080 | return split; | |
3081 | } | |
3082 | ||
3083 | /* Otherwise, see if this is an operation that we can split into two. | |
3084 | If so, try to split that. */ | |
3085 | code = GET_CODE (SET_SRC (x)); | |
3086 | ||
3087 | switch (code) | |
3088 | { | |
d0ab8cd3 RK |
3089 | case AND: |
3090 | /* If we are AND'ing with a large constant that is only a single | |
3091 | bit and the result is only being used in a context where we | |
da7d8304 | 3092 | need to know if it is zero or nonzero, replace it with a bit |
d0ab8cd3 RK |
3093 | extraction. This will avoid the large constant, which might |
3094 | have taken more than one insn to make. If the constant were | |
3095 | not a valid argument to the AND but took only one insn to make, | |
3096 | this is no worse, but if it took more than one insn, it will | |
3097 | be better. */ | |
3098 | ||
3099 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
3100 | && GET_CODE (XEXP (SET_SRC (x), 0)) == REG | |
3101 | && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7 | |
3102 | && GET_CODE (SET_DEST (x)) == REG | |
cf0d9408 | 3103 | && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0 |
d0ab8cd3 RK |
3104 | && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE) |
3105 | && XEXP (*split, 0) == SET_DEST (x) | |
3106 | && XEXP (*split, 1) == const0_rtx) | |
3107 | { | |
76184def DE |
3108 | rtx extraction = make_extraction (GET_MODE (SET_DEST (x)), |
3109 | XEXP (SET_SRC (x), 0), | |
3110 | pos, NULL_RTX, 1, 1, 0, 0); | |
3111 | if (extraction != 0) | |
3112 | { | |
3113 | SUBST (SET_SRC (x), extraction); | |
3114 | return find_split_point (loc, insn); | |
3115 | } | |
d0ab8cd3 RK |
3116 | } |
3117 | break; | |
3118 | ||
1a6ec070 RK |
3119 | case NE: |
3120 | /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X | |
ec5c56db | 3121 | is known to be on, this can be converted into a NEG of a shift. */ |
1a6ec070 RK |
3122 | if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx |
3123 | && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0)) | |
4eb2cb10 | 3124 | && 1 <= (pos = exact_log2 |
1a6ec070 RK |
3125 | (nonzero_bits (XEXP (SET_SRC (x), 0), |
3126 | GET_MODE (XEXP (SET_SRC (x), 0)))))) | |
3127 | { | |
3128 | enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0)); | |
3129 | ||
3130 | SUBST (SET_SRC (x), | |
f1c6ba8b RK |
3131 | gen_rtx_NEG (mode, |
3132 | gen_rtx_LSHIFTRT (mode, | |
3133 | XEXP (SET_SRC (x), 0), | |
3134 | GEN_INT (pos)))); | |
1a6ec070 RK |
3135 | |
3136 | split = find_split_point (&SET_SRC (x), insn); | |
3137 | if (split && split != &SET_SRC (x)) | |
3138 | return split; | |
3139 | } | |
3140 | break; | |
3141 | ||
230d793d RS |
3142 | case SIGN_EXTEND: |
3143 | inner = XEXP (SET_SRC (x), 0); | |
101c1a3d JL |
3144 | |
3145 | /* We can't optimize if either mode is a partial integer | |
3146 | mode as we don't know how many bits are significant | |
3147 | in those modes. */ | |
3148 | if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT | |
3149 | || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT) | |
3150 | break; | |
3151 | ||
230d793d RS |
3152 | pos = 0; |
3153 | len = GET_MODE_BITSIZE (GET_MODE (inner)); | |
3154 | unsignedp = 0; | |
3155 | break; | |
3156 | ||
3157 | case SIGN_EXTRACT: | |
3158 | case ZERO_EXTRACT: | |
3159 | if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT | |
3160 | && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT) | |
3161 | { | |
3162 | inner = XEXP (SET_SRC (x), 0); | |
3163 | len = INTVAL (XEXP (SET_SRC (x), 1)); | |
3164 | pos = INTVAL (XEXP (SET_SRC (x), 2)); | |
3165 | ||
f76b9db2 ILT |
3166 | if (BITS_BIG_ENDIAN) |
3167 | pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos; | |
230d793d RS |
3168 | unsignedp = (code == ZERO_EXTRACT); |
3169 | } | |
3170 | break; | |
e9a25f70 JL |
3171 | |
3172 | default: | |
3173 | break; | |
230d793d RS |
3174 | } |
3175 | ||
3176 | if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner))) | |
3177 | { | |
3178 | enum machine_mode mode = GET_MODE (SET_SRC (x)); | |
3179 | ||
d0ab8cd3 RK |
3180 | /* For unsigned, we have a choice of a shift followed by an |
3181 | AND or two shifts. Use two shifts for field sizes where the | |
3182 | constant might be too large. We assume here that we can | |
3183 | always at least get 8-bit constants in an AND insn, which is | |
3184 | true for every current RISC. */ | |
3185 | ||
3186 | if (unsignedp && len <= 8) | |
230d793d RS |
3187 | { |
3188 | SUBST (SET_SRC (x), | |
f1c6ba8b RK |
3189 | gen_rtx_AND (mode, |
3190 | gen_rtx_LSHIFTRT | |
3191 | (mode, gen_lowpart_for_combine (mode, inner), | |
3192 | GEN_INT (pos)), | |
3193 | GEN_INT (((HOST_WIDE_INT) 1 << len) - 1))); | |
230d793d | 3194 | |
d0ab8cd3 | 3195 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3196 | if (split && split != &SET_SRC (x)) |
3197 | return split; | |
3198 | } | |
3199 | else | |
3200 | { | |
3201 | SUBST (SET_SRC (x), | |
f1c6ba8b | 3202 | gen_rtx_fmt_ee |
d0ab8cd3 | 3203 | (unsignedp ? LSHIFTRT : ASHIFTRT, mode, |
f1c6ba8b RK |
3204 | gen_rtx_ASHIFT (mode, |
3205 | gen_lowpart_for_combine (mode, inner), | |
3206 | GEN_INT (GET_MODE_BITSIZE (mode) | |
3207 | - len - pos)), | |
5f4f0e22 | 3208 | GEN_INT (GET_MODE_BITSIZE (mode) - len))); |
230d793d | 3209 | |
d0ab8cd3 | 3210 | split = find_split_point (&SET_SRC (x), insn); |
230d793d RS |
3211 | if (split && split != &SET_SRC (x)) |
3212 | return split; | |
3213 | } | |
3214 | } | |
3215 | ||
3216 | /* See if this is a simple operation with a constant as the second | |
3217 | operand. It might be that this constant is out of range and hence | |
3218 | could be used as a split point. */ | |
3219 | if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2' | |
3220 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c' | |
3221 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<') | |
3222 | && CONSTANT_P (XEXP (SET_SRC (x), 1)) | |
3223 | && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o' | |
3224 | || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG | |
3225 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0)))) | |
3226 | == 'o')))) | |
3227 | return &XEXP (SET_SRC (x), 1); | |
3228 | ||
3229 | /* Finally, see if this is a simple operation with its first operand | |
3230 | not in a register. The operation might require this operand in a | |
3231 | register, so return it as a split point. We can always do this | |
3232 | because if the first operand were another operation, we would have | |
3233 | already found it as a split point. */ | |
3234 | if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2' | |
3235 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c' | |
3236 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<' | |
3237 | || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1') | |
3238 | && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode)) | |
3239 | return &XEXP (SET_SRC (x), 0); | |
3240 | ||
3241 | return 0; | |
3242 | ||
3243 | case AND: | |
3244 | case IOR: | |
3245 | /* We write NOR as (and (not A) (not B)), but if we don't have a NOR, | |
3246 | it is better to write this as (not (ior A B)) so we can split it. | |
3247 | Similarly for IOR. */ | |
3248 | if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT) | |
3249 | { | |
3250 | SUBST (*loc, | |
f1c6ba8b RK |
3251 | gen_rtx_NOT (GET_MODE (x), |
3252 | gen_rtx_fmt_ee (code == IOR ? AND : IOR, | |
3253 | GET_MODE (x), | |
3254 | XEXP (XEXP (x, 0), 0), | |
3255 | XEXP (XEXP (x, 1), 0)))); | |
d0ab8cd3 | 3256 | return find_split_point (loc, insn); |
230d793d RS |
3257 | } |
3258 | ||
3259 | /* Many RISC machines have a large set of logical insns. If the | |
3260 | second operand is a NOT, put it first so we will try to split the | |
3261 | other operand first. */ | |
3262 | if (GET_CODE (XEXP (x, 1)) == NOT) | |
3263 | { | |
3264 | rtx tem = XEXP (x, 0); | |
3265 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3266 | SUBST (XEXP (x, 1), tem); | |
3267 | } | |
3268 | break; | |
e9a25f70 JL |
3269 | |
3270 | default: | |
3271 | break; | |
230d793d RS |
3272 | } |
3273 | ||
3274 | /* Otherwise, select our actions depending on our rtx class. */ | |
3275 | switch (GET_RTX_CLASS (code)) | |
3276 | { | |
3277 | case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */ | |
3278 | case '3': | |
d0ab8cd3 | 3279 | split = find_split_point (&XEXP (x, 2), insn); |
230d793d RS |
3280 | if (split) |
3281 | return split; | |
0f41302f | 3282 | /* ... fall through ... */ |
230d793d RS |
3283 | case '2': |
3284 | case 'c': | |
3285 | case '<': | |
d0ab8cd3 | 3286 | split = find_split_point (&XEXP (x, 1), insn); |
230d793d RS |
3287 | if (split) |
3288 | return split; | |
0f41302f | 3289 | /* ... fall through ... */ |
230d793d RS |
3290 | case '1': |
3291 | /* Some machines have (and (shift ...) ...) insns. If X is not | |
3292 | an AND, but XEXP (X, 0) is, use it as our split point. */ | |
3293 | if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND) | |
3294 | return &XEXP (x, 0); | |
3295 | ||
d0ab8cd3 | 3296 | split = find_split_point (&XEXP (x, 0), insn); |
230d793d RS |
3297 | if (split) |
3298 | return split; | |
3299 | return loc; | |
3300 | } | |
3301 | ||
3302 | /* Otherwise, we don't have a split point. */ | |
3303 | return 0; | |
3304 | } | |
3305 | \f | |
3306 | /* Throughout X, replace FROM with TO, and return the result. | |
3307 | The result is TO if X is FROM; | |
3308 | otherwise the result is X, but its contents may have been modified. | |
3309 | If they were modified, a record was made in undobuf so that | |
3310 | undo_all will (among other things) return X to its original state. | |
3311 | ||
3312 | If the number of changes necessary is too much to record to undo, | |
3313 | the excess changes are not made, so the result is invalid. | |
3314 | The changes already made can still be undone. | |
3315 | undobuf.num_undo is incremented for such changes, so by testing that | |
3316 | the caller can tell whether the result is valid. | |
3317 | ||
3318 | `n_occurrences' is incremented each time FROM is replaced. | |
663522cb | 3319 | |
da7d8304 | 3320 | IN_DEST is nonzero if we are processing the SET_DEST of a SET. |
230d793d | 3321 | |
da7d8304 KH |
3322 | UNIQUE_COPY is nonzero if each substitution must be unique. We do this |
3323 | by copying if `n_occurrences' is nonzero. */ | |
230d793d RS |
3324 | |
3325 | static rtx | |
3326 | subst (x, from, to, in_dest, unique_copy) | |
b3694847 | 3327 | rtx x, from, to; |
230d793d RS |
3328 | int in_dest; |
3329 | int unique_copy; | |
3330 | { | |
b3694847 | 3331 | enum rtx_code code = GET_CODE (x); |
230d793d | 3332 | enum machine_mode op0_mode = VOIDmode; |
b3694847 SS |
3333 | const char *fmt; |
3334 | int len, i; | |
8079805d | 3335 | rtx new; |
230d793d RS |
3336 | |
3337 | /* Two expressions are equal if they are identical copies of a shared | |
3338 | RTX or if they are both registers with the same register number | |
3339 | and mode. */ | |
3340 | ||
3341 | #define COMBINE_RTX_EQUAL_P(X,Y) \ | |
3342 | ((X) == (Y) \ | |
3343 | || (GET_CODE (X) == REG && GET_CODE (Y) == REG \ | |
3344 | && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y))) | |
3345 | ||
3346 | if (! in_dest && COMBINE_RTX_EQUAL_P (x, from)) | |
3347 | { | |
3348 | n_occurrences++; | |
3349 | return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to); | |
3350 | } | |
3351 | ||
3352 | /* If X and FROM are the same register but different modes, they will | |
663522cb | 3353 | not have been seen as equal above. However, flow.c will make a |
230d793d RS |
3354 | LOG_LINKS entry for that case. If we do nothing, we will try to |
3355 | rerecognize our original insn and, when it succeeds, we will | |
3356 | delete the feeding insn, which is incorrect. | |
3357 | ||
3358 | So force this insn not to match in this (rare) case. */ | |
3359 | if (! in_dest && code == REG && GET_CODE (from) == REG | |
3360 | && REGNO (x) == REGNO (from)) | |
38a448ca | 3361 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
3362 | |
3363 | /* If this is an object, we are done unless it is a MEM or LO_SUM, both | |
3364 | of which may contain things that can be combined. */ | |
3365 | if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o') | |
3366 | return x; | |
3367 | ||
3368 | /* It is possible to have a subexpression appear twice in the insn. | |
3369 | Suppose that FROM is a register that appears within TO. | |
3370 | Then, after that subexpression has been scanned once by `subst', | |
3371 | the second time it is scanned, TO may be found. If we were | |
3372 | to scan TO here, we would find FROM within it and create a | |
3373 | self-referent rtl structure which is completely wrong. */ | |
3374 | if (COMBINE_RTX_EQUAL_P (x, to)) | |
3375 | return to; | |
3376 | ||
4f4b3679 RH |
3377 | /* Parallel asm_operands need special attention because all of the |
3378 | inputs are shared across the arms. Furthermore, unsharing the | |
3379 | rtl results in recognition failures. Failure to handle this case | |
3380 | specially can result in circular rtl. | |
3381 | ||
3382 | Solve this by doing a normal pass across the first entry of the | |
3383 | parallel, and only processing the SET_DESTs of the subsequent | |
3384 | entries. Ug. */ | |
3385 | ||
3386 | if (code == PARALLEL | |
3387 | && GET_CODE (XVECEXP (x, 0, 0)) == SET | |
3388 | && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS) | |
230d793d | 3389 | { |
4f4b3679 RH |
3390 | new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy); |
3391 | ||
3392 | /* If this substitution failed, this whole thing fails. */ | |
3393 | if (GET_CODE (new) == CLOBBER | |
3394 | && XEXP (new, 0) == const0_rtx) | |
3395 | return new; | |
3396 | ||
3397 | SUBST (XVECEXP (x, 0, 0), new); | |
3398 | ||
3399 | for (i = XVECLEN (x, 0) - 1; i >= 1; i--) | |
230d793d | 3400 | { |
4f4b3679 | 3401 | rtx dest = SET_DEST (XVECEXP (x, 0, i)); |
663522cb | 3402 | |
4f4b3679 RH |
3403 | if (GET_CODE (dest) != REG |
3404 | && GET_CODE (dest) != CC0 | |
3405 | && GET_CODE (dest) != PC) | |
230d793d | 3406 | { |
4f4b3679 | 3407 | new = subst (dest, from, to, 0, unique_copy); |
230d793d | 3408 | |
4f4b3679 RH |
3409 | /* If this substitution failed, this whole thing fails. */ |
3410 | if (GET_CODE (new) == CLOBBER | |
3411 | && XEXP (new, 0) == const0_rtx) | |
3412 | return new; | |
230d793d | 3413 | |
4f4b3679 | 3414 | SUBST (SET_DEST (XVECEXP (x, 0, i)), new); |
230d793d RS |
3415 | } |
3416 | } | |
4f4b3679 RH |
3417 | } |
3418 | else | |
3419 | { | |
3420 | len = GET_RTX_LENGTH (code); | |
3421 | fmt = GET_RTX_FORMAT (code); | |
3422 | ||
3423 | /* We don't need to process a SET_DEST that is a register, CC0, | |
3424 | or PC, so set up to skip this common case. All other cases | |
3425 | where we want to suppress replacing something inside a | |
3426 | SET_SRC are handled via the IN_DEST operand. */ | |
3427 | if (code == SET | |
3428 | && (GET_CODE (SET_DEST (x)) == REG | |
3429 | || GET_CODE (SET_DEST (x)) == CC0 | |
3430 | || GET_CODE (SET_DEST (x)) == PC)) | |
3431 | fmt = "ie"; | |
3432 | ||
3433 | /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a | |
3434 | constant. */ | |
3435 | if (fmt[0] == 'e') | |
3436 | op0_mode = GET_MODE (XEXP (x, 0)); | |
3437 | ||
3438 | for (i = 0; i < len; i++) | |
230d793d | 3439 | { |
4f4b3679 | 3440 | if (fmt[i] == 'E') |
230d793d | 3441 | { |
b3694847 | 3442 | int j; |
4f4b3679 RH |
3443 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
3444 | { | |
3445 | if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from)) | |
3446 | { | |
3447 | new = (unique_copy && n_occurrences | |
3448 | ? copy_rtx (to) : to); | |
3449 | n_occurrences++; | |
3450 | } | |
3451 | else | |
3452 | { | |
3453 | new = subst (XVECEXP (x, i, j), from, to, 0, | |
3454 | unique_copy); | |
3455 | ||
3456 | /* If this substitution failed, this whole thing | |
3457 | fails. */ | |
3458 | if (GET_CODE (new) == CLOBBER | |
3459 | && XEXP (new, 0) == const0_rtx) | |
3460 | return new; | |
3461 | } | |
3462 | ||
3463 | SUBST (XVECEXP (x, i, j), new); | |
3464 | } | |
3465 | } | |
3466 | else if (fmt[i] == 'e') | |
3467 | { | |
0a33d11e RH |
3468 | /* If this is a register being set, ignore it. */ |
3469 | new = XEXP (x, i); | |
3470 | if (in_dest | |
3471 | && (code == SUBREG || code == STRICT_LOW_PART | |
3472 | || code == ZERO_EXTRACT) | |
3473 | && i == 0 | |
3474 | && GET_CODE (new) == REG) | |
3475 | ; | |
3476 | ||
3477 | else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from)) | |
4f4b3679 RH |
3478 | { |
3479 | /* In general, don't install a subreg involving two | |
3480 | modes not tieable. It can worsen register | |
3481 | allocation, and can even make invalid reload | |
3482 | insns, since the reg inside may need to be copied | |
3483 | from in the outside mode, and that may be invalid | |
3484 | if it is an fp reg copied in integer mode. | |
3485 | ||
3486 | We allow two exceptions to this: It is valid if | |
3487 | it is inside another SUBREG and the mode of that | |
3488 | SUBREG and the mode of the inside of TO is | |
3489 | tieable and it is valid if X is a SET that copies | |
3490 | FROM to CC0. */ | |
3491 | ||
3492 | if (GET_CODE (to) == SUBREG | |
3493 | && ! MODES_TIEABLE_P (GET_MODE (to), | |
3494 | GET_MODE (SUBREG_REG (to))) | |
3495 | && ! (code == SUBREG | |
3496 | && MODES_TIEABLE_P (GET_MODE (x), | |
3497 | GET_MODE (SUBREG_REG (to)))) | |
42301240 | 3498 | #ifdef HAVE_cc0 |
4f4b3679 | 3499 | && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx) |
42301240 | 3500 | #endif |
4f4b3679 RH |
3501 | ) |
3502 | return gen_rtx_CLOBBER (VOIDmode, const0_rtx); | |
42301240 | 3503 | |
cff9f8d5 | 3504 | #ifdef CANNOT_CHANGE_MODE_CLASS |
ed8afe3a GK |
3505 | if (code == SUBREG |
3506 | && GET_CODE (to) == REG | |
3507 | && REGNO (to) < FIRST_PSEUDO_REGISTER | |
cff9f8d5 AH |
3508 | && REG_CANNOT_CHANGE_MODE_P (REGNO (to), |
3509 | GET_MODE (to), | |
3510 | GET_MODE (x))) | |
ed8afe3a GK |
3511 | return gen_rtx_CLOBBER (VOIDmode, const0_rtx); |
3512 | #endif | |
3513 | ||
4f4b3679 RH |
3514 | new = (unique_copy && n_occurrences ? copy_rtx (to) : to); |
3515 | n_occurrences++; | |
3516 | } | |
3517 | else | |
3518 | /* If we are in a SET_DEST, suppress most cases unless we | |
3519 | have gone inside a MEM, in which case we want to | |
3520 | simplify the address. We assume here that things that | |
3521 | are actually part of the destination have their inner | |
663522cb | 3522 | parts in the first expression. This is true for SUBREG, |
4f4b3679 RH |
3523 | STRICT_LOW_PART, and ZERO_EXTRACT, which are the only |
3524 | things aside from REG and MEM that should appear in a | |
3525 | SET_DEST. */ | |
3526 | new = subst (XEXP (x, i), from, to, | |
3527 | (((in_dest | |
3528 | && (code == SUBREG || code == STRICT_LOW_PART | |
3529 | || code == ZERO_EXTRACT)) | |
3530 | || code == SET) | |
3531 | && i == 0), unique_copy); | |
3532 | ||
3533 | /* If we found that we will have to reject this combination, | |
3534 | indicate that by returning the CLOBBER ourselves, rather than | |
3535 | an expression containing it. This will speed things up as | |
3536 | well as prevent accidents where two CLOBBERs are considered | |
3537 | to be equal, thus producing an incorrect simplification. */ | |
3538 | ||
3539 | if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx) | |
3540 | return new; | |
3541 | ||
4161da12 AO |
3542 | if (GET_CODE (new) == CONST_INT && GET_CODE (x) == SUBREG) |
3543 | { | |
b0dd4808 | 3544 | enum machine_mode mode = GET_MODE (x); |
2e676d78 | 3545 | |
4161da12 AO |
3546 | x = simplify_subreg (GET_MODE (x), new, |
3547 | GET_MODE (SUBREG_REG (x)), | |
3548 | SUBREG_BYTE (x)); | |
3549 | if (! x) | |
b0dd4808 | 3550 | x = gen_rtx_CLOBBER (mode, const0_rtx); |
4161da12 AO |
3551 | } |
3552 | else if (GET_CODE (new) == CONST_INT | |
3553 | && GET_CODE (x) == ZERO_EXTEND) | |
3554 | { | |
3555 | x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
3556 | new, GET_MODE (XEXP (x, 0))); | |
3557 | if (! x) | |
3558 | abort (); | |
3559 | } | |
3560 | else | |
3561 | SUBST (XEXP (x, i), new); | |
230d793d | 3562 | } |
230d793d RS |
3563 | } |
3564 | } | |
3565 | ||
8079805d RK |
3566 | /* Try to simplify X. If the simplification changed the code, it is likely |
3567 | that further simplification will help, so loop, but limit the number | |
3568 | of repetitions that will be performed. */ | |
3569 | ||
3570 | for (i = 0; i < 4; i++) | |
3571 | { | |
3572 | /* If X is sufficiently simple, don't bother trying to do anything | |
3573 | with it. */ | |
3574 | if (code != CONST_INT && code != REG && code != CLOBBER) | |
31ec4e5e | 3575 | x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest); |
d0ab8cd3 | 3576 | |
8079805d RK |
3577 | if (GET_CODE (x) == code) |
3578 | break; | |
d0ab8cd3 | 3579 | |
8079805d | 3580 | code = GET_CODE (x); |
eeb43d32 | 3581 | |
8079805d RK |
3582 | /* We no longer know the original mode of operand 0 since we |
3583 | have changed the form of X) */ | |
3584 | op0_mode = VOIDmode; | |
3585 | } | |
eeb43d32 | 3586 | |
8079805d RK |
3587 | return x; |
3588 | } | |
3589 | \f | |
3590 | /* Simplify X, a piece of RTL. We just operate on the expression at the | |
3591 | outer level; call `subst' to simplify recursively. Return the new | |
3592 | expression. | |
3593 | ||
3594 | OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this | |
3595 | will be the iteration even if an expression with a code different from | |
3596 | X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */ | |
eeb43d32 | 3597 | |
8079805d | 3598 | static rtx |
31ec4e5e | 3599 | combine_simplify_rtx (x, op0_mode, last, in_dest) |
8079805d RK |
3600 | rtx x; |
3601 | enum machine_mode op0_mode; | |
3602 | int last; | |
3603 | int in_dest; | |
3604 | { | |
3605 | enum rtx_code code = GET_CODE (x); | |
3606 | enum machine_mode mode = GET_MODE (x); | |
3607 | rtx temp; | |
9a915772 | 3608 | rtx reversed; |
8079805d | 3609 | int i; |
d0ab8cd3 | 3610 | |
230d793d RS |
3611 | /* If this is a commutative operation, put a constant last and a complex |
3612 | expression first. We don't need to do this for comparisons here. */ | |
3613 | if (GET_RTX_CLASS (code) == 'c' | |
e5c56fd9 | 3614 | && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1))) |
230d793d RS |
3615 | { |
3616 | temp = XEXP (x, 0); | |
3617 | SUBST (XEXP (x, 0), XEXP (x, 1)); | |
3618 | SUBST (XEXP (x, 1), temp); | |
3619 | } | |
3620 | ||
22609cbf RK |
3621 | /* If this is a PLUS, MINUS, or MULT, and the first operand is the |
3622 | sign extension of a PLUS with a constant, reverse the order of the sign | |
3623 | extension and the addition. Note that this not the same as the original | |
3624 | code, but overflow is undefined for signed values. Also note that the | |
3625 | PLUS will have been partially moved "inside" the sign-extension, so that | |
3626 | the first operand of X will really look like: | |
3627 | (ashiftrt (plus (ashift A C4) C5) C4). | |
3628 | We convert this to | |
3629 | (plus (ashiftrt (ashift A C4) C2) C4) | |
3630 | and replace the first operand of X with that expression. Later parts | |
3631 | of this function may simplify the expression further. | |
3632 | ||
3633 | For example, if we start with (mult (sign_extend (plus A C1)) C2), | |
3634 | we swap the SIGN_EXTEND and PLUS. Later code will apply the | |
3635 | distributive law to produce (plus (mult (sign_extend X) C1) C3). | |
3636 | ||
3637 | We do this to simplify address expressions. */ | |
3638 | ||
3639 | if ((code == PLUS || code == MINUS || code == MULT) | |
3640 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
3641 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS | |
3642 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT | |
3643 | && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT | |
3644 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3645 | && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1) | |
3646 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
3647 | && (temp = simplify_binary_operation (ASHIFTRT, mode, | |
3648 | XEXP (XEXP (XEXP (x, 0), 0), 1), | |
3649 | XEXP (XEXP (x, 0), 1))) != 0) | |
3650 | { | |
3651 | rtx new | |
3652 | = simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
3653 | XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0), | |
3654 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3655 | ||
3656 | new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new, | |
3657 | INTVAL (XEXP (XEXP (x, 0), 1))); | |
3658 | ||
3659 | SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp)); | |
3660 | } | |
3661 | ||
663522cb | 3662 | /* If this is a simple operation applied to an IF_THEN_ELSE, try |
d0ab8cd3 | 3663 | applying it to the arms of the IF_THEN_ELSE. This often simplifies |
abe6e52f RK |
3664 | things. Check for cases where both arms are testing the same |
3665 | condition. | |
3666 | ||
3667 | Don't do anything if all operands are very simple. */ | |
3668 | ||
3669 | if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c' | |
3670 | || GET_RTX_CLASS (code) == '<') | |
3671 | && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o' | |
3672 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG | |
3673 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) | |
3674 | == 'o'))) | |
3675 | || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o' | |
3676 | && ! (GET_CODE (XEXP (x, 1)) == SUBREG | |
3677 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1)))) | |
3678 | == 'o'))))) | |
3679 | || (GET_RTX_CLASS (code) == '1' | |
3680 | && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o' | |
3681 | && ! (GET_CODE (XEXP (x, 0)) == SUBREG | |
3682 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) | |
3683 | == 'o')))))) | |
d0ab8cd3 | 3684 | { |
d6edb99e | 3685 | rtx cond, true_rtx, false_rtx; |
abe6e52f | 3686 | |
d6edb99e | 3687 | cond = if_then_else_cond (x, &true_rtx, &false_rtx); |
0802d516 RK |
3688 | if (cond != 0 |
3689 | /* If everything is a comparison, what we have is highly unlikely | |
3690 | to be simpler, so don't use it. */ | |
3691 | && ! (GET_RTX_CLASS (code) == '<' | |
d6edb99e ZW |
3692 | && (GET_RTX_CLASS (GET_CODE (true_rtx)) == '<' |
3693 | || GET_RTX_CLASS (GET_CODE (false_rtx)) == '<'))) | |
abe6e52f RK |
3694 | { |
3695 | rtx cop1 = const0_rtx; | |
3696 | enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1); | |
3697 | ||
15448afc RK |
3698 | if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<') |
3699 | return x; | |
3700 | ||
663522cb | 3701 | /* Simplify the alternative arms; this may collapse the true and |
9210df58 | 3702 | false arms to store-flag values. */ |
d6edb99e ZW |
3703 | true_rtx = subst (true_rtx, pc_rtx, pc_rtx, 0, 0); |
3704 | false_rtx = subst (false_rtx, pc_rtx, pc_rtx, 0, 0); | |
9210df58 | 3705 | |
d6edb99e | 3706 | /* If true_rtx and false_rtx are not general_operands, an if_then_else |
085f1714 | 3707 | is unlikely to be simpler. */ |
d6edb99e ZW |
3708 | if (general_operand (true_rtx, VOIDmode) |
3709 | && general_operand (false_rtx, VOIDmode)) | |
085f1714 RH |
3710 | { |
3711 | /* Restarting if we generate a store-flag expression will cause | |
3712 | us to loop. Just drop through in this case. */ | |
3713 | ||
3714 | /* If the result values are STORE_FLAG_VALUE and zero, we can | |
3715 | just make the comparison operation. */ | |
d6edb99e | 3716 | if (true_rtx == const_true_rtx && false_rtx == const0_rtx) |
085f1714 | 3717 | x = gen_binary (cond_code, mode, cond, cop1); |
fa4e13e0 RH |
3718 | else if (true_rtx == const0_rtx && false_rtx == const_true_rtx |
3719 | && reverse_condition (cond_code) != UNKNOWN) | |
085f1714 RH |
3720 | x = gen_binary (reverse_condition (cond_code), |
3721 | mode, cond, cop1); | |
3722 | ||
3723 | /* Likewise, we can make the negate of a comparison operation | |
3724 | if the result values are - STORE_FLAG_VALUE and zero. */ | |
d6edb99e ZW |
3725 | else if (GET_CODE (true_rtx) == CONST_INT |
3726 | && INTVAL (true_rtx) == - STORE_FLAG_VALUE | |
3727 | && false_rtx == const0_rtx) | |
f1c6ba8b RK |
3728 | x = simplify_gen_unary (NEG, mode, |
3729 | gen_binary (cond_code, mode, cond, | |
3730 | cop1), | |
3731 | mode); | |
d6edb99e ZW |
3732 | else if (GET_CODE (false_rtx) == CONST_INT |
3733 | && INTVAL (false_rtx) == - STORE_FLAG_VALUE | |
3734 | && true_rtx == const0_rtx) | |
f1c6ba8b RK |
3735 | x = simplify_gen_unary (NEG, mode, |
3736 | gen_binary (reverse_condition | |
3737 | (cond_code), | |
3738 | mode, cond, cop1), | |
3739 | mode); | |
085f1714 RH |
3740 | else |
3741 | return gen_rtx_IF_THEN_ELSE (mode, | |
3742 | gen_binary (cond_code, VOIDmode, | |
3743 | cond, cop1), | |
d6edb99e | 3744 | true_rtx, false_rtx); |
5109d49f | 3745 | |
085f1714 RH |
3746 | code = GET_CODE (x); |
3747 | op0_mode = VOIDmode; | |
3748 | } | |
abe6e52f | 3749 | } |
d0ab8cd3 RK |
3750 | } |
3751 | ||
230d793d RS |
3752 | /* Try to fold this expression in case we have constants that weren't |
3753 | present before. */ | |
3754 | temp = 0; | |
3755 | switch (GET_RTX_CLASS (code)) | |
3756 | { | |
3757 | case '1': | |
3758 | temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode); | |
3759 | break; | |
3760 | case '<': | |
47b1e19b JH |
3761 | { |
3762 | enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0)); | |
3763 | if (cmp_mode == VOIDmode) | |
1cac8785 DD |
3764 | { |
3765 | cmp_mode = GET_MODE (XEXP (x, 1)); | |
3766 | if (cmp_mode == VOIDmode) | |
3767 | cmp_mode = op0_mode; | |
3768 | } | |
47b1e19b JH |
3769 | temp = simplify_relational_operation (code, cmp_mode, |
3770 | XEXP (x, 0), XEXP (x, 1)); | |
3771 | } | |
77fa0940 | 3772 | #ifdef FLOAT_STORE_FLAG_VALUE |
12530dbe RH |
3773 | if (temp != 0 && GET_MODE_CLASS (mode) == MODE_FLOAT) |
3774 | { | |
3775 | if (temp == const0_rtx) | |
3776 | temp = CONST0_RTX (mode); | |
3777 | else | |
5692c7bc ZW |
3778 | temp = CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE (mode), |
3779 | mode); | |
12530dbe | 3780 | } |
77fa0940 | 3781 | #endif |
230d793d RS |
3782 | break; |
3783 | case 'c': | |
3784 | case '2': | |
3785 | temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1)); | |
3786 | break; | |
3787 | case 'b': | |
3788 | case '3': | |
3789 | temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0), | |
3790 | XEXP (x, 1), XEXP (x, 2)); | |
3791 | break; | |
3792 | } | |
3793 | ||
3794 | if (temp) | |
4531c1c7 DN |
3795 | { |
3796 | x = temp; | |
3797 | code = GET_CODE (temp); | |
3798 | op0_mode = VOIDmode; | |
3799 | mode = GET_MODE (temp); | |
3800 | } | |
230d793d | 3801 | |
230d793d | 3802 | /* First see if we can apply the inverse distributive law. */ |
224eeff2 RK |
3803 | if (code == PLUS || code == MINUS |
3804 | || code == AND || code == IOR || code == XOR) | |
230d793d RS |
3805 | { |
3806 | x = apply_distributive_law (x); | |
3807 | code = GET_CODE (x); | |
6e20204f | 3808 | op0_mode = VOIDmode; |
230d793d RS |
3809 | } |
3810 | ||
3811 | /* If CODE is an associative operation not otherwise handled, see if we | |
3812 | can associate some operands. This can win if they are constants or | |
e0e08ac2 | 3813 | if they are logically related (i.e. (a & b) & a). */ |
493efd37 TM |
3814 | if ((code == PLUS || code == MINUS || code == MULT || code == DIV |
3815 | || code == AND || code == IOR || code == XOR | |
230d793d | 3816 | || code == SMAX || code == SMIN || code == UMAX || code == UMIN) |
493efd37 | 3817 | && ((INTEGRAL_MODE_P (mode) && code != DIV) |
4ba5f925 | 3818 | || (flag_unsafe_math_optimizations && FLOAT_MODE_P (mode)))) |
230d793d RS |
3819 | { |
3820 | if (GET_CODE (XEXP (x, 0)) == code) | |
3821 | { | |
3822 | rtx other = XEXP (XEXP (x, 0), 0); | |
3823 | rtx inner_op0 = XEXP (XEXP (x, 0), 1); | |
3824 | rtx inner_op1 = XEXP (x, 1); | |
3825 | rtx inner; | |
663522cb | 3826 | |
230d793d RS |
3827 | /* Make sure we pass the constant operand if any as the second |
3828 | one if this is a commutative operation. */ | |
3829 | if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c') | |
3830 | { | |
3831 | rtx tem = inner_op0; | |
3832 | inner_op0 = inner_op1; | |
3833 | inner_op1 = tem; | |
3834 | } | |
3835 | inner = simplify_binary_operation (code == MINUS ? PLUS | |
3836 | : code == DIV ? MULT | |
230d793d RS |
3837 | : code, |
3838 | mode, inner_op0, inner_op1); | |
3839 | ||
3840 | /* For commutative operations, try the other pair if that one | |
3841 | didn't simplify. */ | |
3842 | if (inner == 0 && GET_RTX_CLASS (code) == 'c') | |
3843 | { | |
3844 | other = XEXP (XEXP (x, 0), 1); | |
3845 | inner = simplify_binary_operation (code, mode, | |
3846 | XEXP (XEXP (x, 0), 0), | |
3847 | XEXP (x, 1)); | |
3848 | } | |
3849 | ||
3850 | if (inner) | |
8079805d | 3851 | return gen_binary (code, mode, other, inner); |
230d793d RS |
3852 | } |
3853 | } | |
3854 | ||
3855 | /* A little bit of algebraic simplification here. */ | |
3856 | switch (code) | |
3857 | { | |
3858 | case MEM: | |
3859 | /* Ensure that our address has any ASHIFTs converted to MULT in case | |
3860 | address-recognizing predicates are called later. */ | |
3861 | temp = make_compound_operation (XEXP (x, 0), MEM); | |
3862 | SUBST (XEXP (x, 0), temp); | |
3863 | break; | |
3864 | ||
3865 | case SUBREG: | |
eea50aa0 JH |
3866 | if (op0_mode == VOIDmode) |
3867 | op0_mode = GET_MODE (SUBREG_REG (x)); | |
230d793d | 3868 | |
eea50aa0 | 3869 | /* simplify_subreg can't use gen_lowpart_for_combine. */ |
3c99d5ff | 3870 | if (CONSTANT_P (SUBREG_REG (x)) |
156755ac JJ |
3871 | && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x) |
3872 | /* Don't call gen_lowpart_for_combine if the inner mode | |
3873 | is VOIDmode and we cannot simplify it, as SUBREG without | |
3874 | inner mode is invalid. */ | |
3875 | && (GET_MODE (SUBREG_REG (x)) != VOIDmode | |
3876 | || gen_lowpart_common (mode, SUBREG_REG (x)))) | |
230d793d RS |
3877 | return gen_lowpart_for_combine (mode, SUBREG_REG (x)); |
3878 | ||
a13287e1 AM |
3879 | if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC) |
3880 | break; | |
eea50aa0 JH |
3881 | { |
3882 | rtx temp; | |
3883 | temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode, | |
23190837 | 3884 | SUBREG_BYTE (x)); |
eea50aa0 JH |
3885 | if (temp) |
3886 | return temp; | |
3887 | } | |
b65c1b5b | 3888 | |
30984c57 JJ |
3889 | /* Don't change the mode of the MEM if that would change the meaning |
3890 | of the address. */ | |
3891 | if (GET_CODE (SUBREG_REG (x)) == MEM | |
3892 | && (MEM_VOLATILE_P (SUBREG_REG (x)) | |
3893 | || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0)))) | |
3894 | return gen_rtx_CLOBBER (mode, const0_rtx); | |
3895 | ||
87e3e0c1 RK |
3896 | /* Note that we cannot do any narrowing for non-constants since |
3897 | we might have been counting on using the fact that some bits were | |
3898 | zero. We now do this in the SET. */ | |
3899 | ||
230d793d RS |
3900 | break; |
3901 | ||
3902 | case NOT: | |
3903 | /* (not (plus X -1)) can become (neg X). */ | |
3904 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3905 | && XEXP (XEXP (x, 0), 1) == constm1_rtx) | |
f1c6ba8b | 3906 | return gen_rtx_NEG (mode, XEXP (XEXP (x, 0), 0)); |
230d793d RS |
3907 | |
3908 | /* Similarly, (not (neg X)) is (plus X -1). */ | |
3909 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
f1c6ba8b | 3910 | return gen_rtx_PLUS (mode, XEXP (XEXP (x, 0), 0), constm1_rtx); |
230d793d | 3911 | |
663522cb | 3912 | /* (not (xor X C)) for C constant is (xor X D) with D = ~C. */ |
d0ab8cd3 RK |
3913 | if (GET_CODE (XEXP (x, 0)) == XOR |
3914 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3915 | && (temp = simplify_unary_operation (NOT, mode, | |
3916 | XEXP (XEXP (x, 0), 1), | |
3917 | mode)) != 0) | |
787745f5 | 3918 | return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp); |
663522cb | 3919 | |
230d793d RS |
3920 | /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands |
3921 | other than 1, but that is not valid. We could do a similar | |
3922 | simplification for (not (lshiftrt C X)) where C is just the sign bit, | |
3923 | but this doesn't seem common enough to bother with. */ | |
3924 | if (GET_CODE (XEXP (x, 0)) == ASHIFT | |
3925 | && XEXP (XEXP (x, 0), 0) == const1_rtx) | |
f1c6ba8b RK |
3926 | return gen_rtx_ROTATE (mode, simplify_gen_unary (NOT, mode, |
3927 | const1_rtx, mode), | |
38a448ca | 3928 | XEXP (XEXP (x, 0), 1)); |
663522cb | 3929 | |
230d793d RS |
3930 | if (GET_CODE (XEXP (x, 0)) == SUBREG |
3931 | && subreg_lowpart_p (XEXP (x, 0)) | |
3932 | && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) | |
3933 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0))))) | |
3934 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT | |
3935 | && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx) | |
3936 | { | |
3937 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0))); | |
3938 | ||
38a448ca | 3939 | x = gen_rtx_ROTATE (inner_mode, |
f1c6ba8b RK |
3940 | simplify_gen_unary (NOT, inner_mode, const1_rtx, |
3941 | inner_mode), | |
38a448ca | 3942 | XEXP (SUBREG_REG (XEXP (x, 0)), 1)); |
8079805d | 3943 | return gen_lowpart_for_combine (mode, x); |
230d793d | 3944 | } |
663522cb | 3945 | |
0802d516 RK |
3946 | /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by |
3947 | reversing the comparison code if valid. */ | |
3948 | if (STORE_FLAG_VALUE == -1 | |
3949 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
9a915772 JH |
3950 | && (reversed = reversed_comparison (x, mode, XEXP (XEXP (x, 0), 0), |
3951 | XEXP (XEXP (x, 0), 1)))) | |
3952 | return reversed; | |
500c518b | 3953 | |
e61465ed GS |
3954 | /* (not (ashiftrt foo C)) where C is the number of bits in FOO minus 1 |
3955 | is (ge foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can | |
0802d516 | 3956 | perform the above simplification. */ |
500c518b | 3957 | |
0802d516 | 3958 | if (STORE_FLAG_VALUE == -1 |
500c518b RK |
3959 | && GET_CODE (XEXP (x, 0)) == ASHIFTRT |
3960 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
3961 | && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1) | |
f1c6ba8b | 3962 | return gen_rtx_GE (mode, XEXP (XEXP (x, 0), 0), const0_rtx); |
230d793d RS |
3963 | |
3964 | /* Apply De Morgan's laws to reduce number of patterns for machines | |
23190837 AJ |
3965 | with negating logical insns (and-not, nand, etc.). If result has |
3966 | only one NOT, put it first, since that is how the patterns are | |
3967 | coded. */ | |
230d793d RS |
3968 | |
3969 | if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND) | |
23190837 | 3970 | { |
663522cb | 3971 | rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1); |
5bd60ce6 | 3972 | enum machine_mode op_mode; |
230d793d | 3973 | |
5bd60ce6 | 3974 | op_mode = GET_MODE (in1); |
f1c6ba8b | 3975 | in1 = simplify_gen_unary (NOT, op_mode, in1, op_mode); |
230d793d | 3976 | |
5bd60ce6 RH |
3977 | op_mode = GET_MODE (in2); |
3978 | if (op_mode == VOIDmode) | |
3979 | op_mode = mode; | |
f1c6ba8b | 3980 | in2 = simplify_gen_unary (NOT, op_mode, in2, op_mode); |
663522cb | 3981 | |
5bd60ce6 | 3982 | if (GET_CODE (in2) == NOT && GET_CODE (in1) != NOT) |
663522cb KH |
3983 | { |
3984 | rtx tem = in2; | |
3985 | in2 = in1; in1 = tem; | |
3986 | } | |
3987 | ||
f1c6ba8b RK |
3988 | return gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR, |
3989 | mode, in1, in2); | |
663522cb | 3990 | } |
230d793d RS |
3991 | break; |
3992 | ||
3993 | case NEG: | |
3994 | /* (neg (plus X 1)) can become (not X). */ | |
3995 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
3996 | && XEXP (XEXP (x, 0), 1) == const1_rtx) | |
f1c6ba8b | 3997 | return gen_rtx_NOT (mode, XEXP (XEXP (x, 0), 0)); |
230d793d RS |
3998 | |
3999 | /* Similarly, (neg (not X)) is (plus X 1). */ | |
4000 | if (GET_CODE (XEXP (x, 0)) == NOT) | |
8079805d | 4001 | return plus_constant (XEXP (XEXP (x, 0), 0), 1); |
230d793d | 4002 | |
71925bc0 RS |
4003 | /* (neg (minus X Y)) can become (minus Y X). This transformation |
4004 | isn't safe for modes with signed zeros, since if X and Y are | |
4005 | both +0, (minus Y X) is the same as (minus X Y). If the rounding | |
4006 | mode is towards +infinity (or -infinity) then the two expressions | |
4007 | will be rounded differently. */ | |
230d793d | 4008 | if (GET_CODE (XEXP (x, 0)) == MINUS |
71925bc0 RS |
4009 | && !HONOR_SIGNED_ZEROS (mode) |
4010 | && !HONOR_SIGN_DEPENDENT_ROUNDING (mode)) | |
8079805d RK |
4011 | return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1), |
4012 | XEXP (XEXP (x, 0), 0)); | |
230d793d | 4013 | |
16823694 GK |
4014 | /* (neg (plus A B)) is canonicalized to (minus (neg A) B). */ |
4015 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
4016 | && !HONOR_SIGNED_ZEROS (mode) | |
4017 | && !HONOR_SIGN_DEPENDENT_ROUNDING (mode)) | |
4018 | { | |
4019 | temp = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 0), 0), mode); | |
4020 | temp = combine_simplify_rtx (temp, mode, last, in_dest); | |
4021 | return gen_binary (MINUS, mode, temp, XEXP (XEXP (x, 0), 1)); | |
4022 | } | |
4023 | ||
4024 | /* (neg (mult A B)) becomes (mult (neg A) B). | |
4025 | This works even for floating-point values. */ | |
4026 | if (GET_CODE (XEXP (x, 0)) == MULT) | |
4027 | { | |
4028 | temp = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 0), 0), mode); | |
4029 | return gen_binary (MULT, mode, temp, XEXP (XEXP (x, 0), 1)); | |
4030 | } | |
4031 | ||
0f41302f | 4032 | /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */ |
d0ab8cd3 | 4033 | if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx |
951553af | 4034 | && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1) |
8079805d | 4035 | return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx); |
d0ab8cd3 | 4036 | |
230d793d RS |
4037 | /* NEG commutes with ASHIFT since it is multiplication. Only do this |
4038 | if we can then eliminate the NEG (e.g., | |
4039 | if the operand is a constant). */ | |
4040 | ||
4041 | if (GET_CODE (XEXP (x, 0)) == ASHIFT) | |
4042 | { | |
4043 | temp = simplify_unary_operation (NEG, mode, | |
4044 | XEXP (XEXP (x, 0), 0), mode); | |
4045 | if (temp) | |
9def18da | 4046 | return gen_binary (ASHIFT, mode, temp, XEXP (XEXP (x, 0), 1)); |
230d793d RS |
4047 | } |
4048 | ||
4049 | temp = expand_compound_operation (XEXP (x, 0)); | |
4050 | ||
4051 | /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be | |
23190837 | 4052 | replaced by (lshiftrt X C). This will convert |
230d793d RS |
4053 | (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */ |
4054 | ||
4055 | if (GET_CODE (temp) == ASHIFTRT | |
4056 | && GET_CODE (XEXP (temp, 1)) == CONST_INT | |
4057 | && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
8079805d RK |
4058 | return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0), |
4059 | INTVAL (XEXP (temp, 1))); | |
230d793d | 4060 | |
951553af | 4061 | /* If X has only a single bit that might be nonzero, say, bit I, convert |
230d793d RS |
4062 | (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of |
4063 | MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to | |
4064 | (sign_extract X 1 Y). But only do this if TEMP isn't a register | |
4065 | or a SUBREG of one since we'd be making the expression more | |
4066 | complex if it was just a register. */ | |
4067 | ||
4068 | if (GET_CODE (temp) != REG | |
4069 | && ! (GET_CODE (temp) == SUBREG | |
4070 | && GET_CODE (SUBREG_REG (temp)) == REG) | |
951553af | 4071 | && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0) |
230d793d RS |
4072 | { |
4073 | rtx temp1 = simplify_shift_const | |
5f4f0e22 CH |
4074 | (NULL_RTX, ASHIFTRT, mode, |
4075 | simplify_shift_const (NULL_RTX, ASHIFT, mode, temp, | |
230d793d RS |
4076 | GET_MODE_BITSIZE (mode) - 1 - i), |
4077 | GET_MODE_BITSIZE (mode) - 1 - i); | |
4078 | ||
4079 | /* If all we did was surround TEMP with the two shifts, we | |
4080 | haven't improved anything, so don't use it. Otherwise, | |
4081 | we are better off with TEMP1. */ | |
4082 | if (GET_CODE (temp1) != ASHIFTRT | |
4083 | || GET_CODE (XEXP (temp1, 0)) != ASHIFT | |
4084 | || XEXP (XEXP (temp1, 0), 0) != temp) | |
8079805d | 4085 | return temp1; |
230d793d RS |
4086 | } |
4087 | break; | |
4088 | ||
2ca9ae17 | 4089 | case TRUNCATE: |
e30fb98f JL |
4090 | /* We can't handle truncation to a partial integer mode here |
4091 | because we don't know the real bitsize of the partial | |
4092 | integer mode. */ | |
4093 | if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) | |
4094 | break; | |
4095 | ||
80608e27 JL |
4096 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
4097 | && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), | |
4098 | GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))) | |
2ca9ae17 JW |
4099 | SUBST (XEXP (x, 0), |
4100 | force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)), | |
4101 | GET_MODE_MASK (mode), NULL_RTX, 0)); | |
0f13a422 ILT |
4102 | |
4103 | /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */ | |
4104 | if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
4105 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4106 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
4107 | return XEXP (XEXP (x, 0), 0); | |
4108 | ||
4109 | /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is | |
4110 | (OP:SI foo:SI) if OP is NEG or ABS. */ | |
4111 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
4112 | || GET_CODE (XEXP (x, 0)) == NEG) | |
4113 | && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND | |
4114 | || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND) | |
4115 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
f1c6ba8b RK |
4116 | return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode, |
4117 | XEXP (XEXP (XEXP (x, 0), 0), 0), mode); | |
0f13a422 ILT |
4118 | |
4119 | /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is | |
4120 | (truncate:SI x). */ | |
4121 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
4122 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE | |
4123 | && subreg_lowpart_p (XEXP (x, 0))) | |
4124 | return SUBREG_REG (XEXP (x, 0)); | |
4125 | ||
4126 | /* If we know that the value is already truncated, we can | |
14a774a9 RK |
4127 | replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION |
4128 | is nonzero for the corresponding modes. But don't do this | |
4129 | for an (LSHIFTRT (MULT ...)) since this will cause problems | |
4130 | with the umulXi3_highpart patterns. */ | |
6a992214 JL |
4131 | if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode), |
4132 | GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
4133 | && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
26c34780 | 4134 | >= (unsigned int) (GET_MODE_BITSIZE (mode) + 1) |
14a774a9 | 4135 | && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
23190837 | 4136 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT)) |
0f13a422 ILT |
4137 | return gen_lowpart_for_combine (mode, XEXP (x, 0)); |
4138 | ||
4139 | /* A truncate of a comparison can be replaced with a subreg if | |
4140 | STORE_FLAG_VALUE permits. This is like the previous test, | |
4141 | but it works even if the comparison is done in a mode larger | |
4142 | than HOST_BITS_PER_WIDE_INT. */ | |
4143 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4144 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
663522cb | 4145 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0) |
0f13a422 ILT |
4146 | return gen_lowpart_for_combine (mode, XEXP (x, 0)); |
4147 | ||
4148 | /* Similarly, a truncate of a register whose value is a | |
4149 | comparison can be replaced with a subreg if STORE_FLAG_VALUE | |
4150 | permits. */ | |
4151 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
663522cb | 4152 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0 |
0f13a422 ILT |
4153 | && (temp = get_last_value (XEXP (x, 0))) |
4154 | && GET_RTX_CLASS (GET_CODE (temp)) == '<') | |
4155 | return gen_lowpart_for_combine (mode, XEXP (x, 0)); | |
4156 | ||
2ca9ae17 JW |
4157 | break; |
4158 | ||
230d793d RS |
4159 | case FLOAT_TRUNCATE: |
4160 | /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */ | |
4161 | if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND | |
4162 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode) | |
663522cb | 4163 | return XEXP (XEXP (x, 0), 0); |
4635f748 RK |
4164 | |
4165 | /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is | |
4166 | (OP:SF foo:SF) if OP is NEG or ABS. */ | |
4167 | if ((GET_CODE (XEXP (x, 0)) == ABS | |
4168 | || GET_CODE (XEXP (x, 0)) == NEG) | |
4169 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND | |
4170 | && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode) | |
f1c6ba8b RK |
4171 | return simplify_gen_unary (GET_CODE (XEXP (x, 0)), mode, |
4172 | XEXP (XEXP (XEXP (x, 0), 0), 0), mode); | |
1d12df72 RK |
4173 | |
4174 | /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0)) | |
4175 | is (float_truncate:SF x). */ | |
4176 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
4177 | && subreg_lowpart_p (XEXP (x, 0)) | |
4178 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE) | |
4179 | return SUBREG_REG (XEXP (x, 0)); | |
663522cb | 4180 | break; |
230d793d RS |
4181 | |
4182 | #ifdef HAVE_cc0 | |
4183 | case COMPARE: | |
4184 | /* Convert (compare FOO (const_int 0)) to FOO unless we aren't | |
4185 | using cc0, in which case we want to leave it as a COMPARE | |
4186 | so we can distinguish it from a register-register-copy. */ | |
4187 | if (XEXP (x, 1) == const0_rtx) | |
4188 | return XEXP (x, 0); | |
4189 | ||
71925bc0 RS |
4190 | /* x - 0 is the same as x unless x's mode has signed zeros and |
4191 | allows rounding towards -infinity. Under those conditions, | |
4192 | 0 - 0 is -0. */ | |
4193 | if (!(HONOR_SIGNED_ZEROS (GET_MODE (XEXP (x, 0))) | |
4194 | && HONOR_SIGN_DEPENDENT_ROUNDING (GET_MODE (XEXP (x, 0)))) | |
230d793d RS |
4195 | && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0)))) |
4196 | return XEXP (x, 0); | |
4197 | break; | |
4198 | #endif | |
4199 | ||
4200 | case CONST: | |
4201 | /* (const (const X)) can become (const X). Do it this way rather than | |
4202 | returning the inner CONST since CONST can be shared with a | |
4203 | REG_EQUAL note. */ | |
4204 | if (GET_CODE (XEXP (x, 0)) == CONST) | |
4205 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4206 | break; | |
4207 | ||
4208 | #ifdef HAVE_lo_sum | |
4209 | case LO_SUM: | |
4210 | /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we | |
4211 | can add in an offset. find_split_point will split this address up | |
4212 | again if it doesn't match. */ | |
4213 | if (GET_CODE (XEXP (x, 0)) == HIGH | |
4214 | && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))) | |
4215 | return XEXP (x, 1); | |
4216 | break; | |
4217 | #endif | |
4218 | ||
4219 | case PLUS: | |
16823694 GK |
4220 | /* Canonicalize (plus (mult (neg B) C) A) to (minus A (mult B C)). |
4221 | */ | |
4222 | if (GET_CODE (XEXP (x, 0)) == MULT | |
4223 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == NEG) | |
4224 | { | |
4225 | rtx in1, in2; | |
4226 | ||
4227 | in1 = XEXP (XEXP (XEXP (x, 0), 0), 0); | |
4228 | in2 = XEXP (XEXP (x, 0), 1); | |
4229 | return gen_binary (MINUS, mode, XEXP (x, 1), | |
4230 | gen_binary (MULT, mode, in1, in2)); | |
4231 | } | |
4232 | ||
230d793d RS |
4233 | /* If we have (plus (plus (A const) B)), associate it so that CONST is |
4234 | outermost. That's because that's the way indexed addresses are | |
4235 | supposed to appear. This code used to check many more cases, but | |
4236 | they are now checked elsewhere. */ | |
4237 | if (GET_CODE (XEXP (x, 0)) == PLUS | |
4238 | && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1))) | |
4239 | return gen_binary (PLUS, mode, | |
4240 | gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), | |
4241 | XEXP (x, 1)), | |
4242 | XEXP (XEXP (x, 0), 1)); | |
4243 | ||
4244 | /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>) | |
4245 | when c is (const_int (pow2 + 1) / 2) is a sign extension of a | |
4246 | bit-field and can be replaced by either a sign_extend or a | |
e6380233 JL |
4247 | sign_extract. The `and' may be a zero_extend and the two |
4248 | <c>, -<c> constants may be reversed. */ | |
230d793d RS |
4249 | if (GET_CODE (XEXP (x, 0)) == XOR |
4250 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
4251 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
663522cb | 4252 | && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1)) |
e6380233 JL |
4253 | && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0 |
4254 | || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
5f4f0e22 | 4255 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
4256 | && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND |
4257 | && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT | |
4258 | && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) | |
5f4f0e22 | 4259 | == ((HOST_WIDE_INT) 1 << (i + 1)) - 1)) |
230d793d RS |
4260 | || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND |
4261 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0))) | |
770ae6cc | 4262 | == (unsigned int) i + 1)))) |
8079805d RK |
4263 | return simplify_shift_const |
4264 | (NULL_RTX, ASHIFTRT, mode, | |
4265 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
4266 | XEXP (XEXP (XEXP (x, 0), 0), 0), | |
4267 | GET_MODE_BITSIZE (mode) - (i + 1)), | |
4268 | GET_MODE_BITSIZE (mode) - (i + 1)); | |
230d793d | 4269 | |
bc0776c6 RK |
4270 | /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if |
4271 | C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE | |
4272 | is 1. This produces better code than the alternative immediately | |
4273 | below. */ | |
4274 | if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<' | |
bc0776c6 | 4275 | && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx) |
9a915772 JH |
4276 | || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)) |
4277 | && (reversed = reversed_comparison (XEXP (x, 0), mode, | |
4278 | XEXP (XEXP (x, 0), 0), | |
4279 | XEXP (XEXP (x, 0), 1)))) | |
8079805d | 4280 | return |
f1c6ba8b | 4281 | simplify_gen_unary (NEG, mode, reversed, mode); |
bc0776c6 RK |
4282 | |
4283 | /* If only the low-order bit of X is possibly nonzero, (plus x -1) | |
230d793d RS |
4284 | can become (ashiftrt (ashift (xor x 1) C) C) where C is |
4285 | the bitsize of the mode - 1. This allows simplification of | |
4286 | "a = (b & 8) == 0;" */ | |
4287 | if (XEXP (x, 1) == constm1_rtx | |
4288 | && GET_CODE (XEXP (x, 0)) != REG | |
4289 | && ! (GET_CODE (XEXP (x,0)) == SUBREG | |
4290 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG) | |
951553af | 4291 | && nonzero_bits (XEXP (x, 0), mode) == 1) |
8079805d RK |
4292 | return simplify_shift_const (NULL_RTX, ASHIFTRT, mode, |
4293 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
f1c6ba8b | 4294 | gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx), |
8079805d RK |
4295 | GET_MODE_BITSIZE (mode) - 1), |
4296 | GET_MODE_BITSIZE (mode) - 1); | |
02f4ada4 RK |
4297 | |
4298 | /* If we are adding two things that have no bits in common, convert | |
4299 | the addition into an IOR. This will often be further simplified, | |
4300 | for example in cases like ((a & 1) + (a & 2)), which can | |
4301 | become a & 3. */ | |
4302 | ||
ac49a949 | 4303 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
951553af RK |
4304 | && (nonzero_bits (XEXP (x, 0), mode) |
4305 | & nonzero_bits (XEXP (x, 1), mode)) == 0) | |
085f1714 RH |
4306 | { |
4307 | /* Try to simplify the expression further. */ | |
4308 | rtx tor = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1)); | |
4309 | temp = combine_simplify_rtx (tor, mode, last, in_dest); | |
4310 | ||
4311 | /* If we could, great. If not, do not go ahead with the IOR | |
4312 | replacement, since PLUS appears in many special purpose | |
4313 | address arithmetic instructions. */ | |
4314 | if (GET_CODE (temp) != CLOBBER && temp != tor) | |
4315 | return temp; | |
4316 | } | |
230d793d RS |
4317 | break; |
4318 | ||
4319 | case MINUS: | |
0802d516 RK |
4320 | /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done |
4321 | by reversing the comparison code if valid. */ | |
4322 | if (STORE_FLAG_VALUE == 1 | |
4323 | && XEXP (x, 0) == const1_rtx | |
5109d49f | 4324 | && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<' |
9a915772 JH |
4325 | && (reversed = reversed_comparison (XEXP (x, 1), mode, |
4326 | XEXP (XEXP (x, 1), 0), | |
4327 | XEXP (XEXP (x, 1), 1)))) | |
4328 | return reversed; | |
5109d49f | 4329 | |
230d793d RS |
4330 | /* (minus <foo> (and <foo> (const_int -pow2))) becomes |
4331 | (and <foo> (const_int pow2-1)) */ | |
4332 | if (GET_CODE (XEXP (x, 1)) == AND | |
4333 | && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT | |
663522cb | 4334 | && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0 |
230d793d | 4335 | && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0))) |
8079805d | 4336 | return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0), |
663522cb | 4337 | -INTVAL (XEXP (XEXP (x, 1), 1)) - 1); |
7bef8680 | 4338 | |
16823694 GK |
4339 | /* Canonicalize (minus A (mult (neg B) C)) to (plus (mult B C) A). |
4340 | */ | |
4341 | if (GET_CODE (XEXP (x, 1)) == MULT | |
4342 | && GET_CODE (XEXP (XEXP (x, 1), 0)) == NEG) | |
4343 | { | |
4344 | rtx in1, in2; | |
4345 | ||
4346 | in1 = XEXP (XEXP (XEXP (x, 1), 0), 0); | |
4347 | in2 = XEXP (XEXP (x, 1), 1); | |
4348 | return gen_binary (PLUS, mode, gen_binary (MULT, mode, in1, in2), | |
4349 | XEXP (x, 0)); | |
4350 | } | |
4351 | ||
4352 | /* Canonicalize (minus (neg A) (mult B C)) to | |
4353 | (minus (mult (neg B) C) A). */ | |
4354 | if (GET_CODE (XEXP (x, 1)) == MULT | |
4355 | && GET_CODE (XEXP (x, 0)) == NEG) | |
4356 | { | |
4357 | rtx in1, in2; | |
4358 | ||
4359 | in1 = simplify_gen_unary (NEG, mode, XEXP (XEXP (x, 1), 0), mode); | |
4360 | in2 = XEXP (XEXP (x, 1), 1); | |
4361 | return gen_binary (MINUS, mode, gen_binary (MULT, mode, in1, in2), | |
4362 | XEXP (XEXP (x, 0), 0)); | |
4363 | } | |
4364 | ||
7bef8680 RK |
4365 | /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for |
4366 | integers. */ | |
4367 | if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode)) | |
8079805d RK |
4368 | return gen_binary (MINUS, mode, |
4369 | gen_binary (MINUS, mode, XEXP (x, 0), | |
4370 | XEXP (XEXP (x, 1), 0)), | |
4371 | XEXP (XEXP (x, 1), 1)); | |
230d793d RS |
4372 | break; |
4373 | ||
4374 | case MULT: | |
4375 | /* If we have (mult (plus A B) C), apply the distributive law and then | |
4376 | the inverse distributive law to see if things simplify. This | |
4377 | occurs mostly in addresses, often when unrolling loops. */ | |
4378 | ||
4379 | if (GET_CODE (XEXP (x, 0)) == PLUS) | |
4380 | { | |
4381 | x = apply_distributive_law | |
4382 | (gen_binary (PLUS, mode, | |
4383 | gen_binary (MULT, mode, | |
4384 | XEXP (XEXP (x, 0), 0), XEXP (x, 1)), | |
4385 | gen_binary (MULT, mode, | |
3749f4ca BS |
4386 | XEXP (XEXP (x, 0), 1), |
4387 | copy_rtx (XEXP (x, 1))))); | |
230d793d RS |
4388 | |
4389 | if (GET_CODE (x) != MULT) | |
8079805d | 4390 | return x; |
230d793d | 4391 | } |
4ba5f925 JH |
4392 | /* Try simplify a*(b/c) as (a*b)/c. */ |
4393 | if (FLOAT_MODE_P (mode) && flag_unsafe_math_optimizations | |
4394 | && GET_CODE (XEXP (x, 0)) == DIV) | |
4395 | { | |
4396 | rtx tem = simplify_binary_operation (MULT, mode, | |
4397 | XEXP (XEXP (x, 0), 0), | |
4398 | XEXP (x, 1)); | |
4399 | if (tem) | |
4400 | return gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1)); | |
4401 | } | |
230d793d RS |
4402 | break; |
4403 | ||
4404 | case UDIV: | |
4405 | /* If this is a divide by a power of two, treat it as a shift if | |
4406 | its first operand is a shift. */ | |
4407 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
4408 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0 | |
4409 | && (GET_CODE (XEXP (x, 0)) == ASHIFT | |
4410 | || GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
4411 | || GET_CODE (XEXP (x, 0)) == ASHIFTRT | |
4412 | || GET_CODE (XEXP (x, 0)) == ROTATE | |
4413 | || GET_CODE (XEXP (x, 0)) == ROTATERT)) | |
8079805d | 4414 | return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i); |
230d793d RS |
4415 | break; |
4416 | ||
4417 | case EQ: case NE: | |
4418 | case GT: case GTU: case GE: case GEU: | |
4419 | case LT: case LTU: case LE: case LEU: | |
69bc0a1f | 4420 | case UNEQ: case LTGT: |
23190837 AJ |
4421 | case UNGT: case UNGE: |
4422 | case UNLT: case UNLE: | |
69bc0a1f | 4423 | case UNORDERED: case ORDERED: |
230d793d RS |
4424 | /* If the first operand is a condition code, we can't do anything |
4425 | with it. */ | |
4426 | if (GET_CODE (XEXP (x, 0)) == COMPARE | |
4427 | || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC | |
4428 | #ifdef HAVE_cc0 | |
4429 | && XEXP (x, 0) != cc0_rtx | |
4430 | #endif | |
663522cb | 4431 | )) |
230d793d RS |
4432 | { |
4433 | rtx op0 = XEXP (x, 0); | |
4434 | rtx op1 = XEXP (x, 1); | |
4435 | enum rtx_code new_code; | |
4436 | ||
4437 | if (GET_CODE (op0) == COMPARE) | |
4438 | op1 = XEXP (op0, 1), op0 = XEXP (op0, 0); | |
4439 | ||
4440 | /* Simplify our comparison, if possible. */ | |
4441 | new_code = simplify_comparison (code, &op0, &op1); | |
4442 | ||
230d793d | 4443 | /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X |
951553af | 4444 | if only the low-order bit is possibly nonzero in X (such as when |
5109d49f RK |
4445 | X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to |
4446 | (xor X 1) or (minus 1 X); we use the former. Finally, if X is | |
4447 | known to be either 0 or -1, NE becomes a NEG and EQ becomes | |
4448 | (plus X 1). | |
4449 | ||
4450 | Remove any ZERO_EXTRACT we made when thinking this was a | |
4451 | comparison. It may now be simpler to use, e.g., an AND. If a | |
4452 | ZERO_EXTRACT is indeed appropriate, it will be placed back by | |
4453 | the call to make_compound_operation in the SET case. */ | |
4454 | ||
0802d516 RK |
4455 | if (STORE_FLAG_VALUE == 1 |
4456 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
a191f0ee RH |
4457 | && op1 == const0_rtx |
4458 | && mode == GET_MODE (op0) | |
4459 | && nonzero_bits (op0, mode) == 1) | |
818b11b9 RK |
4460 | return gen_lowpart_for_combine (mode, |
4461 | expand_compound_operation (op0)); | |
5109d49f | 4462 | |
0802d516 RK |
4463 | else if (STORE_FLAG_VALUE == 1 |
4464 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4465 | && op1 == const0_rtx |
a191f0ee | 4466 | && mode == GET_MODE (op0) |
5109d49f RK |
4467 | && (num_sign_bit_copies (op0, mode) |
4468 | == GET_MODE_BITSIZE (mode))) | |
4469 | { | |
4470 | op0 = expand_compound_operation (op0); | |
f1c6ba8b RK |
4471 | return simplify_gen_unary (NEG, mode, |
4472 | gen_lowpart_for_combine (mode, op0), | |
4473 | mode); | |
5109d49f RK |
4474 | } |
4475 | ||
0802d516 RK |
4476 | else if (STORE_FLAG_VALUE == 1 |
4477 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
230d793d | 4478 | && op1 == const0_rtx |
a191f0ee | 4479 | && mode == GET_MODE (op0) |
5109d49f | 4480 | && nonzero_bits (op0, mode) == 1) |
818b11b9 RK |
4481 | { |
4482 | op0 = expand_compound_operation (op0); | |
8079805d RK |
4483 | return gen_binary (XOR, mode, |
4484 | gen_lowpart_for_combine (mode, op0), | |
4485 | const1_rtx); | |
5109d49f | 4486 | } |
818b11b9 | 4487 | |
0802d516 RK |
4488 | else if (STORE_FLAG_VALUE == 1 |
4489 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4490 | && op1 == const0_rtx |
a191f0ee | 4491 | && mode == GET_MODE (op0) |
5109d49f RK |
4492 | && (num_sign_bit_copies (op0, mode) |
4493 | == GET_MODE_BITSIZE (mode))) | |
4494 | { | |
4495 | op0 = expand_compound_operation (op0); | |
8079805d | 4496 | return plus_constant (gen_lowpart_for_combine (mode, op0), 1); |
818b11b9 | 4497 | } |
230d793d | 4498 | |
5109d49f RK |
4499 | /* If STORE_FLAG_VALUE is -1, we have cases similar to |
4500 | those above. */ | |
0802d516 RK |
4501 | if (STORE_FLAG_VALUE == -1 |
4502 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
230d793d | 4503 | && op1 == const0_rtx |
5109d49f RK |
4504 | && (num_sign_bit_copies (op0, mode) |
4505 | == GET_MODE_BITSIZE (mode))) | |
4506 | return gen_lowpart_for_combine (mode, | |
4507 | expand_compound_operation (op0)); | |
4508 | ||
0802d516 RK |
4509 | else if (STORE_FLAG_VALUE == -1 |
4510 | && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4511 | && op1 == const0_rtx |
a191f0ee | 4512 | && mode == GET_MODE (op0) |
5109d49f RK |
4513 | && nonzero_bits (op0, mode) == 1) |
4514 | { | |
4515 | op0 = expand_compound_operation (op0); | |
f1c6ba8b RK |
4516 | return simplify_gen_unary (NEG, mode, |
4517 | gen_lowpart_for_combine (mode, op0), | |
4518 | mode); | |
5109d49f RK |
4519 | } |
4520 | ||
0802d516 RK |
4521 | else if (STORE_FLAG_VALUE == -1 |
4522 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4523 | && op1 == const0_rtx |
a191f0ee | 4524 | && mode == GET_MODE (op0) |
5109d49f RK |
4525 | && (num_sign_bit_copies (op0, mode) |
4526 | == GET_MODE_BITSIZE (mode))) | |
230d793d | 4527 | { |
818b11b9 | 4528 | op0 = expand_compound_operation (op0); |
f1c6ba8b RK |
4529 | return simplify_gen_unary (NOT, mode, |
4530 | gen_lowpart_for_combine (mode, op0), | |
4531 | mode); | |
5109d49f RK |
4532 | } |
4533 | ||
4534 | /* If X is 0/1, (eq X 0) is X-1. */ | |
0802d516 RK |
4535 | else if (STORE_FLAG_VALUE == -1 |
4536 | && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT | |
5109d49f | 4537 | && op1 == const0_rtx |
a191f0ee | 4538 | && mode == GET_MODE (op0) |
5109d49f RK |
4539 | && nonzero_bits (op0, mode) == 1) |
4540 | { | |
4541 | op0 = expand_compound_operation (op0); | |
8079805d | 4542 | return plus_constant (gen_lowpart_for_combine (mode, op0), -1); |
230d793d | 4543 | } |
230d793d RS |
4544 | |
4545 | /* If STORE_FLAG_VALUE says to just test the sign bit and X has just | |
951553af RK |
4546 | one bit that might be nonzero, we can convert (ne x 0) to |
4547 | (ashift x c) where C puts the bit in the sign bit. Remove any | |
4548 | AND with STORE_FLAG_VALUE when we are done, since we are only | |
4549 | going to test the sign bit. */ | |
3f508eca | 4550 | if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT |
5f4f0e22 | 4551 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
0802d516 | 4552 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) |
e51712db | 4553 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1)) |
230d793d RS |
4554 | && op1 == const0_rtx |
4555 | && mode == GET_MODE (op0) | |
5109d49f | 4556 | && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0) |
230d793d | 4557 | { |
818b11b9 RK |
4558 | x = simplify_shift_const (NULL_RTX, ASHIFT, mode, |
4559 | expand_compound_operation (op0), | |
230d793d RS |
4560 | GET_MODE_BITSIZE (mode) - 1 - i); |
4561 | if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx) | |
4562 | return XEXP (x, 0); | |
4563 | else | |
4564 | return x; | |
4565 | } | |
4566 | ||
4567 | /* If the code changed, return a whole new comparison. */ | |
4568 | if (new_code != code) | |
f1c6ba8b | 4569 | return gen_rtx_fmt_ee (new_code, mode, op0, op1); |
230d793d | 4570 | |
663522cb | 4571 | /* Otherwise, keep this operation, but maybe change its operands. |
230d793d RS |
4572 | This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */ |
4573 | SUBST (XEXP (x, 0), op0); | |
4574 | SUBST (XEXP (x, 1), op1); | |
4575 | } | |
4576 | break; | |
663522cb | 4577 | |
230d793d | 4578 | case IF_THEN_ELSE: |
8079805d | 4579 | return simplify_if_then_else (x); |
9210df58 | 4580 | |
8079805d RK |
4581 | case ZERO_EXTRACT: |
4582 | case SIGN_EXTRACT: | |
4583 | case ZERO_EXTEND: | |
4584 | case SIGN_EXTEND: | |
0f41302f | 4585 | /* If we are processing SET_DEST, we are done. */ |
8079805d RK |
4586 | if (in_dest) |
4587 | return x; | |
d0ab8cd3 | 4588 | |
8079805d | 4589 | return expand_compound_operation (x); |
d0ab8cd3 | 4590 | |
8079805d RK |
4591 | case SET: |
4592 | return simplify_set (x); | |
1a26b032 | 4593 | |
8079805d RK |
4594 | case AND: |
4595 | case IOR: | |
4596 | case XOR: | |
4597 | return simplify_logical (x, last); | |
d0ab8cd3 | 4598 | |
663522cb | 4599 | case ABS: |
8079805d RK |
4600 | /* (abs (neg <foo>)) -> (abs <foo>) */ |
4601 | if (GET_CODE (XEXP (x, 0)) == NEG) | |
4602 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
1a26b032 | 4603 | |
b472527b JL |
4604 | /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS), |
4605 | do nothing. */ | |
4606 | if (GET_MODE (XEXP (x, 0)) == VOIDmode) | |
4607 | break; | |
f40421ce | 4608 | |
8079805d RK |
4609 | /* If operand is something known to be positive, ignore the ABS. */ |
4610 | if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS | |
4611 | || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
4612 | <= HOST_BITS_PER_WIDE_INT) | |
4613 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
4614 | & ((HOST_WIDE_INT) 1 | |
4615 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))) | |
4616 | == 0))) | |
4617 | return XEXP (x, 0); | |
1a26b032 | 4618 | |
8079805d RK |
4619 | /* If operand is known to be only -1 or 0, convert ABS to NEG. */ |
4620 | if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode)) | |
f1c6ba8b | 4621 | return gen_rtx_NEG (mode, XEXP (x, 0)); |
1a26b032 | 4622 | |
8079805d | 4623 | break; |
1a26b032 | 4624 | |
8079805d RK |
4625 | case FFS: |
4626 | /* (ffs (*_extend <X>)) = (ffs <X>) */ | |
4627 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND | |
4628 | || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND) | |
4629 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4630 | break; | |
1a26b032 | 4631 | |
8079805d RK |
4632 | case FLOAT: |
4633 | /* (float (sign_extend <X>)) = (float <X>). */ | |
4634 | if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND) | |
4635 | SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); | |
4636 | break; | |
1a26b032 | 4637 | |
8079805d RK |
4638 | case ASHIFT: |
4639 | case LSHIFTRT: | |
4640 | case ASHIFTRT: | |
4641 | case ROTATE: | |
4642 | case ROTATERT: | |
4643 | /* If this is a shift by a constant amount, simplify it. */ | |
4644 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
663522cb | 4645 | return simplify_shift_const (x, code, mode, XEXP (x, 0), |
8079805d RK |
4646 | INTVAL (XEXP (x, 1))); |
4647 | ||
4648 | #ifdef SHIFT_COUNT_TRUNCATED | |
4649 | else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG) | |
4650 | SUBST (XEXP (x, 1), | |
f1b1186f | 4651 | force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)), |
663522cb | 4652 | ((HOST_WIDE_INT) 1 |
8079805d RK |
4653 | << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x)))) |
4654 | - 1, | |
4655 | NULL_RTX, 0)); | |
4656 | #endif | |
4657 | ||
4658 | break; | |
e9a25f70 | 4659 | |
82be40f7 BS |
4660 | case VEC_SELECT: |
4661 | { | |
4662 | rtx op0 = XEXP (x, 0); | |
4663 | rtx op1 = XEXP (x, 1); | |
4664 | int len; | |
4665 | ||
4666 | if (GET_CODE (op1) != PARALLEL) | |
4667 | abort (); | |
4668 | len = XVECLEN (op1, 0); | |
4669 | if (len == 1 | |
4670 | && GET_CODE (XVECEXP (op1, 0, 0)) == CONST_INT | |
4671 | && GET_CODE (op0) == VEC_CONCAT) | |
4672 | { | |
4673 | int offset = INTVAL (XVECEXP (op1, 0, 0)) * GET_MODE_SIZE (GET_MODE (x)); | |
4674 | ||
4675 | /* Try to find the element in the VEC_CONCAT. */ | |
4676 | for (;;) | |
4677 | { | |
4678 | if (GET_MODE (op0) == GET_MODE (x)) | |
4679 | return op0; | |
4680 | if (GET_CODE (op0) == VEC_CONCAT) | |
4681 | { | |
4682 | HOST_WIDE_INT op0_size = GET_MODE_SIZE (GET_MODE (XEXP (op0, 0))); | |
4683 | if (op0_size < offset) | |
4684 | op0 = XEXP (op0, 0); | |
4685 | else | |
4686 | { | |
4687 | offset -= op0_size; | |
4688 | op0 = XEXP (op0, 1); | |
4689 | } | |
4690 | } | |
4691 | else | |
4692 | break; | |
4693 | } | |
4694 | } | |
4695 | } | |
4696 | ||
4697 | break; | |
23190837 | 4698 | |
e9a25f70 JL |
4699 | default: |
4700 | break; | |
8079805d RK |
4701 | } |
4702 | ||
4703 | return x; | |
4704 | } | |
4705 | \f | |
4706 | /* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */ | |
5109d49f | 4707 | |
8079805d RK |
4708 | static rtx |
4709 | simplify_if_then_else (x) | |
4710 | rtx x; | |
4711 | { | |
4712 | enum machine_mode mode = GET_MODE (x); | |
4713 | rtx cond = XEXP (x, 0); | |
d6edb99e ZW |
4714 | rtx true_rtx = XEXP (x, 1); |
4715 | rtx false_rtx = XEXP (x, 2); | |
8079805d RK |
4716 | enum rtx_code true_code = GET_CODE (cond); |
4717 | int comparison_p = GET_RTX_CLASS (true_code) == '<'; | |
4718 | rtx temp; | |
4719 | int i; | |
9a915772 JH |
4720 | enum rtx_code false_code; |
4721 | rtx reversed; | |
8079805d | 4722 | |
0f41302f | 4723 | /* Simplify storing of the truth value. */ |
d6edb99e | 4724 | if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx) |
8079805d | 4725 | return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1)); |
663522cb | 4726 | |
0f41302f | 4727 | /* Also when the truth value has to be reversed. */ |
9a915772 | 4728 | if (comparison_p |
d6edb99e | 4729 | && true_rtx == const0_rtx && false_rtx == const_true_rtx |
9a915772 JH |
4730 | && (reversed = reversed_comparison (cond, mode, XEXP (cond, 0), |
4731 | XEXP (cond, 1)))) | |
4732 | return reversed; | |
8079805d RK |
4733 | |
4734 | /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used | |
4735 | in it is being compared against certain values. Get the true and false | |
4736 | comparisons and see if that says anything about the value of each arm. */ | |
4737 | ||
9a915772 JH |
4738 | if (comparison_p |
4739 | && ((false_code = combine_reversed_comparison_code (cond)) | |
4740 | != UNKNOWN) | |
8079805d RK |
4741 | && GET_CODE (XEXP (cond, 0)) == REG) |
4742 | { | |
4743 | HOST_WIDE_INT nzb; | |
4744 | rtx from = XEXP (cond, 0); | |
8079805d RK |
4745 | rtx true_val = XEXP (cond, 1); |
4746 | rtx false_val = true_val; | |
4747 | int swapped = 0; | |
9210df58 | 4748 | |
8079805d | 4749 | /* If FALSE_CODE is EQ, swap the codes and arms. */ |
5109d49f | 4750 | |
8079805d | 4751 | if (false_code == EQ) |
1a26b032 | 4752 | { |
8079805d | 4753 | swapped = 1, true_code = EQ, false_code = NE; |
d6edb99e | 4754 | temp = true_rtx, true_rtx = false_rtx, false_rtx = temp; |
8079805d | 4755 | } |
5109d49f | 4756 | |
8079805d RK |
4757 | /* If we are comparing against zero and the expression being tested has |
4758 | only a single bit that might be nonzero, that is its value when it is | |
4759 | not equal to zero. Similarly if it is known to be -1 or 0. */ | |
4760 | ||
4761 | if (true_code == EQ && true_val == const0_rtx | |
4762 | && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0) | |
4763 | false_code = EQ, false_val = GEN_INT (nzb); | |
4764 | else if (true_code == EQ && true_val == const0_rtx | |
4765 | && (num_sign_bit_copies (from, GET_MODE (from)) | |
4766 | == GET_MODE_BITSIZE (GET_MODE (from)))) | |
4767 | false_code = EQ, false_val = constm1_rtx; | |
4768 | ||
4769 | /* Now simplify an arm if we know the value of the register in the | |
4770 | branch and it is used in the arm. Be careful due to the potential | |
4771 | of locally-shared RTL. */ | |
4772 | ||
d6edb99e ZW |
4773 | if (reg_mentioned_p (from, true_rtx)) |
4774 | true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code, | |
4775 | from, true_val), | |
8079805d | 4776 | pc_rtx, pc_rtx, 0, 0); |
d6edb99e ZW |
4777 | if (reg_mentioned_p (from, false_rtx)) |
4778 | false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code, | |
8079805d RK |
4779 | from, false_val), |
4780 | pc_rtx, pc_rtx, 0, 0); | |
4781 | ||
d6edb99e ZW |
4782 | SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx); |
4783 | SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx); | |
8079805d | 4784 | |
d6edb99e ZW |
4785 | true_rtx = XEXP (x, 1); |
4786 | false_rtx = XEXP (x, 2); | |
4787 | true_code = GET_CODE (cond); | |
8079805d | 4788 | } |
5109d49f | 4789 | |
8079805d RK |
4790 | /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be |
4791 | reversed, do so to avoid needing two sets of patterns for | |
4792 | subtract-and-branch insns. Similarly if we have a constant in the true | |
4793 | arm, the false arm is the same as the first operand of the comparison, or | |
4794 | the false arm is more complicated than the true arm. */ | |
4795 | ||
9a915772 JH |
4796 | if (comparison_p |
4797 | && combine_reversed_comparison_code (cond) != UNKNOWN | |
d6edb99e ZW |
4798 | && (true_rtx == pc_rtx |
4799 | || (CONSTANT_P (true_rtx) | |
4800 | && GET_CODE (false_rtx) != CONST_INT && false_rtx != pc_rtx) | |
4801 | || true_rtx == const0_rtx | |
4802 | || (GET_RTX_CLASS (GET_CODE (true_rtx)) == 'o' | |
4803 | && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o') | |
4804 | || (GET_CODE (true_rtx) == SUBREG | |
4805 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true_rtx))) == 'o' | |
4806 | && GET_RTX_CLASS (GET_CODE (false_rtx)) != 'o') | |
4807 | || reg_mentioned_p (true_rtx, false_rtx) | |
4808 | || rtx_equal_p (false_rtx, XEXP (cond, 0)))) | |
8079805d | 4809 | { |
9a915772 | 4810 | true_code = reversed_comparison_code (cond, NULL); |
8079805d | 4811 | SUBST (XEXP (x, 0), |
9a915772 JH |
4812 | reversed_comparison (cond, GET_MODE (cond), XEXP (cond, 0), |
4813 | XEXP (cond, 1))); | |
5109d49f | 4814 | |
d6edb99e ZW |
4815 | SUBST (XEXP (x, 1), false_rtx); |
4816 | SUBST (XEXP (x, 2), true_rtx); | |
1a26b032 | 4817 | |
d6edb99e ZW |
4818 | temp = true_rtx, true_rtx = false_rtx, false_rtx = temp; |
4819 | cond = XEXP (x, 0); | |
bb821298 | 4820 | |
0f41302f | 4821 | /* It is possible that the conditional has been simplified out. */ |
bb821298 RK |
4822 | true_code = GET_CODE (cond); |
4823 | comparison_p = GET_RTX_CLASS (true_code) == '<'; | |
8079805d | 4824 | } |
abe6e52f | 4825 | |
8079805d | 4826 | /* If the two arms are identical, we don't need the comparison. */ |
1a26b032 | 4827 | |
d6edb99e ZW |
4828 | if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond)) |
4829 | return true_rtx; | |
1a26b032 | 4830 | |
5be669c7 RK |
4831 | /* Convert a == b ? b : a to "a". */ |
4832 | if (true_code == EQ && ! side_effects_p (cond) | |
73e42cf3 | 4833 | && !HONOR_NANS (mode) |
d6edb99e ZW |
4834 | && rtx_equal_p (XEXP (cond, 0), false_rtx) |
4835 | && rtx_equal_p (XEXP (cond, 1), true_rtx)) | |
4836 | return false_rtx; | |
5be669c7 | 4837 | else if (true_code == NE && ! side_effects_p (cond) |
73e42cf3 | 4838 | && !HONOR_NANS (mode) |
d6edb99e ZW |
4839 | && rtx_equal_p (XEXP (cond, 0), true_rtx) |
4840 | && rtx_equal_p (XEXP (cond, 1), false_rtx)) | |
4841 | return true_rtx; | |
5be669c7 | 4842 | |
8079805d RK |
4843 | /* Look for cases where we have (abs x) or (neg (abs X)). */ |
4844 | ||
4845 | if (GET_MODE_CLASS (mode) == MODE_INT | |
d6edb99e ZW |
4846 | && GET_CODE (false_rtx) == NEG |
4847 | && rtx_equal_p (true_rtx, XEXP (false_rtx, 0)) | |
8079805d | 4848 | && comparison_p |
d6edb99e ZW |
4849 | && rtx_equal_p (true_rtx, XEXP (cond, 0)) |
4850 | && ! side_effects_p (true_rtx)) | |
8079805d RK |
4851 | switch (true_code) |
4852 | { | |
4853 | case GT: | |
4854 | case GE: | |
f1c6ba8b | 4855 | return simplify_gen_unary (ABS, mode, true_rtx, mode); |
8079805d RK |
4856 | case LT: |
4857 | case LE: | |
f1c6ba8b RK |
4858 | return |
4859 | simplify_gen_unary (NEG, mode, | |
4860 | simplify_gen_unary (ABS, mode, true_rtx, mode), | |
4861 | mode); | |
cf0d9408 KH |
4862 | default: |
4863 | break; | |
8079805d RK |
4864 | } |
4865 | ||
4866 | /* Look for MIN or MAX. */ | |
4867 | ||
de6c5979 | 4868 | if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations) |
8079805d | 4869 | && comparison_p |
d6edb99e ZW |
4870 | && rtx_equal_p (XEXP (cond, 0), true_rtx) |
4871 | && rtx_equal_p (XEXP (cond, 1), false_rtx) | |
8079805d RK |
4872 | && ! side_effects_p (cond)) |
4873 | switch (true_code) | |
4874 | { | |
4875 | case GE: | |
4876 | case GT: | |
d6edb99e | 4877 | return gen_binary (SMAX, mode, true_rtx, false_rtx); |
8079805d RK |
4878 | case LE: |
4879 | case LT: | |
d6edb99e | 4880 | return gen_binary (SMIN, mode, true_rtx, false_rtx); |
8079805d RK |
4881 | case GEU: |
4882 | case GTU: | |
d6edb99e | 4883 | return gen_binary (UMAX, mode, true_rtx, false_rtx); |
8079805d RK |
4884 | case LEU: |
4885 | case LTU: | |
d6edb99e | 4886 | return gen_binary (UMIN, mode, true_rtx, false_rtx); |
e9a25f70 JL |
4887 | default: |
4888 | break; | |
8079805d | 4889 | } |
663522cb | 4890 | |
8079805d RK |
4891 | /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its |
4892 | second operand is zero, this can be done as (OP Z (mult COND C2)) where | |
4893 | C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or | |
4894 | SIGN_EXTEND as long as Z is already extended (so we don't destroy it). | |
4895 | We can do this kind of thing in some cases when STORE_FLAG_VALUE is | |
0802d516 | 4896 | neither 1 or -1, but it isn't worth checking for. */ |
8079805d | 4897 | |
0802d516 RK |
4898 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
4899 | && comparison_p && mode != VOIDmode && ! side_effects_p (x)) | |
8079805d | 4900 | { |
d6edb99e ZW |
4901 | rtx t = make_compound_operation (true_rtx, SET); |
4902 | rtx f = make_compound_operation (false_rtx, SET); | |
8079805d RK |
4903 | rtx cond_op0 = XEXP (cond, 0); |
4904 | rtx cond_op1 = XEXP (cond, 1); | |
6a651371 | 4905 | enum rtx_code op = NIL, extend_op = NIL; |
8079805d | 4906 | enum machine_mode m = mode; |
6a651371 | 4907 | rtx z = 0, c1 = NULL_RTX; |
8079805d | 4908 | |
8079805d RK |
4909 | if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS |
4910 | || GET_CODE (t) == IOR || GET_CODE (t) == XOR | |
4911 | || GET_CODE (t) == ASHIFT | |
4912 | || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT) | |
4913 | && rtx_equal_p (XEXP (t, 0), f)) | |
4914 | c1 = XEXP (t, 1), op = GET_CODE (t), z = f; | |
4915 | ||
4916 | /* If an identity-zero op is commutative, check whether there | |
0f41302f | 4917 | would be a match if we swapped the operands. */ |
8079805d RK |
4918 | else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR |
4919 | || GET_CODE (t) == XOR) | |
4920 | && rtx_equal_p (XEXP (t, 1), f)) | |
4921 | c1 = XEXP (t, 0), op = GET_CODE (t), z = f; | |
4922 | else if (GET_CODE (t) == SIGN_EXTEND | |
4923 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4924 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4925 | || GET_CODE (XEXP (t, 0)) == IOR | |
4926 | || GET_CODE (XEXP (t, 0)) == XOR | |
4927 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4928 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4929 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4930 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4931 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4932 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4933 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
26c34780 RS |
4934 | > (unsigned int) |
4935 | (GET_MODE_BITSIZE (mode) | |
8079805d RK |
4936 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0)))))) |
4937 | { | |
4938 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4939 | extend_op = SIGN_EXTEND; | |
4940 | m = GET_MODE (XEXP (t, 0)); | |
1a26b032 | 4941 | } |
8079805d RK |
4942 | else if (GET_CODE (t) == SIGN_EXTEND |
4943 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4944 | || GET_CODE (XEXP (t, 0)) == IOR | |
4945 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4946 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4947 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4948 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
4949 | && (num_sign_bit_copies (f, GET_MODE (f)) | |
26c34780 RS |
4950 | > (unsigned int) |
4951 | (GET_MODE_BITSIZE (mode) | |
8079805d RK |
4952 | - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1)))))) |
4953 | { | |
4954 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
4955 | extend_op = SIGN_EXTEND; | |
4956 | m = GET_MODE (XEXP (t, 0)); | |
4957 | } | |
4958 | else if (GET_CODE (t) == ZERO_EXTEND | |
4959 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4960 | || GET_CODE (XEXP (t, 0)) == MINUS | |
4961 | || GET_CODE (XEXP (t, 0)) == IOR | |
4962 | || GET_CODE (XEXP (t, 0)) == XOR | |
4963 | || GET_CODE (XEXP (t, 0)) == ASHIFT | |
4964 | || GET_CODE (XEXP (t, 0)) == LSHIFTRT | |
4965 | || GET_CODE (XEXP (t, 0)) == ASHIFTRT) | |
4966 | && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG | |
4967 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4968 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 0)) | |
4969 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f) | |
4970 | && ((nonzero_bits (f, GET_MODE (f)) | |
663522cb | 4971 | & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0)))) |
8079805d RK |
4972 | == 0)) |
4973 | { | |
4974 | c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0)); | |
4975 | extend_op = ZERO_EXTEND; | |
4976 | m = GET_MODE (XEXP (t, 0)); | |
4977 | } | |
4978 | else if (GET_CODE (t) == ZERO_EXTEND | |
4979 | && (GET_CODE (XEXP (t, 0)) == PLUS | |
4980 | || GET_CODE (XEXP (t, 0)) == IOR | |
4981 | || GET_CODE (XEXP (t, 0)) == XOR) | |
4982 | && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG | |
4983 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
4984 | && subreg_lowpart_p (XEXP (XEXP (t, 0), 1)) | |
4985 | && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f) | |
4986 | && ((nonzero_bits (f, GET_MODE (f)) | |
663522cb | 4987 | & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1)))) |
8079805d RK |
4988 | == 0)) |
4989 | { | |
4990 | c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0)); | |
4991 | extend_op = ZERO_EXTEND; | |
4992 | m = GET_MODE (XEXP (t, 0)); | |
4993 | } | |
663522cb | 4994 | |
8079805d RK |
4995 | if (z) |
4996 | { | |
4997 | temp = subst (gen_binary (true_code, m, cond_op0, cond_op1), | |
4998 | pc_rtx, pc_rtx, 0, 0); | |
4999 | temp = gen_binary (MULT, m, temp, | |
5000 | gen_binary (MULT, m, c1, const_true_rtx)); | |
5001 | temp = subst (temp, pc_rtx, pc_rtx, 0, 0); | |
5002 | temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp); | |
5003 | ||
5004 | if (extend_op != NIL) | |
f1c6ba8b | 5005 | temp = simplify_gen_unary (extend_op, mode, temp, m); |
8079805d RK |
5006 | |
5007 | return temp; | |
5008 | } | |
5009 | } | |
224eeff2 | 5010 | |
8079805d RK |
5011 | /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or |
5012 | 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the | |
5013 | negation of a single bit, we can convert this operation to a shift. We | |
5014 | can actually do this more generally, but it doesn't seem worth it. */ | |
5015 | ||
5016 | if (true_code == NE && XEXP (cond, 1) == const0_rtx | |
d6edb99e | 5017 | && false_rtx == const0_rtx && GET_CODE (true_rtx) == CONST_INT |
8079805d | 5018 | && ((1 == nonzero_bits (XEXP (cond, 0), mode) |
d6edb99e | 5019 | && (i = exact_log2 (INTVAL (true_rtx))) >= 0) |
8079805d RK |
5020 | || ((num_sign_bit_copies (XEXP (cond, 0), mode) |
5021 | == GET_MODE_BITSIZE (mode)) | |
d6edb99e | 5022 | && (i = exact_log2 (-INTVAL (true_rtx))) >= 0))) |
8079805d RK |
5023 | return |
5024 | simplify_shift_const (NULL_RTX, ASHIFT, mode, | |
5025 | gen_lowpart_for_combine (mode, XEXP (cond, 0)), i); | |
230d793d | 5026 | |
8079805d RK |
5027 | return x; |
5028 | } | |
5029 | \f | |
5030 | /* Simplify X, a SET expression. Return the new expression. */ | |
230d793d | 5031 | |
8079805d RK |
5032 | static rtx |
5033 | simplify_set (x) | |
5034 | rtx x; | |
5035 | { | |
5036 | rtx src = SET_SRC (x); | |
5037 | rtx dest = SET_DEST (x); | |
5038 | enum machine_mode mode | |
5039 | = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest); | |
5040 | rtx other_insn; | |
5041 | rtx *cc_use; | |
5042 | ||
5043 | /* (set (pc) (return)) gets written as (return). */ | |
5044 | if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN) | |
5045 | return src; | |
230d793d | 5046 | |
87e3e0c1 RK |
5047 | /* Now that we know for sure which bits of SRC we are using, see if we can |
5048 | simplify the expression for the object knowing that we only need the | |
5049 | low-order bits. */ | |
5050 | ||
855c3a2e IS |
5051 | if (GET_MODE_CLASS (mode) == MODE_INT |
5052 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
c5c76735 | 5053 | { |
e8dc6d50 | 5054 | src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, NULL_RTX, 0); |
c5c76735 JL |
5055 | SUBST (SET_SRC (x), src); |
5056 | } | |
87e3e0c1 | 5057 | |
8079805d RK |
5058 | /* If we are setting CC0 or if the source is a COMPARE, look for the use of |
5059 | the comparison result and try to simplify it unless we already have used | |
5060 | undobuf.other_insn. */ | |
dbf4f1a2 RS |
5061 | if ((GET_MODE_CLASS (mode) == MODE_CC |
5062 | || GET_CODE (src) == COMPARE | |
5063 | || CC0_P (dest)) | |
8079805d RK |
5064 | && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0 |
5065 | && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn) | |
5066 | && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<' | |
c0d3ac4d | 5067 | && rtx_equal_p (XEXP (*cc_use, 0), dest)) |
8079805d RK |
5068 | { |
5069 | enum rtx_code old_code = GET_CODE (*cc_use); | |
5070 | enum rtx_code new_code; | |
f40f4c8e | 5071 | rtx op0, op1, tmp; |
8079805d RK |
5072 | int other_changed = 0; |
5073 | enum machine_mode compare_mode = GET_MODE (dest); | |
f40f4c8e | 5074 | enum machine_mode tmp_mode; |
8079805d RK |
5075 | |
5076 | if (GET_CODE (src) == COMPARE) | |
5077 | op0 = XEXP (src, 0), op1 = XEXP (src, 1); | |
5078 | else | |
5079 | op0 = src, op1 = const0_rtx; | |
230d793d | 5080 | |
f40f4c8e RS |
5081 | /* Check whether the comparison is known at compile time. */ |
5082 | if (GET_MODE (op0) != VOIDmode) | |
5083 | tmp_mode = GET_MODE (op0); | |
5084 | else if (GET_MODE (op1) != VOIDmode) | |
5085 | tmp_mode = GET_MODE (op1); | |
5086 | else | |
5087 | tmp_mode = compare_mode; | |
5088 | tmp = simplify_relational_operation (old_code, tmp_mode, op0, op1); | |
5089 | if (tmp != NULL_RTX) | |
5090 | { | |
5091 | rtx pat = PATTERN (other_insn); | |
5092 | undobuf.other_insn = other_insn; | |
5093 | SUBST (*cc_use, tmp); | |
5094 | ||
5095 | /* Attempt to simplify CC user. */ | |
5096 | if (GET_CODE (pat) == SET) | |
5097 | { | |
5098 | rtx new = simplify_rtx (SET_SRC (pat)); | |
5099 | if (new != NULL_RTX) | |
5100 | SUBST (SET_SRC (pat), new); | |
5101 | } | |
5102 | ||
5103 | /* Convert X into a no-op move. */ | |
5104 | SUBST (SET_DEST (x), pc_rtx); | |
5105 | SUBST (SET_SRC (x), pc_rtx); | |
5106 | return x; | |
5107 | } | |
5108 | ||
8079805d RK |
5109 | /* Simplify our comparison, if possible. */ |
5110 | new_code = simplify_comparison (old_code, &op0, &op1); | |
230d793d | 5111 | |
c141a106 | 5112 | #ifdef EXTRA_CC_MODES |
8079805d RK |
5113 | /* If this machine has CC modes other than CCmode, check to see if we |
5114 | need to use a different CC mode here. */ | |
5115 | compare_mode = SELECT_CC_MODE (new_code, op0, op1); | |
c141a106 | 5116 | #endif /* EXTRA_CC_MODES */ |
230d793d | 5117 | |
c141a106 | 5118 | #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES) |
8079805d RK |
5119 | /* If the mode changed, we have to change SET_DEST, the mode in the |
5120 | compare, and the mode in the place SET_DEST is used. If SET_DEST is | |
5121 | a hard register, just build new versions with the proper mode. If it | |
5122 | is a pseudo, we lose unless it is only time we set the pseudo, in | |
5123 | which case we can safely change its mode. */ | |
5124 | if (compare_mode != GET_MODE (dest)) | |
5125 | { | |
770ae6cc | 5126 | unsigned int regno = REGNO (dest); |
38a448ca | 5127 | rtx new_dest = gen_rtx_REG (compare_mode, regno); |
8079805d RK |
5128 | |
5129 | if (regno < FIRST_PSEUDO_REGISTER | |
b1f21e0a | 5130 | || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest))) |
230d793d | 5131 | { |
8079805d RK |
5132 | if (regno >= FIRST_PSEUDO_REGISTER) |
5133 | SUBST (regno_reg_rtx[regno], new_dest); | |
230d793d | 5134 | |
8079805d RK |
5135 | SUBST (SET_DEST (x), new_dest); |
5136 | SUBST (XEXP (*cc_use, 0), new_dest); | |
5137 | other_changed = 1; | |
230d793d | 5138 | |
8079805d | 5139 | dest = new_dest; |
230d793d | 5140 | } |
8079805d | 5141 | } |
230d793d RS |
5142 | #endif |
5143 | ||
8079805d RK |
5144 | /* If the code changed, we have to build a new comparison in |
5145 | undobuf.other_insn. */ | |
5146 | if (new_code != old_code) | |
5147 | { | |
5148 | unsigned HOST_WIDE_INT mask; | |
5149 | ||
f1c6ba8b RK |
5150 | SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use), |
5151 | dest, const0_rtx)); | |
8079805d RK |
5152 | |
5153 | /* If the only change we made was to change an EQ into an NE or | |
5154 | vice versa, OP0 has only one bit that might be nonzero, and OP1 | |
5155 | is zero, check if changing the user of the condition code will | |
5156 | produce a valid insn. If it won't, we can keep the original code | |
5157 | in that insn by surrounding our operation with an XOR. */ | |
5158 | ||
5159 | if (((old_code == NE && new_code == EQ) | |
5160 | || (old_code == EQ && new_code == NE)) | |
5161 | && ! other_changed && op1 == const0_rtx | |
5162 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
5163 | && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0) | |
230d793d | 5164 | { |
8079805d | 5165 | rtx pat = PATTERN (other_insn), note = 0; |
230d793d | 5166 | |
8e2f6e35 | 5167 | if ((recog_for_combine (&pat, other_insn, ¬e) < 0 |
8079805d RK |
5168 | && ! check_asm_operands (pat))) |
5169 | { | |
5170 | PUT_CODE (*cc_use, old_code); | |
5171 | other_insn = 0; | |
230d793d | 5172 | |
8079805d | 5173 | op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask)); |
230d793d | 5174 | } |
230d793d RS |
5175 | } |
5176 | ||
8079805d RK |
5177 | other_changed = 1; |
5178 | } | |
5179 | ||
5180 | if (other_changed) | |
5181 | undobuf.other_insn = other_insn; | |
230d793d RS |
5182 | |
5183 | #ifdef HAVE_cc0 | |
8079805d RK |
5184 | /* If we are now comparing against zero, change our source if |
5185 | needed. If we do not use cc0, we always have a COMPARE. */ | |
5186 | if (op1 == const0_rtx && dest == cc0_rtx) | |
5187 | { | |
5188 | SUBST (SET_SRC (x), op0); | |
5189 | src = op0; | |
5190 | } | |
5191 | else | |
230d793d RS |
5192 | #endif |
5193 | ||
8079805d RK |
5194 | /* Otherwise, if we didn't previously have a COMPARE in the |
5195 | correct mode, we need one. */ | |
5196 | if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode) | |
5197 | { | |
f1c6ba8b | 5198 | SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1)); |
8079805d | 5199 | src = SET_SRC (x); |
230d793d RS |
5200 | } |
5201 | else | |
5202 | { | |
8079805d RK |
5203 | /* Otherwise, update the COMPARE if needed. */ |
5204 | SUBST (XEXP (src, 0), op0); | |
5205 | SUBST (XEXP (src, 1), op1); | |
230d793d | 5206 | } |
8079805d RK |
5207 | } |
5208 | else | |
5209 | { | |
5210 | /* Get SET_SRC in a form where we have placed back any | |
5211 | compound expressions. Then do the checks below. */ | |
5212 | src = make_compound_operation (src, SET); | |
5213 | SUBST (SET_SRC (x), src); | |
5214 | } | |
230d793d | 5215 | |
8079805d RK |
5216 | /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation, |
5217 | and X being a REG or (subreg (reg)), we may be able to convert this to | |
663522cb | 5218 | (set (subreg:m2 x) (op)). |
df62f951 | 5219 | |
8079805d RK |
5220 | We can always do this if M1 is narrower than M2 because that means that |
5221 | we only care about the low bits of the result. | |
df62f951 | 5222 | |
8079805d | 5223 | However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot |
9ec36da5 | 5224 | perform a narrower operation than requested since the high-order bits will |
8079805d RK |
5225 | be undefined. On machine where it is defined, this transformation is safe |
5226 | as long as M1 and M2 have the same number of words. */ | |
663522cb | 5227 | |
8079805d RK |
5228 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) |
5229 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o' | |
5230 | && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1)) | |
5231 | / UNITS_PER_WORD) | |
5232 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))) | |
5233 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) | |
8baf60bb | 5234 | #ifndef WORD_REGISTER_OPERATIONS |
8079805d RK |
5235 | && (GET_MODE_SIZE (GET_MODE (src)) |
5236 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
df62f951 | 5237 | #endif |
cff9f8d5 | 5238 | #ifdef CANNOT_CHANGE_MODE_CLASS |
f507a070 | 5239 | && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER |
cff9f8d5 AH |
5240 | && REG_CANNOT_CHANGE_MODE_P (REGNO (dest), |
5241 | GET_MODE (src), | |
5242 | GET_MODE (SUBREG_REG (src)))) | |
663522cb | 5243 | #endif |
8079805d RK |
5244 | && (GET_CODE (dest) == REG |
5245 | || (GET_CODE (dest) == SUBREG | |
5246 | && GET_CODE (SUBREG_REG (dest)) == REG))) | |
5247 | { | |
5248 | SUBST (SET_DEST (x), | |
5249 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)), | |
5250 | dest)); | |
5251 | SUBST (SET_SRC (x), SUBREG_REG (src)); | |
5252 | ||
5253 | src = SET_SRC (x), dest = SET_DEST (x); | |
5254 | } | |
df62f951 | 5255 | |
8c1d52a3 KH |
5256 | #ifdef HAVE_cc0 |
5257 | /* If we have (set (cc0) (subreg ...)), we try to remove the subreg | |
5258 | in SRC. */ | |
5259 | if (dest == cc0_rtx | |
5260 | && GET_CODE (src) == SUBREG | |
5261 | && subreg_lowpart_p (src) | |
5262 | && (GET_MODE_BITSIZE (GET_MODE (src)) | |
5263 | < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src))))) | |
5264 | { | |
5265 | rtx inner = SUBREG_REG (src); | |
5266 | enum machine_mode inner_mode = GET_MODE (inner); | |
5267 | ||
5268 | /* Here we make sure that we don't have a sign bit on. */ | |
5269 | if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT | |
5270 | && (nonzero_bits (inner, inner_mode) | |
5271 | < ((unsigned HOST_WIDE_INT) 1 | |
ff076520 | 5272 | << (GET_MODE_BITSIZE (GET_MODE (src)) - 1)))) |
8c1d52a3 KH |
5273 | { |
5274 | SUBST (SET_SRC (x), inner); | |
5275 | src = SET_SRC (x); | |
5276 | } | |
5277 | } | |
5278 | #endif | |
5279 | ||
8baf60bb | 5280 | #ifdef LOAD_EXTEND_OP |
8079805d RK |
5281 | /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this |
5282 | would require a paradoxical subreg. Replace the subreg with a | |
0f41302f | 5283 | zero_extend to avoid the reload that would otherwise be required. */ |
8079805d RK |
5284 | |
5285 | if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src) | |
5286 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL | |
ddef6bc7 | 5287 | && SUBREG_BYTE (src) == 0 |
8079805d RK |
5288 | && (GET_MODE_SIZE (GET_MODE (src)) |
5289 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))) | |
5290 | && GET_CODE (SUBREG_REG (src)) == MEM) | |
5291 | { | |
5292 | SUBST (SET_SRC (x), | |
f1c6ba8b | 5293 | gen_rtx (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))), |
ddef6bc7 | 5294 | GET_MODE (src), SUBREG_REG (src))); |
8079805d RK |
5295 | |
5296 | src = SET_SRC (x); | |
5297 | } | |
230d793d RS |
5298 | #endif |
5299 | ||
8079805d RK |
5300 | /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we |
5301 | are comparing an item known to be 0 or -1 against 0, use a logical | |
5302 | operation instead. Check for one of the arms being an IOR of the other | |
5303 | arm with some value. We compute three terms to be IOR'ed together. In | |
5304 | practice, at most two will be nonzero. Then we do the IOR's. */ | |
5305 | ||
5306 | if (GET_CODE (dest) != PC | |
5307 | && GET_CODE (src) == IF_THEN_ELSE | |
36b8d792 | 5308 | && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT |
8079805d RK |
5309 | && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE) |
5310 | && XEXP (XEXP (src, 0), 1) == const0_rtx | |
6dd49058 | 5311 | && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0)) |
ea414472 DE |
5312 | #ifdef HAVE_conditional_move |
5313 | && ! can_conditionally_move_p (GET_MODE (src)) | |
5314 | #endif | |
8079805d RK |
5315 | && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0), |
5316 | GET_MODE (XEXP (XEXP (src, 0), 0))) | |
5317 | == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0)))) | |
5318 | && ! side_effects_p (src)) | |
5319 | { | |
d6edb99e | 5320 | rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE |
8079805d | 5321 | ? XEXP (src, 1) : XEXP (src, 2)); |
d6edb99e | 5322 | rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE |
8079805d RK |
5323 | ? XEXP (src, 2) : XEXP (src, 1)); |
5324 | rtx term1 = const0_rtx, term2, term3; | |
5325 | ||
d6edb99e ZW |
5326 | if (GET_CODE (true_rtx) == IOR |
5327 | && rtx_equal_p (XEXP (true_rtx, 0), false_rtx)) | |
5328 | term1 = false_rtx, true_rtx = XEXP(true_rtx, 1), false_rtx = const0_rtx; | |
5329 | else if (GET_CODE (true_rtx) == IOR | |
5330 | && rtx_equal_p (XEXP (true_rtx, 1), false_rtx)) | |
5331 | term1 = false_rtx, true_rtx = XEXP(true_rtx, 0), false_rtx = const0_rtx; | |
5332 | else if (GET_CODE (false_rtx) == IOR | |
5333 | && rtx_equal_p (XEXP (false_rtx, 0), true_rtx)) | |
5334 | term1 = true_rtx, false_rtx = XEXP(false_rtx, 1), true_rtx = const0_rtx; | |
5335 | else if (GET_CODE (false_rtx) == IOR | |
5336 | && rtx_equal_p (XEXP (false_rtx, 1), true_rtx)) | |
5337 | term1 = true_rtx, false_rtx = XEXP(false_rtx, 0), true_rtx = const0_rtx; | |
5338 | ||
5339 | term2 = gen_binary (AND, GET_MODE (src), | |
5340 | XEXP (XEXP (src, 0), 0), true_rtx); | |
8079805d | 5341 | term3 = gen_binary (AND, GET_MODE (src), |
f1c6ba8b RK |
5342 | simplify_gen_unary (NOT, GET_MODE (src), |
5343 | XEXP (XEXP (src, 0), 0), | |
5344 | GET_MODE (src)), | |
d6edb99e | 5345 | false_rtx); |
8079805d RK |
5346 | |
5347 | SUBST (SET_SRC (x), | |
5348 | gen_binary (IOR, GET_MODE (src), | |
5349 | gen_binary (IOR, GET_MODE (src), term1, term2), | |
5350 | term3)); | |
5351 | ||
5352 | src = SET_SRC (x); | |
5353 | } | |
230d793d | 5354 | |
246e00f2 RK |
5355 | /* If either SRC or DEST is a CLOBBER of (const_int 0), make this |
5356 | whole thing fail. */ | |
5357 | if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx) | |
5358 | return src; | |
5359 | else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx) | |
5360 | return dest; | |
5361 | else | |
5362 | /* Convert this into a field assignment operation, if possible. */ | |
5363 | return make_field_assignment (x); | |
8079805d RK |
5364 | } |
5365 | \f | |
5366 | /* Simplify, X, and AND, IOR, or XOR operation, and return the simplified | |
5367 | result. LAST is nonzero if this is the last retry. */ | |
5368 | ||
5369 | static rtx | |
5370 | simplify_logical (x, last) | |
5371 | rtx x; | |
5372 | int last; | |
5373 | { | |
5374 | enum machine_mode mode = GET_MODE (x); | |
5375 | rtx op0 = XEXP (x, 0); | |
5376 | rtx op1 = XEXP (x, 1); | |
9a915772 | 5377 | rtx reversed; |
8079805d RK |
5378 | |
5379 | switch (GET_CODE (x)) | |
5380 | { | |
230d793d | 5381 | case AND: |
663522cb | 5382 | /* Convert (A ^ B) & A to A & (~B) since the latter is often a single |
8079805d RK |
5383 | insn (and may simplify more). */ |
5384 | if (GET_CODE (op0) == XOR | |
5385 | && rtx_equal_p (XEXP (op0, 0), op1) | |
5386 | && ! side_effects_p (op1)) | |
0c1c8ea6 | 5387 | x = gen_binary (AND, mode, |
f1c6ba8b RK |
5388 | simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode), |
5389 | op1); | |
8079805d RK |
5390 | |
5391 | if (GET_CODE (op0) == XOR | |
5392 | && rtx_equal_p (XEXP (op0, 1), op1) | |
5393 | && ! side_effects_p (op1)) | |
0c1c8ea6 | 5394 | x = gen_binary (AND, mode, |
f1c6ba8b RK |
5395 | simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode), |
5396 | op1); | |
8079805d | 5397 | |
663522cb | 5398 | /* Similarly for (~(A ^ B)) & A. */ |
8079805d RK |
5399 | if (GET_CODE (op0) == NOT |
5400 | && GET_CODE (XEXP (op0, 0)) == XOR | |
5401 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1) | |
5402 | && ! side_effects_p (op1)) | |
5403 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1); | |
5404 | ||
5405 | if (GET_CODE (op0) == NOT | |
5406 | && GET_CODE (XEXP (op0, 0)) == XOR | |
5407 | && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1) | |
5408 | && ! side_effects_p (op1)) | |
5409 | x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1); | |
5410 | ||
2e8f9abf DM |
5411 | /* We can call simplify_and_const_int only if we don't lose |
5412 | any (sign) bits when converting INTVAL (op1) to | |
5413 | "unsigned HOST_WIDE_INT". */ | |
5414 | if (GET_CODE (op1) == CONST_INT | |
5415 | && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5416 | || INTVAL (op1) > 0)) | |
230d793d | 5417 | { |
8079805d | 5418 | x = simplify_and_const_int (x, mode, op0, INTVAL (op1)); |
230d793d RS |
5419 | |
5420 | /* If we have (ior (and (X C1) C2)) and the next restart would be | |
5421 | the last, simplify this by making C1 as small as possible | |
0f41302f | 5422 | and then exit. */ |
8079805d RK |
5423 | if (last |
5424 | && GET_CODE (x) == IOR && GET_CODE (op0) == AND | |
5425 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5426 | && GET_CODE (op1) == CONST_INT) | |
5427 | return gen_binary (IOR, mode, | |
5428 | gen_binary (AND, mode, XEXP (op0, 0), | |
5429 | GEN_INT (INTVAL (XEXP (op0, 1)) | |
663522cb | 5430 | & ~INTVAL (op1))), op1); |
230d793d RS |
5431 | |
5432 | if (GET_CODE (x) != AND) | |
8079805d | 5433 | return x; |
0e32506c | 5434 | |
663522cb | 5435 | if (GET_RTX_CLASS (GET_CODE (x)) == 'c' |
0e32506c RK |
5436 | || GET_RTX_CLASS (GET_CODE (x)) == '2') |
5437 | op0 = XEXP (x, 0), op1 = XEXP (x, 1); | |
230d793d RS |
5438 | } |
5439 | ||
5440 | /* Convert (A | B) & A to A. */ | |
8079805d RK |
5441 | if (GET_CODE (op0) == IOR |
5442 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
5443 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
5444 | && ! side_effects_p (XEXP (op0, 0)) | |
5445 | && ! side_effects_p (XEXP (op0, 1))) | |
5446 | return op1; | |
230d793d | 5447 | |
d0ab8cd3 | 5448 | /* In the following group of tests (and those in case IOR below), |
230d793d RS |
5449 | we start with some combination of logical operations and apply |
5450 | the distributive law followed by the inverse distributive law. | |
5451 | Most of the time, this results in no change. However, if some of | |
5452 | the operands are the same or inverses of each other, simplifications | |
5453 | will result. | |
5454 | ||
5455 | For example, (and (ior A B) (not B)) can occur as the result of | |
5456 | expanding a bit field assignment. When we apply the distributive | |
5457 | law to this, we get (ior (and (A (not B))) (and (B (not B)))), | |
663522cb | 5458 | which then simplifies to (and (A (not B))). |
230d793d | 5459 | |
8079805d | 5460 | If we have (and (ior A B) C), apply the distributive law and then |
230d793d RS |
5461 | the inverse distributive law to see if things simplify. */ |
5462 | ||
8079805d | 5463 | if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR) |
230d793d RS |
5464 | { |
5465 | x = apply_distributive_law | |
8079805d RK |
5466 | (gen_binary (GET_CODE (op0), mode, |
5467 | gen_binary (AND, mode, XEXP (op0, 0), op1), | |
3749f4ca BS |
5468 | gen_binary (AND, mode, XEXP (op0, 1), |
5469 | copy_rtx (op1)))); | |
230d793d | 5470 | if (GET_CODE (x) != AND) |
8079805d | 5471 | return x; |
230d793d RS |
5472 | } |
5473 | ||
8079805d RK |
5474 | if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR) |
5475 | return apply_distributive_law | |
5476 | (gen_binary (GET_CODE (op1), mode, | |
5477 | gen_binary (AND, mode, XEXP (op1, 0), op0), | |
3749f4ca BS |
5478 | gen_binary (AND, mode, XEXP (op1, 1), |
5479 | copy_rtx (op0)))); | |
230d793d RS |
5480 | |
5481 | /* Similarly, taking advantage of the fact that | |
5482 | (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */ | |
5483 | ||
8079805d RK |
5484 | if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR) |
5485 | return apply_distributive_law | |
5486 | (gen_binary (XOR, mode, | |
5487 | gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)), | |
3749f4ca BS |
5488 | gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)), |
5489 | XEXP (op1, 1)))); | |
663522cb | 5490 | |
8079805d RK |
5491 | else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR) |
5492 | return apply_distributive_law | |
5493 | (gen_binary (XOR, mode, | |
5494 | gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)), | |
3749f4ca | 5495 | gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1)))); |
230d793d RS |
5496 | break; |
5497 | ||
5498 | case IOR: | |
951553af | 5499 | /* (ior A C) is C if all bits of A that might be nonzero are on in C. */ |
8079805d | 5500 | if (GET_CODE (op1) == CONST_INT |
ac49a949 | 5501 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
663522cb | 5502 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) |
8079805d | 5503 | return op1; |
d0ab8cd3 | 5504 | |
230d793d | 5505 | /* Convert (A & B) | A to A. */ |
8079805d RK |
5506 | if (GET_CODE (op0) == AND |
5507 | && (rtx_equal_p (XEXP (op0, 0), op1) | |
5508 | || rtx_equal_p (XEXP (op0, 1), op1)) | |
5509 | && ! side_effects_p (XEXP (op0, 0)) | |
5510 | && ! side_effects_p (XEXP (op0, 1))) | |
5511 | return op1; | |
230d793d RS |
5512 | |
5513 | /* If we have (ior (and A B) C), apply the distributive law and then | |
5514 | the inverse distributive law to see if things simplify. */ | |
5515 | ||
8079805d | 5516 | if (GET_CODE (op0) == AND) |
230d793d RS |
5517 | { |
5518 | x = apply_distributive_law | |
5519 | (gen_binary (AND, mode, | |
8079805d | 5520 | gen_binary (IOR, mode, XEXP (op0, 0), op1), |
3749f4ca BS |
5521 | gen_binary (IOR, mode, XEXP (op0, 1), |
5522 | copy_rtx (op1)))); | |
230d793d RS |
5523 | |
5524 | if (GET_CODE (x) != IOR) | |
8079805d | 5525 | return x; |
230d793d RS |
5526 | } |
5527 | ||
8079805d | 5528 | if (GET_CODE (op1) == AND) |
230d793d RS |
5529 | { |
5530 | x = apply_distributive_law | |
5531 | (gen_binary (AND, mode, | |
8079805d | 5532 | gen_binary (IOR, mode, XEXP (op1, 0), op0), |
3749f4ca BS |
5533 | gen_binary (IOR, mode, XEXP (op1, 1), |
5534 | copy_rtx (op0)))); | |
230d793d RS |
5535 | |
5536 | if (GET_CODE (x) != IOR) | |
8079805d | 5537 | return x; |
230d793d RS |
5538 | } |
5539 | ||
5540 | /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the | |
5541 | mode size to (rotate A CX). */ | |
5542 | ||
8079805d RK |
5543 | if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT) |
5544 | || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT)) | |
5545 | && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)) | |
5546 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5547 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
5548 | && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1)) | |
230d793d | 5549 | == GET_MODE_BITSIZE (mode))) |
38a448ca RH |
5550 | return gen_rtx_ROTATE (mode, XEXP (op0, 0), |
5551 | (GET_CODE (op0) == ASHIFT | |
5552 | ? XEXP (op0, 1) : XEXP (op1, 1))); | |
230d793d | 5553 | |
71923da7 RK |
5554 | /* If OP0 is (ashiftrt (plus ...) C), it might actually be |
5555 | a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS | |
5556 | does not affect any of the bits in OP1, it can really be done | |
5557 | as a PLUS and we can associate. We do this by seeing if OP1 | |
5558 | can be safely shifted left C bits. */ | |
5559 | if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT | |
5560 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
5561 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
5562 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5563 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT) | |
5564 | { | |
5565 | int count = INTVAL (XEXP (op0, 1)); | |
5566 | HOST_WIDE_INT mask = INTVAL (op1) << count; | |
5567 | ||
5568 | if (mask >> count == INTVAL (op1) | |
5569 | && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0) | |
5570 | { | |
5571 | SUBST (XEXP (XEXP (op0, 0), 1), | |
5572 | GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask)); | |
5573 | return op0; | |
5574 | } | |
5575 | } | |
230d793d RS |
5576 | break; |
5577 | ||
5578 | case XOR: | |
79e8185c JH |
5579 | /* If we are XORing two things that have no bits in common, |
5580 | convert them into an IOR. This helps to detect rotation encoded | |
5581 | using those methods and possibly other simplifications. */ | |
5582 | ||
5583 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
5584 | && (nonzero_bits (op0, mode) | |
5585 | & nonzero_bits (op1, mode)) == 0) | |
5586 | return (gen_binary (IOR, mode, op0, op1)); | |
5587 | ||
230d793d RS |
5588 | /* Convert (XOR (NOT x) (NOT y)) to (XOR x y). |
5589 | Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for | |
5590 | (NOT y). */ | |
5591 | { | |
5592 | int num_negated = 0; | |
230d793d | 5593 | |
8079805d RK |
5594 | if (GET_CODE (op0) == NOT) |
5595 | num_negated++, op0 = XEXP (op0, 0); | |
5596 | if (GET_CODE (op1) == NOT) | |
5597 | num_negated++, op1 = XEXP (op1, 0); | |
230d793d RS |
5598 | |
5599 | if (num_negated == 2) | |
5600 | { | |
8079805d RK |
5601 | SUBST (XEXP (x, 0), op0); |
5602 | SUBST (XEXP (x, 1), op1); | |
230d793d RS |
5603 | } |
5604 | else if (num_negated == 1) | |
f1c6ba8b RK |
5605 | return |
5606 | simplify_gen_unary (NOT, mode, gen_binary (XOR, mode, op0, op1), | |
5607 | mode); | |
230d793d RS |
5608 | } |
5609 | ||
5610 | /* Convert (xor (and A B) B) to (and (not A) B). The latter may | |
5611 | correspond to a machine insn or result in further simplifications | |
5612 | if B is a constant. */ | |
5613 | ||
8079805d RK |
5614 | if (GET_CODE (op0) == AND |
5615 | && rtx_equal_p (XEXP (op0, 1), op1) | |
5616 | && ! side_effects_p (op1)) | |
0c1c8ea6 | 5617 | return gen_binary (AND, mode, |
f1c6ba8b | 5618 | simplify_gen_unary (NOT, mode, XEXP (op0, 0), mode), |
8079805d | 5619 | op1); |
230d793d | 5620 | |
8079805d RK |
5621 | else if (GET_CODE (op0) == AND |
5622 | && rtx_equal_p (XEXP (op0, 0), op1) | |
5623 | && ! side_effects_p (op1)) | |
0c1c8ea6 | 5624 | return gen_binary (AND, mode, |
f1c6ba8b | 5625 | simplify_gen_unary (NOT, mode, XEXP (op0, 1), mode), |
8079805d | 5626 | op1); |
230d793d | 5627 | |
230d793d | 5628 | /* (xor (comparison foo bar) (const_int 1)) can become the reversed |
0802d516 RK |
5629 | comparison if STORE_FLAG_VALUE is 1. */ |
5630 | if (STORE_FLAG_VALUE == 1 | |
5631 | && op1 == const1_rtx | |
8079805d | 5632 | && GET_RTX_CLASS (GET_CODE (op0)) == '<' |
9a915772 JH |
5633 | && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0), |
5634 | XEXP (op0, 1)))) | |
5635 | return reversed; | |
500c518b RK |
5636 | |
5637 | /* (lshiftrt foo C) where C is the number of bits in FOO minus 1 | |
5638 | is (lt foo (const_int 0)), so we can perform the above | |
0802d516 | 5639 | simplification if STORE_FLAG_VALUE is 1. */ |
500c518b | 5640 | |
0802d516 RK |
5641 | if (STORE_FLAG_VALUE == 1 |
5642 | && op1 == const1_rtx | |
8079805d RK |
5643 | && GET_CODE (op0) == LSHIFTRT |
5644 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5645 | && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1) | |
f1c6ba8b | 5646 | return gen_rtx_GE (mode, XEXP (op0, 0), const0_rtx); |
230d793d RS |
5647 | |
5648 | /* (xor (comparison foo bar) (const_int sign-bit)) | |
5649 | when STORE_FLAG_VALUE is the sign bit. */ | |
5f4f0e22 | 5650 | if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
0802d516 | 5651 | && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode)) |
e51712db | 5652 | == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1)) |
8079805d RK |
5653 | && op1 == const_true_rtx |
5654 | && GET_RTX_CLASS (GET_CODE (op0)) == '<' | |
9a915772 JH |
5655 | && (reversed = reversed_comparison (op0, mode, XEXP (op0, 0), |
5656 | XEXP (op0, 1)))) | |
5657 | return reversed; | |
0918eca0 | 5658 | |
230d793d | 5659 | break; |
e9a25f70 JL |
5660 | |
5661 | default: | |
5662 | abort (); | |
230d793d RS |
5663 | } |
5664 | ||
5665 | return x; | |
5666 | } | |
5667 | \f | |
5668 | /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound | |
5669 | operations" because they can be replaced with two more basic operations. | |
5670 | ZERO_EXTEND is also considered "compound" because it can be replaced with | |
5671 | an AND operation, which is simpler, though only one operation. | |
5672 | ||
5673 | The function expand_compound_operation is called with an rtx expression | |
663522cb | 5674 | and will convert it to the appropriate shifts and AND operations, |
230d793d RS |
5675 | simplifying at each stage. |
5676 | ||
5677 | The function make_compound_operation is called to convert an expression | |
5678 | consisting of shifts and ANDs into the equivalent compound expression. | |
5679 | It is the inverse of this function, loosely speaking. */ | |
5680 | ||
5681 | static rtx | |
5682 | expand_compound_operation (x) | |
5683 | rtx x; | |
5684 | { | |
770ae6cc | 5685 | unsigned HOST_WIDE_INT pos = 0, len; |
230d793d | 5686 | int unsignedp = 0; |
770ae6cc | 5687 | unsigned int modewidth; |
230d793d RS |
5688 | rtx tem; |
5689 | ||
5690 | switch (GET_CODE (x)) | |
5691 | { | |
5692 | case ZERO_EXTEND: | |
5693 | unsignedp = 1; | |
5694 | case SIGN_EXTEND: | |
75473182 RS |
5695 | /* We can't necessarily use a const_int for a multiword mode; |
5696 | it depends on implicitly extending the value. | |
5697 | Since we don't know the right way to extend it, | |
5698 | we can't tell whether the implicit way is right. | |
5699 | ||
5700 | Even for a mode that is no wider than a const_int, | |
5701 | we can't win, because we need to sign extend one of its bits through | |
5702 | the rest of it, and we don't know which bit. */ | |
230d793d | 5703 | if (GET_CODE (XEXP (x, 0)) == CONST_INT) |
75473182 | 5704 | return x; |
230d793d | 5705 | |
8079805d RK |
5706 | /* Return if (subreg:MODE FROM 0) is not a safe replacement for |
5707 | (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM | |
5708 | because (SUBREG (MEM...)) is guaranteed to cause the MEM to be | |
5709 | reloaded. If not for that, MEM's would very rarely be safe. | |
5710 | ||
5711 | Reject MODEs bigger than a word, because we might not be able | |
5712 | to reference a two-register group starting with an arbitrary register | |
5713 | (and currently gen_lowpart might crash for a SUBREG). */ | |
663522cb | 5714 | |
8079805d | 5715 | if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD) |
230d793d RS |
5716 | return x; |
5717 | ||
71012d97 GK |
5718 | /* Reject MODEs that aren't scalar integers because turning vector |
5719 | or complex modes into shifts causes problems. */ | |
5720 | ||
5721 | if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0)))) | |
5722 | return x; | |
5723 | ||
230d793d RS |
5724 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))); |
5725 | /* If the inner object has VOIDmode (the only way this can happen | |
e0a2f705 | 5726 | is if it is an ASM_OPERANDS), we can't do anything since we don't |
230d793d RS |
5727 | know how much masking to do. */ |
5728 | if (len == 0) | |
5729 | return x; | |
5730 | ||
5731 | break; | |
5732 | ||
5733 | case ZERO_EXTRACT: | |
5734 | unsignedp = 1; | |
5735 | case SIGN_EXTRACT: | |
5736 | /* If the operand is a CLOBBER, just return it. */ | |
5737 | if (GET_CODE (XEXP (x, 0)) == CLOBBER) | |
5738 | return XEXP (x, 0); | |
5739 | ||
5740 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
5741 | || GET_CODE (XEXP (x, 2)) != CONST_INT | |
5742 | || GET_MODE (XEXP (x, 0)) == VOIDmode) | |
5743 | return x; | |
5744 | ||
71012d97 GK |
5745 | /* Reject MODEs that aren't scalar integers because turning vector |
5746 | or complex modes into shifts causes problems. */ | |
5747 | ||
5748 | if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0)))) | |
5749 | return x; | |
5750 | ||
230d793d RS |
5751 | len = INTVAL (XEXP (x, 1)); |
5752 | pos = INTVAL (XEXP (x, 2)); | |
5753 | ||
5754 | /* If this goes outside the object being extracted, replace the object | |
5755 | with a (use (mem ...)) construct that only combine understands | |
5756 | and is used only for this purpose. */ | |
5757 | if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))) | |
38a448ca | 5758 | SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0))); |
230d793d | 5759 | |
f76b9db2 ILT |
5760 | if (BITS_BIG_ENDIAN) |
5761 | pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos; | |
5762 | ||
230d793d RS |
5763 | break; |
5764 | ||
5765 | default: | |
5766 | return x; | |
5767 | } | |
0f808b6f JH |
5768 | /* Convert sign extension to zero extension, if we know that the high |
5769 | bit is not set, as this is easier to optimize. It will be converted | |
5770 | back to cheaper alternative in make_extraction. */ | |
5771 | if (GET_CODE (x) == SIGN_EXTEND | |
5772 | && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5773 | && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0))) | |
663522cb | 5774 | & ~(((unsigned HOST_WIDE_INT) |
0f808b6f JH |
5775 | GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) |
5776 | >> 1)) | |
5777 | == 0))) | |
5778 | { | |
5779 | rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0)); | |
5780 | return expand_compound_operation (temp); | |
5781 | } | |
230d793d | 5782 | |
0f13a422 ILT |
5783 | /* We can optimize some special cases of ZERO_EXTEND. */ |
5784 | if (GET_CODE (x) == ZERO_EXTEND) | |
5785 | { | |
5786 | /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we | |
5787 | know that the last value didn't have any inappropriate bits | |
5788 | set. */ | |
5789 | if (GET_CODE (XEXP (x, 0)) == TRUNCATE | |
5790 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) | |
5791 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5792 | && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x)) | |
663522cb | 5793 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5794 | return XEXP (XEXP (x, 0), 0); |
5795 | ||
5796 | /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ | |
5797 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5798 | && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) | |
5799 | && subreg_lowpart_p (XEXP (x, 0)) | |
5800 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
5801 | && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x)) | |
663522cb | 5802 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5803 | return SUBREG_REG (XEXP (x, 0)); |
5804 | ||
5805 | /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo | |
5806 | is a comparison and STORE_FLAG_VALUE permits. This is like | |
5807 | the first case, but it works even when GET_MODE (x) is larger | |
5808 | than HOST_WIDE_INT. */ | |
5809 | if (GET_CODE (XEXP (x, 0)) == TRUNCATE | |
5810 | && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x) | |
5811 | && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<' | |
5812 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
5813 | <= HOST_BITS_PER_WIDE_INT) | |
23190837 | 5814 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE |
663522cb | 5815 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5816 | return XEXP (XEXP (x, 0), 0); |
5817 | ||
5818 | /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */ | |
5819 | if (GET_CODE (XEXP (x, 0)) == SUBREG | |
5820 | && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x) | |
5821 | && subreg_lowpart_p (XEXP (x, 0)) | |
5822 | && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<' | |
5823 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) | |
5824 | <= HOST_BITS_PER_WIDE_INT) | |
5825 | && ((HOST_WIDE_INT) STORE_FLAG_VALUE | |
663522cb | 5826 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
0f13a422 ILT |
5827 | return SUBREG_REG (XEXP (x, 0)); |
5828 | ||
0f13a422 ILT |
5829 | } |
5830 | ||
230d793d RS |
5831 | /* If we reach here, we want to return a pair of shifts. The inner |
5832 | shift is a left shift of BITSIZE - POS - LEN bits. The outer | |
5833 | shift is a right shift of BITSIZE - LEN bits. It is arithmetic or | |
5834 | logical depending on the value of UNSIGNEDP. | |
5835 | ||
5836 | If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be | |
5837 | converted into an AND of a shift. | |
5838 | ||
5839 | We must check for the case where the left shift would have a negative | |
5840 | count. This can happen in a case like (x >> 31) & 255 on machines | |
5841 | that can't shift by a constant. On those machines, we would first | |
663522cb | 5842 | combine the shift with the AND to produce a variable-position |
230d793d RS |
5843 | extraction. Then the constant of 31 would be substituted in to produce |
5844 | a such a position. */ | |
5845 | ||
5846 | modewidth = GET_MODE_BITSIZE (GET_MODE (x)); | |
770ae6cc | 5847 | if (modewidth + len >= pos) |
5f4f0e22 | 5848 | tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT, |
230d793d | 5849 | GET_MODE (x), |
5f4f0e22 CH |
5850 | simplify_shift_const (NULL_RTX, ASHIFT, |
5851 | GET_MODE (x), | |
230d793d RS |
5852 | XEXP (x, 0), |
5853 | modewidth - pos - len), | |
5854 | modewidth - len); | |
5855 | ||
5f4f0e22 CH |
5856 | else if (unsignedp && len < HOST_BITS_PER_WIDE_INT) |
5857 | tem = simplify_and_const_int (NULL_RTX, GET_MODE (x), | |
5858 | simplify_shift_const (NULL_RTX, LSHIFTRT, | |
230d793d RS |
5859 | GET_MODE (x), |
5860 | XEXP (x, 0), pos), | |
5f4f0e22 | 5861 | ((HOST_WIDE_INT) 1 << len) - 1); |
230d793d RS |
5862 | else |
5863 | /* Any other cases we can't handle. */ | |
5864 | return x; | |
230d793d RS |
5865 | |
5866 | /* If we couldn't do this for some reason, return the original | |
5867 | expression. */ | |
5868 | if (GET_CODE (tem) == CLOBBER) | |
5869 | return x; | |
5870 | ||
5871 | return tem; | |
5872 | } | |
5873 | \f | |
5874 | /* X is a SET which contains an assignment of one object into | |
5875 | a part of another (such as a bit-field assignment, STRICT_LOW_PART, | |
5876 | or certain SUBREGS). If possible, convert it into a series of | |
5877 | logical operations. | |
5878 | ||
5879 | We half-heartedly support variable positions, but do not at all | |
5880 | support variable lengths. */ | |
5881 | ||
5882 | static rtx | |
5883 | expand_field_assignment (x) | |
5884 | rtx x; | |
5885 | { | |
5886 | rtx inner; | |
0f41302f | 5887 | rtx pos; /* Always counts from low bit. */ |
230d793d RS |
5888 | int len; |
5889 | rtx mask; | |
5890 | enum machine_mode compute_mode; | |
5891 | ||
5892 | /* Loop until we find something we can't simplify. */ | |
5893 | while (1) | |
5894 | { | |
5895 | if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART | |
5896 | && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG) | |
5897 | { | |
5898 | inner = SUBREG_REG (XEXP (SET_DEST (x), 0)); | |
5899 | len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))); | |
47073a38 | 5900 | pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0))); |
230d793d RS |
5901 | } |
5902 | else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT | |
5903 | && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT) | |
5904 | { | |
5905 | inner = XEXP (SET_DEST (x), 0); | |
5906 | len = INTVAL (XEXP (SET_DEST (x), 1)); | |
5907 | pos = XEXP (SET_DEST (x), 2); | |
5908 | ||
5909 | /* If the position is constant and spans the width of INNER, | |
5910 | surround INNER with a USE to indicate this. */ | |
5911 | if (GET_CODE (pos) == CONST_INT | |
5912 | && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner))) | |
38a448ca | 5913 | inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner); |
230d793d | 5914 | |
f76b9db2 ILT |
5915 | if (BITS_BIG_ENDIAN) |
5916 | { | |
5917 | if (GET_CODE (pos) == CONST_INT) | |
5918 | pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len | |
5919 | - INTVAL (pos)); | |
5920 | else if (GET_CODE (pos) == MINUS | |
5921 | && GET_CODE (XEXP (pos, 1)) == CONST_INT | |
5922 | && (INTVAL (XEXP (pos, 1)) | |
5923 | == GET_MODE_BITSIZE (GET_MODE (inner)) - len)) | |
5924 | /* If position is ADJUST - X, new position is X. */ | |
5925 | pos = XEXP (pos, 0); | |
5926 | else | |
5927 | pos = gen_binary (MINUS, GET_MODE (pos), | |
5928 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) | |
5929 | - len), | |
5930 | pos); | |
5931 | } | |
230d793d RS |
5932 | } |
5933 | ||
5934 | /* A SUBREG between two modes that occupy the same numbers of words | |
5935 | can be done by moving the SUBREG to the source. */ | |
5936 | else if (GET_CODE (SET_DEST (x)) == SUBREG | |
b1e9c8a9 AO |
5937 | /* We need SUBREGs to compute nonzero_bits properly. */ |
5938 | && nonzero_sign_valid | |
230d793d RS |
5939 | && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x))) |
5940 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
5941 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x)))) | |
5942 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))) | |
5943 | { | |
38a448ca | 5944 | x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)), |
c5c76735 JL |
5945 | gen_lowpart_for_combine |
5946 | (GET_MODE (SUBREG_REG (SET_DEST (x))), | |
5947 | SET_SRC (x))); | |
230d793d RS |
5948 | continue; |
5949 | } | |
5950 | else | |
5951 | break; | |
5952 | ||
5953 | while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
5954 | inner = SUBREG_REG (inner); | |
5955 | ||
5956 | compute_mode = GET_MODE (inner); | |
5957 | ||
71012d97 GK |
5958 | /* Don't attempt bitwise arithmetic on non scalar integer modes. */ |
5959 | if (! SCALAR_INT_MODE_P (compute_mode)) | |
861556b4 RH |
5960 | { |
5961 | enum machine_mode imode; | |
5962 | ||
71012d97 | 5963 | /* Don't do anything for vector or complex integral types. */ |
861556b4 RH |
5964 | if (! FLOAT_MODE_P (compute_mode)) |
5965 | break; | |
5966 | ||
5967 | /* Try to find an integral mode to pun with. */ | |
5968 | imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0); | |
5969 | if (imode == BLKmode) | |
5970 | break; | |
5971 | ||
5972 | compute_mode = imode; | |
5973 | inner = gen_lowpart_for_combine (imode, inner); | |
5974 | } | |
5975 | ||
230d793d | 5976 | /* Compute a mask of LEN bits, if we can do this on the host machine. */ |
5f4f0e22 CH |
5977 | if (len < HOST_BITS_PER_WIDE_INT) |
5978 | mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1); | |
230d793d RS |
5979 | else |
5980 | break; | |
5981 | ||
5982 | /* Now compute the equivalent expression. Make a copy of INNER | |
5983 | for the SET_DEST in case it is a MEM into which we will substitute; | |
5984 | we don't want shared RTL in that case. */ | |
c5c76735 JL |
5985 | x = gen_rtx_SET |
5986 | (VOIDmode, copy_rtx (inner), | |
5987 | gen_binary (IOR, compute_mode, | |
5988 | gen_binary (AND, compute_mode, | |
f1c6ba8b RK |
5989 | simplify_gen_unary (NOT, compute_mode, |
5990 | gen_binary (ASHIFT, | |
5991 | compute_mode, | |
5992 | mask, pos), | |
5993 | compute_mode), | |
c5c76735 JL |
5994 | inner), |
5995 | gen_binary (ASHIFT, compute_mode, | |
5996 | gen_binary (AND, compute_mode, | |
5997 | gen_lowpart_for_combine | |
5998 | (compute_mode, SET_SRC (x)), | |
5999 | mask), | |
6000 | pos))); | |
230d793d RS |
6001 | } |
6002 | ||
6003 | return x; | |
6004 | } | |
6005 | \f | |
8999a12e RK |
6006 | /* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero, |
6007 | it is an RTX that represents a variable starting position; otherwise, | |
6008 | POS is the (constant) starting bit position (counted from the LSB). | |
230d793d RS |
6009 | |
6010 | INNER may be a USE. This will occur when we started with a bitfield | |
6011 | that went outside the boundary of the object in memory, which is | |
6012 | allowed on most machines. To isolate this case, we produce a USE | |
6013 | whose mode is wide enough and surround the MEM with it. The only | |
6014 | code that understands the USE is this routine. If it is not removed, | |
6015 | it will cause the resulting insn not to match. | |
6016 | ||
da7d8304 | 6017 | UNSIGNEDP is nonzero for an unsigned reference and zero for a |
230d793d RS |
6018 | signed reference. |
6019 | ||
da7d8304 KH |
6020 | IN_DEST is nonzero if this is a reference in the destination of a |
6021 | SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero, | |
230d793d RS |
6022 | a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will |
6023 | be used. | |
6024 | ||
da7d8304 | 6025 | IN_COMPARE is nonzero if we are in a COMPARE. This means that a |
230d793d RS |
6026 | ZERO_EXTRACT should be built even for bits starting at bit 0. |
6027 | ||
76184def DE |
6028 | MODE is the desired mode of the result (if IN_DEST == 0). |
6029 | ||
6030 | The result is an RTX for the extraction or NULL_RTX if the target | |
6031 | can't handle it. */ | |
230d793d RS |
6032 | |
6033 | static rtx | |
6034 | make_extraction (mode, inner, pos, pos_rtx, len, | |
6035 | unsignedp, in_dest, in_compare) | |
6036 | enum machine_mode mode; | |
6037 | rtx inner; | |
770ae6cc | 6038 | HOST_WIDE_INT pos; |
230d793d | 6039 | rtx pos_rtx; |
770ae6cc | 6040 | unsigned HOST_WIDE_INT len; |
230d793d RS |
6041 | int unsignedp; |
6042 | int in_dest, in_compare; | |
6043 | { | |
94b4b17a RS |
6044 | /* This mode describes the size of the storage area |
6045 | to fetch the overall value from. Within that, we | |
6046 | ignore the POS lowest bits, etc. */ | |
230d793d RS |
6047 | enum machine_mode is_mode = GET_MODE (inner); |
6048 | enum machine_mode inner_mode; | |
d7cd794f RK |
6049 | enum machine_mode wanted_inner_mode = byte_mode; |
6050 | enum machine_mode wanted_inner_reg_mode = word_mode; | |
230d793d RS |
6051 | enum machine_mode pos_mode = word_mode; |
6052 | enum machine_mode extraction_mode = word_mode; | |
6053 | enum machine_mode tmode = mode_for_size (len, MODE_INT, 1); | |
6054 | int spans_byte = 0; | |
6055 | rtx new = 0; | |
8999a12e | 6056 | rtx orig_pos_rtx = pos_rtx; |
770ae6cc | 6057 | HOST_WIDE_INT orig_pos; |
230d793d RS |
6058 | |
6059 | /* Get some information about INNER and get the innermost object. */ | |
6060 | if (GET_CODE (inner) == USE) | |
94b4b17a | 6061 | /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */ |
230d793d RS |
6062 | /* We don't need to adjust the position because we set up the USE |
6063 | to pretend that it was a full-word object. */ | |
6064 | spans_byte = 1, inner = XEXP (inner, 0); | |
6065 | else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner)) | |
94b4b17a RS |
6066 | { |
6067 | /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...), | |
6068 | consider just the QI as the memory to extract from. | |
6069 | The subreg adds or removes high bits; its mode is | |
6070 | irrelevant to the meaning of this extraction, | |
6071 | since POS and LEN count from the lsb. */ | |
6072 | if (GET_CODE (SUBREG_REG (inner)) == MEM) | |
6073 | is_mode = GET_MODE (SUBREG_REG (inner)); | |
6074 | inner = SUBREG_REG (inner); | |
6075 | } | |
988ef418 RS |
6076 | else if (GET_CODE (inner) == ASHIFT |
6077 | && GET_CODE (XEXP (inner, 1)) == CONST_INT | |
6078 | && pos_rtx == 0 && pos == 0 | |
3129af4c | 6079 | && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1))) |
988ef418 RS |
6080 | { |
6081 | /* We're extracting the least significant bits of an rtx | |
6082 | (ashift X (const_int C)), where LEN > C. Extract the | |
6083 | least significant (LEN - C) bits of X, giving an rtx | |
6084 | whose mode is MODE, then shift it left C times. */ | |
6085 | new = make_extraction (mode, XEXP (inner, 0), | |
6086 | 0, 0, len - INTVAL (XEXP (inner, 1)), | |
6087 | unsignedp, in_dest, in_compare); | |
6088 | if (new != 0) | |
6089 | return gen_rtx_ASHIFT (mode, new, XEXP (inner, 1)); | |
6090 | } | |
230d793d RS |
6091 | |
6092 | inner_mode = GET_MODE (inner); | |
6093 | ||
6094 | if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT) | |
8999a12e | 6095 | pos = INTVAL (pos_rtx), pos_rtx = 0; |
230d793d RS |
6096 | |
6097 | /* See if this can be done without an extraction. We never can if the | |
6098 | width of the field is not the same as that of some integer mode. For | |
6099 | registers, we can only avoid the extraction if the position is at the | |
6100 | low-order bit and this is either not in the destination or we have the | |
6101 | appropriate STRICT_LOW_PART operation available. | |
6102 | ||
6103 | For MEM, we can avoid an extract if the field starts on an appropriate | |
6104 | boundary and we can change the mode of the memory reference. However, | |
6105 | we cannot directly access the MEM if we have a USE and the underlying | |
6106 | MEM is not TMODE. This combination means that MEM was being used in a | |
6107 | context where bits outside its mode were being referenced; that is only | |
6108 | valid in bit-field insns. */ | |
6109 | ||
6110 | if (tmode != BLKmode | |
6111 | && ! (spans_byte && inner_mode != tmode) | |
4d9cfc7b RK |
6112 | && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0 |
6113 | && GET_CODE (inner) != MEM | |
230d793d | 6114 | && (! in_dest |
df62f951 | 6115 | || (GET_CODE (inner) == REG |
ef89d648 | 6116 | && have_insn_for (STRICT_LOW_PART, tmode)))) |
8999a12e | 6117 | || (GET_CODE (inner) == MEM && pos_rtx == 0 |
dfbe1b2f RK |
6118 | && (pos |
6119 | % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode) | |
6120 | : BITS_PER_UNIT)) == 0 | |
230d793d RS |
6121 | /* We can't do this if we are widening INNER_MODE (it |
6122 | may not be aligned, for one thing). */ | |
6123 | && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode) | |
6124 | && (inner_mode == tmode | |
6125 | || (! mode_dependent_address_p (XEXP (inner, 0)) | |
6126 | && ! MEM_VOLATILE_P (inner)))))) | |
6127 | { | |
230d793d RS |
6128 | /* If INNER is a MEM, make a new MEM that encompasses just the desired |
6129 | field. If the original and current mode are the same, we need not | |
663522cb | 6130 | adjust the offset. Otherwise, we do if bytes big endian. |
230d793d | 6131 | |
4d9cfc7b RK |
6132 | If INNER is not a MEM, get a piece consisting of just the field |
6133 | of interest (in this case POS % BITS_PER_WORD must be 0). */ | |
230d793d RS |
6134 | |
6135 | if (GET_CODE (inner) == MEM) | |
6136 | { | |
f1ec5147 RK |
6137 | HOST_WIDE_INT offset; |
6138 | ||
94b4b17a RS |
6139 | /* POS counts from lsb, but make OFFSET count in memory order. */ |
6140 | if (BYTES_BIG_ENDIAN) | |
6141 | offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT; | |
6142 | else | |
6143 | offset = pos / BITS_PER_UNIT; | |
230d793d | 6144 | |
f1ec5147 | 6145 | new = adjust_address_nv (inner, tmode, offset); |
230d793d | 6146 | } |
df62f951 | 6147 | else if (GET_CODE (inner) == REG) |
c0d3ac4d RK |
6148 | { |
6149 | /* We can't call gen_lowpart_for_combine here since we always want | |
6150 | a SUBREG and it would sometimes return a new hard register. */ | |
6151 | if (tmode != inner_mode) | |
ddef6bc7 | 6152 | { |
f1ec5147 | 6153 | HOST_WIDE_INT final_word = pos / BITS_PER_WORD; |
ddef6bc7 JJ |
6154 | |
6155 | if (WORDS_BIG_ENDIAN | |
6156 | && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD) | |
6157 | final_word = ((GET_MODE_SIZE (inner_mode) | |
6158 | - GET_MODE_SIZE (tmode)) | |
6159 | / UNITS_PER_WORD) - final_word; | |
6160 | ||
6161 | final_word *= UNITS_PER_WORD; | |
6162 | if (BYTES_BIG_ENDIAN && | |
6163 | GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode)) | |
6164 | final_word += (GET_MODE_SIZE (inner_mode) | |
6165 | - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD; | |
6166 | ||
307f767b | 6167 | /* Avoid creating invalid subregs, for example when |
991b6592 | 6168 | simplifying (x>>32)&255. */ |
307f767b DJ |
6169 | if (final_word >= GET_MODE_SIZE (inner_mode)) |
6170 | return NULL_RTX; | |
6171 | ||
ddef6bc7 JJ |
6172 | new = gen_rtx_SUBREG (tmode, inner, final_word); |
6173 | } | |
23190837 AJ |
6174 | else |
6175 | new = inner; | |
6176 | } | |
230d793d | 6177 | else |
6139ff20 RK |
6178 | new = force_to_mode (inner, tmode, |
6179 | len >= HOST_BITS_PER_WIDE_INT | |
0345195a | 6180 | ? ~(unsigned HOST_WIDE_INT) 0 |
729a2125 | 6181 | : ((unsigned HOST_WIDE_INT) 1 << len) - 1, |
e3d616e3 | 6182 | NULL_RTX, 0); |
230d793d | 6183 | |
663522cb | 6184 | /* If this extraction is going into the destination of a SET, |
230d793d RS |
6185 | make a STRICT_LOW_PART unless we made a MEM. */ |
6186 | ||
6187 | if (in_dest) | |
6188 | return (GET_CODE (new) == MEM ? new | |
77fa0940 | 6189 | : (GET_CODE (new) != SUBREG |
38a448ca | 6190 | ? gen_rtx_CLOBBER (tmode, const0_rtx) |
f1c6ba8b | 6191 | : gen_rtx_STRICT_LOW_PART (VOIDmode, new))); |
230d793d | 6192 | |
0f808b6f JH |
6193 | if (mode == tmode) |
6194 | return new; | |
6195 | ||
0a7ec763 | 6196 | if (GET_CODE (new) == CONST_INT) |
2496c7bd | 6197 | return gen_int_mode (INTVAL (new), mode); |
0a7ec763 | 6198 | |
0f808b6f JH |
6199 | /* If we know that no extraneous bits are set, and that the high |
6200 | bit is not set, convert the extraction to the cheaper of | |
6201 | sign and zero extension, that are equivalent in these cases. */ | |
6202 | if (flag_expensive_optimizations | |
6203 | && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT | |
6204 | && ((nonzero_bits (new, tmode) | |
663522cb KH |
6205 | & ~(((unsigned HOST_WIDE_INT) |
6206 | GET_MODE_MASK (tmode)) | |
6207 | >> 1)) | |
0f808b6f JH |
6208 | == 0))) |
6209 | { | |
6210 | rtx temp = gen_rtx_ZERO_EXTEND (mode, new); | |
6211 | rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new); | |
6212 | ||
6213 | /* Prefer ZERO_EXTENSION, since it gives more information to | |
6214 | backends. */ | |
25ffb1f6 | 6215 | if (rtx_cost (temp, SET) <= rtx_cost (temp1, SET)) |
0f808b6f JH |
6216 | return temp; |
6217 | return temp1; | |
6218 | } | |
6219 | ||
230d793d RS |
6220 | /* Otherwise, sign- or zero-extend unless we already are in the |
6221 | proper mode. */ | |
6222 | ||
f1c6ba8b RK |
6223 | return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, |
6224 | mode, new)); | |
230d793d RS |
6225 | } |
6226 | ||
cc471082 RS |
6227 | /* Unless this is a COMPARE or we have a funny memory reference, |
6228 | don't do anything with zero-extending field extracts starting at | |
6229 | the low-order bit since they are simple AND operations. */ | |
8999a12e RK |
6230 | if (pos_rtx == 0 && pos == 0 && ! in_dest |
6231 | && ! in_compare && ! spans_byte && unsignedp) | |
230d793d RS |
6232 | return 0; |
6233 | ||
c5c76735 JL |
6234 | /* Unless we are allowed to span bytes or INNER is not MEM, reject this if |
6235 | we would be spanning bytes or if the position is not a constant and the | |
6236 | length is not 1. In all other cases, we would only be going outside | |
6237 | our object in cases when an original shift would have been | |
e7373556 | 6238 | undefined. */ |
c5c76735 | 6239 | if (! spans_byte && GET_CODE (inner) == MEM |
e7373556 RK |
6240 | && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode)) |
6241 | || (pos_rtx != 0 && len != 1))) | |
6242 | return 0; | |
6243 | ||
d7cd794f | 6244 | /* Get the mode to use should INNER not be a MEM, the mode for the position, |
230d793d | 6245 | and the mode for the result. */ |
505ddab6 | 6246 | if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE) |
230d793d | 6247 | { |
da920570 ZW |
6248 | wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0); |
6249 | pos_mode = mode_for_extraction (EP_insv, 2); | |
6250 | extraction_mode = mode_for_extraction (EP_insv, 3); | |
230d793d | 6251 | } |
230d793d | 6252 | |
da920570 ZW |
6253 | if (! in_dest && unsignedp |
6254 | && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE) | |
230d793d | 6255 | { |
da920570 ZW |
6256 | wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1); |
6257 | pos_mode = mode_for_extraction (EP_extzv, 3); | |
6258 | extraction_mode = mode_for_extraction (EP_extzv, 0); | |
230d793d | 6259 | } |
230d793d | 6260 | |
da920570 ZW |
6261 | if (! in_dest && ! unsignedp |
6262 | && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE) | |
230d793d | 6263 | { |
da920570 ZW |
6264 | wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1); |
6265 | pos_mode = mode_for_extraction (EP_extv, 3); | |
6266 | extraction_mode = mode_for_extraction (EP_extv, 0); | |
230d793d | 6267 | } |
230d793d RS |
6268 | |
6269 | /* Never narrow an object, since that might not be safe. */ | |
6270 | ||
6271 | if (mode != VOIDmode | |
6272 | && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode)) | |
6273 | extraction_mode = mode; | |
6274 | ||
6275 | if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode | |
6276 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) | |
6277 | pos_mode = GET_MODE (pos_rtx); | |
6278 | ||
d7cd794f RK |
6279 | /* If this is not from memory, the desired mode is wanted_inner_reg_mode; |
6280 | if we have to change the mode of memory and cannot, the desired mode is | |
6281 | EXTRACTION_MODE. */ | |
6282 | if (GET_CODE (inner) != MEM) | |
6283 | wanted_inner_mode = wanted_inner_reg_mode; | |
6284 | else if (inner_mode != wanted_inner_mode | |
6285 | && (mode_dependent_address_p (XEXP (inner, 0)) | |
6286 | || MEM_VOLATILE_P (inner))) | |
6287 | wanted_inner_mode = extraction_mode; | |
230d793d | 6288 | |
6139ff20 RK |
6289 | orig_pos = pos; |
6290 | ||
f76b9db2 ILT |
6291 | if (BITS_BIG_ENDIAN) |
6292 | { | |
cf54c2cd DE |
6293 | /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to |
6294 | BITS_BIG_ENDIAN style. If position is constant, compute new | |
6295 | position. Otherwise, build subtraction. | |
6296 | Note that POS is relative to the mode of the original argument. | |
6297 | If it's a MEM we need to recompute POS relative to that. | |
6298 | However, if we're extracting from (or inserting into) a register, | |
6299 | we want to recompute POS relative to wanted_inner_mode. */ | |
6300 | int width = (GET_CODE (inner) == MEM | |
6301 | ? GET_MODE_BITSIZE (is_mode) | |
6302 | : GET_MODE_BITSIZE (wanted_inner_mode)); | |
6303 | ||
f76b9db2 | 6304 | if (pos_rtx == 0) |
cf54c2cd | 6305 | pos = width - len - pos; |
f76b9db2 ILT |
6306 | else |
6307 | pos_rtx | |
f1c6ba8b | 6308 | = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx); |
cf54c2cd DE |
6309 | /* POS may be less than 0 now, but we check for that below. |
6310 | Note that it can only be less than 0 if GET_CODE (inner) != MEM. */ | |
f76b9db2 | 6311 | } |
230d793d RS |
6312 | |
6313 | /* If INNER has a wider mode, make it smaller. If this is a constant | |
6314 | extract, try to adjust the byte to point to the byte containing | |
6315 | the value. */ | |
d7cd794f RK |
6316 | if (wanted_inner_mode != VOIDmode |
6317 | && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode) | |
230d793d | 6318 | && ((GET_CODE (inner) == MEM |
d7cd794f | 6319 | && (inner_mode == wanted_inner_mode |
230d793d RS |
6320 | || (! mode_dependent_address_p (XEXP (inner, 0)) |
6321 | && ! MEM_VOLATILE_P (inner)))))) | |
6322 | { | |
6323 | int offset = 0; | |
6324 | ||
6325 | /* The computations below will be correct if the machine is big | |
6326 | endian in both bits and bytes or little endian in bits and bytes. | |
6327 | If it is mixed, we must adjust. */ | |
663522cb | 6328 | |
230d793d | 6329 | /* If bytes are big endian and we had a paradoxical SUBREG, we must |
0f41302f | 6330 | adjust OFFSET to compensate. */ |
f76b9db2 ILT |
6331 | if (BYTES_BIG_ENDIAN |
6332 | && ! spans_byte | |
230d793d RS |
6333 | && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode)) |
6334 | offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode); | |
230d793d RS |
6335 | |
6336 | /* If this is a constant position, we can move to the desired byte. */ | |
8999a12e | 6337 | if (pos_rtx == 0) |
230d793d RS |
6338 | { |
6339 | offset += pos / BITS_PER_UNIT; | |
d7cd794f | 6340 | pos %= GET_MODE_BITSIZE (wanted_inner_mode); |
230d793d RS |
6341 | } |
6342 | ||
f76b9db2 ILT |
6343 | if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN |
6344 | && ! spans_byte | |
d7cd794f | 6345 | && is_mode != wanted_inner_mode) |
c6b3f1f2 | 6346 | offset = (GET_MODE_SIZE (is_mode) |
d7cd794f | 6347 | - GET_MODE_SIZE (wanted_inner_mode) - offset); |
c6b3f1f2 | 6348 | |
d7cd794f | 6349 | if (offset != 0 || inner_mode != wanted_inner_mode) |
f1ec5147 | 6350 | inner = adjust_address_nv (inner, wanted_inner_mode, offset); |
230d793d RS |
6351 | } |
6352 | ||
9e74dc41 RK |
6353 | /* If INNER is not memory, we can always get it into the proper mode. If we |
6354 | are changing its mode, POS must be a constant and smaller than the size | |
6355 | of the new mode. */ | |
230d793d | 6356 | else if (GET_CODE (inner) != MEM) |
9e74dc41 RK |
6357 | { |
6358 | if (GET_MODE (inner) != wanted_inner_mode | |
6359 | && (pos_rtx != 0 | |
6360 | || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode))) | |
6361 | return 0; | |
6362 | ||
6363 | inner = force_to_mode (inner, wanted_inner_mode, | |
6364 | pos_rtx | |
6365 | || len + orig_pos >= HOST_BITS_PER_WIDE_INT | |
0345195a | 6366 | ? ~(unsigned HOST_WIDE_INT) 0 |
729a2125 RK |
6367 | : ((((unsigned HOST_WIDE_INT) 1 << len) - 1) |
6368 | << orig_pos), | |
9e74dc41 RK |
6369 | NULL_RTX, 0); |
6370 | } | |
230d793d RS |
6371 | |
6372 | /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we | |
6373 | have to zero extend. Otherwise, we can just use a SUBREG. */ | |
8999a12e | 6374 | if (pos_rtx != 0 |
230d793d | 6375 | && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx))) |
0f808b6f | 6376 | { |
f1c6ba8b | 6377 | rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx); |
0f808b6f JH |
6378 | |
6379 | /* If we know that no extraneous bits are set, and that the high | |
eaec9b3d | 6380 | bit is not set, convert extraction to cheaper one - either |
0f808b6f JH |
6381 | SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these |
6382 | cases. */ | |
6383 | if (flag_expensive_optimizations | |
6384 | && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT | |
6385 | && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx)) | |
663522cb KH |
6386 | & ~(((unsigned HOST_WIDE_INT) |
6387 | GET_MODE_MASK (GET_MODE (pos_rtx))) | |
6388 | >> 1)) | |
0f808b6f JH |
6389 | == 0))) |
6390 | { | |
6391 | rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx); | |
6392 | ||
25ffb1f6 | 6393 | /* Prefer ZERO_EXTENSION, since it gives more information to |
0f808b6f JH |
6394 | backends. */ |
6395 | if (rtx_cost (temp1, SET) < rtx_cost (temp, SET)) | |
6396 | temp = temp1; | |
6397 | } | |
6398 | pos_rtx = temp; | |
6399 | } | |
8999a12e | 6400 | else if (pos_rtx != 0 |
230d793d RS |
6401 | && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx))) |
6402 | pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx); | |
6403 | ||
8999a12e RK |
6404 | /* Make POS_RTX unless we already have it and it is correct. If we don't |
6405 | have a POS_RTX but we do have an ORIG_POS_RTX, the latter must | |
0f41302f | 6406 | be a CONST_INT. */ |
8999a12e RK |
6407 | if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos) |
6408 | pos_rtx = orig_pos_rtx; | |
6409 | ||
6410 | else if (pos_rtx == 0) | |
5f4f0e22 | 6411 | pos_rtx = GEN_INT (pos); |
230d793d RS |
6412 | |
6413 | /* Make the required operation. See if we can use existing rtx. */ | |
f1c6ba8b | 6414 | new = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT, |
5f4f0e22 | 6415 | extraction_mode, inner, GEN_INT (len), pos_rtx); |
230d793d RS |
6416 | if (! in_dest) |
6417 | new = gen_lowpart_for_combine (mode, new); | |
6418 | ||
6419 | return new; | |
6420 | } | |
6421 | \f | |
71923da7 RK |
6422 | /* See if X contains an ASHIFT of COUNT or more bits that can be commuted |
6423 | with any other operations in X. Return X without that shift if so. */ | |
6424 | ||
6425 | static rtx | |
6426 | extract_left_shift (x, count) | |
6427 | rtx x; | |
6428 | int count; | |
6429 | { | |
6430 | enum rtx_code code = GET_CODE (x); | |
6431 | enum machine_mode mode = GET_MODE (x); | |
6432 | rtx tem; | |
6433 | ||
6434 | switch (code) | |
6435 | { | |
6436 | case ASHIFT: | |
6437 | /* This is the shift itself. If it is wide enough, we will return | |
6438 | either the value being shifted if the shift count is equal to | |
6439 | COUNT or a shift for the difference. */ | |
6440 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6441 | && INTVAL (XEXP (x, 1)) >= count) | |
6442 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), | |
6443 | INTVAL (XEXP (x, 1)) - count); | |
6444 | break; | |
6445 | ||
6446 | case NEG: case NOT: | |
6447 | if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0) | |
f1c6ba8b | 6448 | return simplify_gen_unary (code, mode, tem, mode); |
71923da7 RK |
6449 | |
6450 | break; | |
6451 | ||
6452 | case PLUS: case IOR: case XOR: case AND: | |
6453 | /* If we can safely shift this constant and we find the inner shift, | |
6454 | make a new operation. */ | |
6455 | if (GET_CODE (XEXP (x,1)) == CONST_INT | |
b729186a | 6456 | && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0 |
71923da7 | 6457 | && (tem = extract_left_shift (XEXP (x, 0), count)) != 0) |
663522cb | 6458 | return gen_binary (code, mode, tem, |
71923da7 RK |
6459 | GEN_INT (INTVAL (XEXP (x, 1)) >> count)); |
6460 | ||
6461 | break; | |
663522cb | 6462 | |
e9a25f70 JL |
6463 | default: |
6464 | break; | |
71923da7 RK |
6465 | } |
6466 | ||
6467 | return 0; | |
6468 | } | |
6469 | \f | |
230d793d RS |
6470 | /* Look at the expression rooted at X. Look for expressions |
6471 | equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND. | |
6472 | Form these expressions. | |
6473 | ||
6474 | Return the new rtx, usually just X. | |
6475 | ||
8aeea6e6 | 6476 | Also, for machines like the VAX that don't have logical shift insns, |
230d793d RS |
6477 | try to convert logical to arithmetic shift operations in cases where |
6478 | they are equivalent. This undoes the canonicalizations to logical | |
6479 | shifts done elsewhere. | |
6480 | ||
6481 | We try, as much as possible, to re-use rtl expressions to save memory. | |
6482 | ||
6483 | IN_CODE says what kind of expression we are processing. Normally, it is | |
42495ca0 RK |
6484 | SET. In a memory address (inside a MEM, PLUS or minus, the latter two |
6485 | being kludges), it is MEM. When processing the arguments of a comparison | |
230d793d RS |
6486 | or a COMPARE against zero, it is COMPARE. */ |
6487 | ||
6488 | static rtx | |
6489 | make_compound_operation (x, in_code) | |
6490 | rtx x; | |
6491 | enum rtx_code in_code; | |
6492 | { | |
6493 | enum rtx_code code = GET_CODE (x); | |
6494 | enum machine_mode mode = GET_MODE (x); | |
6495 | int mode_width = GET_MODE_BITSIZE (mode); | |
71923da7 | 6496 | rtx rhs, lhs; |
230d793d | 6497 | enum rtx_code next_code; |
f24ad0e4 | 6498 | int i; |
230d793d | 6499 | rtx new = 0; |
280f58ba | 6500 | rtx tem; |
6f7d635c | 6501 | const char *fmt; |
230d793d RS |
6502 | |
6503 | /* Select the code to be used in recursive calls. Once we are inside an | |
6504 | address, we stay there. If we have a comparison, set to COMPARE, | |
6505 | but once inside, go back to our default of SET. */ | |
6506 | ||
42495ca0 | 6507 | next_code = (code == MEM || code == PLUS || code == MINUS ? MEM |
230d793d RS |
6508 | : ((code == COMPARE || GET_RTX_CLASS (code) == '<') |
6509 | && XEXP (x, 1) == const0_rtx) ? COMPARE | |
6510 | : in_code == COMPARE ? SET : in_code); | |
6511 | ||
6512 | /* Process depending on the code of this operation. If NEW is set | |
da7d8304 | 6513 | nonzero, it will be returned. */ |
230d793d RS |
6514 | |
6515 | switch (code) | |
6516 | { | |
6517 | case ASHIFT: | |
230d793d RS |
6518 | /* Convert shifts by constants into multiplications if inside |
6519 | an address. */ | |
6520 | if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 6521 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
230d793d | 6522 | && INTVAL (XEXP (x, 1)) >= 0) |
280f58ba RK |
6523 | { |
6524 | new = make_compound_operation (XEXP (x, 0), next_code); | |
f1c6ba8b RK |
6525 | new = gen_rtx_MULT (mode, new, |
6526 | GEN_INT ((HOST_WIDE_INT) 1 | |
6527 | << INTVAL (XEXP (x, 1)))); | |
280f58ba | 6528 | } |
230d793d RS |
6529 | break; |
6530 | ||
6531 | case AND: | |
6532 | /* If the second operand is not a constant, we can't do anything | |
6533 | with it. */ | |
6534 | if (GET_CODE (XEXP (x, 1)) != CONST_INT) | |
6535 | break; | |
6536 | ||
6537 | /* If the constant is a power of two minus one and the first operand | |
6538 | is a logical right shift, make an extraction. */ | |
6539 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
6540 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6541 | { |
6542 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
6543 | new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1, | |
6544 | 0, in_code == COMPARE); | |
6545 | } | |
dfbe1b2f | 6546 | |
230d793d RS |
6547 | /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */ |
6548 | else if (GET_CODE (XEXP (x, 0)) == SUBREG | |
6549 | && subreg_lowpart_p (XEXP (x, 0)) | |
6550 | && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT | |
6551 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6552 | { |
6553 | new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0), | |
6554 | next_code); | |
2f99f437 | 6555 | new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0, |
280f58ba RK |
6556 | XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1, |
6557 | 0, in_code == COMPARE); | |
6558 | } | |
45620ed4 | 6559 | /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */ |
c2f9f64e JW |
6560 | else if ((GET_CODE (XEXP (x, 0)) == XOR |
6561 | || GET_CODE (XEXP (x, 0)) == IOR) | |
6562 | && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT | |
6563 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT | |
6564 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
6565 | { | |
6566 | /* Apply the distributive law, and then try to make extractions. */ | |
f1c6ba8b RK |
6567 | new = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode, |
6568 | gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0), | |
6569 | XEXP (x, 1)), | |
6570 | gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1), | |
6571 | XEXP (x, 1))); | |
c2f9f64e JW |
6572 | new = make_compound_operation (new, in_code); |
6573 | } | |
a7c99304 RK |
6574 | |
6575 | /* If we are have (and (rotate X C) M) and C is larger than the number | |
6576 | of bits in M, this is an extraction. */ | |
6577 | ||
6578 | else if (GET_CODE (XEXP (x, 0)) == ROTATE | |
6579 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
6580 | && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0 | |
6581 | && i <= INTVAL (XEXP (XEXP (x, 0), 1))) | |
280f58ba RK |
6582 | { |
6583 | new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code); | |
6584 | new = make_extraction (mode, new, | |
6585 | (GET_MODE_BITSIZE (mode) | |
6586 | - INTVAL (XEXP (XEXP (x, 0), 1))), | |
6587 | NULL_RTX, i, 1, 0, in_code == COMPARE); | |
6588 | } | |
a7c99304 RK |
6589 | |
6590 | /* On machines without logical shifts, if the operand of the AND is | |
230d793d RS |
6591 | a logical shift and our mask turns off all the propagated sign |
6592 | bits, we can replace the logical shift with an arithmetic shift. */ | |
ef89d648 ZW |
6593 | else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT |
6594 | && !have_insn_for (LSHIFTRT, mode) | |
6595 | && have_insn_for (ASHIFTRT, mode) | |
230d793d RS |
6596 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT |
6597 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
5f4f0e22 CH |
6598 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT |
6599 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 6600 | { |
5f4f0e22 | 6601 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
6602 | |
6603 | mask >>= INTVAL (XEXP (XEXP (x, 0), 1)); | |
6604 | if ((INTVAL (XEXP (x, 1)) & ~mask) == 0) | |
6605 | SUBST (XEXP (x, 0), | |
f1c6ba8b RK |
6606 | gen_rtx_ASHIFTRT (mode, |
6607 | make_compound_operation | |
6608 | (XEXP (XEXP (x, 0), 0), next_code), | |
6609 | XEXP (XEXP (x, 0), 1))); | |
230d793d RS |
6610 | } |
6611 | ||
6612 | /* If the constant is one less than a power of two, this might be | |
6613 | representable by an extraction even if no shift is present. | |
6614 | If it doesn't end up being a ZERO_EXTEND, we will ignore it unless | |
6615 | we are in a COMPARE. */ | |
6616 | else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0) | |
280f58ba RK |
6617 | new = make_extraction (mode, |
6618 | make_compound_operation (XEXP (x, 0), | |
6619 | next_code), | |
6620 | 0, NULL_RTX, i, 1, 0, in_code == COMPARE); | |
230d793d RS |
6621 | |
6622 | /* If we are in a comparison and this is an AND with a power of two, | |
6623 | convert this into the appropriate bit extract. */ | |
6624 | else if (in_code == COMPARE | |
6625 | && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0) | |
280f58ba RK |
6626 | new = make_extraction (mode, |
6627 | make_compound_operation (XEXP (x, 0), | |
6628 | next_code), | |
6629 | i, NULL_RTX, 1, 1, 0, 1); | |
230d793d RS |
6630 | |
6631 | break; | |
6632 | ||
6633 | case LSHIFTRT: | |
6634 | /* If the sign bit is known to be zero, replace this with an | |
6635 | arithmetic shift. */ | |
ef89d648 ZW |
6636 | if (have_insn_for (ASHIFTRT, mode) |
6637 | && ! have_insn_for (LSHIFTRT, mode) | |
5f4f0e22 | 6638 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 6639 | && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0) |
230d793d | 6640 | { |
f1c6ba8b RK |
6641 | new = gen_rtx_ASHIFTRT (mode, |
6642 | make_compound_operation (XEXP (x, 0), | |
6643 | next_code), | |
6644 | XEXP (x, 1)); | |
230d793d RS |
6645 | break; |
6646 | } | |
6647 | ||
0f41302f | 6648 | /* ... fall through ... */ |
230d793d RS |
6649 | |
6650 | case ASHIFTRT: | |
71923da7 RK |
6651 | lhs = XEXP (x, 0); |
6652 | rhs = XEXP (x, 1); | |
6653 | ||
230d793d RS |
6654 | /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1, |
6655 | this is a SIGN_EXTRACT. */ | |
71923da7 RK |
6656 | if (GET_CODE (rhs) == CONST_INT |
6657 | && GET_CODE (lhs) == ASHIFT | |
6658 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
6659 | && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))) | |
280f58ba | 6660 | { |
71923da7 | 6661 | new = make_compound_operation (XEXP (lhs, 0), next_code); |
280f58ba | 6662 | new = make_extraction (mode, new, |
71923da7 RK |
6663 | INTVAL (rhs) - INTVAL (XEXP (lhs, 1)), |
6664 | NULL_RTX, mode_width - INTVAL (rhs), | |
d0ab8cd3 | 6665 | code == LSHIFTRT, 0, in_code == COMPARE); |
8231ad94 | 6666 | break; |
d0ab8cd3 RK |
6667 | } |
6668 | ||
71923da7 RK |
6669 | /* See if we have operations between an ASHIFTRT and an ASHIFT. |
6670 | If so, try to merge the shifts into a SIGN_EXTEND. We could | |
6671 | also do this for some cases of SIGN_EXTRACT, but it doesn't | |
6672 | seem worth the effort; the case checked for occurs on Alpha. */ | |
663522cb | 6673 | |
71923da7 RK |
6674 | if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o' |
6675 | && ! (GET_CODE (lhs) == SUBREG | |
6676 | && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o')) | |
6677 | && GET_CODE (rhs) == CONST_INT | |
6678 | && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT | |
6679 | && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0) | |
6680 | new = make_extraction (mode, make_compound_operation (new, next_code), | |
6681 | 0, NULL_RTX, mode_width - INTVAL (rhs), | |
6682 | code == LSHIFTRT, 0, in_code == COMPARE); | |
663522cb | 6683 | |
230d793d | 6684 | break; |
280f58ba RK |
6685 | |
6686 | case SUBREG: | |
6687 | /* Call ourselves recursively on the inner expression. If we are | |
6688 | narrowing the object and it has a different RTL code from | |
6689 | what it originally did, do this SUBREG as a force_to_mode. */ | |
6690 | ||
0a5cbff6 | 6691 | tem = make_compound_operation (SUBREG_REG (x), in_code); |
280f58ba RK |
6692 | if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x)) |
6693 | && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem)) | |
6694 | && subreg_lowpart_p (x)) | |
0a5cbff6 | 6695 | { |
e8dc6d50 JH |
6696 | rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0, |
6697 | NULL_RTX, 0); | |
0a5cbff6 RK |
6698 | |
6699 | /* If we have something other than a SUBREG, we might have | |
eaec9b3d | 6700 | done an expansion, so rerun ourselves. */ |
0a5cbff6 RK |
6701 | if (GET_CODE (newer) != SUBREG) |
6702 | newer = make_compound_operation (newer, in_code); | |
6703 | ||
6704 | return newer; | |
6705 | } | |
6f28d3e9 RH |
6706 | |
6707 | /* If this is a paradoxical subreg, and the new code is a sign or | |
6708 | zero extension, omit the subreg and widen the extension. If it | |
6709 | is a regular subreg, we can still get rid of the subreg by not | |
6710 | widening so much, or in fact removing the extension entirely. */ | |
6711 | if ((GET_CODE (tem) == SIGN_EXTEND | |
6712 | || GET_CODE (tem) == ZERO_EXTEND) | |
6713 | && subreg_lowpart_p (x)) | |
6714 | { | |
6715 | if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem)) | |
6716 | || (GET_MODE_SIZE (mode) > | |
6717 | GET_MODE_SIZE (GET_MODE (XEXP (tem, 0))))) | |
b10f2187 R |
6718 | { |
6719 | if (! INTEGRAL_MODE_P (mode)) | |
6720 | break; | |
6721 | tem = gen_rtx_fmt_e (GET_CODE (tem), mode, XEXP (tem, 0)); | |
6722 | } | |
6f28d3e9 RH |
6723 | else |
6724 | tem = gen_lowpart_for_combine (mode, XEXP (tem, 0)); | |
6725 | return tem; | |
6726 | } | |
e9a25f70 | 6727 | break; |
663522cb | 6728 | |
e9a25f70 JL |
6729 | default: |
6730 | break; | |
230d793d RS |
6731 | } |
6732 | ||
6733 | if (new) | |
6734 | { | |
df62f951 | 6735 | x = gen_lowpart_for_combine (mode, new); |
230d793d RS |
6736 | code = GET_CODE (x); |
6737 | } | |
6738 | ||
6739 | /* Now recursively process each operand of this operation. */ | |
6740 | fmt = GET_RTX_FORMAT (code); | |
6741 | for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
6742 | if (fmt[i] == 'e') | |
6743 | { | |
6744 | new = make_compound_operation (XEXP (x, i), next_code); | |
6745 | SUBST (XEXP (x, i), new); | |
6746 | } | |
6747 | ||
6748 | return x; | |
6749 | } | |
6750 | \f | |
6751 | /* Given M see if it is a value that would select a field of bits | |
663522cb KH |
6752 | within an item, but not the entire word. Return -1 if not. |
6753 | Otherwise, return the starting position of the field, where 0 is the | |
6754 | low-order bit. | |
230d793d RS |
6755 | |
6756 | *PLEN is set to the length of the field. */ | |
6757 | ||
6758 | static int | |
6759 | get_pos_from_mask (m, plen) | |
5f4f0e22 | 6760 | unsigned HOST_WIDE_INT m; |
770ae6cc | 6761 | unsigned HOST_WIDE_INT *plen; |
230d793d RS |
6762 | { |
6763 | /* Get the bit number of the first 1 bit from the right, -1 if none. */ | |
663522cb | 6764 | int pos = exact_log2 (m & -m); |
d3bc8938 | 6765 | int len; |
230d793d RS |
6766 | |
6767 | if (pos < 0) | |
6768 | return -1; | |
6769 | ||
6770 | /* Now shift off the low-order zero bits and see if we have a power of | |
6771 | two minus 1. */ | |
d3bc8938 | 6772 | len = exact_log2 ((m >> pos) + 1); |
230d793d | 6773 | |
d3bc8938 | 6774 | if (len <= 0) |
230d793d RS |
6775 | return -1; |
6776 | ||
d3bc8938 | 6777 | *plen = len; |
230d793d RS |
6778 | return pos; |
6779 | } | |
6780 | \f | |
6139ff20 RK |
6781 | /* See if X can be simplified knowing that we will only refer to it in |
6782 | MODE and will only refer to those bits that are nonzero in MASK. | |
6783 | If other bits are being computed or if masking operations are done | |
6784 | that select a superset of the bits in MASK, they can sometimes be | |
6785 | ignored. | |
6786 | ||
6787 | Return a possibly simplified expression, but always convert X to | |
6788 | MODE. If X is a CONST_INT, AND the CONST_INT with MASK. | |
dfbe1b2f | 6789 | |
da7d8304 | 6790 | Also, if REG is nonzero and X is a register equal in value to REG, |
e3d616e3 RK |
6791 | replace X with REG. |
6792 | ||
6793 | If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK | |
6794 | are all off in X. This is used when X will be complemented, by either | |
180b8e4b | 6795 | NOT, NEG, or XOR. */ |
dfbe1b2f RK |
6796 | |
6797 | static rtx | |
e3d616e3 | 6798 | force_to_mode (x, mode, mask, reg, just_select) |
dfbe1b2f RK |
6799 | rtx x; |
6800 | enum machine_mode mode; | |
6139ff20 | 6801 | unsigned HOST_WIDE_INT mask; |
dfbe1b2f | 6802 | rtx reg; |
e3d616e3 | 6803 | int just_select; |
dfbe1b2f RK |
6804 | { |
6805 | enum rtx_code code = GET_CODE (x); | |
180b8e4b | 6806 | int next_select = just_select || code == XOR || code == NOT || code == NEG; |
ef026f91 RS |
6807 | enum machine_mode op_mode; |
6808 | unsigned HOST_WIDE_INT fuller_mask, nonzero; | |
6139ff20 RK |
6809 | rtx op0, op1, temp; |
6810 | ||
132d2040 RK |
6811 | /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the |
6812 | code below will do the wrong thing since the mode of such an | |
663522cb | 6813 | expression is VOIDmode. |
be3d27d6 CI |
6814 | |
6815 | Also do nothing if X is a CLOBBER; this can happen if X was | |
6816 | the return value from a call to gen_lowpart_for_combine. */ | |
6817 | if (code == CALL || code == ASM_OPERANDS || code == CLOBBER) | |
246e00f2 RK |
6818 | return x; |
6819 | ||
6139ff20 RK |
6820 | /* We want to perform the operation is its present mode unless we know |
6821 | that the operation is valid in MODE, in which case we do the operation | |
6822 | in MODE. */ | |
1c75dfa4 | 6823 | op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x)) |
ef89d648 | 6824 | && have_insn_for (code, mode)) |
ef026f91 | 6825 | ? mode : GET_MODE (x)); |
e3d616e3 | 6826 | |
aa988991 RS |
6827 | /* It is not valid to do a right-shift in a narrower mode |
6828 | than the one it came in with. */ | |
6829 | if ((code == LSHIFTRT || code == ASHIFTRT) | |
6830 | && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x))) | |
6831 | op_mode = GET_MODE (x); | |
ef026f91 RS |
6832 | |
6833 | /* Truncate MASK to fit OP_MODE. */ | |
6834 | if (op_mode) | |
6835 | mask &= GET_MODE_MASK (op_mode); | |
6139ff20 RK |
6836 | |
6837 | /* When we have an arithmetic operation, or a shift whose count we | |
6838 | do not know, we need to assume that all bit the up to the highest-order | |
6839 | bit in MASK will be needed. This is how we form such a mask. */ | |
ef026f91 RS |
6840 | if (op_mode) |
6841 | fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT | |
6842 | ? GET_MODE_MASK (op_mode) | |
729a2125 RK |
6843 | : (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) |
6844 | - 1)); | |
ef026f91 | 6845 | else |
663522cb | 6846 | fuller_mask = ~(HOST_WIDE_INT) 0; |
ef026f91 RS |
6847 | |
6848 | /* Determine what bits of X are guaranteed to be (non)zero. */ | |
6849 | nonzero = nonzero_bits (x, mode); | |
6139ff20 RK |
6850 | |
6851 | /* If none of the bits in X are needed, return a zero. */ | |
e3d616e3 | 6852 | if (! just_select && (nonzero & mask) == 0) |
ccf7aef4 | 6853 | x = const0_rtx; |
dfbe1b2f | 6854 | |
6139ff20 RK |
6855 | /* If X is a CONST_INT, return a new one. Do this here since the |
6856 | test below will fail. */ | |
6857 | if (GET_CODE (x) == CONST_INT) | |
ccf7aef4 RH |
6858 | { |
6859 | if (SCALAR_INT_MODE_P (mode)) | |
6860 | return gen_int_mode (INTVAL (x) & mask, mode); | |
6861 | else | |
6862 | { | |
6863 | x = GEN_INT (INTVAL (x) & mask); | |
6864 | return gen_lowpart_common (mode, x); | |
6865 | } | |
6866 | } | |
dfbe1b2f | 6867 | |
180b8e4b RK |
6868 | /* If X is narrower than MODE and we want all the bits in X's mode, just |
6869 | get X in the proper mode. */ | |
6870 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode) | |
663522cb | 6871 | && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0) |
dfbe1b2f RK |
6872 | return gen_lowpart_for_combine (mode, x); |
6873 | ||
71923da7 RK |
6874 | /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in |
6875 | MASK are already known to be zero in X, we need not do anything. */ | |
663522cb | 6876 | if (GET_MODE (x) == mode && code != SUBREG && (~mask & nonzero) == 0) |
6139ff20 RK |
6877 | return x; |
6878 | ||
dfbe1b2f RK |
6879 | switch (code) |
6880 | { | |
6139ff20 RK |
6881 | case CLOBBER: |
6882 | /* If X is a (clobber (const_int)), return it since we know we are | |
0f41302f | 6883 | generating something that won't match. */ |
6139ff20 RK |
6884 | return x; |
6885 | ||
6139ff20 RK |
6886 | case USE: |
6887 | /* X is a (use (mem ..)) that was made from a bit-field extraction that | |
6888 | spanned the boundary of the MEM. If we are now masking so it is | |
6889 | within that boundary, we don't need the USE any more. */ | |
f76b9db2 | 6890 | if (! BITS_BIG_ENDIAN |
663522cb | 6891 | && (mask & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0) |
e3d616e3 | 6892 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
f76b9db2 | 6893 | break; |
6139ff20 | 6894 | |
dfbe1b2f RK |
6895 | case SIGN_EXTEND: |
6896 | case ZERO_EXTEND: | |
6897 | case ZERO_EXTRACT: | |
6898 | case SIGN_EXTRACT: | |
6899 | x = expand_compound_operation (x); | |
6900 | if (GET_CODE (x) != code) | |
e3d616e3 | 6901 | return force_to_mode (x, mode, mask, reg, next_select); |
dfbe1b2f RK |
6902 | break; |
6903 | ||
6904 | case REG: | |
6905 | if (reg != 0 && (rtx_equal_p (get_last_value (reg), x) | |
6906 | || rtx_equal_p (reg, get_last_value (x)))) | |
6907 | x = reg; | |
6908 | break; | |
6909 | ||
dfbe1b2f | 6910 | case SUBREG: |
6139ff20 | 6911 | if (subreg_lowpart_p (x) |
180b8e4b RK |
6912 | /* We can ignore the effect of this SUBREG if it narrows the mode or |
6913 | if the constant masks to zero all the bits the mode doesn't | |
6914 | have. */ | |
6139ff20 RK |
6915 | && ((GET_MODE_SIZE (GET_MODE (x)) |
6916 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
6139ff20 RK |
6917 | || (0 == (mask |
6918 | & GET_MODE_MASK (GET_MODE (x)) | |
663522cb | 6919 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))))))) |
e3d616e3 | 6920 | return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select); |
dfbe1b2f RK |
6921 | break; |
6922 | ||
6923 | case AND: | |
6139ff20 RK |
6924 | /* If this is an AND with a constant, convert it into an AND |
6925 | whose constant is the AND of that constant with MASK. If it | |
6926 | remains an AND of MASK, delete it since it is redundant. */ | |
dfbe1b2f | 6927 | |
2ca9ae17 | 6928 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) |
dfbe1b2f | 6929 | { |
6139ff20 RK |
6930 | x = simplify_and_const_int (x, op_mode, XEXP (x, 0), |
6931 | mask & INTVAL (XEXP (x, 1))); | |
dfbe1b2f RK |
6932 | |
6933 | /* If X is still an AND, see if it is an AND with a mask that | |
71923da7 RK |
6934 | is just some low-order bits. If so, and it is MASK, we don't |
6935 | need it. */ | |
dfbe1b2f RK |
6936 | |
6937 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
d0c9db30 | 6938 | && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x))) |
3129af4c | 6939 | == mask)) |
dfbe1b2f | 6940 | x = XEXP (x, 0); |
d0ab8cd3 | 6941 | |
71923da7 RK |
6942 | /* If it remains an AND, try making another AND with the bits |
6943 | in the mode mask that aren't in MASK turned on. If the | |
6944 | constant in the AND is wide enough, this might make a | |
6945 | cheaper constant. */ | |
6946 | ||
6947 | if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT | |
2ca9ae17 JW |
6948 | && GET_MODE_MASK (GET_MODE (x)) != mask |
6949 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT) | |
71923da7 RK |
6950 | { |
6951 | HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1)) | |
663522cb | 6952 | | (GET_MODE_MASK (GET_MODE (x)) & ~mask)); |
71923da7 RK |
6953 | int width = GET_MODE_BITSIZE (GET_MODE (x)); |
6954 | rtx y; | |
6955 | ||
6956 | /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative | |
6957 | number, sign extend it. */ | |
6958 | if (width > 0 && width < HOST_BITS_PER_WIDE_INT | |
6959 | && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
6960 | cval |= (HOST_WIDE_INT) -1 << width; | |
6961 | ||
6962 | y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval)); | |
6963 | if (rtx_cost (y, SET) < rtx_cost (x, SET)) | |
6964 | x = y; | |
6965 | } | |
6966 | ||
d0ab8cd3 | 6967 | break; |
dfbe1b2f RK |
6968 | } |
6969 | ||
6139ff20 | 6970 | goto binop; |
dfbe1b2f RK |
6971 | |
6972 | case PLUS: | |
6139ff20 RK |
6973 | /* In (and (plus FOO C1) M), if M is a mask that just turns off |
6974 | low-order bits (as in an alignment operation) and FOO is already | |
6975 | aligned to that boundary, mask C1 to that boundary as well. | |
6976 | This may eliminate that PLUS and, later, the AND. */ | |
9fa6d012 TG |
6977 | |
6978 | { | |
770ae6cc | 6979 | unsigned int width = GET_MODE_BITSIZE (mode); |
9fa6d012 TG |
6980 | unsigned HOST_WIDE_INT smask = mask; |
6981 | ||
6982 | /* If MODE is narrower than HOST_WIDE_INT and mask is a negative | |
6983 | number, sign extend it. */ | |
6984 | ||
6985 | if (width < HOST_BITS_PER_WIDE_INT | |
6986 | && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0) | |
6987 | smask |= (HOST_WIDE_INT) -1 << width; | |
6988 | ||
6989 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
563c12b0 RH |
6990 | && exact_log2 (- smask) >= 0 |
6991 | && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0 | |
6992 | && (INTVAL (XEXP (x, 1)) & ~smask) != 0) | |
6993 | return force_to_mode (plus_constant (XEXP (x, 0), | |
6994 | (INTVAL (XEXP (x, 1)) & smask)), | |
6995 | mode, smask, reg, next_select); | |
9fa6d012 | 6996 | } |
6139ff20 | 6997 | |
0f41302f | 6998 | /* ... fall through ... */ |
6139ff20 | 6999 | |
dfbe1b2f | 7000 | case MULT: |
6139ff20 RK |
7001 | /* For PLUS, MINUS and MULT, we need any bits less significant than the |
7002 | most significant bit in MASK since carries from those bits will | |
7003 | affect the bits we are interested in. */ | |
7004 | mask = fuller_mask; | |
7005 | goto binop; | |
7006 | ||
d41638e4 RH |
7007 | case MINUS: |
7008 | /* If X is (minus C Y) where C's least set bit is larger than any bit | |
7009 | in the mask, then we may replace with (neg Y). */ | |
7010 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
0345195a RK |
7011 | && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0)) |
7012 | & -INTVAL (XEXP (x, 0)))) | |
7013 | > mask)) | |
d41638e4 | 7014 | { |
f1c6ba8b RK |
7015 | x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1), |
7016 | GET_MODE (x)); | |
d41638e4 RH |
7017 | return force_to_mode (x, mode, mask, reg, next_select); |
7018 | } | |
7019 | ||
bc02f8d3 | 7020 | /* Similarly, if C contains every bit in the fuller_mask, then we may |
d41638e4 RH |
7021 | replace with (not Y). */ |
7022 | if (GET_CODE (XEXP (x, 0)) == CONST_INT | |
bc02f8d3 | 7023 | && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask) |
0345195a | 7024 | == INTVAL (XEXP (x, 0)))) |
d41638e4 | 7025 | { |
f1c6ba8b RK |
7026 | x = simplify_gen_unary (NOT, GET_MODE (x), |
7027 | XEXP (x, 1), GET_MODE (x)); | |
d41638e4 RH |
7028 | return force_to_mode (x, mode, mask, reg, next_select); |
7029 | } | |
7030 | ||
7031 | mask = fuller_mask; | |
7032 | goto binop; | |
7033 | ||
dfbe1b2f RK |
7034 | case IOR: |
7035 | case XOR: | |
6139ff20 RK |
7036 | /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and |
7037 | LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...) | |
7038 | operation which may be a bitfield extraction. Ensure that the | |
7039 | constant we form is not wider than the mode of X. */ | |
7040 | ||
7041 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
7042 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7043 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
7044 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT | |
7045 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
7046 | && ((INTVAL (XEXP (XEXP (x, 0), 1)) | |
7047 | + floor_log2 (INTVAL (XEXP (x, 1)))) | |
7048 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
7049 | && (INTVAL (XEXP (x, 1)) | |
663522cb | 7050 | & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0) |
6139ff20 RK |
7051 | { |
7052 | temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask) | |
663522cb | 7053 | << INTVAL (XEXP (XEXP (x, 0), 1))); |
6139ff20 RK |
7054 | temp = gen_binary (GET_CODE (x), GET_MODE (x), |
7055 | XEXP (XEXP (x, 0), 0), temp); | |
d4d2b13f RK |
7056 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, |
7057 | XEXP (XEXP (x, 0), 1)); | |
e3d616e3 | 7058 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7059 | } |
7060 | ||
7061 | binop: | |
dfbe1b2f | 7062 | /* For most binary operations, just propagate into the operation and |
6d2f8887 | 7063 | change the mode if we have an operation of that mode. */ |
6139ff20 | 7064 | |
e3d616e3 RK |
7065 | op0 = gen_lowpart_for_combine (op_mode, |
7066 | force_to_mode (XEXP (x, 0), mode, mask, | |
7067 | reg, next_select)); | |
7068 | op1 = gen_lowpart_for_combine (op_mode, | |
7069 | force_to_mode (XEXP (x, 1), mode, mask, | |
7070 | reg, next_select)); | |
6139ff20 RK |
7071 | |
7072 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1)) | |
7073 | x = gen_binary (code, op_mode, op0, op1); | |
d0ab8cd3 | 7074 | break; |
dfbe1b2f RK |
7075 | |
7076 | case ASHIFT: | |
dfbe1b2f | 7077 | /* For left shifts, do the same, but just for the first operand. |
f6785026 RK |
7078 | However, we cannot do anything with shifts where we cannot |
7079 | guarantee that the counts are smaller than the size of the mode | |
7080 | because such a count will have a different meaning in a | |
6139ff20 | 7081 | wider mode. */ |
f6785026 RK |
7082 | |
7083 | if (! (GET_CODE (XEXP (x, 1)) == CONST_INT | |
6139ff20 | 7084 | && INTVAL (XEXP (x, 1)) >= 0 |
f6785026 RK |
7085 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode)) |
7086 | && ! (GET_MODE (XEXP (x, 1)) != VOIDmode | |
7087 | && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1))) | |
adb7a1cb | 7088 | < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode)))) |
f6785026 | 7089 | break; |
663522cb | 7090 | |
6139ff20 RK |
7091 | /* If the shift count is a constant and we can do arithmetic in |
7092 | the mode of the shift, refine which bits we need. Otherwise, use the | |
7093 | conservative form of the mask. */ | |
7094 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7095 | && INTVAL (XEXP (x, 1)) >= 0 | |
7096 | && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode) | |
7097 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) | |
7098 | mask >>= INTVAL (XEXP (x, 1)); | |
7099 | else | |
7100 | mask = fuller_mask; | |
7101 | ||
7102 | op0 = gen_lowpart_for_combine (op_mode, | |
7103 | force_to_mode (XEXP (x, 0), op_mode, | |
e3d616e3 | 7104 | mask, reg, next_select)); |
6139ff20 RK |
7105 | |
7106 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) | |
663522cb | 7107 | x = gen_binary (code, op_mode, op0, XEXP (x, 1)); |
d0ab8cd3 | 7108 | break; |
dfbe1b2f RK |
7109 | |
7110 | case LSHIFTRT: | |
1347292b JW |
7111 | /* Here we can only do something if the shift count is a constant, |
7112 | this shift constant is valid for the host, and we can do arithmetic | |
7113 | in OP_MODE. */ | |
dfbe1b2f RK |
7114 | |
7115 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
1347292b | 7116 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT |
6139ff20 | 7117 | && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 7118 | { |
6139ff20 | 7119 | rtx inner = XEXP (x, 0); |
402b6c2a | 7120 | unsigned HOST_WIDE_INT inner_mask; |
6139ff20 RK |
7121 | |
7122 | /* Select the mask of the bits we need for the shift operand. */ | |
402b6c2a | 7123 | inner_mask = mask << INTVAL (XEXP (x, 1)); |
d0ab8cd3 | 7124 | |
6139ff20 | 7125 | /* We can only change the mode of the shift if we can do arithmetic |
402b6c2a JW |
7126 | in the mode of the shift and INNER_MASK is no wider than the |
7127 | width of OP_MODE. */ | |
6139ff20 | 7128 | if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT |
663522cb | 7129 | || (inner_mask & ~GET_MODE_MASK (op_mode)) != 0) |
d0ab8cd3 RK |
7130 | op_mode = GET_MODE (x); |
7131 | ||
402b6c2a | 7132 | inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select); |
6139ff20 RK |
7133 | |
7134 | if (GET_MODE (x) != op_mode || inner != XEXP (x, 0)) | |
7135 | x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1)); | |
d0ab8cd3 | 7136 | } |
6139ff20 RK |
7137 | |
7138 | /* If we have (and (lshiftrt FOO C1) C2) where the combination of the | |
7139 | shift and AND produces only copies of the sign bit (C2 is one less | |
7140 | than a power of two), we can do this with just a shift. */ | |
7141 | ||
7142 | if (GET_CODE (x) == LSHIFTRT | |
7143 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
cfff35c1 JW |
7144 | /* The shift puts one of the sign bit copies in the least significant |
7145 | bit. */ | |
6139ff20 RK |
7146 | && ((INTVAL (XEXP (x, 1)) |
7147 | + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))) | |
7148 | >= GET_MODE_BITSIZE (GET_MODE (x))) | |
7149 | && exact_log2 (mask + 1) >= 0 | |
cfff35c1 JW |
7150 | /* Number of bits left after the shift must be more than the mask |
7151 | needs. */ | |
7152 | && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1)) | |
7153 | <= GET_MODE_BITSIZE (GET_MODE (x))) | |
7154 | /* Must be more sign bit copies than the mask needs. */ | |
770ae6cc | 7155 | && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))) |
6139ff20 RK |
7156 | >= exact_log2 (mask + 1))) |
7157 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
7158 | GEN_INT (GET_MODE_BITSIZE (GET_MODE (x)) | |
7159 | - exact_log2 (mask + 1))); | |
fae2db47 JW |
7160 | |
7161 | goto shiftrt; | |
d0ab8cd3 RK |
7162 | |
7163 | case ASHIFTRT: | |
6139ff20 RK |
7164 | /* If we are just looking for the sign bit, we don't need this shift at |
7165 | all, even if it has a variable count. */ | |
9bf22b75 | 7166 | if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT |
e51712db | 7167 | && (mask == ((unsigned HOST_WIDE_INT) 1 |
9bf22b75 | 7168 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
e3d616e3 | 7169 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 RK |
7170 | |
7171 | /* If this is a shift by a constant, get a mask that contains those bits | |
7172 | that are not copies of the sign bit. We then have two cases: If | |
7173 | MASK only includes those bits, this can be a logical shift, which may | |
7174 | allow simplifications. If MASK is a single-bit field not within | |
7175 | those bits, we are requesting a copy of the sign bit and hence can | |
7176 | shift the sign bit to the appropriate location. */ | |
7177 | ||
7178 | if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0 | |
7179 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) | |
7180 | { | |
7181 | int i = -1; | |
7182 | ||
3e92902c | 7183 | /* If the considered data is wider than HOST_WIDE_INT, we can't |
b69960ac RK |
7184 | represent a mask for all its bits in a single scalar. |
7185 | But we only care about the lower bits, so calculate these. */ | |
7186 | ||
6a11342f | 7187 | if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT) |
b69960ac | 7188 | { |
663522cb | 7189 | nonzero = ~(HOST_WIDE_INT) 0; |
b69960ac RK |
7190 | |
7191 | /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
7192 | is the number of bits a full-width mask would have set. | |
7193 | We need only shift if these are fewer than nonzero can | |
7194 | hold. If not, we must keep all bits set in nonzero. */ | |
7195 | ||
7196 | if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1)) | |
7197 | < HOST_BITS_PER_WIDE_INT) | |
7198 | nonzero >>= INTVAL (XEXP (x, 1)) | |
7199 | + HOST_BITS_PER_WIDE_INT | |
7200 | - GET_MODE_BITSIZE (GET_MODE (x)) ; | |
7201 | } | |
7202 | else | |
7203 | { | |
7204 | nonzero = GET_MODE_MASK (GET_MODE (x)); | |
7205 | nonzero >>= INTVAL (XEXP (x, 1)); | |
7206 | } | |
6139ff20 | 7207 | |
663522cb | 7208 | if ((mask & ~nonzero) == 0 |
6139ff20 RK |
7209 | || (i = exact_log2 (mask)) >= 0) |
7210 | { | |
7211 | x = simplify_shift_const | |
7212 | (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0), | |
7213 | i < 0 ? INTVAL (XEXP (x, 1)) | |
7214 | : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i); | |
7215 | ||
7216 | if (GET_CODE (x) != ASHIFTRT) | |
e3d616e3 | 7217 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7218 | } |
7219 | } | |
7220 | ||
e0a2f705 | 7221 | /* If MASK is 1, convert this to an LSHIFTRT. This can be done |
6139ff20 RK |
7222 | even if the shift count isn't a constant. */ |
7223 | if (mask == 1) | |
7224 | x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1)); | |
7225 | ||
fae2db47 JW |
7226 | shiftrt: |
7227 | ||
7228 | /* If this is a zero- or sign-extension operation that just affects bits | |
4c002f29 RK |
7229 | we don't care about, remove it. Be sure the call above returned |
7230 | something that is still a shift. */ | |
d0ab8cd3 | 7231 | |
4c002f29 RK |
7232 | if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT) |
7233 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
d0ab8cd3 | 7234 | && INTVAL (XEXP (x, 1)) >= 0 |
6139ff20 RK |
7235 | && (INTVAL (XEXP (x, 1)) |
7236 | <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1)) | |
d0ab8cd3 RK |
7237 | && GET_CODE (XEXP (x, 0)) == ASHIFT |
7238 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7239 | && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1))) | |
e3d616e3 RK |
7240 | return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, |
7241 | reg, next_select); | |
6139ff20 | 7242 | |
dfbe1b2f RK |
7243 | break; |
7244 | ||
6139ff20 RK |
7245 | case ROTATE: |
7246 | case ROTATERT: | |
7247 | /* If the shift count is constant and we can do computations | |
7248 | in the mode of X, compute where the bits we care about are. | |
7249 | Otherwise, we can't do anything. Don't change the mode of | |
7250 | the shift or propagate MODE into the shift, though. */ | |
7251 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
7252 | && INTVAL (XEXP (x, 1)) >= 0) | |
7253 | { | |
7254 | temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE, | |
7255 | GET_MODE (x), GEN_INT (mask), | |
7256 | XEXP (x, 1)); | |
7d171a1e | 7257 | if (temp && GET_CODE(temp) == CONST_INT) |
6139ff20 RK |
7258 | SUBST (XEXP (x, 0), |
7259 | force_to_mode (XEXP (x, 0), GET_MODE (x), | |
e3d616e3 | 7260 | INTVAL (temp), reg, next_select)); |
6139ff20 RK |
7261 | } |
7262 | break; | |
663522cb | 7263 | |
dfbe1b2f | 7264 | case NEG: |
180b8e4b | 7265 | /* If we just want the low-order bit, the NEG isn't needed since it |
3ef42a0c | 7266 | won't change the low-order bit. */ |
180b8e4b RK |
7267 | if (mask == 1) |
7268 | return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select); | |
7269 | ||
6139ff20 RK |
7270 | /* We need any bits less significant than the most significant bit in |
7271 | MASK since carries from those bits will affect the bits we are | |
7272 | interested in. */ | |
7273 | mask = fuller_mask; | |
7274 | goto unop; | |
7275 | ||
dfbe1b2f | 7276 | case NOT: |
6139ff20 RK |
7277 | /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the |
7278 | same as the XOR case above. Ensure that the constant we form is not | |
7279 | wider than the mode of X. */ | |
7280 | ||
7281 | if (GET_CODE (XEXP (x, 0)) == LSHIFTRT | |
7282 | && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT | |
7283 | && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0 | |
7284 | && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask) | |
7285 | < GET_MODE_BITSIZE (GET_MODE (x))) | |
7286 | && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT) | |
7287 | { | |
7288 | temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1))); | |
7289 | temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp); | |
7290 | x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1)); | |
7291 | ||
e3d616e3 | 7292 | return force_to_mode (x, mode, mask, reg, next_select); |
6139ff20 RK |
7293 | } |
7294 | ||
f82da7d2 JW |
7295 | /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must |
7296 | use the full mask inside the NOT. */ | |
7297 | mask = fuller_mask; | |
7298 | ||
6139ff20 | 7299 | unop: |
e3d616e3 RK |
7300 | op0 = gen_lowpart_for_combine (op_mode, |
7301 | force_to_mode (XEXP (x, 0), mode, mask, | |
7302 | reg, next_select)); | |
6139ff20 | 7303 | if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0)) |
f1c6ba8b | 7304 | x = simplify_gen_unary (code, op_mode, op0, op_mode); |
6139ff20 RK |
7305 | break; |
7306 | ||
7307 | case NE: | |
7308 | /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included | |
3aceff0d | 7309 | in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero, |
1a6ec070 | 7310 | which is equal to STORE_FLAG_VALUE. */ |
663522cb | 7311 | if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx |
3aceff0d | 7312 | && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0 |
1a6ec070 | 7313 | && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE) |
e3d616e3 | 7314 | return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select); |
6139ff20 | 7315 | |
d0ab8cd3 RK |
7316 | break; |
7317 | ||
7318 | case IF_THEN_ELSE: | |
7319 | /* We have no way of knowing if the IF_THEN_ELSE can itself be | |
7320 | written in a narrower mode. We play it safe and do not do so. */ | |
7321 | ||
7322 | SUBST (XEXP (x, 1), | |
7323 | gen_lowpart_for_combine (GET_MODE (x), | |
7324 | force_to_mode (XEXP (x, 1), mode, | |
e3d616e3 | 7325 | mask, reg, next_select))); |
d0ab8cd3 RK |
7326 | SUBST (XEXP (x, 2), |
7327 | gen_lowpart_for_combine (GET_MODE (x), | |
7328 | force_to_mode (XEXP (x, 2), mode, | |
e3d616e3 | 7329 | mask, reg,next_select))); |
d0ab8cd3 | 7330 | break; |
663522cb | 7331 | |
e9a25f70 JL |
7332 | default: |
7333 | break; | |
dfbe1b2f RK |
7334 | } |
7335 | ||
d0ab8cd3 | 7336 | /* Ensure we return a value of the proper mode. */ |
dfbe1b2f RK |
7337 | return gen_lowpart_for_combine (mode, x); |
7338 | } | |
7339 | \f | |
abe6e52f RK |
7340 | /* Return nonzero if X is an expression that has one of two values depending on |
7341 | whether some other value is zero or nonzero. In that case, we return the | |
7342 | value that is being tested, *PTRUE is set to the value if the rtx being | |
7343 | returned has a nonzero value, and *PFALSE is set to the other alternative. | |
7344 | ||
7345 | If we return zero, we set *PTRUE and *PFALSE to X. */ | |
7346 | ||
7347 | static rtx | |
7348 | if_then_else_cond (x, ptrue, pfalse) | |
7349 | rtx x; | |
7350 | rtx *ptrue, *pfalse; | |
7351 | { | |
7352 | enum machine_mode mode = GET_MODE (x); | |
7353 | enum rtx_code code = GET_CODE (x); | |
abe6e52f RK |
7354 | rtx cond0, cond1, true0, true1, false0, false1; |
7355 | unsigned HOST_WIDE_INT nz; | |
7356 | ||
14a774a9 RK |
7357 | /* If we are comparing a value against zero, we are done. */ |
7358 | if ((code == NE || code == EQ) | |
7359 | && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0) | |
7360 | { | |
e8758a3a JL |
7361 | *ptrue = (code == NE) ? const_true_rtx : const0_rtx; |
7362 | *pfalse = (code == NE) ? const0_rtx : const_true_rtx; | |
14a774a9 RK |
7363 | return XEXP (x, 0); |
7364 | } | |
7365 | ||
abe6e52f RK |
7366 | /* If this is a unary operation whose operand has one of two values, apply |
7367 | our opcode to compute those values. */ | |
14a774a9 RK |
7368 | else if (GET_RTX_CLASS (code) == '1' |
7369 | && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0) | |
abe6e52f | 7370 | { |
f1c6ba8b RK |
7371 | *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0))); |
7372 | *pfalse = simplify_gen_unary (code, mode, false0, | |
7373 | GET_MODE (XEXP (x, 0))); | |
abe6e52f RK |
7374 | return cond0; |
7375 | } | |
7376 | ||
3a19aabc | 7377 | /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would |
ddd5a7c1 | 7378 | make can't possibly match and would suppress other optimizations. */ |
3a19aabc RK |
7379 | else if (code == COMPARE) |
7380 | ; | |
7381 | ||
abe6e52f RK |
7382 | /* If this is a binary operation, see if either side has only one of two |
7383 | values. If either one does or if both do and they are conditional on | |
7384 | the same value, compute the new true and false values. */ | |
7385 | else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2' | |
7386 | || GET_RTX_CLASS (code) == '<') | |
7387 | { | |
7388 | cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0); | |
7389 | cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1); | |
7390 | ||
7391 | if ((cond0 != 0 || cond1 != 0) | |
7392 | && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1))) | |
7393 | { | |
987e845a JW |
7394 | /* If if_then_else_cond returned zero, then true/false are the |
7395 | same rtl. We must copy one of them to prevent invalid rtl | |
7396 | sharing. */ | |
7397 | if (cond0 == 0) | |
7398 | true0 = copy_rtx (true0); | |
7399 | else if (cond1 == 0) | |
7400 | true1 = copy_rtx (true1); | |
7401 | ||
abe6e52f RK |
7402 | *ptrue = gen_binary (code, mode, true0, true1); |
7403 | *pfalse = gen_binary (code, mode, false0, false1); | |
7404 | return cond0 ? cond0 : cond1; | |
7405 | } | |
9210df58 | 7406 | |
9210df58 | 7407 | /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the |
da7d8304 | 7408 | operands is zero when the other is nonzero, and vice-versa, |
0802d516 | 7409 | and STORE_FLAG_VALUE is 1 or -1. */ |
9210df58 | 7410 | |
0802d516 RK |
7411 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
7412 | && (code == PLUS || code == IOR || code == XOR || code == MINUS | |
663522cb | 7413 | || code == UMAX) |
9210df58 RK |
7414 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) |
7415 | { | |
7416 | rtx op0 = XEXP (XEXP (x, 0), 1); | |
7417 | rtx op1 = XEXP (XEXP (x, 1), 1); | |
7418 | ||
7419 | cond0 = XEXP (XEXP (x, 0), 0); | |
7420 | cond1 = XEXP (XEXP (x, 1), 0); | |
7421 | ||
7422 | if (GET_RTX_CLASS (GET_CODE (cond0)) == '<' | |
7423 | && GET_RTX_CLASS (GET_CODE (cond1)) == '<' | |
9a915772 | 7424 | && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1) |
9210df58 RK |
7425 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) |
7426 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
7427 | || ((swap_condition (GET_CODE (cond0)) | |
9a915772 | 7428 | == combine_reversed_comparison_code (cond1)) |
9210df58 RK |
7429 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) |
7430 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
7431 | && ! side_effects_p (x)) | |
7432 | { | |
7433 | *ptrue = gen_binary (MULT, mode, op0, const_true_rtx); | |
663522cb KH |
7434 | *pfalse = gen_binary (MULT, mode, |
7435 | (code == MINUS | |
f1c6ba8b RK |
7436 | ? simplify_gen_unary (NEG, mode, op1, |
7437 | mode) | |
7438 | : op1), | |
9210df58 RK |
7439 | const_true_rtx); |
7440 | return cond0; | |
7441 | } | |
7442 | } | |
7443 | ||
eaec9b3d | 7444 | /* Similarly for MULT, AND and UMIN, except that for these the result |
9210df58 | 7445 | is always zero. */ |
0802d516 RK |
7446 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
7447 | && (code == MULT || code == AND || code == UMIN) | |
9210df58 RK |
7448 | && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT) |
7449 | { | |
7450 | cond0 = XEXP (XEXP (x, 0), 0); | |
7451 | cond1 = XEXP (XEXP (x, 1), 0); | |
7452 | ||
7453 | if (GET_RTX_CLASS (GET_CODE (cond0)) == '<' | |
7454 | && GET_RTX_CLASS (GET_CODE (cond1)) == '<' | |
9a915772 | 7455 | && ((GET_CODE (cond0) == combine_reversed_comparison_code (cond1) |
9210df58 RK |
7456 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0)) |
7457 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1))) | |
7458 | || ((swap_condition (GET_CODE (cond0)) | |
9a915772 | 7459 | == combine_reversed_comparison_code (cond1)) |
9210df58 RK |
7460 | && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1)) |
7461 | && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0)))) | |
7462 | && ! side_effects_p (x)) | |
7463 | { | |
7464 | *ptrue = *pfalse = const0_rtx; | |
7465 | return cond0; | |
7466 | } | |
7467 | } | |
abe6e52f RK |
7468 | } |
7469 | ||
7470 | else if (code == IF_THEN_ELSE) | |
7471 | { | |
7472 | /* If we have IF_THEN_ELSE already, extract the condition and | |
7473 | canonicalize it if it is NE or EQ. */ | |
7474 | cond0 = XEXP (x, 0); | |
7475 | *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2); | |
7476 | if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx) | |
7477 | return XEXP (cond0, 0); | |
7478 | else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx) | |
7479 | { | |
7480 | *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1); | |
7481 | return XEXP (cond0, 0); | |
7482 | } | |
7483 | else | |
7484 | return cond0; | |
7485 | } | |
7486 | ||
0631e0bf JH |
7487 | /* If X is a SUBREG, we can narrow both the true and false values |
7488 | if the inner expression, if there is a condition. */ | |
7489 | else if (code == SUBREG | |
abe6e52f RK |
7490 | && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x), |
7491 | &true0, &false0))) | |
7492 | { | |
0631e0bf JH |
7493 | *ptrue = simplify_gen_subreg (mode, true0, |
7494 | GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); | |
7495 | *pfalse = simplify_gen_subreg (mode, false0, | |
7496 | GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x)); | |
abe6e52f | 7497 | |
abe6e52f RK |
7498 | return cond0; |
7499 | } | |
7500 | ||
7501 | /* If X is a constant, this isn't special and will cause confusions | |
7502 | if we treat it as such. Likewise if it is equivalent to a constant. */ | |
7503 | else if (CONSTANT_P (x) | |
7504 | || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0))) | |
7505 | ; | |
7506 | ||
1f3f36d1 RH |
7507 | /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that |
7508 | will be least confusing to the rest of the compiler. */ | |
7509 | else if (mode == BImode) | |
7510 | { | |
7511 | *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx; | |
7512 | return x; | |
7513 | } | |
7514 | ||
663522cb | 7515 | /* If X is known to be either 0 or -1, those are the true and |
abe6e52f | 7516 | false values when testing X. */ |
49219895 JH |
7517 | else if (x == constm1_rtx || x == const0_rtx |
7518 | || (mode != VOIDmode | |
7519 | && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode))) | |
abe6e52f RK |
7520 | { |
7521 | *ptrue = constm1_rtx, *pfalse = const0_rtx; | |
7522 | return x; | |
7523 | } | |
7524 | ||
7525 | /* Likewise for 0 or a single bit. */ | |
49219895 JH |
7526 | else if (mode != VOIDmode |
7527 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
7528 | && exact_log2 (nz = nonzero_bits (x, mode)) >= 0) | |
abe6e52f | 7529 | { |
578fc63d | 7530 | *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx; |
abe6e52f RK |
7531 | return x; |
7532 | } | |
7533 | ||
7534 | /* Otherwise fail; show no condition with true and false values the same. */ | |
7535 | *ptrue = *pfalse = x; | |
7536 | return 0; | |
7537 | } | |
7538 | \f | |
1a26b032 RK |
7539 | /* Return the value of expression X given the fact that condition COND |
7540 | is known to be true when applied to REG as its first operand and VAL | |
7541 | as its second. X is known to not be shared and so can be modified in | |
7542 | place. | |
7543 | ||
7544 | We only handle the simplest cases, and specifically those cases that | |
7545 | arise with IF_THEN_ELSE expressions. */ | |
7546 | ||
7547 | static rtx | |
7548 | known_cond (x, cond, reg, val) | |
7549 | rtx x; | |
7550 | enum rtx_code cond; | |
7551 | rtx reg, val; | |
7552 | { | |
7553 | enum rtx_code code = GET_CODE (x); | |
f24ad0e4 | 7554 | rtx temp; |
6f7d635c | 7555 | const char *fmt; |
1a26b032 RK |
7556 | int i, j; |
7557 | ||
7558 | if (side_effects_p (x)) | |
7559 | return x; | |
7560 | ||
805f1694 JL |
7561 | /* If either operand of the condition is a floating point value, |
7562 | then we have to avoid collapsing an EQ comparison. */ | |
7563 | if (cond == EQ | |
7564 | && rtx_equal_p (x, reg) | |
7565 | && ! FLOAT_MODE_P (GET_MODE (x)) | |
7566 | && ! FLOAT_MODE_P (GET_MODE (val))) | |
69bc0a1f | 7567 | return val; |
805f1694 | 7568 | |
69bc0a1f | 7569 | if (cond == UNEQ && rtx_equal_p (x, reg)) |
1a26b032 RK |
7570 | return val; |
7571 | ||
7572 | /* If X is (abs REG) and we know something about REG's relationship | |
7573 | with zero, we may be able to simplify this. */ | |
7574 | ||
7575 | if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx) | |
7576 | switch (cond) | |
7577 | { | |
7578 | case GE: case GT: case EQ: | |
7579 | return XEXP (x, 0); | |
7580 | case LT: case LE: | |
f1c6ba8b RK |
7581 | return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)), |
7582 | XEXP (x, 0), | |
7583 | GET_MODE (XEXP (x, 0))); | |
e9a25f70 JL |
7584 | default: |
7585 | break; | |
1a26b032 RK |
7586 | } |
7587 | ||
7588 | /* The only other cases we handle are MIN, MAX, and comparisons if the | |
7589 | operands are the same as REG and VAL. */ | |
7590 | ||
7591 | else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c') | |
7592 | { | |
7593 | if (rtx_equal_p (XEXP (x, 0), val)) | |
7594 | cond = swap_condition (cond), temp = val, val = reg, reg = temp; | |
7595 | ||
7596 | if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val)) | |
7597 | { | |
7598 | if (GET_RTX_CLASS (code) == '<') | |
1eb8759b RH |
7599 | { |
7600 | if (comparison_dominates_p (cond, code)) | |
7601 | return const_true_rtx; | |
1a26b032 | 7602 | |
9a915772 | 7603 | code = combine_reversed_comparison_code (x); |
1eb8759b RH |
7604 | if (code != UNKNOWN |
7605 | && comparison_dominates_p (cond, code)) | |
7606 | return const0_rtx; | |
7607 | else | |
7608 | return x; | |
7609 | } | |
1a26b032 RK |
7610 | else if (code == SMAX || code == SMIN |
7611 | || code == UMIN || code == UMAX) | |
7612 | { | |
7613 | int unsignedp = (code == UMIN || code == UMAX); | |
7614 | ||
ac4cdf40 JE |
7615 | /* Do not reverse the condition when it is NE or EQ. |
7616 | This is because we cannot conclude anything about | |
7617 | the value of 'SMAX (x, y)' when x is not equal to y, | |
23190837 | 7618 | but we can when x equals y. */ |
ac4cdf40 JE |
7619 | if ((code == SMAX || code == UMAX) |
7620 | && ! (cond == EQ || cond == NE)) | |
1a26b032 RK |
7621 | cond = reverse_condition (cond); |
7622 | ||
7623 | switch (cond) | |
7624 | { | |
7625 | case GE: case GT: | |
7626 | return unsignedp ? x : XEXP (x, 1); | |
7627 | case LE: case LT: | |
7628 | return unsignedp ? x : XEXP (x, 0); | |
7629 | case GEU: case GTU: | |
7630 | return unsignedp ? XEXP (x, 1) : x; | |
7631 | case LEU: case LTU: | |
7632 | return unsignedp ? XEXP (x, 0) : x; | |
e9a25f70 JL |
7633 | default: |
7634 | break; | |
1a26b032 RK |
7635 | } |
7636 | } | |
7637 | } | |
7638 | } | |
9a360704 AO |
7639 | else if (code == SUBREG) |
7640 | { | |
7641 | enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x)); | |
7642 | rtx new, r = known_cond (SUBREG_REG (x), cond, reg, val); | |
7643 | ||
7644 | if (SUBREG_REG (x) != r) | |
7645 | { | |
7646 | /* We must simplify subreg here, before we lose track of the | |
7647 | original inner_mode. */ | |
7648 | new = simplify_subreg (GET_MODE (x), r, | |
7649 | inner_mode, SUBREG_BYTE (x)); | |
7650 | if (new) | |
7651 | return new; | |
7652 | else | |
7653 | SUBST (SUBREG_REG (x), r); | |
7654 | } | |
7655 | ||
7656 | return x; | |
7657 | } | |
4161da12 AO |
7658 | /* We don't have to handle SIGN_EXTEND here, because even in the |
7659 | case of replacing something with a modeless CONST_INT, a | |
7660 | CONST_INT is already (supposed to be) a valid sign extension for | |
7661 | its narrower mode, which implies it's already properly | |
7662 | sign-extended for the wider mode. Now, for ZERO_EXTEND, the | |
7663 | story is different. */ | |
7664 | else if (code == ZERO_EXTEND) | |
7665 | { | |
7666 | enum machine_mode inner_mode = GET_MODE (XEXP (x, 0)); | |
7667 | rtx new, r = known_cond (XEXP (x, 0), cond, reg, val); | |
7668 | ||
7669 | if (XEXP (x, 0) != r) | |
7670 | { | |
7671 | /* We must simplify the zero_extend here, before we lose | |
7672 | track of the original inner_mode. */ | |
7673 | new = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x), | |
7674 | r, inner_mode); | |
7675 | if (new) | |
7676 | return new; | |
7677 | else | |
7678 | SUBST (XEXP (x, 0), r); | |
7679 | } | |
7680 | ||
7681 | return x; | |
7682 | } | |
1a26b032 RK |
7683 | |
7684 | fmt = GET_RTX_FORMAT (code); | |
7685 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
7686 | { | |
7687 | if (fmt[i] == 'e') | |
7688 | SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val)); | |
7689 | else if (fmt[i] == 'E') | |
7690 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) | |
7691 | SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j), | |
7692 | cond, reg, val)); | |
7693 | } | |
7694 | ||
7695 | return x; | |
7696 | } | |
7697 | \f | |
e11fa86f RK |
7698 | /* See if X and Y are equal for the purposes of seeing if we can rewrite an |
7699 | assignment as a field assignment. */ | |
7700 | ||
7701 | static int | |
7702 | rtx_equal_for_field_assignment_p (x, y) | |
7703 | rtx x; | |
7704 | rtx y; | |
7705 | { | |
e11fa86f RK |
7706 | if (x == y || rtx_equal_p (x, y)) |
7707 | return 1; | |
7708 | ||
7709 | if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y)) | |
7710 | return 0; | |
7711 | ||
7712 | /* Check for a paradoxical SUBREG of a MEM compared with the MEM. | |
7713 | Note that all SUBREGs of MEM are paradoxical; otherwise they | |
7714 | would have been rewritten. */ | |
7715 | if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG | |
7716 | && GET_CODE (SUBREG_REG (y)) == MEM | |
7717 | && rtx_equal_p (SUBREG_REG (y), | |
7718 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x))) | |
7719 | return 1; | |
7720 | ||
7721 | if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG | |
7722 | && GET_CODE (SUBREG_REG (x)) == MEM | |
7723 | && rtx_equal_p (SUBREG_REG (x), | |
7724 | gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y))) | |
7725 | return 1; | |
7726 | ||
9ec36da5 JL |
7727 | /* We used to see if get_last_value of X and Y were the same but that's |
7728 | not correct. In one direction, we'll cause the assignment to have | |
7729 | the wrong destination and in the case, we'll import a register into this | |
7730 | insn that might have already have been dead. So fail if none of the | |
7731 | above cases are true. */ | |
7732 | return 0; | |
e11fa86f RK |
7733 | } |
7734 | \f | |
230d793d RS |
7735 | /* See if X, a SET operation, can be rewritten as a bit-field assignment. |
7736 | Return that assignment if so. | |
7737 | ||
7738 | We only handle the most common cases. */ | |
7739 | ||
7740 | static rtx | |
7741 | make_field_assignment (x) | |
7742 | rtx x; | |
7743 | { | |
7744 | rtx dest = SET_DEST (x); | |
7745 | rtx src = SET_SRC (x); | |
dfbe1b2f | 7746 | rtx assign; |
e11fa86f | 7747 | rtx rhs, lhs; |
5f4f0e22 | 7748 | HOST_WIDE_INT c1; |
770ae6cc RK |
7749 | HOST_WIDE_INT pos; |
7750 | unsigned HOST_WIDE_INT len; | |
dfbe1b2f RK |
7751 | rtx other; |
7752 | enum machine_mode mode; | |
230d793d RS |
7753 | |
7754 | /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is | |
7755 | a clear of a one-bit field. We will have changed it to | |
7756 | (and (rotate (const_int -2) POS) DEST), so check for that. Also check | |
7757 | for a SUBREG. */ | |
7758 | ||
7759 | if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE | |
7760 | && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT | |
7761 | && INTVAL (XEXP (XEXP (src, 0), 0)) == -2 | |
e11fa86f | 7762 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7763 | { |
8999a12e | 7764 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 7765 | 1, 1, 1, 0); |
76184def | 7766 | if (assign != 0) |
38a448ca | 7767 | return gen_rtx_SET (VOIDmode, assign, const0_rtx); |
76184def | 7768 | return x; |
230d793d RS |
7769 | } |
7770 | ||
7771 | else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG | |
7772 | && subreg_lowpart_p (XEXP (src, 0)) | |
663522cb | 7773 | && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0))) |
230d793d RS |
7774 | < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0))))) |
7775 | && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE | |
7776 | && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2 | |
e11fa86f | 7777 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7778 | { |
8999a12e | 7779 | assign = make_extraction (VOIDmode, dest, 0, |
230d793d RS |
7780 | XEXP (SUBREG_REG (XEXP (src, 0)), 1), |
7781 | 1, 1, 1, 0); | |
76184def | 7782 | if (assign != 0) |
38a448ca | 7783 | return gen_rtx_SET (VOIDmode, assign, const0_rtx); |
76184def | 7784 | return x; |
230d793d RS |
7785 | } |
7786 | ||
9dd11dcb | 7787 | /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a |
230d793d RS |
7788 | one-bit field. */ |
7789 | else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT | |
7790 | && XEXP (XEXP (src, 0), 0) == const1_rtx | |
e11fa86f | 7791 | && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1))) |
230d793d | 7792 | { |
8999a12e | 7793 | assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1), |
230d793d | 7794 | 1, 1, 1, 0); |
76184def | 7795 | if (assign != 0) |
38a448ca | 7796 | return gen_rtx_SET (VOIDmode, assign, const1_rtx); |
76184def | 7797 | return x; |
230d793d RS |
7798 | } |
7799 | ||
dfbe1b2f | 7800 | /* The other case we handle is assignments into a constant-position |
9dd11dcb | 7801 | field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents |
dfbe1b2f RK |
7802 | a mask that has all one bits except for a group of zero bits and |
7803 | OTHER is known to have zeros where C1 has ones, this is such an | |
7804 | assignment. Compute the position and length from C1. Shift OTHER | |
7805 | to the appropriate position, force it to the required mode, and | |
7806 | make the extraction. Check for the AND in both operands. */ | |
7807 | ||
9dd11dcb | 7808 | if (GET_CODE (src) != IOR && GET_CODE (src) != XOR) |
e11fa86f RK |
7809 | return x; |
7810 | ||
7811 | rhs = expand_compound_operation (XEXP (src, 0)); | |
7812 | lhs = expand_compound_operation (XEXP (src, 1)); | |
7813 | ||
7814 | if (GET_CODE (rhs) == AND | |
7815 | && GET_CODE (XEXP (rhs, 1)) == CONST_INT | |
7816 | && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest)) | |
7817 | c1 = INTVAL (XEXP (rhs, 1)), other = lhs; | |
7818 | else if (GET_CODE (lhs) == AND | |
7819 | && GET_CODE (XEXP (lhs, 1)) == CONST_INT | |
7820 | && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest)) | |
7821 | c1 = INTVAL (XEXP (lhs, 1)), other = rhs; | |
dfbe1b2f RK |
7822 | else |
7823 | return x; | |
230d793d | 7824 | |
663522cb | 7825 | pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len); |
dfbe1b2f | 7826 | if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest)) |
e5e809f4 JL |
7827 | || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT |
7828 | || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0) | |
dfbe1b2f | 7829 | return x; |
230d793d | 7830 | |
5f4f0e22 | 7831 | assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0); |
76184def DE |
7832 | if (assign == 0) |
7833 | return x; | |
230d793d | 7834 | |
dfbe1b2f RK |
7835 | /* The mode to use for the source is the mode of the assignment, or of |
7836 | what is inside a possible STRICT_LOW_PART. */ | |
663522cb | 7837 | mode = (GET_CODE (assign) == STRICT_LOW_PART |
dfbe1b2f | 7838 | ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign)); |
230d793d | 7839 | |
dfbe1b2f RK |
7840 | /* Shift OTHER right POS places and make it the source, restricting it |
7841 | to the proper length and mode. */ | |
230d793d | 7842 | |
5f4f0e22 CH |
7843 | src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT, |
7844 | GET_MODE (src), other, pos), | |
6139ff20 RK |
7845 | mode, |
7846 | GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT | |
0345195a | 7847 | ? ~(unsigned HOST_WIDE_INT) 0 |
729a2125 | 7848 | : ((unsigned HOST_WIDE_INT) 1 << len) - 1, |
e3d616e3 | 7849 | dest, 0); |
230d793d | 7850 | |
f1c6ba8b | 7851 | return gen_rtx_SET (VOIDmode, assign, src); |
230d793d RS |
7852 | } |
7853 | \f | |
7854 | /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c) | |
7855 | if so. */ | |
7856 | ||
7857 | static rtx | |
7858 | apply_distributive_law (x) | |
7859 | rtx x; | |
7860 | { | |
7861 | enum rtx_code code = GET_CODE (x); | |
7862 | rtx lhs, rhs, other; | |
7863 | rtx tem; | |
7864 | enum rtx_code inner_code; | |
7865 | ||
d8a8a4da RS |
7866 | /* Distributivity is not true for floating point. |
7867 | It can change the value. So don't do it. | |
7868 | -- rms and moshier@world.std.com. */ | |
3ad2180a | 7869 | if (FLOAT_MODE_P (GET_MODE (x))) |
d8a8a4da RS |
7870 | return x; |
7871 | ||
230d793d RS |
7872 | /* The outer operation can only be one of the following: */ |
7873 | if (code != IOR && code != AND && code != XOR | |
7874 | && code != PLUS && code != MINUS) | |
7875 | return x; | |
7876 | ||
7877 | lhs = XEXP (x, 0), rhs = XEXP (x, 1); | |
7878 | ||
0f41302f MS |
7879 | /* If either operand is a primitive we can't do anything, so get out |
7880 | fast. */ | |
230d793d | 7881 | if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o' |
dfbe1b2f | 7882 | || GET_RTX_CLASS (GET_CODE (rhs)) == 'o') |
230d793d RS |
7883 | return x; |
7884 | ||
7885 | lhs = expand_compound_operation (lhs); | |
7886 | rhs = expand_compound_operation (rhs); | |
7887 | inner_code = GET_CODE (lhs); | |
7888 | if (inner_code != GET_CODE (rhs)) | |
7889 | return x; | |
7890 | ||
7891 | /* See if the inner and outer operations distribute. */ | |
7892 | switch (inner_code) | |
7893 | { | |
7894 | case LSHIFTRT: | |
7895 | case ASHIFTRT: | |
7896 | case AND: | |
7897 | case IOR: | |
7898 | /* These all distribute except over PLUS. */ | |
7899 | if (code == PLUS || code == MINUS) | |
7900 | return x; | |
7901 | break; | |
7902 | ||
7903 | case MULT: | |
7904 | if (code != PLUS && code != MINUS) | |
7905 | return x; | |
7906 | break; | |
7907 | ||
7908 | case ASHIFT: | |
45620ed4 | 7909 | /* This is also a multiply, so it distributes over everything. */ |
230d793d RS |
7910 | break; |
7911 | ||
7912 | case SUBREG: | |
dfbe1b2f | 7913 | /* Non-paradoxical SUBREGs distributes over all operations, provided |
ddef6bc7 | 7914 | the inner modes and byte offsets are the same, this is an extraction |
2b4bd1bc JW |
7915 | of a low-order part, we don't convert an fp operation to int or |
7916 | vice versa, and we would not be converting a single-word | |
dfbe1b2f | 7917 | operation into a multi-word operation. The latter test is not |
2b4bd1bc | 7918 | required, but it prevents generating unneeded multi-word operations. |
dfbe1b2f RK |
7919 | Some of the previous tests are redundant given the latter test, but |
7920 | are retained because they are required for correctness. | |
7921 | ||
7922 | We produce the result slightly differently in this case. */ | |
7923 | ||
7924 | if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs)) | |
ddef6bc7 | 7925 | || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs) |
dfbe1b2f | 7926 | || ! subreg_lowpart_p (lhs) |
2b4bd1bc JW |
7927 | || (GET_MODE_CLASS (GET_MODE (lhs)) |
7928 | != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs)))) | |
dfbe1b2f | 7929 | || (GET_MODE_SIZE (GET_MODE (lhs)) |
8af24e26 | 7930 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs)))) |
dfbe1b2f | 7931 | || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD) |
230d793d RS |
7932 | return x; |
7933 | ||
7934 | tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)), | |
7935 | SUBREG_REG (lhs), SUBREG_REG (rhs)); | |
7936 | return gen_lowpart_for_combine (GET_MODE (x), tem); | |
7937 | ||
7938 | default: | |
7939 | return x; | |
7940 | } | |
7941 | ||
7942 | /* Set LHS and RHS to the inner operands (A and B in the example | |
7943 | above) and set OTHER to the common operand (C in the example). | |
7944 | These is only one way to do this unless the inner operation is | |
7945 | commutative. */ | |
7946 | if (GET_RTX_CLASS (inner_code) == 'c' | |
7947 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0))) | |
7948 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1); | |
7949 | else if (GET_RTX_CLASS (inner_code) == 'c' | |
7950 | && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1))) | |
7951 | other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0); | |
7952 | else if (GET_RTX_CLASS (inner_code) == 'c' | |
7953 | && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0))) | |
7954 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1); | |
7955 | else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1))) | |
7956 | other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0); | |
7957 | else | |
7958 | return x; | |
7959 | ||
7960 | /* Form the new inner operation, seeing if it simplifies first. */ | |
7961 | tem = gen_binary (code, GET_MODE (x), lhs, rhs); | |
7962 | ||
7963 | /* There is one exception to the general way of distributing: | |
7964 | (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */ | |
7965 | if (code == XOR && inner_code == IOR) | |
7966 | { | |
7967 | inner_code = AND; | |
f1c6ba8b | 7968 | other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x)); |
230d793d RS |
7969 | } |
7970 | ||
7971 | /* We may be able to continuing distributing the result, so call | |
7972 | ourselves recursively on the inner operation before forming the | |
7973 | outer operation, which we return. */ | |
7974 | return gen_binary (inner_code, GET_MODE (x), | |
7975 | apply_distributive_law (tem), other); | |
7976 | } | |
7977 | \f | |
7978 | /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done | |
7979 | in MODE. | |
7980 | ||
7981 | Return an equivalent form, if different from X. Otherwise, return X. If | |
7982 | X is zero, we are to always construct the equivalent form. */ | |
7983 | ||
7984 | static rtx | |
7985 | simplify_and_const_int (x, mode, varop, constop) | |
7986 | rtx x; | |
7987 | enum machine_mode mode; | |
7988 | rtx varop; | |
5f4f0e22 | 7989 | unsigned HOST_WIDE_INT constop; |
230d793d | 7990 | { |
951553af | 7991 | unsigned HOST_WIDE_INT nonzero; |
42301240 | 7992 | int i; |
230d793d | 7993 | |
6139ff20 | 7994 | /* Simplify VAROP knowing that we will be only looking at some of the |
8bc52806 JL |
7995 | bits in it. |
7996 | ||
7997 | Note by passing in CONSTOP, we guarantee that the bits not set in | |
7998 | CONSTOP are not significant and will never be examined. We must | |
7999 | ensure that is the case by explicitly masking out those bits | |
8000 | before returning. */ | |
e3d616e3 | 8001 | varop = force_to_mode (varop, mode, constop, NULL_RTX, 0); |
230d793d | 8002 | |
8bc52806 JL |
8003 | /* If VAROP is a CLOBBER, we will fail so return it. */ |
8004 | if (GET_CODE (varop) == CLOBBER) | |
6139ff20 | 8005 | return varop; |
230d793d | 8006 | |
8bc52806 JL |
8007 | /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP |
8008 | to VAROP and return the new constant. */ | |
8009 | if (GET_CODE (varop) == CONST_INT) | |
8010 | return GEN_INT (trunc_int_for_mode (INTVAL (varop) & constop, mode)); | |
8011 | ||
fc06d7aa RK |
8012 | /* See what bits may be nonzero in VAROP. Unlike the general case of |
8013 | a call to nonzero_bits, here we don't care about bits outside | |
8014 | MODE. */ | |
8015 | ||
8016 | nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode); | |
9fa6d012 | 8017 | |
230d793d | 8018 | /* Turn off all bits in the constant that are known to already be zero. |
951553af | 8019 | Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS |
230d793d RS |
8020 | which is tested below. */ |
8021 | ||
951553af | 8022 | constop &= nonzero; |
230d793d RS |
8023 | |
8024 | /* If we don't have any bits left, return zero. */ | |
8025 | if (constop == 0) | |
8026 | return const0_rtx; | |
8027 | ||
42301240 | 8028 | /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is |
e0a2f705 | 8029 | a power of two, we can replace this with an ASHIFT. */ |
42301240 RK |
8030 | if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1 |
8031 | && (i = exact_log2 (constop)) >= 0) | |
8032 | return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i); | |
663522cb | 8033 | |
6139ff20 RK |
8034 | /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR |
8035 | or XOR, then try to apply the distributive law. This may eliminate | |
8036 | operations if either branch can be simplified because of the AND. | |
8037 | It may also make some cases more complex, but those cases probably | |
8038 | won't match a pattern either with or without this. */ | |
8039 | ||
8040 | if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR) | |
8041 | return | |
8042 | gen_lowpart_for_combine | |
8043 | (mode, | |
8044 | apply_distributive_law | |
8045 | (gen_binary (GET_CODE (varop), GET_MODE (varop), | |
8046 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
8047 | XEXP (varop, 0), constop), | |
8048 | simplify_and_const_int (NULL_RTX, GET_MODE (varop), | |
8049 | XEXP (varop, 1), constop)))); | |
8050 | ||
8deb7514 RH |
8051 | /* If VAROP is PLUS, and the constant is a mask of low bite, distribute |
8052 | the AND and see if one of the operands simplifies to zero. If so, we | |
8053 | may eliminate it. */ | |
8054 | ||
8055 | if (GET_CODE (varop) == PLUS | |
8056 | && exact_log2 (constop + 1) >= 0) | |
8057 | { | |
8058 | rtx o0, o1; | |
8059 | ||
8060 | o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop); | |
8061 | o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop); | |
8062 | if (o0 == const0_rtx) | |
8063 | return o1; | |
8064 | if (o1 == const0_rtx) | |
8065 | return o0; | |
8066 | } | |
8067 | ||
230d793d RS |
8068 | /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG |
8069 | if we already had one (just check for the simplest cases). */ | |
8070 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
8071 | && GET_MODE (XEXP (x, 0)) == mode | |
8072 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
8073 | varop = XEXP (x, 0); | |
8074 | else | |
8075 | varop = gen_lowpart_for_combine (mode, varop); | |
8076 | ||
0f41302f | 8077 | /* If we can't make the SUBREG, try to return what we were given. */ |
230d793d RS |
8078 | if (GET_CODE (varop) == CLOBBER) |
8079 | return x ? x : varop; | |
8080 | ||
8081 | /* If we are only masking insignificant bits, return VAROP. */ | |
951553af | 8082 | if (constop == nonzero) |
230d793d | 8083 | x = varop; |
230d793d RS |
8084 | else |
8085 | { | |
d0c9db30 | 8086 | /* Otherwise, return an AND. */ |
3b5708e7 | 8087 | constop = trunc_int_for_mode (constop, mode); |
d0c9db30 AM |
8088 | /* See how much, if any, of X we can use. */ |
8089 | if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode) | |
8090 | x = gen_binary (AND, mode, varop, GEN_INT (constop)); | |
230d793d | 8091 | |
d0c9db30 AM |
8092 | else |
8093 | { | |
8094 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
8095 | || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop) | |
8096 | SUBST (XEXP (x, 1), GEN_INT (constop)); | |
8097 | ||
8098 | SUBST (XEXP (x, 0), varop); | |
8099 | } | |
230d793d RS |
8100 | } |
8101 | ||
8102 | return x; | |
8103 | } | |
8104 | \f | |
b3728b0e JW |
8105 | /* We let num_sign_bit_copies recur into nonzero_bits as that is useful. |
8106 | We don't let nonzero_bits recur into num_sign_bit_copies, because that | |
8107 | is less useful. We can't allow both, because that results in exponential | |
956d6950 | 8108 | run time recursion. There is a nullstone testcase that triggered |
b3728b0e JW |
8109 | this. This macro avoids accidental uses of num_sign_bit_copies. */ |
8110 | #define num_sign_bit_copies() | |
8111 | ||
da7d8304 | 8112 | /* Given an expression, X, compute which bits in X can be nonzero. |
230d793d RS |
8113 | We don't care about bits outside of those defined in MODE. |
8114 | ||
8115 | For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is | |
8116 | a shift, AND, or zero_extract, we can do better. */ | |
8117 | ||
5f4f0e22 | 8118 | static unsigned HOST_WIDE_INT |
951553af | 8119 | nonzero_bits (x, mode) |
230d793d RS |
8120 | rtx x; |
8121 | enum machine_mode mode; | |
8122 | { | |
951553af RK |
8123 | unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); |
8124 | unsigned HOST_WIDE_INT inner_nz; | |
230d793d | 8125 | enum rtx_code code; |
770ae6cc | 8126 | unsigned int mode_width = GET_MODE_BITSIZE (mode); |
230d793d RS |
8127 | rtx tem; |
8128 | ||
1c75dfa4 RK |
8129 | /* For floating-point values, assume all bits are needed. */ |
8130 | if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)) | |
8131 | return nonzero; | |
8132 | ||
230d793d RS |
8133 | /* If X is wider than MODE, use its mode instead. */ |
8134 | if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width) | |
8135 | { | |
8136 | mode = GET_MODE (x); | |
951553af | 8137 | nonzero = GET_MODE_MASK (mode); |
230d793d RS |
8138 | mode_width = GET_MODE_BITSIZE (mode); |
8139 | } | |
8140 | ||
5f4f0e22 | 8141 | if (mode_width > HOST_BITS_PER_WIDE_INT) |
230d793d RS |
8142 | /* Our only callers in this case look for single bit values. So |
8143 | just return the mode mask. Those tests will then be false. */ | |
951553af | 8144 | return nonzero; |
230d793d | 8145 | |
8baf60bb | 8146 | #ifndef WORD_REGISTER_OPERATIONS |
c6965c0f | 8147 | /* If MODE is wider than X, but both are a single word for both the host |
663522cb | 8148 | and target machines, we can compute this from which bits of the |
0840fd91 RK |
8149 | object might be nonzero in its own mode, taking into account the fact |
8150 | that on many CISC machines, accessing an object in a wider mode | |
8151 | causes the high-order bits to become undefined. So they are | |
8152 | not known to be zero. */ | |
8153 | ||
8154 | if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode | |
8155 | && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD | |
8156 | && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT | |
c6965c0f | 8157 | && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x))) |
0840fd91 RK |
8158 | { |
8159 | nonzero &= nonzero_bits (x, GET_MODE (x)); | |
663522cb | 8160 | nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)); |
0840fd91 RK |
8161 | return nonzero; |
8162 | } | |
8163 | #endif | |
8164 | ||
230d793d RS |
8165 | code = GET_CODE (x); |
8166 | switch (code) | |
8167 | { | |
8168 | case REG: | |
6dd12198 | 8169 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) |
320dd7a7 RK |
8170 | /* If pointers extend unsigned and this is a pointer in Pmode, say that |
8171 | all the bits above ptr_mode are known to be zero. */ | |
8172 | if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
3502dc9c | 8173 | && REG_POINTER (x)) |
320dd7a7 RK |
8174 | nonzero &= GET_MODE_MASK (ptr_mode); |
8175 | #endif | |
8176 | ||
563c12b0 | 8177 | /* Include declared information about alignment of pointers. */ |
ebbb0a63 RH |
8178 | /* ??? We don't properly preserve REG_POINTER changes across |
8179 | pointer-to-integer casts, so we can't trust it except for | |
8180 | things that we know must be pointers. See execute/960116-1.c. */ | |
8181 | if ((x == stack_pointer_rtx | |
8182 | || x == frame_pointer_rtx | |
8183 | || x == arg_pointer_rtx) | |
8184 | && REGNO_POINTER_ALIGN (REGNO (x))) | |
230d793d | 8185 | { |
563c12b0 RH |
8186 | unsigned HOST_WIDE_INT alignment |
8187 | = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT; | |
230d793d RS |
8188 | |
8189 | #ifdef PUSH_ROUNDING | |
563c12b0 RH |
8190 | /* If PUSH_ROUNDING is defined, it is possible for the |
8191 | stack to be momentarily aligned only to that amount, | |
8192 | so we pick the least alignment. */ | |
8193 | if (x == stack_pointer_rtx && PUSH_ARGS) | |
8194 | alignment = MIN (PUSH_ROUNDING (1), alignment); | |
230d793d RS |
8195 | #endif |
8196 | ||
563c12b0 | 8197 | nonzero &= ~(alignment - 1); |
230d793d | 8198 | } |
230d793d | 8199 | |
55310dad RK |
8200 | /* If X is a register whose nonzero bits value is current, use it. |
8201 | Otherwise, if X is a register whose value we can find, use that | |
8202 | value. Otherwise, use the previously-computed global nonzero bits | |
8203 | for this register. */ | |
8204 | ||
8205 | if (reg_last_set_value[REGNO (x)] != 0 | |
0a0440c9 JJ |
8206 | && (reg_last_set_mode[REGNO (x)] == mode |
8207 | || (GET_MODE_CLASS (reg_last_set_mode[REGNO (x)]) == MODE_INT | |
8208 | && GET_MODE_CLASS (mode) == MODE_INT)) | |
57cf50a4 GRK |
8209 | && (reg_last_set_label[REGNO (x)] == label_tick |
8210 | || (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
8211 | && REG_N_SETS (REGNO (x)) == 1 | |
f6366fc7 | 8212 | && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, |
57cf50a4 | 8213 | REGNO (x)))) |
55310dad | 8214 | && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid) |
563c12b0 | 8215 | return reg_last_set_nonzero_bits[REGNO (x)] & nonzero; |
230d793d RS |
8216 | |
8217 | tem = get_last_value (x); | |
9afa3d54 | 8218 | |
230d793d | 8219 | if (tem) |
9afa3d54 RK |
8220 | { |
8221 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND | |
8222 | /* If X is narrower than MODE and TEM is a non-negative | |
8223 | constant that would appear negative in the mode of X, | |
8224 | sign-extend it for use in reg_nonzero_bits because some | |
8225 | machines (maybe most) will actually do the sign-extension | |
663522cb | 8226 | and this is the conservative approach. |
9afa3d54 RK |
8227 | |
8228 | ??? For 2.5, try to tighten up the MD files in this regard | |
8229 | instead of this kludge. */ | |
8230 | ||
8231 | if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width | |
8232 | && GET_CODE (tem) == CONST_INT | |
8233 | && INTVAL (tem) > 0 | |
8234 | && 0 != (INTVAL (tem) | |
8235 | & ((HOST_WIDE_INT) 1 | |
9e69be8c | 8236 | << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))) |
9afa3d54 RK |
8237 | tem = GEN_INT (INTVAL (tem) |
8238 | | ((HOST_WIDE_INT) (-1) | |
8239 | << GET_MODE_BITSIZE (GET_MODE (x)))); | |
8240 | #endif | |
563c12b0 | 8241 | return nonzero_bits (tem, mode) & nonzero; |
9afa3d54 | 8242 | } |
951553af | 8243 | else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)]) |
7958f3c7 JJ |
8244 | { |
8245 | unsigned HOST_WIDE_INT mask = reg_nonzero_bits[REGNO (x)]; | |
8246 | ||
8247 | if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width) | |
8248 | /* We don't know anything about the upper bits. */ | |
8249 | mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x)); | |
8250 | return nonzero & mask; | |
8251 | } | |
230d793d | 8252 | else |
951553af | 8253 | return nonzero; |
230d793d RS |
8254 | |
8255 | case CONST_INT: | |
9afa3d54 RK |
8256 | #ifdef SHORT_IMMEDIATES_SIGN_EXTEND |
8257 | /* If X is negative in MODE, sign-extend the value. */ | |
9e69be8c RK |
8258 | if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD |
8259 | && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1)))) | |
8260 | return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width)); | |
9afa3d54 RK |
8261 | #endif |
8262 | ||
230d793d RS |
8263 | return INTVAL (x); |
8264 | ||
230d793d | 8265 | case MEM: |
8baf60bb | 8266 | #ifdef LOAD_EXTEND_OP |
230d793d RS |
8267 | /* In many, if not most, RISC machines, reading a byte from memory |
8268 | zeros the rest of the register. Noticing that fact saves a lot | |
8269 | of extra zero-extends. */ | |
8baf60bb RK |
8270 | if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND) |
8271 | nonzero &= GET_MODE_MASK (GET_MODE (x)); | |
230d793d | 8272 | #endif |
8baf60bb | 8273 | break; |
230d793d | 8274 | |
230d793d | 8275 | case EQ: case NE: |
69bc0a1f JH |
8276 | case UNEQ: case LTGT: |
8277 | case GT: case GTU: case UNGT: | |
8278 | case LT: case LTU: case UNLT: | |
8279 | case GE: case GEU: case UNGE: | |
8280 | case LE: case LEU: case UNLE: | |
8281 | case UNORDERED: case ORDERED: | |
3f508eca | 8282 | |
c6965c0f RK |
8283 | /* If this produces an integer result, we know which bits are set. |
8284 | Code here used to clear bits outside the mode of X, but that is | |
8285 | now done above. */ | |
230d793d | 8286 | |
c6965c0f RK |
8287 | if (GET_MODE_CLASS (mode) == MODE_INT |
8288 | && mode_width <= HOST_BITS_PER_WIDE_INT) | |
8289 | nonzero = STORE_FLAG_VALUE; | |
230d793d | 8290 | break; |
230d793d | 8291 | |
230d793d | 8292 | case NEG: |
b3728b0e JW |
8293 | #if 0 |
8294 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
8295 | and num_sign_bit_copies. */ | |
d0ab8cd3 RK |
8296 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) |
8297 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
951553af | 8298 | nonzero = 1; |
b3728b0e | 8299 | #endif |
230d793d RS |
8300 | |
8301 | if (GET_MODE_SIZE (GET_MODE (x)) < mode_width) | |
663522cb | 8302 | nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x))); |
230d793d | 8303 | break; |
d0ab8cd3 RK |
8304 | |
8305 | case ABS: | |
b3728b0e JW |
8306 | #if 0 |
8307 | /* Disabled to avoid exponential mutual recursion between nonzero_bits | |
8308 | and num_sign_bit_copies. */ | |
d0ab8cd3 RK |
8309 | if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x)) |
8310 | == GET_MODE_BITSIZE (GET_MODE (x))) | |
951553af | 8311 | nonzero = 1; |
b3728b0e | 8312 | #endif |
d0ab8cd3 | 8313 | break; |
230d793d RS |
8314 | |
8315 | case TRUNCATE: | |
951553af | 8316 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode)); |
230d793d RS |
8317 | break; |
8318 | ||
8319 | case ZERO_EXTEND: | |
951553af | 8320 | nonzero &= nonzero_bits (XEXP (x, 0), mode); |
230d793d | 8321 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) |
951553af | 8322 | nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); |
230d793d RS |
8323 | break; |
8324 | ||
8325 | case SIGN_EXTEND: | |
8326 | /* If the sign bit is known clear, this is the same as ZERO_EXTEND. | |
8327 | Otherwise, show all the bits in the outer mode but not the inner | |
da7d8304 | 8328 | may be nonzero. */ |
951553af | 8329 | inner_nz = nonzero_bits (XEXP (x, 0), mode); |
230d793d RS |
8330 | if (GET_MODE (XEXP (x, 0)) != VOIDmode) |
8331 | { | |
951553af | 8332 | inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0))); |
e3da301d MS |
8333 | if (inner_nz |
8334 | & (((HOST_WIDE_INT) 1 | |
8335 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))) | |
951553af | 8336 | inner_nz |= (GET_MODE_MASK (mode) |
663522cb | 8337 | & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))); |
230d793d RS |
8338 | } |
8339 | ||
951553af | 8340 | nonzero &= inner_nz; |
230d793d RS |
8341 | break; |
8342 | ||
8343 | case AND: | |
951553af RK |
8344 | nonzero &= (nonzero_bits (XEXP (x, 0), mode) |
8345 | & nonzero_bits (XEXP (x, 1), mode)); | |
230d793d RS |
8346 | break; |
8347 | ||
d0ab8cd3 RK |
8348 | case XOR: case IOR: |
8349 | case UMIN: case UMAX: case SMIN: case SMAX: | |
0a0440c9 JJ |
8350 | { |
8351 | unsigned HOST_WIDE_INT nonzero0 = nonzero_bits (XEXP (x, 0), mode); | |
8352 | ||
8353 | /* Don't call nonzero_bits for the second time if it cannot change | |
8354 | anything. */ | |
8355 | if ((nonzero & nonzero0) != nonzero) | |
8356 | nonzero &= (nonzero0 | nonzero_bits (XEXP (x, 1), mode)); | |
8357 | } | |
230d793d RS |
8358 | break; |
8359 | ||
8360 | case PLUS: case MINUS: | |
8361 | case MULT: | |
8362 | case DIV: case UDIV: | |
8363 | case MOD: case UMOD: | |
8364 | /* We can apply the rules of arithmetic to compute the number of | |
8365 | high- and low-order zero bits of these operations. We start by | |
da7d8304 | 8366 | computing the width (position of the highest-order nonzero bit) |
230d793d RS |
8367 | and the number of low-order zero bits for each value. */ |
8368 | { | |
951553af RK |
8369 | unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode); |
8370 | unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode); | |
8371 | int width0 = floor_log2 (nz0) + 1; | |
8372 | int width1 = floor_log2 (nz1) + 1; | |
8373 | int low0 = floor_log2 (nz0 & -nz0); | |
8374 | int low1 = floor_log2 (nz1 & -nz1); | |
318b149c RK |
8375 | HOST_WIDE_INT op0_maybe_minusp |
8376 | = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1))); | |
8377 | HOST_WIDE_INT op1_maybe_minusp | |
8378 | = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1))); | |
770ae6cc | 8379 | unsigned int result_width = mode_width; |
230d793d RS |
8380 | int result_low = 0; |
8381 | ||
8382 | switch (code) | |
8383 | { | |
8384 | case PLUS: | |
8385 | result_width = MAX (width0, width1) + 1; | |
8386 | result_low = MIN (low0, low1); | |
8387 | break; | |
8388 | case MINUS: | |
8389 | result_low = MIN (low0, low1); | |
8390 | break; | |
8391 | case MULT: | |
8392 | result_width = width0 + width1; | |
8393 | result_low = low0 + low1; | |
8394 | break; | |
8395 | case DIV: | |
2a8bb5cf AH |
8396 | if (width1 == 0) |
8397 | break; | |
230d793d RS |
8398 | if (! op0_maybe_minusp && ! op1_maybe_minusp) |
8399 | result_width = width0; | |
8400 | break; | |
8401 | case UDIV: | |
2a8bb5cf AH |
8402 | if (width1 == 0) |
8403 | break; | |
230d793d RS |
8404 | result_width = width0; |
8405 | break; | |
8406 | case MOD: | |
2a8bb5cf AH |
8407 | if (width1 == 0) |
8408 | break; | |
230d793d RS |
8409 | if (! op0_maybe_minusp && ! op1_maybe_minusp) |
8410 | result_width = MIN (width0, width1); | |
8411 | result_low = MIN (low0, low1); | |
8412 | break; | |
8413 | case UMOD: | |
2a8bb5cf AH |
8414 | if (width1 == 0) |
8415 | break; | |
230d793d RS |
8416 | result_width = MIN (width0, width1); |
8417 | result_low = MIN (low0, low1); | |
8418 | break; | |
e9a25f70 JL |
8419 | default: |
8420 | abort (); | |
230d793d RS |
8421 | } |
8422 | ||
8423 | if (result_width < mode_width) | |
951553af | 8424 | nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1; |
230d793d RS |
8425 | |
8426 | if (result_low > 0) | |
663522cb | 8427 | nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1); |
d1405722 RK |
8428 | |
8429 | #ifdef POINTERS_EXTEND_UNSIGNED | |
8430 | /* If pointers extend unsigned and this is an addition or subtraction | |
8431 | to a pointer in Pmode, all the bits above ptr_mode are known to be | |
8432 | zero. */ | |
6dd12198 | 8433 | if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode |
d1405722 RK |
8434 | && (code == PLUS || code == MINUS) |
8435 | && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0))) | |
8436 | nonzero &= GET_MODE_MASK (ptr_mode); | |
8437 | #endif | |
230d793d RS |
8438 | } |
8439 | break; | |
8440 | ||
8441 | case ZERO_EXTRACT: | |
8442 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
5f4f0e22 | 8443 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
951553af | 8444 | nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1; |
230d793d RS |
8445 | break; |
8446 | ||
8447 | case SUBREG: | |
c3c2cb37 RK |
8448 | /* If this is a SUBREG formed for a promoted variable that has |
8449 | been zero-extended, we know that at least the high-order bits | |
8450 | are zero, though others might be too. */ | |
8451 | ||
7879b81e | 8452 | if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0) |
951553af RK |
8453 | nonzero = (GET_MODE_MASK (GET_MODE (x)) |
8454 | & nonzero_bits (SUBREG_REG (x), GET_MODE (x))); | |
c3c2cb37 | 8455 | |
230d793d RS |
8456 | /* If the inner mode is a single word for both the host and target |
8457 | machines, we can compute this from which bits of the inner | |
951553af | 8458 | object might be nonzero. */ |
230d793d | 8459 | if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD |
5f4f0e22 CH |
8460 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) |
8461 | <= HOST_BITS_PER_WIDE_INT)) | |
230d793d | 8462 | { |
951553af | 8463 | nonzero &= nonzero_bits (SUBREG_REG (x), mode); |
8baf60bb | 8464 | |
b52ce03d R |
8465 | #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP) |
8466 | /* If this is a typical RISC machine, we only have to worry | |
8467 | about the way loads are extended. */ | |
0e603223 RS |
8468 | if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND |
8469 | ? (((nonzero | |
8470 | & (((unsigned HOST_WIDE_INT) 1 | |
8471 | << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))) | |
8472 | != 0)) | |
8473 | : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND) | |
8474 | || GET_CODE (SUBREG_REG (x)) != MEM) | |
230d793d | 8475 | #endif |
b52ce03d R |
8476 | { |
8477 | /* On many CISC machines, accessing an object in a wider mode | |
8478 | causes the high-order bits to become undefined. So they are | |
8479 | not known to be zero. */ | |
8480 | if (GET_MODE_SIZE (GET_MODE (x)) | |
8481 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
8482 | nonzero |= (GET_MODE_MASK (GET_MODE (x)) | |
663522cb | 8483 | & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))); |
b52ce03d | 8484 | } |
230d793d RS |
8485 | } |
8486 | break; | |
8487 | ||
8488 | case ASHIFTRT: | |
8489 | case LSHIFTRT: | |
8490 | case ASHIFT: | |
230d793d | 8491 | case ROTATE: |
951553af | 8492 | /* The nonzero bits are in two classes: any bits within MODE |
230d793d | 8493 | that aren't in GET_MODE (x) are always significant. The rest of the |
951553af | 8494 | nonzero bits are those that are significant in the operand of |
230d793d RS |
8495 | the shift when shifted the appropriate number of bits. This |
8496 | shows that high-order bits are cleared by the right shift and | |
8497 | low-order bits by left shifts. */ | |
8498 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
8499 | && INTVAL (XEXP (x, 1)) >= 0 | |
5f4f0e22 | 8500 | && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
8501 | { |
8502 | enum machine_mode inner_mode = GET_MODE (x); | |
770ae6cc | 8503 | unsigned int width = GET_MODE_BITSIZE (inner_mode); |
230d793d | 8504 | int count = INTVAL (XEXP (x, 1)); |
5f4f0e22 | 8505 | unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode); |
951553af RK |
8506 | unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode); |
8507 | unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask; | |
5f4f0e22 | 8508 | unsigned HOST_WIDE_INT outer = 0; |
230d793d RS |
8509 | |
8510 | if (mode_width > width) | |
663522cb | 8511 | outer = (op_nonzero & nonzero & ~mode_mask); |
230d793d RS |
8512 | |
8513 | if (code == LSHIFTRT) | |
8514 | inner >>= count; | |
8515 | else if (code == ASHIFTRT) | |
8516 | { | |
8517 | inner >>= count; | |
8518 | ||
951553af | 8519 | /* If the sign bit may have been nonzero before the shift, we |
230d793d | 8520 | need to mark all the places it could have been copied to |
951553af | 8521 | by the shift as possibly nonzero. */ |
5f4f0e22 CH |
8522 | if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count))) |
8523 | inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count); | |
230d793d | 8524 | } |
45620ed4 | 8525 | else if (code == ASHIFT) |
230d793d RS |
8526 | inner <<= count; |
8527 | else | |
8528 | inner = ((inner << (count % width) | |
8529 | | (inner >> (width - (count % width)))) & mode_mask); | |
8530 | ||
951553af | 8531 | nonzero &= (outer | inner); |
230d793d RS |
8532 | } |
8533 | break; | |
8534 | ||
8535 | case FFS: | |
8536 | /* This is at most the number of bits in the mode. */ | |
951553af | 8537 | nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1; |
230d793d | 8538 | break; |
d0ab8cd3 RK |
8539 | |
8540 | case IF_THEN_ELSE: | |
951553af RK |
8541 | nonzero &= (nonzero_bits (XEXP (x, 1), mode) |
8542 | | nonzero_bits (XEXP (x, 2), mode)); | |
d0ab8cd3 | 8543 | break; |
663522cb | 8544 | |
e9a25f70 JL |
8545 | default: |
8546 | break; | |
230d793d RS |
8547 | } |
8548 | ||
951553af | 8549 | return nonzero; |
230d793d | 8550 | } |
b3728b0e JW |
8551 | |
8552 | /* See the macro definition above. */ | |
8553 | #undef num_sign_bit_copies | |
230d793d | 8554 | \f |
d0ab8cd3 | 8555 | /* Return the number of bits at the high-order end of X that are known to |
5109d49f RK |
8556 | be equal to the sign bit. X will be used in mode MODE; if MODE is |
8557 | VOIDmode, X will be used in its own mode. The returned value will always | |
8558 | be between 1 and the number of bits in MODE. */ | |
d0ab8cd3 | 8559 | |
770ae6cc | 8560 | static unsigned int |
d0ab8cd3 RK |
8561 | num_sign_bit_copies (x, mode) |
8562 | rtx x; | |
8563 | enum machine_mode mode; | |
8564 | { | |
8565 | enum rtx_code code = GET_CODE (x); | |
770ae6cc | 8566 | unsigned int bitwidth; |
d0ab8cd3 | 8567 | int num0, num1, result; |
951553af | 8568 | unsigned HOST_WIDE_INT nonzero; |
d0ab8cd3 RK |
8569 | rtx tem; |
8570 | ||
8571 | /* If we weren't given a mode, use the mode of X. If the mode is still | |
1c75dfa4 RK |
8572 | VOIDmode, we don't know anything. Likewise if one of the modes is |
8573 | floating-point. */ | |
d0ab8cd3 RK |
8574 | |
8575 | if (mode == VOIDmode) | |
8576 | mode = GET_MODE (x); | |
8577 | ||
1c75dfa4 | 8578 | if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))) |
6752e8d2 | 8579 | return 1; |
d0ab8cd3 RK |
8580 | |
8581 | bitwidth = GET_MODE_BITSIZE (mode); | |
8582 | ||
0f41302f | 8583 | /* For a smaller object, just ignore the high bits. */ |
312def2e | 8584 | if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x))) |
770ae6cc RK |
8585 | { |
8586 | num0 = num_sign_bit_copies (x, GET_MODE (x)); | |
8587 | return MAX (1, | |
8588 | num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)); | |
8589 | } | |
663522cb | 8590 | |
e9a25f70 JL |
8591 | if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x))) |
8592 | { | |
0c314d1a RK |
8593 | #ifndef WORD_REGISTER_OPERATIONS |
8594 | /* If this machine does not do all register operations on the entire | |
8595 | register and MODE is wider than the mode of X, we can say nothing | |
8596 | at all about the high-order bits. */ | |
e9a25f70 JL |
8597 | return 1; |
8598 | #else | |
8599 | /* Likewise on machines that do, if the mode of the object is smaller | |
8600 | than a word and loads of that size don't sign extend, we can say | |
8601 | nothing about the high order bits. */ | |
8602 | if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD | |
8603 | #ifdef LOAD_EXTEND_OP | |
8604 | && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND | |
8605 | #endif | |
8606 | ) | |
8607 | return 1; | |
0c314d1a | 8608 | #endif |
e9a25f70 | 8609 | } |
0c314d1a | 8610 | |
d0ab8cd3 RK |
8611 | switch (code) |
8612 | { | |
8613 | case REG: | |
55310dad | 8614 | |
6dd12198 | 8615 | #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend) |
ff0dbdd1 RK |
8616 | /* If pointers extend signed and this is a pointer in Pmode, say that |
8617 | all the bits above ptr_mode are known to be sign bit copies. */ | |
8618 | if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode | |
3502dc9c | 8619 | && REG_POINTER (x)) |
ff0dbdd1 RK |
8620 | return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1; |
8621 | #endif | |
8622 | ||
55310dad RK |
8623 | if (reg_last_set_value[REGNO (x)] != 0 |
8624 | && reg_last_set_mode[REGNO (x)] == mode | |
57cf50a4 GRK |
8625 | && (reg_last_set_label[REGNO (x)] == label_tick |
8626 | || (REGNO (x) >= FIRST_PSEUDO_REGISTER | |
8627 | && REG_N_SETS (REGNO (x)) == 1 | |
f6366fc7 | 8628 | && ! REGNO_REG_SET_P (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, |
57cf50a4 | 8629 | REGNO (x)))) |
55310dad RK |
8630 | && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid) |
8631 | return reg_last_set_sign_bit_copies[REGNO (x)]; | |
d0ab8cd3 | 8632 | |
663522cb | 8633 | tem = get_last_value (x); |
d0ab8cd3 RK |
8634 | if (tem != 0) |
8635 | return num_sign_bit_copies (tem, mode); | |
55310dad | 8636 | |
7958f3c7 JJ |
8637 | if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0 |
8638 | && GET_MODE_BITSIZE (GET_MODE (x)) == bitwidth) | |
55310dad | 8639 | return reg_sign_bit_copies[REGNO (x)]; |
d0ab8cd3 RK |
8640 | break; |
8641 | ||
457816e2 | 8642 | case MEM: |
8baf60bb | 8643 | #ifdef LOAD_EXTEND_OP |
457816e2 | 8644 | /* Some RISC machines sign-extend all loads of smaller than a word. */ |
8baf60bb | 8645 | if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND) |
770ae6cc RK |
8646 | return MAX (1, ((int) bitwidth |
8647 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1)); | |
457816e2 | 8648 | #endif |
8baf60bb | 8649 | break; |
457816e2 | 8650 | |
d0ab8cd3 RK |
8651 | case CONST_INT: |
8652 | /* If the constant is negative, take its 1's complement and remask. | |
8653 | Then see how many zero bits we have. */ | |
951553af | 8654 | nonzero = INTVAL (x) & GET_MODE_MASK (mode); |
ac49a949 | 8655 | if (bitwidth <= HOST_BITS_PER_WIDE_INT |
951553af | 8656 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) |
663522cb | 8657 | nonzero = (~nonzero) & GET_MODE_MASK (mode); |
d0ab8cd3 | 8658 | |
951553af | 8659 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); |
d0ab8cd3 RK |
8660 | |
8661 | case SUBREG: | |
c3c2cb37 RK |
8662 | /* If this is a SUBREG for a promoted object that is sign-extended |
8663 | and we are looking at it in a wider mode, we know that at least the | |
8664 | high-order bits are known to be sign bit copies. */ | |
8665 | ||
8666 | if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x)) | |
770ae6cc RK |
8667 | { |
8668 | num0 = num_sign_bit_copies (SUBREG_REG (x), mode); | |
8669 | return MAX ((int) bitwidth | |
8670 | - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1, | |
8671 | num0); | |
8672 | } | |
663522cb | 8673 | |
0f41302f | 8674 | /* For a smaller object, just ignore the high bits. */ |
d0ab8cd3 RK |
8675 | if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))) |
8676 | { | |
8677 | num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode); | |
8678 | return MAX (1, (num0 | |
770ae6cc RK |
8679 | - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) |
8680 | - bitwidth))); | |
d0ab8cd3 | 8681 | } |
457816e2 | 8682 | |
8baf60bb | 8683 | #ifdef WORD_REGISTER_OPERATIONS |
2aec5b7a | 8684 | #ifdef LOAD_EXTEND_OP |
8baf60bb RK |
8685 | /* For paradoxical SUBREGs on machines where all register operations |
8686 | affect the entire register, just look inside. Note that we are | |
8687 | passing MODE to the recursive call, so the number of sign bit copies | |
8688 | will remain relative to that mode, not the inner mode. */ | |
457816e2 | 8689 | |
2aec5b7a JW |
8690 | /* This works only if loads sign extend. Otherwise, if we get a |
8691 | reload for the inner part, it may be loaded from the stack, and | |
8692 | then we lose all sign bit copies that existed before the store | |
8693 | to the stack. */ | |
8694 | ||
8695 | if ((GET_MODE_SIZE (GET_MODE (x)) | |
8696 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
0e603223 RS |
8697 | && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND |
8698 | && GET_CODE (SUBREG_REG (x)) == MEM) | |
457816e2 | 8699 | return num_sign_bit_copies (SUBREG_REG (x), mode); |
2aec5b7a | 8700 | #endif |
457816e2 | 8701 | #endif |
d0ab8cd3 RK |
8702 | break; |
8703 | ||
8704 | case SIGN_EXTRACT: | |
8705 | if (GET_CODE (XEXP (x, 1)) == CONST_INT) | |
770ae6cc | 8706 | return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1))); |
d0ab8cd3 RK |
8707 | break; |
8708 | ||
663522cb | 8709 | case SIGN_EXTEND: |
d0ab8cd3 RK |
8710 | return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) |
8711 | + num_sign_bit_copies (XEXP (x, 0), VOIDmode)); | |
8712 | ||
8713 | case TRUNCATE: | |
0f41302f | 8714 | /* For a smaller object, just ignore the high bits. */ |
d0ab8cd3 | 8715 | num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode); |
770ae6cc RK |
8716 | return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) |
8717 | - bitwidth))); | |
d0ab8cd3 RK |
8718 | |
8719 | case NOT: | |
8720 | return num_sign_bit_copies (XEXP (x, 0), mode); | |
8721 | ||
8722 | case ROTATE: case ROTATERT: | |
8723 | /* If we are rotating left by a number of bits less than the number | |
8724 | of sign bit copies, we can just subtract that amount from the | |
8725 | number. */ | |
8726 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
ae0ed63a JM |
8727 | && INTVAL (XEXP (x, 1)) >= 0 |
8728 | && INTVAL (XEXP (x, 1)) < (int) bitwidth) | |
d0ab8cd3 RK |
8729 | { |
8730 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8731 | return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1)) | |
770ae6cc | 8732 | : (int) bitwidth - INTVAL (XEXP (x, 1)))); |
d0ab8cd3 RK |
8733 | } |
8734 | break; | |
8735 | ||
8736 | case NEG: | |
8737 | /* In general, this subtracts one sign bit copy. But if the value | |
8738 | is known to be positive, the number of sign bit copies is the | |
951553af RK |
8739 | same as that of the input. Finally, if the input has just one bit |
8740 | that might be nonzero, all the bits are copies of the sign bit. */ | |
70186b34 BS |
8741 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); |
8742 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
8743 | return num0 > 1 ? num0 - 1 : 1; | |
8744 | ||
951553af RK |
8745 | nonzero = nonzero_bits (XEXP (x, 0), mode); |
8746 | if (nonzero == 1) | |
d0ab8cd3 RK |
8747 | return bitwidth; |
8748 | ||
d0ab8cd3 | 8749 | if (num0 > 1 |
951553af | 8750 | && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero)) |
d0ab8cd3 RK |
8751 | num0--; |
8752 | ||
8753 | return num0; | |
8754 | ||
8755 | case IOR: case AND: case XOR: | |
8756 | case SMIN: case SMAX: case UMIN: case UMAX: | |
8757 | /* Logical operations will preserve the number of sign-bit copies. | |
8758 | MIN and MAX operations always return one of the operands. */ | |
8759 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8760 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8761 | return MIN (num0, num1); | |
8762 | ||
8763 | case PLUS: case MINUS: | |
8764 | /* For addition and subtraction, we can have a 1-bit carry. However, | |
8765 | if we are subtracting 1 from a positive number, there will not | |
8766 | be such a carry. Furthermore, if the positive number is known to | |
8767 | be 0 or 1, we know the result is either -1 or 0. */ | |
8768 | ||
3e3ea975 | 8769 | if (code == PLUS && XEXP (x, 1) == constm1_rtx |
9295e6af | 8770 | && bitwidth <= HOST_BITS_PER_WIDE_INT) |
d0ab8cd3 | 8771 | { |
951553af RK |
8772 | nonzero = nonzero_bits (XEXP (x, 0), mode); |
8773 | if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0) | |
8774 | return (nonzero == 1 || nonzero == 0 ? bitwidth | |
8775 | : bitwidth - floor_log2 (nonzero) - 1); | |
d0ab8cd3 RK |
8776 | } |
8777 | ||
8778 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8779 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
d1405722 RK |
8780 | result = MAX (1, MIN (num0, num1) - 1); |
8781 | ||
8782 | #ifdef POINTERS_EXTEND_UNSIGNED | |
8783 | /* If pointers extend signed and this is an addition or subtraction | |
8784 | to a pointer in Pmode, all the bits above ptr_mode are known to be | |
8785 | sign bit copies. */ | |
8786 | if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode | |
8787 | && (code == PLUS || code == MINUS) | |
8788 | && GET_CODE (XEXP (x, 0)) == REG && REG_POINTER (XEXP (x, 0))) | |
505ddab6 KH |
8789 | result = MAX ((int) (GET_MODE_BITSIZE (Pmode) |
8790 | - GET_MODE_BITSIZE (ptr_mode) + 1), | |
d1405722 RK |
8791 | result); |
8792 | #endif | |
8793 | return result; | |
663522cb | 8794 | |
d0ab8cd3 RK |
8795 | case MULT: |
8796 | /* The number of bits of the product is the sum of the number of | |
8797 | bits of both terms. However, unless one of the terms if known | |
8798 | to be positive, we must allow for an additional bit since negating | |
8799 | a negative number can remove one sign bit copy. */ | |
8800 | ||
8801 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8802 | num1 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8803 | ||
8804 | result = bitwidth - (bitwidth - num0) - (bitwidth - num1); | |
8805 | if (result > 0 | |
70186b34 BS |
8806 | && (bitwidth > HOST_BITS_PER_WIDE_INT |
8807 | || (((nonzero_bits (XEXP (x, 0), mode) | |
8808 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
8809 | && ((nonzero_bits (XEXP (x, 1), mode) | |
8810 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)))) | |
d0ab8cd3 RK |
8811 | result--; |
8812 | ||
8813 | return MAX (1, result); | |
8814 | ||
8815 | case UDIV: | |
70186b34 BS |
8816 | /* The result must be <= the first operand. If the first operand |
8817 | has the high bit set, we know nothing about the number of sign | |
8818 | bit copies. */ | |
8819 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
8820 | return 1; | |
8821 | else if ((nonzero_bits (XEXP (x, 0), mode) | |
8822 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
8823 | return 1; | |
8824 | else | |
8825 | return num_sign_bit_copies (XEXP (x, 0), mode); | |
663522cb | 8826 | |
d0ab8cd3 | 8827 | case UMOD: |
eaec9b3d | 8828 | /* The result must be <= the second operand. */ |
d0ab8cd3 RK |
8829 | return num_sign_bit_copies (XEXP (x, 1), mode); |
8830 | ||
8831 | case DIV: | |
8832 | /* Similar to unsigned division, except that we have to worry about | |
8833 | the case where the divisor is negative, in which case we have | |
8834 | to add 1. */ | |
8835 | result = num_sign_bit_copies (XEXP (x, 0), mode); | |
8836 | if (result > 1 | |
70186b34 BS |
8837 | && (bitwidth > HOST_BITS_PER_WIDE_INT |
8838 | || (nonzero_bits (XEXP (x, 1), mode) | |
8839 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
8840 | result--; | |
d0ab8cd3 RK |
8841 | |
8842 | return result; | |
8843 | ||
8844 | case MOD: | |
8845 | result = num_sign_bit_copies (XEXP (x, 1), mode); | |
8846 | if (result > 1 | |
70186b34 BS |
8847 | && (bitwidth > HOST_BITS_PER_WIDE_INT |
8848 | || (nonzero_bits (XEXP (x, 1), mode) | |
8849 | & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)) | |
8850 | result--; | |
d0ab8cd3 RK |
8851 | |
8852 | return result; | |
8853 | ||
8854 | case ASHIFTRT: | |
8855 | /* Shifts by a constant add to the number of bits equal to the | |
8856 | sign bit. */ | |
8857 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8858 | if (GET_CODE (XEXP (x, 1)) == CONST_INT | |
8859 | && INTVAL (XEXP (x, 1)) > 0) | |
ae0ed63a | 8860 | num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1))); |
d0ab8cd3 RK |
8861 | |
8862 | return num0; | |
8863 | ||
8864 | case ASHIFT: | |
d0ab8cd3 RK |
8865 | /* Left shifts destroy copies. */ |
8866 | if (GET_CODE (XEXP (x, 1)) != CONST_INT | |
8867 | || INTVAL (XEXP (x, 1)) < 0 | |
ae0ed63a | 8868 | || INTVAL (XEXP (x, 1)) >= (int) bitwidth) |
d0ab8cd3 RK |
8869 | return 1; |
8870 | ||
8871 | num0 = num_sign_bit_copies (XEXP (x, 0), mode); | |
8872 | return MAX (1, num0 - INTVAL (XEXP (x, 1))); | |
8873 | ||
8874 | case IF_THEN_ELSE: | |
8875 | num0 = num_sign_bit_copies (XEXP (x, 1), mode); | |
8876 | num1 = num_sign_bit_copies (XEXP (x, 2), mode); | |
8877 | return MIN (num0, num1); | |
8878 | ||
d0ab8cd3 | 8879 | case EQ: case NE: case GE: case GT: case LE: case LT: |
69bc0a1f | 8880 | case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT: |
d0ab8cd3 | 8881 | case GEU: case GTU: case LEU: case LTU: |
69bc0a1f JH |
8882 | case UNORDERED: case ORDERED: |
8883 | /* If the constant is negative, take its 1's complement and remask. | |
8884 | Then see how many zero bits we have. */ | |
8885 | nonzero = STORE_FLAG_VALUE; | |
8886 | if (bitwidth <= HOST_BITS_PER_WIDE_INT | |
8887 | && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0) | |
8888 | nonzero = (~nonzero) & GET_MODE_MASK (mode); | |
8889 | ||
8890 | return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1); | |
e9a25f70 | 8891 | break; |
663522cb | 8892 | |
e9a25f70 JL |
8893 | default: |
8894 | break; | |
d0ab8cd3 RK |
8895 | } |
8896 | ||
8897 | /* If we haven't been able to figure it out by one of the above rules, | |
8898 | see if some of the high-order bits are known to be zero. If so, | |
ac49a949 RS |
8899 | count those bits and return one less than that amount. If we can't |
8900 | safely compute the mask for this mode, always return BITWIDTH. */ | |
8901 | ||
8902 | if (bitwidth > HOST_BITS_PER_WIDE_INT) | |
6752e8d2 | 8903 | return 1; |
d0ab8cd3 | 8904 | |
951553af | 8905 | nonzero = nonzero_bits (x, mode); |
df6f4086 | 8906 | return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) |
951553af | 8907 | ? 1 : bitwidth - floor_log2 (nonzero) - 1); |
d0ab8cd3 RK |
8908 | } |
8909 | \f | |
1a26b032 RK |
8910 | /* Return the number of "extended" bits there are in X, when interpreted |
8911 | as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For | |
8912 | unsigned quantities, this is the number of high-order zero bits. | |
8913 | For signed quantities, this is the number of copies of the sign bit | |
8914 | minus 1. In both case, this function returns the number of "spare" | |
8915 | bits. For example, if two quantities for which this function returns | |
8916 | at least 1 are added, the addition is known not to overflow. | |
8917 | ||
8918 | This function will always return 0 unless called during combine, which | |
8919 | implies that it must be called from a define_split. */ | |
8920 | ||
770ae6cc | 8921 | unsigned int |
1a26b032 RK |
8922 | extended_count (x, mode, unsignedp) |
8923 | rtx x; | |
8924 | enum machine_mode mode; | |
8925 | int unsignedp; | |
8926 | { | |
951553af | 8927 | if (nonzero_sign_valid == 0) |
1a26b032 RK |
8928 | return 0; |
8929 | ||
8930 | return (unsignedp | |
ac49a949 | 8931 | ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT |
26c34780 RS |
8932 | ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1 |
8933 | - floor_log2 (nonzero_bits (x, mode))) | |
770ae6cc | 8934 | : 0) |
1a26b032 RK |
8935 | : num_sign_bit_copies (x, mode) - 1); |
8936 | } | |
8937 | \f | |
230d793d RS |
8938 | /* This function is called from `simplify_shift_const' to merge two |
8939 | outer operations. Specifically, we have already found that we need | |
8940 | to perform operation *POP0 with constant *PCONST0 at the outermost | |
8941 | position. We would now like to also perform OP1 with constant CONST1 | |
8942 | (with *POP0 being done last). | |
8943 | ||
8944 | Return 1 if we can do the operation and update *POP0 and *PCONST0 with | |
663522cb | 8945 | the resulting operation. *PCOMP_P is set to 1 if we would need to |
230d793d RS |
8946 | complement the innermost operand, otherwise it is unchanged. |
8947 | ||
8948 | MODE is the mode in which the operation will be done. No bits outside | |
8949 | the width of this mode matter. It is assumed that the width of this mode | |
5f4f0e22 | 8950 | is smaller than or equal to HOST_BITS_PER_WIDE_INT. |
230d793d RS |
8951 | |
8952 | If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS, | |
8953 | IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper | |
8954 | result is simply *PCONST0. | |
8955 | ||
8956 | If the resulting operation cannot be expressed as one operation, we | |
8957 | return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */ | |
8958 | ||
8959 | static int | |
8960 | merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p) | |
8961 | enum rtx_code *pop0; | |
5f4f0e22 | 8962 | HOST_WIDE_INT *pconst0; |
230d793d | 8963 | enum rtx_code op1; |
5f4f0e22 | 8964 | HOST_WIDE_INT const1; |
230d793d RS |
8965 | enum machine_mode mode; |
8966 | int *pcomp_p; | |
8967 | { | |
8968 | enum rtx_code op0 = *pop0; | |
5f4f0e22 | 8969 | HOST_WIDE_INT const0 = *pconst0; |
230d793d RS |
8970 | |
8971 | const0 &= GET_MODE_MASK (mode); | |
8972 | const1 &= GET_MODE_MASK (mode); | |
8973 | ||
8974 | /* If OP0 is an AND, clear unimportant bits in CONST1. */ | |
8975 | if (op0 == AND) | |
8976 | const1 &= const0; | |
8977 | ||
8978 | /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or | |
8979 | if OP0 is SET. */ | |
8980 | ||
8981 | if (op1 == NIL || op0 == SET) | |
8982 | return 1; | |
8983 | ||
8984 | else if (op0 == NIL) | |
8985 | op0 = op1, const0 = const1; | |
8986 | ||
8987 | else if (op0 == op1) | |
8988 | { | |
8989 | switch (op0) | |
8990 | { | |
8991 | case AND: | |
8992 | const0 &= const1; | |
8993 | break; | |
8994 | case IOR: | |
8995 | const0 |= const1; | |
8996 | break; | |
8997 | case XOR: | |
8998 | const0 ^= const1; | |
8999 | break; | |
9000 | case PLUS: | |
9001 | const0 += const1; | |
9002 | break; | |
9003 | case NEG: | |
9004 | op0 = NIL; | |
9005 | break; | |
e9a25f70 JL |
9006 | default: |
9007 | break; | |
230d793d RS |
9008 | } |
9009 | } | |
9010 | ||
9011 | /* Otherwise, if either is a PLUS or NEG, we can't do anything. */ | |
9012 | else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG) | |
9013 | return 0; | |
9014 | ||
9015 | /* If the two constants aren't the same, we can't do anything. The | |
9016 | remaining six cases can all be done. */ | |
9017 | else if (const0 != const1) | |
9018 | return 0; | |
9019 | ||
9020 | else | |
9021 | switch (op0) | |
9022 | { | |
9023 | case IOR: | |
9024 | if (op1 == AND) | |
9025 | /* (a & b) | b == b */ | |
9026 | op0 = SET; | |
9027 | else /* op1 == XOR */ | |
9028 | /* (a ^ b) | b == a | b */ | |
b729186a | 9029 | {;} |
230d793d RS |
9030 | break; |
9031 | ||
9032 | case XOR: | |
9033 | if (op1 == AND) | |
9034 | /* (a & b) ^ b == (~a) & b */ | |
9035 | op0 = AND, *pcomp_p = 1; | |
9036 | else /* op1 == IOR */ | |
9037 | /* (a | b) ^ b == a & ~b */ | |
663522cb | 9038 | op0 = AND, *pconst0 = ~const0; |
230d793d RS |
9039 | break; |
9040 | ||
9041 | case AND: | |
9042 | if (op1 == IOR) | |
9043 | /* (a | b) & b == b */ | |
9044 | op0 = SET; | |
9045 | else /* op1 == XOR */ | |
9046 | /* (a ^ b) & b) == (~a) & b */ | |
9047 | *pcomp_p = 1; | |
9048 | break; | |
e9a25f70 JL |
9049 | default: |
9050 | break; | |
230d793d RS |
9051 | } |
9052 | ||
9053 | /* Check for NO-OP cases. */ | |
9054 | const0 &= GET_MODE_MASK (mode); | |
9055 | if (const0 == 0 | |
9056 | && (op0 == IOR || op0 == XOR || op0 == PLUS)) | |
9057 | op0 = NIL; | |
9058 | else if (const0 == 0 && op0 == AND) | |
9059 | op0 = SET; | |
e51712db KG |
9060 | else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode) |
9061 | && op0 == AND) | |
230d793d RS |
9062 | op0 = NIL; |
9063 | ||
7e4ce834 RH |
9064 | /* ??? Slightly redundant with the above mask, but not entirely. |
9065 | Moving this above means we'd have to sign-extend the mode mask | |
9066 | for the final test. */ | |
9067 | const0 = trunc_int_for_mode (const0, mode); | |
9fa6d012 | 9068 | |
230d793d RS |
9069 | *pop0 = op0; |
9070 | *pconst0 = const0; | |
9071 | ||
9072 | return 1; | |
9073 | } | |
9074 | \f | |
9075 | /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift. | |
da7d8304 | 9076 | The result of the shift is RESULT_MODE. X, if nonzero, is an expression |
230d793d RS |
9077 | that we started with. |
9078 | ||
9079 | The shift is normally computed in the widest mode we find in VAROP, as | |
9080 | long as it isn't a different number of words than RESULT_MODE. Exceptions | |
9081 | are ASHIFTRT and ROTATE, which are always done in their original mode, */ | |
9082 | ||
9083 | static rtx | |
0051b6ca | 9084 | simplify_shift_const (x, code, result_mode, varop, orig_count) |
230d793d RS |
9085 | rtx x; |
9086 | enum rtx_code code; | |
9087 | enum machine_mode result_mode; | |
9088 | rtx varop; | |
0051b6ca | 9089 | int orig_count; |
230d793d RS |
9090 | { |
9091 | enum rtx_code orig_code = code; | |
770ae6cc RK |
9092 | unsigned int count; |
9093 | int signed_count; | |
230d793d RS |
9094 | enum machine_mode mode = result_mode; |
9095 | enum machine_mode shift_mode, tmode; | |
770ae6cc | 9096 | unsigned int mode_words |
230d793d RS |
9097 | = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; |
9098 | /* We form (outer_op (code varop count) (outer_const)). */ | |
9099 | enum rtx_code outer_op = NIL; | |
c4e861e8 | 9100 | HOST_WIDE_INT outer_const = 0; |
230d793d RS |
9101 | rtx const_rtx; |
9102 | int complement_p = 0; | |
9103 | rtx new; | |
9104 | ||
0051b6ca RH |
9105 | /* Make sure and truncate the "natural" shift on the way in. We don't |
9106 | want to do this inside the loop as it makes it more difficult to | |
9107 | combine shifts. */ | |
9108 | #ifdef SHIFT_COUNT_TRUNCATED | |
9109 | if (SHIFT_COUNT_TRUNCATED) | |
9110 | orig_count &= GET_MODE_BITSIZE (mode) - 1; | |
9111 | #endif | |
9112 | ||
230d793d RS |
9113 | /* If we were given an invalid count, don't do anything except exactly |
9114 | what was requested. */ | |
9115 | ||
0051b6ca | 9116 | if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode)) |
230d793d RS |
9117 | { |
9118 | if (x) | |
9119 | return x; | |
9120 | ||
0051b6ca | 9121 | return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (orig_count)); |
230d793d RS |
9122 | } |
9123 | ||
0051b6ca | 9124 | count = orig_count; |
853d8828 | 9125 | |
230d793d RS |
9126 | /* Unless one of the branches of the `if' in this loop does a `continue', |
9127 | we will `break' the loop after the `if'. */ | |
9128 | ||
9129 | while (count != 0) | |
9130 | { | |
9131 | /* If we have an operand of (clobber (const_int 0)), just return that | |
9132 | value. */ | |
9133 | if (GET_CODE (varop) == CLOBBER) | |
9134 | return varop; | |
9135 | ||
9136 | /* If we discovered we had to complement VAROP, leave. Making a NOT | |
9137 | here would cause an infinite loop. */ | |
9138 | if (complement_p) | |
9139 | break; | |
9140 | ||
abc95ed3 | 9141 | /* Convert ROTATERT to ROTATE. */ |
230d793d | 9142 | if (code == ROTATERT) |
ad9df12f IS |
9143 | { |
9144 | unsigned int bitsize = GET_MODE_BITSIZE (result_mode);; | |
9145 | code = ROTATE; | |
9146 | if (VECTOR_MODE_P (result_mode)) | |
9147 | count = bitsize / GET_MODE_NUNITS (result_mode) - count; | |
9148 | else | |
9149 | count = bitsize - count; | |
9150 | } | |
230d793d | 9151 | |
230d793d | 9152 | /* We need to determine what mode we will do the shift in. If the |
f6789c77 RK |
9153 | shift is a right shift or a ROTATE, we must always do it in the mode |
9154 | it was originally done in. Otherwise, we can do it in MODE, the | |
0f41302f | 9155 | widest mode encountered. */ |
f6789c77 RK |
9156 | shift_mode |
9157 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
9158 | ? result_mode : mode); | |
230d793d RS |
9159 | |
9160 | /* Handle cases where the count is greater than the size of the mode | |
853d8828 RH |
9161 | minus 1. For ASHIFT, use the size minus one as the count (this can |
9162 | occur when simplifying (lshiftrt (ashiftrt ..))). For rotates, | |
9163 | take the count modulo the size. For other shifts, the result is | |
9164 | zero. | |
230d793d RS |
9165 | |
9166 | Since these shifts are being produced by the compiler by combining | |
9167 | multiple operations, each of which are defined, we know what the | |
9168 | result is supposed to be. */ | |
663522cb | 9169 | |
26c34780 | 9170 | if (count > (unsigned int) (GET_MODE_BITSIZE (shift_mode) - 1)) |
230d793d RS |
9171 | { |
9172 | if (code == ASHIFTRT) | |
9173 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
9174 | else if (code == ROTATE || code == ROTATERT) | |
9175 | count %= GET_MODE_BITSIZE (shift_mode); | |
9176 | else | |
9177 | { | |
9178 | /* We can't simply return zero because there may be an | |
9179 | outer op. */ | |
9180 | varop = const0_rtx; | |
9181 | count = 0; | |
9182 | break; | |
9183 | } | |
9184 | } | |
9185 | ||
312def2e RK |
9186 | /* An arithmetic right shift of a quantity known to be -1 or 0 |
9187 | is a no-op. */ | |
9188 | if (code == ASHIFTRT | |
9189 | && (num_sign_bit_copies (varop, shift_mode) | |
9190 | == GET_MODE_BITSIZE (shift_mode))) | |
d0ab8cd3 | 9191 | { |
312def2e RK |
9192 | count = 0; |
9193 | break; | |
9194 | } | |
d0ab8cd3 | 9195 | |
312def2e RK |
9196 | /* If we are doing an arithmetic right shift and discarding all but |
9197 | the sign bit copies, this is equivalent to doing a shift by the | |
9198 | bitsize minus one. Convert it into that shift because it will often | |
9199 | allow other simplifications. */ | |
500c518b | 9200 | |
312def2e RK |
9201 | if (code == ASHIFTRT |
9202 | && (count + num_sign_bit_copies (varop, shift_mode) | |
9203 | >= GET_MODE_BITSIZE (shift_mode))) | |
9204 | count = GET_MODE_BITSIZE (shift_mode) - 1; | |
500c518b | 9205 | |
230d793d RS |
9206 | /* We simplify the tests below and elsewhere by converting |
9207 | ASHIFTRT to LSHIFTRT if we know the sign bit is clear. | |
e0a2f705 KH |
9208 | `make_compound_operation' will convert it to an ASHIFTRT for |
9209 | those machines (such as VAX) that don't have an LSHIFTRT. */ | |
5f4f0e22 | 9210 | if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 9211 | && code == ASHIFTRT |
951553af | 9212 | && ((nonzero_bits (varop, shift_mode) |
5f4f0e22 CH |
9213 | & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1))) |
9214 | == 0)) | |
230d793d RS |
9215 | code = LSHIFTRT; |
9216 | ||
9217 | switch (GET_CODE (varop)) | |
9218 | { | |
9219 | case SIGN_EXTEND: | |
9220 | case ZERO_EXTEND: | |
9221 | case SIGN_EXTRACT: | |
9222 | case ZERO_EXTRACT: | |
9223 | new = expand_compound_operation (varop); | |
9224 | if (new != varop) | |
9225 | { | |
9226 | varop = new; | |
9227 | continue; | |
9228 | } | |
9229 | break; | |
9230 | ||
9231 | case MEM: | |
9232 | /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH | |
9233 | minus the width of a smaller mode, we can do this with a | |
9234 | SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */ | |
9235 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
9236 | && ! mode_dependent_address_p (XEXP (varop, 0)) | |
9237 | && ! MEM_VOLATILE_P (varop) | |
9238 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
9239 | MODE_INT, 1)) != BLKmode) | |
9240 | { | |
f1ec5147 RK |
9241 | new = adjust_address_nv (varop, tmode, |
9242 | BYTES_BIG_ENDIAN ? 0 | |
9243 | : count / BITS_PER_UNIT); | |
bf49b139 | 9244 | |
f1c6ba8b RK |
9245 | varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND |
9246 | : ZERO_EXTEND, mode, new); | |
230d793d RS |
9247 | count = 0; |
9248 | continue; | |
9249 | } | |
9250 | break; | |
9251 | ||
9252 | case USE: | |
9253 | /* Similar to the case above, except that we can only do this if | |
9254 | the resulting mode is the same as that of the underlying | |
9255 | MEM and adjust the address depending on the *bits* endianness | |
9256 | because of the way that bit-field extract insns are defined. */ | |
9257 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
9258 | && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count, | |
9259 | MODE_INT, 1)) != BLKmode | |
9260 | && tmode == GET_MODE (XEXP (varop, 0))) | |
9261 | { | |
f76b9db2 ILT |
9262 | if (BITS_BIG_ENDIAN) |
9263 | new = XEXP (varop, 0); | |
9264 | else | |
9265 | { | |
9266 | new = copy_rtx (XEXP (varop, 0)); | |
663522cb | 9267 | SUBST (XEXP (new, 0), |
f76b9db2 ILT |
9268 | plus_constant (XEXP (new, 0), |
9269 | count / BITS_PER_UNIT)); | |
9270 | } | |
230d793d | 9271 | |
f1c6ba8b RK |
9272 | varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND |
9273 | : ZERO_EXTEND, mode, new); | |
230d793d RS |
9274 | count = 0; |
9275 | continue; | |
9276 | } | |
9277 | break; | |
9278 | ||
9279 | case SUBREG: | |
9280 | /* If VAROP is a SUBREG, strip it as long as the inner operand has | |
9281 | the same number of words as what we've seen so far. Then store | |
9282 | the widest mode in MODE. */ | |
f9e67232 RS |
9283 | if (subreg_lowpart_p (varop) |
9284 | && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) | |
9285 | > GET_MODE_SIZE (GET_MODE (varop))) | |
26c34780 RS |
9286 | && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))) |
9287 | + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) | |
9288 | == mode_words) | |
230d793d RS |
9289 | { |
9290 | varop = SUBREG_REG (varop); | |
9291 | if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode)) | |
9292 | mode = GET_MODE (varop); | |
9293 | continue; | |
9294 | } | |
9295 | break; | |
9296 | ||
9297 | case MULT: | |
9298 | /* Some machines use MULT instead of ASHIFT because MULT | |
9299 | is cheaper. But it is still better on those machines to | |
9300 | merge two shifts into one. */ | |
9301 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9302 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
9303 | { | |
770ae6cc RK |
9304 | varop |
9305 | = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0), | |
9306 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); | |
230d793d RS |
9307 | continue; |
9308 | } | |
9309 | break; | |
9310 | ||
9311 | case UDIV: | |
9312 | /* Similar, for when divides are cheaper. */ | |
9313 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9314 | && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0) | |
9315 | { | |
770ae6cc RK |
9316 | varop |
9317 | = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0), | |
9318 | GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1))))); | |
230d793d RS |
9319 | continue; |
9320 | } | |
9321 | break; | |
9322 | ||
9323 | case ASHIFTRT: | |
8f8d8d6e AO |
9324 | /* If we are extracting just the sign bit of an arithmetic |
9325 | right shift, that shift is not needed. However, the sign | |
9326 | bit of a wider mode may be different from what would be | |
9327 | interpreted as the sign bit in a narrower mode, so, if | |
9328 | the result is narrower, don't discard the shift. */ | |
26c34780 RS |
9329 | if (code == LSHIFTRT |
9330 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) | |
8f8d8d6e AO |
9331 | && (GET_MODE_BITSIZE (result_mode) |
9332 | >= GET_MODE_BITSIZE (GET_MODE (varop)))) | |
230d793d RS |
9333 | { |
9334 | varop = XEXP (varop, 0); | |
9335 | continue; | |
9336 | } | |
9337 | ||
0f41302f | 9338 | /* ... fall through ... */ |
230d793d RS |
9339 | |
9340 | case LSHIFTRT: | |
9341 | case ASHIFT: | |
230d793d RS |
9342 | case ROTATE: |
9343 | /* Here we have two nested shifts. The result is usually the | |
9344 | AND of a new shift with a mask. We compute the result below. */ | |
9345 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9346 | && INTVAL (XEXP (varop, 1)) >= 0 | |
9347 | && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop)) | |
5f4f0e22 CH |
9348 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
9349 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d RS |
9350 | { |
9351 | enum rtx_code first_code = GET_CODE (varop); | |
770ae6cc | 9352 | unsigned int first_count = INTVAL (XEXP (varop, 1)); |
5f4f0e22 | 9353 | unsigned HOST_WIDE_INT mask; |
230d793d | 9354 | rtx mask_rtx; |
230d793d | 9355 | |
230d793d RS |
9356 | /* We have one common special case. We can't do any merging if |
9357 | the inner code is an ASHIFTRT of a smaller mode. However, if | |
9358 | we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2) | |
9359 | with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2), | |
9360 | we can convert it to | |
9361 | (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1). | |
9362 | This simplifies certain SIGN_EXTEND operations. */ | |
9363 | if (code == ASHIFT && first_code == ASHIFTRT | |
26c34780 RS |
9364 | && count == (unsigned int) |
9365 | (GET_MODE_BITSIZE (result_mode) | |
9366 | - GET_MODE_BITSIZE (GET_MODE (varop)))) | |
230d793d RS |
9367 | { |
9368 | /* C3 has the low-order C1 bits zero. */ | |
663522cb | 9369 | |
5f4f0e22 | 9370 | mask = (GET_MODE_MASK (mode) |
663522cb | 9371 | & ~(((HOST_WIDE_INT) 1 << first_count) - 1)); |
230d793d | 9372 | |
5f4f0e22 | 9373 | varop = simplify_and_const_int (NULL_RTX, result_mode, |
230d793d | 9374 | XEXP (varop, 0), mask); |
5f4f0e22 | 9375 | varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode, |
230d793d RS |
9376 | varop, count); |
9377 | count = first_count; | |
9378 | code = ASHIFTRT; | |
9379 | continue; | |
9380 | } | |
663522cb | 9381 | |
d0ab8cd3 RK |
9382 | /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more |
9383 | than C1 high-order bits equal to the sign bit, we can convert | |
e0a2f705 | 9384 | this to either an ASHIFT or an ASHIFTRT depending on the |
663522cb | 9385 | two counts. |
230d793d RS |
9386 | |
9387 | We cannot do this if VAROP's mode is not SHIFT_MODE. */ | |
9388 | ||
9389 | if (code == ASHIFTRT && first_code == ASHIFT | |
9390 | && GET_MODE (varop) == shift_mode | |
d0ab8cd3 RK |
9391 | && (num_sign_bit_copies (XEXP (varop, 0), shift_mode) |
9392 | > first_count)) | |
230d793d | 9393 | { |
d0ab8cd3 | 9394 | varop = XEXP (varop, 0); |
770ae6cc RK |
9395 | |
9396 | signed_count = count - first_count; | |
9397 | if (signed_count < 0) | |
663522cb | 9398 | count = -signed_count, code = ASHIFT; |
770ae6cc RK |
9399 | else |
9400 | count = signed_count; | |
9401 | ||
d0ab8cd3 | 9402 | continue; |
230d793d RS |
9403 | } |
9404 | ||
9405 | /* There are some cases we can't do. If CODE is ASHIFTRT, | |
9406 | we can only do this if FIRST_CODE is also ASHIFTRT. | |
9407 | ||
9408 | We can't do the case when CODE is ROTATE and FIRST_CODE is | |
9409 | ASHIFTRT. | |
9410 | ||
9411 | If the mode of this shift is not the mode of the outer shift, | |
bdaae9a0 | 9412 | we can't do this if either shift is a right shift or ROTATE. |
230d793d RS |
9413 | |
9414 | Finally, we can't do any of these if the mode is too wide | |
9415 | unless the codes are the same. | |
9416 | ||
9417 | Handle the case where the shift codes are the same | |
9418 | first. */ | |
9419 | ||
9420 | if (code == first_code) | |
9421 | { | |
9422 | if (GET_MODE (varop) != result_mode | |
bdaae9a0 RK |
9423 | && (code == ASHIFTRT || code == LSHIFTRT |
9424 | || code == ROTATE)) | |
230d793d RS |
9425 | break; |
9426 | ||
9427 | count += first_count; | |
9428 | varop = XEXP (varop, 0); | |
9429 | continue; | |
9430 | } | |
9431 | ||
9432 | if (code == ASHIFTRT | |
9433 | || (code == ROTATE && first_code == ASHIFTRT) | |
5f4f0e22 | 9434 | || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT |
230d793d | 9435 | || (GET_MODE (varop) != result_mode |
bdaae9a0 RK |
9436 | && (first_code == ASHIFTRT || first_code == LSHIFTRT |
9437 | || first_code == ROTATE | |
230d793d RS |
9438 | || code == ROTATE))) |
9439 | break; | |
9440 | ||
9441 | /* To compute the mask to apply after the shift, shift the | |
663522cb | 9442 | nonzero bits of the inner shift the same way the |
230d793d RS |
9443 | outer shift will. */ |
9444 | ||
951553af | 9445 | mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop))); |
230d793d RS |
9446 | |
9447 | mask_rtx | |
9448 | = simplify_binary_operation (code, result_mode, mask_rtx, | |
5f4f0e22 | 9449 | GEN_INT (count)); |
663522cb | 9450 | |
230d793d RS |
9451 | /* Give up if we can't compute an outer operation to use. */ |
9452 | if (mask_rtx == 0 | |
9453 | || GET_CODE (mask_rtx) != CONST_INT | |
9454 | || ! merge_outer_ops (&outer_op, &outer_const, AND, | |
9455 | INTVAL (mask_rtx), | |
9456 | result_mode, &complement_p)) | |
9457 | break; | |
9458 | ||
9459 | /* If the shifts are in the same direction, we add the | |
9460 | counts. Otherwise, we subtract them. */ | |
770ae6cc | 9461 | signed_count = count; |
230d793d RS |
9462 | if ((code == ASHIFTRT || code == LSHIFTRT) |
9463 | == (first_code == ASHIFTRT || first_code == LSHIFTRT)) | |
770ae6cc | 9464 | signed_count += first_count; |
230d793d | 9465 | else |
770ae6cc | 9466 | signed_count -= first_count; |
230d793d | 9467 | |
663522cb | 9468 | /* If COUNT is positive, the new shift is usually CODE, |
230d793d RS |
9469 | except for the two exceptions below, in which case it is |
9470 | FIRST_CODE. If the count is negative, FIRST_CODE should | |
9471 | always be used */ | |
770ae6cc | 9472 | if (signed_count > 0 |
230d793d RS |
9473 | && ((first_code == ROTATE && code == ASHIFT) |
9474 | || (first_code == ASHIFTRT && code == LSHIFTRT))) | |
770ae6cc RK |
9475 | code = first_code, count = signed_count; |
9476 | else if (signed_count < 0) | |
663522cb | 9477 | code = first_code, count = -signed_count; |
770ae6cc RK |
9478 | else |
9479 | count = signed_count; | |
230d793d RS |
9480 | |
9481 | varop = XEXP (varop, 0); | |
9482 | continue; | |
9483 | } | |
9484 | ||
9485 | /* If we have (A << B << C) for any shift, we can convert this to | |
9486 | (A << C << B). This wins if A is a constant. Only try this if | |
9487 | B is not a constant. */ | |
9488 | ||
9489 | else if (GET_CODE (varop) == code | |
9490 | && GET_CODE (XEXP (varop, 1)) != CONST_INT | |
9491 | && 0 != (new | |
9492 | = simplify_binary_operation (code, mode, | |
9493 | XEXP (varop, 0), | |
5f4f0e22 | 9494 | GEN_INT (count)))) |
230d793d | 9495 | { |
f1c6ba8b | 9496 | varop = gen_rtx_fmt_ee (code, mode, new, XEXP (varop, 1)); |
230d793d RS |
9497 | count = 0; |
9498 | continue; | |
9499 | } | |
9500 | break; | |
9501 | ||
9502 | case NOT: | |
9503 | /* Make this fit the case below. */ | |
f1c6ba8b RK |
9504 | varop = gen_rtx_XOR (mode, XEXP (varop, 0), |
9505 | GEN_INT (GET_MODE_MASK (mode))); | |
230d793d RS |
9506 | continue; |
9507 | ||
9508 | case IOR: | |
9509 | case AND: | |
9510 | case XOR: | |
9511 | /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C) | |
9512 | with C the size of VAROP - 1 and the shift is logical if | |
9513 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
9514 | we have an (le X 0) operation. If we have an arithmetic shift | |
9515 | and STORE_FLAG_VALUE is 1 or we have a logical shift with | |
9516 | STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */ | |
9517 | ||
9518 | if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS | |
9519 | && XEXP (XEXP (varop, 0), 1) == constm1_rtx | |
9520 | && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) | |
9521 | && (code == LSHIFTRT || code == ASHIFTRT) | |
26c34780 RS |
9522 | && count == (unsigned int) |
9523 | (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) | |
230d793d RS |
9524 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) |
9525 | { | |
9526 | count = 0; | |
f1c6ba8b RK |
9527 | varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1), |
9528 | const0_rtx); | |
230d793d RS |
9529 | |
9530 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
f1c6ba8b | 9531 | varop = gen_rtx_NEG (GET_MODE (varop), varop); |
230d793d RS |
9532 | |
9533 | continue; | |
9534 | } | |
9535 | ||
9536 | /* If we have (shift (logical)), move the logical to the outside | |
9537 | to allow it to possibly combine with another logical and the | |
9538 | shift to combine with another shift. This also canonicalizes to | |
9539 | what a ZERO_EXTRACT looks like. Also, some machines have | |
9540 | (and (shift)) insns. */ | |
9541 | ||
9542 | if (GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9543 | && (new = simplify_binary_operation (code, result_mode, | |
9544 | XEXP (varop, 1), | |
5f4f0e22 | 9545 | GEN_INT (count))) != 0 |
663522cb | 9546 | && GET_CODE (new) == CONST_INT |
230d793d RS |
9547 | && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop), |
9548 | INTVAL (new), result_mode, &complement_p)) | |
9549 | { | |
9550 | varop = XEXP (varop, 0); | |
9551 | continue; | |
9552 | } | |
9553 | ||
9554 | /* If we can't do that, try to simplify the shift in each arm of the | |
9555 | logical expression, make a new logical expression, and apply | |
9556 | the inverse distributive law. */ | |
9557 | { | |
00d4ca1c | 9558 | rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
230d793d | 9559 | XEXP (varop, 0), count); |
00d4ca1c | 9560 | rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode, |
230d793d RS |
9561 | XEXP (varop, 1), count); |
9562 | ||
21a64bf1 | 9563 | varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs); |
230d793d RS |
9564 | varop = apply_distributive_law (varop); |
9565 | ||
9566 | count = 0; | |
9567 | } | |
9568 | break; | |
9569 | ||
9570 | case EQ: | |
45620ed4 | 9571 | /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE |
230d793d | 9572 | says that the sign bit can be tested, FOO has mode MODE, C is |
45620ed4 RK |
9573 | GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit |
9574 | that may be nonzero. */ | |
9575 | if (code == LSHIFTRT | |
230d793d RS |
9576 | && XEXP (varop, 1) == const0_rtx |
9577 | && GET_MODE (XEXP (varop, 0)) == result_mode | |
26c34780 | 9578 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) |
5f4f0e22 | 9579 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
230d793d | 9580 | && ((STORE_FLAG_VALUE |
663522cb | 9581 | & ((HOST_WIDE_INT) 1 |
770ae6cc | 9582 | < (GET_MODE_BITSIZE (result_mode) - 1)))) |
951553af | 9583 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
9584 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
9585 | (HOST_WIDE_INT) 1, result_mode, | |
9586 | &complement_p)) | |
230d793d RS |
9587 | { |
9588 | varop = XEXP (varop, 0); | |
9589 | count = 0; | |
9590 | continue; | |
9591 | } | |
9592 | break; | |
9593 | ||
9594 | case NEG: | |
d0ab8cd3 RK |
9595 | /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less |
9596 | than the number of bits in the mode is equivalent to A. */ | |
26c34780 RS |
9597 | if (code == LSHIFTRT |
9598 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) | |
951553af | 9599 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1) |
230d793d | 9600 | { |
d0ab8cd3 | 9601 | varop = XEXP (varop, 0); |
230d793d RS |
9602 | count = 0; |
9603 | continue; | |
9604 | } | |
9605 | ||
9606 | /* NEG commutes with ASHIFT since it is multiplication. Move the | |
9607 | NEG outside to allow shifts to combine. */ | |
9608 | if (code == ASHIFT | |
5f4f0e22 CH |
9609 | && merge_outer_ops (&outer_op, &outer_const, NEG, |
9610 | (HOST_WIDE_INT) 0, result_mode, | |
9611 | &complement_p)) | |
230d793d RS |
9612 | { |
9613 | varop = XEXP (varop, 0); | |
9614 | continue; | |
9615 | } | |
9616 | break; | |
9617 | ||
9618 | case PLUS: | |
d0ab8cd3 RK |
9619 | /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C |
9620 | is one less than the number of bits in the mode is | |
9621 | equivalent to (xor A 1). */ | |
26c34780 RS |
9622 | if (code == LSHIFTRT |
9623 | && count == (unsigned int) (GET_MODE_BITSIZE (result_mode) - 1) | |
230d793d | 9624 | && XEXP (varop, 1) == constm1_rtx |
951553af | 9625 | && nonzero_bits (XEXP (varop, 0), result_mode) == 1 |
5f4f0e22 CH |
9626 | && merge_outer_ops (&outer_op, &outer_const, XOR, |
9627 | (HOST_WIDE_INT) 1, result_mode, | |
9628 | &complement_p)) | |
230d793d RS |
9629 | { |
9630 | count = 0; | |
9631 | varop = XEXP (varop, 0); | |
9632 | continue; | |
9633 | } | |
9634 | ||
3f508eca | 9635 | /* If we have (xshiftrt (plus FOO BAR) C), and the only bits |
951553af | 9636 | that might be nonzero in BAR are those being shifted out and those |
3f508eca RK |
9637 | bits are known zero in FOO, we can replace the PLUS with FOO. |
9638 | Similarly in the other operand order. This code occurs when | |
9639 | we are computing the size of a variable-size array. */ | |
9640 | ||
9641 | if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 9642 | && count < HOST_BITS_PER_WIDE_INT |
951553af RK |
9643 | && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0 |
9644 | && (nonzero_bits (XEXP (varop, 1), result_mode) | |
9645 | & nonzero_bits (XEXP (varop, 0), result_mode)) == 0) | |
3f508eca RK |
9646 | { |
9647 | varop = XEXP (varop, 0); | |
9648 | continue; | |
9649 | } | |
9650 | else if ((code == ASHIFTRT || code == LSHIFTRT) | |
5f4f0e22 | 9651 | && count < HOST_BITS_PER_WIDE_INT |
ac49a949 | 9652 | && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT |
951553af | 9653 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
3f508eca | 9654 | >> count) |
951553af RK |
9655 | && 0 == (nonzero_bits (XEXP (varop, 0), result_mode) |
9656 | & nonzero_bits (XEXP (varop, 1), | |
3f508eca RK |
9657 | result_mode))) |
9658 | { | |
9659 | varop = XEXP (varop, 1); | |
9660 | continue; | |
9661 | } | |
9662 | ||
230d793d RS |
9663 | /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */ |
9664 | if (code == ASHIFT | |
9665 | && GET_CODE (XEXP (varop, 1)) == CONST_INT | |
9666 | && (new = simplify_binary_operation (ASHIFT, result_mode, | |
9667 | XEXP (varop, 1), | |
5f4f0e22 | 9668 | GEN_INT (count))) != 0 |
770ae6cc | 9669 | && GET_CODE (new) == CONST_INT |
230d793d RS |
9670 | && merge_outer_ops (&outer_op, &outer_const, PLUS, |
9671 | INTVAL (new), result_mode, &complement_p)) | |
9672 | { | |
9673 | varop = XEXP (varop, 0); | |
9674 | continue; | |
9675 | } | |
9676 | break; | |
9677 | ||
9678 | case MINUS: | |
9679 | /* If we have (xshiftrt (minus (ashiftrt X C)) X) C) | |
9680 | with C the size of VAROP - 1 and the shift is logical if | |
9681 | STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1, | |
9682 | we have a (gt X 0) operation. If the shift is arithmetic with | |
9683 | STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1, | |
9684 | we have a (neg (gt X 0)) operation. */ | |
9685 | ||
0802d516 RK |
9686 | if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1) |
9687 | && GET_CODE (XEXP (varop, 0)) == ASHIFTRT | |
26c34780 RS |
9688 | && count == (unsigned int) |
9689 | (GET_MODE_BITSIZE (GET_MODE (varop)) - 1) | |
230d793d RS |
9690 | && (code == LSHIFTRT || code == ASHIFTRT) |
9691 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
26c34780 RS |
9692 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (varop, 0), 1)) |
9693 | == count | |
230d793d RS |
9694 | && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1))) |
9695 | { | |
9696 | count = 0; | |
f1c6ba8b RK |
9697 | varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1), |
9698 | const0_rtx); | |
230d793d RS |
9699 | |
9700 | if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT) | |
f1c6ba8b | 9701 | varop = gen_rtx_NEG (GET_MODE (varop), varop); |
230d793d RS |
9702 | |
9703 | continue; | |
9704 | } | |
9705 | break; | |
6e0ef100 JC |
9706 | |
9707 | case TRUNCATE: | |
9708 | /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt)) | |
9709 | if the truncate does not affect the value. */ | |
9710 | if (code == LSHIFTRT | |
9711 | && GET_CODE (XEXP (varop, 0)) == LSHIFTRT | |
9712 | && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT | |
9713 | && (INTVAL (XEXP (XEXP (varop, 0), 1)) | |
b577a8ff JL |
9714 | >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0))) |
9715 | - GET_MODE_BITSIZE (GET_MODE (varop))))) | |
6e0ef100 JC |
9716 | { |
9717 | rtx varop_inner = XEXP (varop, 0); | |
9718 | ||
770ae6cc | 9719 | varop_inner |
f1c6ba8b RK |
9720 | = gen_rtx_LSHIFTRT (GET_MODE (varop_inner), |
9721 | XEXP (varop_inner, 0), | |
9722 | GEN_INT | |
9723 | (count + INTVAL (XEXP (varop_inner, 1)))); | |
9724 | varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner); | |
6e0ef100 JC |
9725 | count = 0; |
9726 | continue; | |
9727 | } | |
9728 | break; | |
663522cb | 9729 | |
e9a25f70 JL |
9730 | default: |
9731 | break; | |
230d793d RS |
9732 | } |
9733 | ||
9734 | break; | |
9735 | } | |
9736 | ||
9737 | /* We need to determine what mode to do the shift in. If the shift is | |
f6789c77 RK |
9738 | a right shift or ROTATE, we must always do it in the mode it was |
9739 | originally done in. Otherwise, we can do it in MODE, the widest mode | |
9740 | encountered. The code we care about is that of the shift that will | |
9741 | actually be done, not the shift that was originally requested. */ | |
9742 | shift_mode | |
9743 | = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE | |
9744 | ? result_mode : mode); | |
230d793d RS |
9745 | |
9746 | /* We have now finished analyzing the shift. The result should be | |
9747 | a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If | |
9748 | OUTER_OP is non-NIL, it is an operation that needs to be applied | |
9749 | to the result of the shift. OUTER_CONST is the relevant constant, | |
9750 | but we must turn off all bits turned off in the shift. | |
9751 | ||
9752 | If we were passed a value for X, see if we can use any pieces of | |
9753 | it. If not, make new rtx. */ | |
9754 | ||
9755 | if (x && GET_RTX_CLASS (GET_CODE (x)) == '2' | |
9756 | && GET_CODE (XEXP (x, 1)) == CONST_INT | |
26c34780 | 9757 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == count) |
230d793d RS |
9758 | const_rtx = XEXP (x, 1); |
9759 | else | |
5f4f0e22 | 9760 | const_rtx = GEN_INT (count); |
230d793d RS |
9761 | |
9762 | if (x && GET_CODE (XEXP (x, 0)) == SUBREG | |
9763 | && GET_MODE (XEXP (x, 0)) == shift_mode | |
9764 | && SUBREG_REG (XEXP (x, 0)) == varop) | |
9765 | varop = XEXP (x, 0); | |
9766 | else if (GET_MODE (varop) != shift_mode) | |
9767 | varop = gen_lowpart_for_combine (shift_mode, varop); | |
9768 | ||
0f41302f | 9769 | /* If we can't make the SUBREG, try to return what we were given. */ |
230d793d RS |
9770 | if (GET_CODE (varop) == CLOBBER) |
9771 | return x ? x : varop; | |
9772 | ||
9773 | new = simplify_binary_operation (code, shift_mode, varop, const_rtx); | |
9774 | if (new != 0) | |
9775 | x = new; | |
9776 | else | |
6c2d03d0 | 9777 | x = gen_rtx_fmt_ee (code, shift_mode, varop, const_rtx); |
230d793d | 9778 | |
224eeff2 RK |
9779 | /* If we have an outer operation and we just made a shift, it is |
9780 | possible that we could have simplified the shift were it not | |
9781 | for the outer operation. So try to do the simplification | |
9782 | recursively. */ | |
9783 | ||
9784 | if (outer_op != NIL && GET_CODE (x) == code | |
9785 | && GET_CODE (XEXP (x, 1)) == CONST_INT) | |
9786 | x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0), | |
9787 | INTVAL (XEXP (x, 1))); | |
9788 | ||
e0a2f705 | 9789 | /* If we were doing an LSHIFTRT in a wider mode than it was originally, |
230d793d RS |
9790 | turn off all the bits that the shift would have turned off. */ |
9791 | if (orig_code == LSHIFTRT && result_mode != shift_mode) | |
5f4f0e22 | 9792 | x = simplify_and_const_int (NULL_RTX, shift_mode, x, |
230d793d | 9793 | GET_MODE_MASK (result_mode) >> orig_count); |
663522cb | 9794 | |
230d793d RS |
9795 | /* Do the remainder of the processing in RESULT_MODE. */ |
9796 | x = gen_lowpart_for_combine (result_mode, x); | |
9797 | ||
9798 | /* If COMPLEMENT_P is set, we have to complement X before doing the outer | |
9799 | operation. */ | |
9800 | if (complement_p) | |
f1c6ba8b | 9801 | x =simplify_gen_unary (NOT, result_mode, x, result_mode); |
230d793d RS |
9802 | |
9803 | if (outer_op != NIL) | |
9804 | { | |
5f4f0e22 | 9805 | if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT) |
7e4ce834 | 9806 | outer_const = trunc_int_for_mode (outer_const, result_mode); |
230d793d RS |
9807 | |
9808 | if (outer_op == AND) | |
5f4f0e22 | 9809 | x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const); |
230d793d RS |
9810 | else if (outer_op == SET) |
9811 | /* This means that we have determined that the result is | |
9812 | equivalent to a constant. This should be rare. */ | |
5f4f0e22 | 9813 | x = GEN_INT (outer_const); |
230d793d | 9814 | else if (GET_RTX_CLASS (outer_op) == '1') |
f1c6ba8b | 9815 | x = simplify_gen_unary (outer_op, result_mode, x, result_mode); |
230d793d | 9816 | else |
5f4f0e22 | 9817 | x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const)); |
230d793d RS |
9818 | } |
9819 | ||
9820 | return x; | |
663522cb | 9821 | } |
230d793d RS |
9822 | \f |
9823 | /* Like recog, but we receive the address of a pointer to a new pattern. | |
9824 | We try to match the rtx that the pointer points to. | |
9825 | If that fails, we may try to modify or replace the pattern, | |
9826 | storing the replacement into the same pointer object. | |
9827 | ||
9828 | Modifications include deletion or addition of CLOBBERs. | |
9829 | ||
9830 | PNOTES is a pointer to a location where any REG_UNUSED notes added for | |
9831 | the CLOBBERs are placed. | |
9832 | ||
9833 | The value is the final insn code from the pattern ultimately matched, | |
9834 | or -1. */ | |
9835 | ||
9836 | static int | |
8e2f6e35 | 9837 | recog_for_combine (pnewpat, insn, pnotes) |
230d793d RS |
9838 | rtx *pnewpat; |
9839 | rtx insn; | |
9840 | rtx *pnotes; | |
9841 | { | |
b3694847 | 9842 | rtx pat = *pnewpat; |
230d793d RS |
9843 | int insn_code_number; |
9844 | int num_clobbers_to_add = 0; | |
9845 | int i; | |
9846 | rtx notes = 0; | |
fa852403 | 9847 | rtx dummy_insn; |
230d793d | 9848 | |
974f4146 RK |
9849 | /* If PAT is a PARALLEL, check to see if it contains the CLOBBER |
9850 | we use to indicate that something didn't match. If we find such a | |
9851 | thing, force rejection. */ | |
d96023cf | 9852 | if (GET_CODE (pat) == PARALLEL) |
974f4146 | 9853 | for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) |
d96023cf RK |
9854 | if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER |
9855 | && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx) | |
974f4146 RK |
9856 | return -1; |
9857 | ||
fa852403 JJ |
9858 | /* *pnewpat does not have to be actual PATTERN (insn), so make a dummy |
9859 | instruction for pattern recognition. */ | |
9860 | dummy_insn = shallow_copy_rtx (insn); | |
9861 | PATTERN (dummy_insn) = pat; | |
9862 | REG_NOTES (dummy_insn) = 0; | |
c1194d74 | 9863 | |
fa852403 | 9864 | insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add); |
230d793d RS |
9865 | |
9866 | /* If it isn't, there is the possibility that we previously had an insn | |
9867 | that clobbered some register as a side effect, but the combined | |
9868 | insn doesn't need to do that. So try once more without the clobbers | |
9869 | unless this represents an ASM insn. */ | |
9870 | ||
9871 | if (insn_code_number < 0 && ! check_asm_operands (pat) | |
9872 | && GET_CODE (pat) == PARALLEL) | |
9873 | { | |
9874 | int pos; | |
9875 | ||
9876 | for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++) | |
9877 | if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER) | |
9878 | { | |
9879 | if (i != pos) | |
9880 | SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i)); | |
9881 | pos++; | |
9882 | } | |
9883 | ||
9884 | SUBST_INT (XVECLEN (pat, 0), pos); | |
9885 | ||
9886 | if (pos == 1) | |
9887 | pat = XVECEXP (pat, 0, 0); | |
9888 | ||
fa852403 JJ |
9889 | PATTERN (dummy_insn) = pat; |
9890 | insn_code_number = recog (pat, dummy_insn, &num_clobbers_to_add); | |
230d793d RS |
9891 | } |
9892 | ||
b5832b43 JH |
9893 | /* Recognize all noop sets, these will be killed by followup pass. */ |
9894 | if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat)) | |
9895 | insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0; | |
9896 | ||
230d793d RS |
9897 | /* If we had any clobbers to add, make a new pattern than contains |
9898 | them. Then check to make sure that all of them are dead. */ | |
9899 | if (num_clobbers_to_add) | |
9900 | { | |
38a448ca | 9901 | rtx newpat = gen_rtx_PARALLEL (VOIDmode, |
bf103ec2 R |
9902 | rtvec_alloc (GET_CODE (pat) == PARALLEL |
9903 | ? (XVECLEN (pat, 0) | |
9904 | + num_clobbers_to_add) | |
9905 | : num_clobbers_to_add + 1)); | |
230d793d RS |
9906 | |
9907 | if (GET_CODE (pat) == PARALLEL) | |
9908 | for (i = 0; i < XVECLEN (pat, 0); i++) | |
9909 | XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i); | |
9910 | else | |
9911 | XVECEXP (newpat, 0, 0) = pat; | |
9912 | ||
9913 | add_clobbers (newpat, insn_code_number); | |
9914 | ||
9915 | for (i = XVECLEN (newpat, 0) - num_clobbers_to_add; | |
9916 | i < XVECLEN (newpat, 0); i++) | |
9917 | { | |
9918 | if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG | |
9919 | && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn)) | |
9920 | return -1; | |
38a448ca RH |
9921 | notes = gen_rtx_EXPR_LIST (REG_UNUSED, |
9922 | XEXP (XVECEXP (newpat, 0, i), 0), notes); | |
230d793d RS |
9923 | } |
9924 | pat = newpat; | |
9925 | } | |
9926 | ||
9927 | *pnewpat = pat; | |
9928 | *pnotes = notes; | |
9929 | ||
9930 | return insn_code_number; | |
9931 | } | |
9932 | \f | |
9933 | /* Like gen_lowpart but for use by combine. In combine it is not possible | |
9934 | to create any new pseudoregs. However, it is safe to create | |
9935 | invalid memory addresses, because combine will try to recognize | |
9936 | them and all they will do is make the combine attempt fail. | |
9937 | ||
9938 | If for some reason this cannot do its job, an rtx | |
9939 | (clobber (const_int 0)) is returned. | |
9940 | An insn containing that will not be recognized. */ | |
9941 | ||
9942 | #undef gen_lowpart | |
9943 | ||
9944 | static rtx | |
9945 | gen_lowpart_for_combine (mode, x) | |
9946 | enum machine_mode mode; | |
b3694847 | 9947 | rtx x; |
230d793d RS |
9948 | { |
9949 | rtx result; | |
9950 | ||
9951 | if (GET_MODE (x) == mode) | |
9952 | return x; | |
9953 | ||
eae957a8 RK |
9954 | /* We can only support MODE being wider than a word if X is a |
9955 | constant integer or has a mode the same size. */ | |
9956 | ||
9957 | if (GET_MODE_SIZE (mode) > UNITS_PER_WORD | |
9958 | && ! ((GET_MODE (x) == VOIDmode | |
9959 | && (GET_CODE (x) == CONST_INT | |
9960 | || GET_CODE (x) == CONST_DOUBLE)) | |
9961 | || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode))) | |
38a448ca | 9962 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
9963 | |
9964 | /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart | |
9965 | won't know what to do. So we will strip off the SUBREG here and | |
9966 | process normally. */ | |
9967 | if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM) | |
9968 | { | |
9969 | x = SUBREG_REG (x); | |
9970 | if (GET_MODE (x) == mode) | |
9971 | return x; | |
9972 | } | |
9973 | ||
9974 | result = gen_lowpart_common (mode, x); | |
cff9f8d5 | 9975 | #ifdef CANNOT_CHANGE_MODE_CLASS |
64bf47a2 RK |
9976 | if (result != 0 |
9977 | && GET_CODE (result) == SUBREG | |
9978 | && GET_CODE (SUBREG_REG (result)) == REG | |
cff9f8d5 AH |
9979 | && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER) |
9980 | SET_REGNO_REG_SET (&subregs_of_mode[GET_MODE (result)], | |
9981 | REGNO (SUBREG_REG (result))); | |
02188693 | 9982 | #endif |
64bf47a2 | 9983 | |
230d793d RS |
9984 | if (result) |
9985 | return result; | |
9986 | ||
9987 | if (GET_CODE (x) == MEM) | |
9988 | { | |
b3694847 | 9989 | int offset = 0; |
230d793d RS |
9990 | |
9991 | /* Refuse to work on a volatile memory ref or one with a mode-dependent | |
9992 | address. */ | |
9993 | if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0))) | |
38a448ca | 9994 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
9995 | |
9996 | /* If we want to refer to something bigger than the original memref, | |
9997 | generate a perverse subreg instead. That will force a reload | |
9998 | of the original memref X. */ | |
9999 | if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)) | |
38a448ca | 10000 | return gen_rtx_SUBREG (mode, x, 0); |
230d793d | 10001 | |
f76b9db2 ILT |
10002 | if (WORDS_BIG_ENDIAN) |
10003 | offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD) | |
10004 | - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD)); | |
c5c76735 | 10005 | |
f76b9db2 ILT |
10006 | if (BYTES_BIG_ENDIAN) |
10007 | { | |
10008 | /* Adjust the address so that the address-after-the-data is | |
10009 | unchanged. */ | |
10010 | offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)) | |
10011 | - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x)))); | |
10012 | } | |
f1ec5147 RK |
10013 | |
10014 | return adjust_address_nv (x, mode, offset); | |
230d793d RS |
10015 | } |
10016 | ||
10017 | /* If X is a comparison operator, rewrite it in a new mode. This | |
10018 | probably won't match, but may allow further simplifications. */ | |
10019 | else if (GET_RTX_CLASS (GET_CODE (x)) == '<') | |
f1c6ba8b | 10020 | return gen_rtx_fmt_ee (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1)); |
230d793d RS |
10021 | |
10022 | /* If we couldn't simplify X any other way, just enclose it in a | |
10023 | SUBREG. Normally, this SUBREG won't match, but some patterns may | |
a7c99304 | 10024 | include an explicit SUBREG or we may simplify it further in combine. */ |
230d793d | 10025 | else |
dfbe1b2f | 10026 | { |
ddef6bc7 | 10027 | int offset = 0; |
e0e08ac2 | 10028 | rtx res; |
80ba02b1 | 10029 | enum machine_mode sub_mode = GET_MODE (x); |
dfbe1b2f | 10030 | |
80ba02b1 R |
10031 | offset = subreg_lowpart_offset (mode, sub_mode); |
10032 | if (sub_mode == VOIDmode) | |
10033 | { | |
10034 | sub_mode = int_mode_for_mode (mode); | |
10035 | x = gen_lowpart_common (sub_mode, x); | |
10036 | } | |
10037 | res = simplify_gen_subreg (mode, x, sub_mode, offset); | |
e0e08ac2 JH |
10038 | if (res) |
10039 | return res; | |
10040 | return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); | |
dfbe1b2f | 10041 | } |
230d793d RS |
10042 | } |
10043 | \f | |
230d793d RS |
10044 | /* These routines make binary and unary operations by first seeing if they |
10045 | fold; if not, a new expression is allocated. */ | |
10046 | ||
10047 | static rtx | |
10048 | gen_binary (code, mode, op0, op1) | |
10049 | enum rtx_code code; | |
10050 | enum machine_mode mode; | |
10051 | rtx op0, op1; | |
10052 | { | |
10053 | rtx result; | |
1a26b032 RK |
10054 | rtx tem; |
10055 | ||
10056 | if (GET_RTX_CLASS (code) == 'c' | |
8c9864f3 | 10057 | && swap_commutative_operands_p (op0, op1)) |
1a26b032 | 10058 | tem = op0, op0 = op1, op1 = tem; |
230d793d | 10059 | |
663522cb | 10060 | if (GET_RTX_CLASS (code) == '<') |
230d793d RS |
10061 | { |
10062 | enum machine_mode op_mode = GET_MODE (op0); | |
9210df58 | 10063 | |
663522cb | 10064 | /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get |
0f41302f | 10065 | just (REL_OP X Y). */ |
9210df58 RK |
10066 | if (GET_CODE (op0) == COMPARE && op1 == const0_rtx) |
10067 | { | |
10068 | op1 = XEXP (op0, 1); | |
10069 | op0 = XEXP (op0, 0); | |
10070 | op_mode = GET_MODE (op0); | |
10071 | } | |
10072 | ||
230d793d RS |
10073 | if (op_mode == VOIDmode) |
10074 | op_mode = GET_MODE (op1); | |
10075 | result = simplify_relational_operation (code, op_mode, op0, op1); | |
10076 | } | |
10077 | else | |
10078 | result = simplify_binary_operation (code, mode, op0, op1); | |
10079 | ||
10080 | if (result) | |
10081 | return result; | |
10082 | ||
10083 | /* Put complex operands first and constants second. */ | |
10084 | if (GET_RTX_CLASS (code) == 'c' | |
e5c56fd9 | 10085 | && swap_commutative_operands_p (op0, op1)) |
f1c6ba8b | 10086 | return gen_rtx_fmt_ee (code, mode, op1, op0); |
230d793d | 10087 | |
e5e809f4 JL |
10088 | /* If we are turning off bits already known off in OP0, we need not do |
10089 | an AND. */ | |
10090 | else if (code == AND && GET_CODE (op1) == CONST_INT | |
10091 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT | |
663522cb | 10092 | && (nonzero_bits (op0, mode) & ~INTVAL (op1)) == 0) |
e5e809f4 JL |
10093 | return op0; |
10094 | ||
f1c6ba8b | 10095 | return gen_rtx_fmt_ee (code, mode, op0, op1); |
230d793d RS |
10096 | } |
10097 | \f | |
10098 | /* Simplify a comparison between *POP0 and *POP1 where CODE is the | |
10099 | comparison code that will be tested. | |
10100 | ||
10101 | The result is a possibly different comparison code to use. *POP0 and | |
10102 | *POP1 may be updated. | |
10103 | ||
10104 | It is possible that we might detect that a comparison is either always | |
10105 | true or always false. However, we do not perform general constant | |
5089e22e | 10106 | folding in combine, so this knowledge isn't useful. Such tautologies |
230d793d RS |
10107 | should have been detected earlier. Hence we ignore all such cases. */ |
10108 | ||
10109 | static enum rtx_code | |
10110 | simplify_comparison (code, pop0, pop1) | |
10111 | enum rtx_code code; | |
10112 | rtx *pop0; | |
10113 | rtx *pop1; | |
10114 | { | |
10115 | rtx op0 = *pop0; | |
10116 | rtx op1 = *pop1; | |
10117 | rtx tem, tem1; | |
10118 | int i; | |
10119 | enum machine_mode mode, tmode; | |
10120 | ||
10121 | /* Try a few ways of applying the same transformation to both operands. */ | |
10122 | while (1) | |
10123 | { | |
3a19aabc RK |
10124 | #ifndef WORD_REGISTER_OPERATIONS |
10125 | /* The test below this one won't handle SIGN_EXTENDs on these machines, | |
10126 | so check specially. */ | |
10127 | if (code != GTU && code != GEU && code != LTU && code != LEU | |
10128 | && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT | |
10129 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
10130 | && GET_CODE (XEXP (op1, 0)) == ASHIFT | |
10131 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG | |
10132 | && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG | |
10133 | && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))) | |
ad25ba17 | 10134 | == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0)))) |
3a19aabc RK |
10135 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10136 | && GET_CODE (XEXP (op1, 1)) == CONST_INT | |
10137 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10138 | && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT | |
10139 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1)) | |
10140 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1)) | |
10141 | && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1)) | |
10142 | && (INTVAL (XEXP (op0, 1)) | |
10143 | == (GET_MODE_BITSIZE (GET_MODE (op0)) | |
10144 | - (GET_MODE_BITSIZE | |
10145 | (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))))))) | |
10146 | { | |
10147 | op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0)); | |
10148 | op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0)); | |
10149 | } | |
10150 | #endif | |
10151 | ||
230d793d RS |
10152 | /* If both operands are the same constant shift, see if we can ignore the |
10153 | shift. We can if the shift is a rotate or if the bits shifted out of | |
951553af | 10154 | this shift are known to be zero for both inputs and if the type of |
230d793d | 10155 | comparison is compatible with the shift. */ |
67232b23 RK |
10156 | if (GET_CODE (op0) == GET_CODE (op1) |
10157 | && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT | |
10158 | && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ)) | |
45620ed4 | 10159 | || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT) |
67232b23 RK |
10160 | && (code != GT && code != LT && code != GE && code != LE)) |
10161 | || (GET_CODE (op0) == ASHIFTRT | |
10162 | && (code != GTU && code != LTU | |
99dc5306 | 10163 | && code != GEU && code != LEU))) |
67232b23 RK |
10164 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10165 | && INTVAL (XEXP (op0, 1)) >= 0 | |
10166 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT | |
10167 | && XEXP (op0, 1) == XEXP (op1, 1)) | |
230d793d RS |
10168 | { |
10169 | enum machine_mode mode = GET_MODE (op0); | |
5f4f0e22 | 10170 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
10171 | int shift_count = INTVAL (XEXP (op0, 1)); |
10172 | ||
10173 | if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT) | |
10174 | mask &= (mask >> shift_count) << shift_count; | |
45620ed4 | 10175 | else if (GET_CODE (op0) == ASHIFT) |
230d793d RS |
10176 | mask = (mask & (mask << shift_count)) >> shift_count; |
10177 | ||
663522cb KH |
10178 | if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0 |
10179 | && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0) | |
230d793d RS |
10180 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0); |
10181 | else | |
10182 | break; | |
10183 | } | |
10184 | ||
10185 | /* If both operands are AND's of a paradoxical SUBREG by constant, the | |
10186 | SUBREGs are of the same mode, and, in both cases, the AND would | |
10187 | be redundant if the comparison was done in the narrower mode, | |
10188 | do the comparison in the narrower mode (e.g., we are AND'ing with 1 | |
951553af RK |
10189 | and the operand's possibly nonzero bits are 0xffffff01; in that case |
10190 | if we only care about QImode, we don't need the AND). This case | |
10191 | occurs if the output mode of an scc insn is not SImode and | |
7e4dc511 RK |
10192 | STORE_FLAG_VALUE == 1 (e.g., the 386). |
10193 | ||
10194 | Similarly, check for a case where the AND's are ZERO_EXTEND | |
10195 | operations from some narrower mode even though a SUBREG is not | |
10196 | present. */ | |
230d793d | 10197 | |
663522cb KH |
10198 | else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND |
10199 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10200 | && GET_CODE (XEXP (op1, 1)) == CONST_INT) | |
230d793d | 10201 | { |
7e4dc511 RK |
10202 | rtx inner_op0 = XEXP (op0, 0); |
10203 | rtx inner_op1 = XEXP (op1, 0); | |
10204 | HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1)); | |
10205 | HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1)); | |
10206 | int changed = 0; | |
663522cb | 10207 | |
7e4dc511 RK |
10208 | if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG |
10209 | && (GET_MODE_SIZE (GET_MODE (inner_op0)) | |
10210 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0)))) | |
10211 | && (GET_MODE (SUBREG_REG (inner_op0)) | |
10212 | == GET_MODE (SUBREG_REG (inner_op1))) | |
729a2bc6 | 10213 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0))) |
7e4dc511 | 10214 | <= HOST_BITS_PER_WIDE_INT) |
01c82bbb | 10215 | && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0), |
729a2bc6 | 10216 | GET_MODE (SUBREG_REG (inner_op0))))) |
01c82bbb RK |
10217 | && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1), |
10218 | GET_MODE (SUBREG_REG (inner_op1)))))) | |
7e4dc511 RK |
10219 | { |
10220 | op0 = SUBREG_REG (inner_op0); | |
10221 | op1 = SUBREG_REG (inner_op1); | |
10222 | ||
10223 | /* The resulting comparison is always unsigned since we masked | |
0f41302f | 10224 | off the original sign bit. */ |
7e4dc511 RK |
10225 | code = unsigned_condition (code); |
10226 | ||
10227 | changed = 1; | |
10228 | } | |
230d793d | 10229 | |
7e4dc511 RK |
10230 | else if (c0 == c1) |
10231 | for (tmode = GET_CLASS_NARROWEST_MODE | |
10232 | (GET_MODE_CLASS (GET_MODE (op0))); | |
10233 | tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode)) | |
e51712db | 10234 | if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode)) |
7e4dc511 RK |
10235 | { |
10236 | op0 = gen_lowpart_for_combine (tmode, inner_op0); | |
10237 | op1 = gen_lowpart_for_combine (tmode, inner_op1); | |
66415c8b | 10238 | code = unsigned_condition (code); |
7e4dc511 RK |
10239 | changed = 1; |
10240 | break; | |
10241 | } | |
10242 | ||
10243 | if (! changed) | |
10244 | break; | |
230d793d | 10245 | } |
3a19aabc | 10246 | |
ad25ba17 RK |
10247 | /* If both operands are NOT, we can strip off the outer operation |
10248 | and adjust the comparison code for swapped operands; similarly for | |
10249 | NEG, except that this must be an equality comparison. */ | |
10250 | else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT) | |
10251 | || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG | |
10252 | && (code == EQ || code == NE))) | |
10253 | op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code); | |
3a19aabc | 10254 | |
230d793d RS |
10255 | else |
10256 | break; | |
10257 | } | |
663522cb | 10258 | |
230d793d | 10259 | /* If the first operand is a constant, swap the operands and adjust the |
3aceff0d RK |
10260 | comparison code appropriately, but don't do this if the second operand |
10261 | is already a constant integer. */ | |
8c9864f3 | 10262 | if (swap_commutative_operands_p (op0, op1)) |
230d793d RS |
10263 | { |
10264 | tem = op0, op0 = op1, op1 = tem; | |
10265 | code = swap_condition (code); | |
10266 | } | |
10267 | ||
10268 | /* We now enter a loop during which we will try to simplify the comparison. | |
10269 | For the most part, we only are concerned with comparisons with zero, | |
10270 | but some things may really be comparisons with zero but not start | |
10271 | out looking that way. */ | |
10272 | ||
10273 | while (GET_CODE (op1) == CONST_INT) | |
10274 | { | |
10275 | enum machine_mode mode = GET_MODE (op0); | |
770ae6cc | 10276 | unsigned int mode_width = GET_MODE_BITSIZE (mode); |
5f4f0e22 | 10277 | unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode); |
230d793d RS |
10278 | int equality_comparison_p; |
10279 | int sign_bit_comparison_p; | |
10280 | int unsigned_comparison_p; | |
5f4f0e22 | 10281 | HOST_WIDE_INT const_op; |
230d793d RS |
10282 | |
10283 | /* We only want to handle integral modes. This catches VOIDmode, | |
10284 | CCmode, and the floating-point modes. An exception is that we | |
10285 | can handle VOIDmode if OP0 is a COMPARE or a comparison | |
10286 | operation. */ | |
10287 | ||
10288 | if (GET_MODE_CLASS (mode) != MODE_INT | |
10289 | && ! (mode == VOIDmode | |
10290 | && (GET_CODE (op0) == COMPARE | |
10291 | || GET_RTX_CLASS (GET_CODE (op0)) == '<'))) | |
10292 | break; | |
10293 | ||
10294 | /* Get the constant we are comparing against and turn off all bits | |
10295 | not on in our mode. */ | |
71012d97 GK |
10296 | const_op = INTVAL (op1); |
10297 | if (mode != VOIDmode) | |
10298 | const_op = trunc_int_for_mode (const_op, mode); | |
b4fbaca7 | 10299 | op1 = GEN_INT (const_op); |
230d793d RS |
10300 | |
10301 | /* If we are comparing against a constant power of two and the value | |
951553af | 10302 | being compared can only have that single bit nonzero (e.g., it was |
230d793d RS |
10303 | `and'ed with that bit), we can replace this with a comparison |
10304 | with zero. */ | |
10305 | if (const_op | |
10306 | && (code == EQ || code == NE || code == GE || code == GEU | |
10307 | || code == LT || code == LTU) | |
5f4f0e22 | 10308 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 10309 | && exact_log2 (const_op) >= 0 |
e51712db | 10310 | && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op) |
230d793d RS |
10311 | { |
10312 | code = (code == EQ || code == GE || code == GEU ? NE : EQ); | |
10313 | op1 = const0_rtx, const_op = 0; | |
10314 | } | |
10315 | ||
d0ab8cd3 RK |
10316 | /* Similarly, if we are comparing a value known to be either -1 or |
10317 | 0 with -1, change it to the opposite comparison against zero. */ | |
10318 | ||
10319 | if (const_op == -1 | |
10320 | && (code == EQ || code == NE || code == GT || code == LE | |
10321 | || code == GEU || code == LTU) | |
10322 | && num_sign_bit_copies (op0, mode) == mode_width) | |
10323 | { | |
10324 | code = (code == EQ || code == LE || code == GEU ? NE : EQ); | |
10325 | op1 = const0_rtx, const_op = 0; | |
10326 | } | |
10327 | ||
230d793d | 10328 | /* Do some canonicalizations based on the comparison code. We prefer |
663522cb | 10329 | comparisons against zero and then prefer equality comparisons. |
4803a34a | 10330 | If we can reduce the size of a constant, we will do that too. */ |
230d793d RS |
10331 | |
10332 | switch (code) | |
10333 | { | |
10334 | case LT: | |
4803a34a RK |
10335 | /* < C is equivalent to <= (C - 1) */ |
10336 | if (const_op > 0) | |
230d793d | 10337 | { |
4803a34a | 10338 | const_op -= 1; |
5f4f0e22 | 10339 | op1 = GEN_INT (const_op); |
230d793d RS |
10340 | code = LE; |
10341 | /* ... fall through to LE case below. */ | |
10342 | } | |
10343 | else | |
10344 | break; | |
10345 | ||
10346 | case LE: | |
4803a34a RK |
10347 | /* <= C is equivalent to < (C + 1); we do this for C < 0 */ |
10348 | if (const_op < 0) | |
10349 | { | |
10350 | const_op += 1; | |
5f4f0e22 | 10351 | op1 = GEN_INT (const_op); |
4803a34a RK |
10352 | code = LT; |
10353 | } | |
230d793d RS |
10354 | |
10355 | /* If we are doing a <= 0 comparison on a value known to have | |
10356 | a zero sign bit, we can replace this with == 0. */ | |
10357 | else if (const_op == 0 | |
5f4f0e22 | 10358 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10359 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 10360 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
10361 | code = EQ; |
10362 | break; | |
10363 | ||
10364 | case GE: | |
0f41302f | 10365 | /* >= C is equivalent to > (C - 1). */ |
4803a34a | 10366 | if (const_op > 0) |
230d793d | 10367 | { |
4803a34a | 10368 | const_op -= 1; |
5f4f0e22 | 10369 | op1 = GEN_INT (const_op); |
230d793d RS |
10370 | code = GT; |
10371 | /* ... fall through to GT below. */ | |
10372 | } | |
10373 | else | |
10374 | break; | |
10375 | ||
10376 | case GT: | |
663522cb | 10377 | /* > C is equivalent to >= (C + 1); we do this for C < 0. */ |
4803a34a RK |
10378 | if (const_op < 0) |
10379 | { | |
10380 | const_op += 1; | |
5f4f0e22 | 10381 | op1 = GEN_INT (const_op); |
4803a34a RK |
10382 | code = GE; |
10383 | } | |
230d793d RS |
10384 | |
10385 | /* If we are doing a > 0 comparison on a value known to have | |
10386 | a zero sign bit, we can replace this with != 0. */ | |
10387 | else if (const_op == 0 | |
5f4f0e22 | 10388 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10389 | && (nonzero_bits (op0, mode) |
5f4f0e22 | 10390 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0) |
230d793d RS |
10391 | code = NE; |
10392 | break; | |
10393 | ||
230d793d | 10394 | case LTU: |
4803a34a RK |
10395 | /* < C is equivalent to <= (C - 1). */ |
10396 | if (const_op > 0) | |
10397 | { | |
10398 | const_op -= 1; | |
5f4f0e22 | 10399 | op1 = GEN_INT (const_op); |
4803a34a | 10400 | code = LEU; |
0f41302f | 10401 | /* ... fall through ... */ |
4803a34a | 10402 | } |
d0ab8cd3 RK |
10403 | |
10404 | /* (unsigned) < 0x80000000 is equivalent to >= 0. */ | |
f77aada2 JW |
10405 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10406 | && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) | |
d0ab8cd3 RK |
10407 | { |
10408 | const_op = 0, op1 = const0_rtx; | |
10409 | code = GE; | |
10410 | break; | |
10411 | } | |
4803a34a RK |
10412 | else |
10413 | break; | |
230d793d RS |
10414 | |
10415 | case LEU: | |
10416 | /* unsigned <= 0 is equivalent to == 0 */ | |
10417 | if (const_op == 0) | |
10418 | code = EQ; | |
d0ab8cd3 | 10419 | |
0f41302f | 10420 | /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */ |
f77aada2 JW |
10421 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10422 | && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) | |
d0ab8cd3 RK |
10423 | { |
10424 | const_op = 0, op1 = const0_rtx; | |
10425 | code = GE; | |
10426 | } | |
230d793d RS |
10427 | break; |
10428 | ||
4803a34a RK |
10429 | case GEU: |
10430 | /* >= C is equivalent to < (C - 1). */ | |
10431 | if (const_op > 1) | |
10432 | { | |
10433 | const_op -= 1; | |
5f4f0e22 | 10434 | op1 = GEN_INT (const_op); |
4803a34a | 10435 | code = GTU; |
0f41302f | 10436 | /* ... fall through ... */ |
4803a34a | 10437 | } |
d0ab8cd3 RK |
10438 | |
10439 | /* (unsigned) >= 0x80000000 is equivalent to < 0. */ | |
f77aada2 JW |
10440 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10441 | && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))) | |
d0ab8cd3 RK |
10442 | { |
10443 | const_op = 0, op1 = const0_rtx; | |
10444 | code = LT; | |
8b2e69e1 | 10445 | break; |
d0ab8cd3 | 10446 | } |
4803a34a RK |
10447 | else |
10448 | break; | |
10449 | ||
230d793d RS |
10450 | case GTU: |
10451 | /* unsigned > 0 is equivalent to != 0 */ | |
10452 | if (const_op == 0) | |
10453 | code = NE; | |
d0ab8cd3 RK |
10454 | |
10455 | /* (unsigned) > 0x7fffffff is equivalent to < 0. */ | |
f77aada2 JW |
10456 | else if ((mode_width <= HOST_BITS_PER_WIDE_INT) |
10457 | && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)) | |
d0ab8cd3 RK |
10458 | { |
10459 | const_op = 0, op1 = const0_rtx; | |
10460 | code = LT; | |
10461 | } | |
230d793d | 10462 | break; |
e9a25f70 JL |
10463 | |
10464 | default: | |
10465 | break; | |
230d793d RS |
10466 | } |
10467 | ||
10468 | /* Compute some predicates to simplify code below. */ | |
10469 | ||
10470 | equality_comparison_p = (code == EQ || code == NE); | |
10471 | sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0); | |
10472 | unsigned_comparison_p = (code == LTU || code == LEU || code == GTU | |
d5010e66 | 10473 | || code == GEU); |
230d793d | 10474 | |
6139ff20 RK |
10475 | /* If this is a sign bit comparison and we can do arithmetic in |
10476 | MODE, say that we will only be needing the sign bit of OP0. */ | |
10477 | if (sign_bit_comparison_p | |
10478 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
10479 | op0 = force_to_mode (op0, mode, | |
10480 | ((HOST_WIDE_INT) 1 | |
10481 | << (GET_MODE_BITSIZE (mode) - 1)), | |
e3d616e3 | 10482 | NULL_RTX, 0); |
6139ff20 | 10483 | |
230d793d RS |
10484 | /* Now try cases based on the opcode of OP0. If none of the cases |
10485 | does a "continue", we exit this loop immediately after the | |
10486 | switch. */ | |
10487 | ||
10488 | switch (GET_CODE (op0)) | |
10489 | { | |
10490 | case ZERO_EXTRACT: | |
10491 | /* If we are extracting a single bit from a variable position in | |
10492 | a constant that has only a single bit set and are comparing it | |
663522cb | 10493 | with zero, we can convert this into an equality comparison |
d7cd794f | 10494 | between the position and the location of the single bit. */ |
230d793d | 10495 | |
230d793d RS |
10496 | if (GET_CODE (XEXP (op0, 0)) == CONST_INT |
10497 | && XEXP (op0, 1) == const1_rtx | |
10498 | && equality_comparison_p && const_op == 0 | |
d7cd794f | 10499 | && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0) |
230d793d | 10500 | { |
f76b9db2 | 10501 | if (BITS_BIG_ENDIAN) |
0d8e55d8 | 10502 | { |
da920570 ZW |
10503 | enum machine_mode new_mode |
10504 | = mode_for_extraction (EP_extzv, 1); | |
10505 | if (new_mode == MAX_MACHINE_MODE) | |
10506 | i = BITS_PER_WORD - 1 - i; | |
10507 | else | |
10508 | { | |
10509 | mode = new_mode; | |
10510 | i = (GET_MODE_BITSIZE (mode) - 1 - i); | |
10511 | } | |
0d8e55d8 | 10512 | } |
230d793d RS |
10513 | |
10514 | op0 = XEXP (op0, 2); | |
5f4f0e22 | 10515 | op1 = GEN_INT (i); |
230d793d RS |
10516 | const_op = i; |
10517 | ||
10518 | /* Result is nonzero iff shift count is equal to I. */ | |
10519 | code = reverse_condition (code); | |
10520 | continue; | |
10521 | } | |
230d793d | 10522 | |
0f41302f | 10523 | /* ... fall through ... */ |
230d793d RS |
10524 | |
10525 | case SIGN_EXTRACT: | |
10526 | tem = expand_compound_operation (op0); | |
10527 | if (tem != op0) | |
10528 | { | |
10529 | op0 = tem; | |
10530 | continue; | |
10531 | } | |
10532 | break; | |
10533 | ||
10534 | case NOT: | |
10535 | /* If testing for equality, we can take the NOT of the constant. */ | |
10536 | if (equality_comparison_p | |
10537 | && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0) | |
10538 | { | |
10539 | op0 = XEXP (op0, 0); | |
10540 | op1 = tem; | |
10541 | continue; | |
10542 | } | |
10543 | ||
10544 | /* If just looking at the sign bit, reverse the sense of the | |
10545 | comparison. */ | |
10546 | if (sign_bit_comparison_p) | |
10547 | { | |
10548 | op0 = XEXP (op0, 0); | |
10549 | code = (code == GE ? LT : GE); | |
10550 | continue; | |
10551 | } | |
10552 | break; | |
10553 | ||
10554 | case NEG: | |
10555 | /* If testing for equality, we can take the NEG of the constant. */ | |
10556 | if (equality_comparison_p | |
10557 | && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0) | |
10558 | { | |
10559 | op0 = XEXP (op0, 0); | |
10560 | op1 = tem; | |
10561 | continue; | |
10562 | } | |
10563 | ||
10564 | /* The remaining cases only apply to comparisons with zero. */ | |
10565 | if (const_op != 0) | |
10566 | break; | |
10567 | ||
10568 | /* When X is ABS or is known positive, | |
10569 | (neg X) is < 0 if and only if X != 0. */ | |
10570 | ||
10571 | if (sign_bit_comparison_p | |
10572 | && (GET_CODE (XEXP (op0, 0)) == ABS | |
5f4f0e22 | 10573 | || (mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10574 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 10575 | & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0))) |
230d793d RS |
10576 | { |
10577 | op0 = XEXP (op0, 0); | |
10578 | code = (code == LT ? NE : EQ); | |
10579 | continue; | |
10580 | } | |
10581 | ||
3bed8141 | 10582 | /* If we have NEG of something whose two high-order bits are the |
0f41302f | 10583 | same, we know that "(-a) < 0" is equivalent to "a > 0". */ |
3bed8141 | 10584 | if (num_sign_bit_copies (op0, mode) >= 2) |
230d793d RS |
10585 | { |
10586 | op0 = XEXP (op0, 0); | |
10587 | code = swap_condition (code); | |
10588 | continue; | |
10589 | } | |
10590 | break; | |
10591 | ||
10592 | case ROTATE: | |
10593 | /* If we are testing equality and our count is a constant, we | |
10594 | can perform the inverse operation on our RHS. */ | |
10595 | if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10596 | && (tem = simplify_binary_operation (ROTATERT, mode, | |
10597 | op1, XEXP (op0, 1))) != 0) | |
10598 | { | |
10599 | op0 = XEXP (op0, 0); | |
10600 | op1 = tem; | |
10601 | continue; | |
10602 | } | |
10603 | ||
10604 | /* If we are doing a < 0 or >= 0 comparison, it means we are testing | |
10605 | a particular bit. Convert it to an AND of a constant of that | |
10606 | bit. This will be converted into a ZERO_EXTRACT. */ | |
10607 | if (const_op == 0 && sign_bit_comparison_p | |
10608 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 10609 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 10610 | { |
5f4f0e22 CH |
10611 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
10612 | ((HOST_WIDE_INT) 1 | |
10613 | << (mode_width - 1 | |
10614 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
10615 | code = (code == LT ? NE : EQ); |
10616 | continue; | |
10617 | } | |
10618 | ||
663522cb | 10619 | /* Fall through. */ |
230d793d RS |
10620 | |
10621 | case ABS: | |
10622 | /* ABS is ignorable inside an equality comparison with zero. */ | |
10623 | if (const_op == 0 && equality_comparison_p) | |
10624 | { | |
10625 | op0 = XEXP (op0, 0); | |
10626 | continue; | |
10627 | } | |
10628 | break; | |
230d793d RS |
10629 | |
10630 | case SIGN_EXTEND: | |
10631 | /* Can simplify (compare (zero/sign_extend FOO) CONST) | |
663522cb | 10632 | to (compare FOO CONST) if CONST fits in FOO's mode and we |
230d793d RS |
10633 | are either testing inequality or have an unsigned comparison |
10634 | with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */ | |
10635 | if (! unsigned_comparison_p | |
10636 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
10637 | <= HOST_BITS_PER_WIDE_INT) |
10638 | && ((unsigned HOST_WIDE_INT) const_op | |
e51712db | 10639 | < (((unsigned HOST_WIDE_INT) 1 |
5f4f0e22 | 10640 | << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1))))) |
230d793d RS |
10641 | { |
10642 | op0 = XEXP (op0, 0); | |
10643 | continue; | |
10644 | } | |
10645 | break; | |
10646 | ||
10647 | case SUBREG: | |
a687e897 | 10648 | /* Check for the case where we are comparing A - C1 with C2, |
abc95ed3 | 10649 | both constants are smaller than 1/2 the maximum positive |
a687e897 RK |
10650 | value in MODE, and the comparison is equality or unsigned. |
10651 | In that case, if A is either zero-extended to MODE or has | |
10652 | sufficient sign bits so that the high-order bit in MODE | |
10653 | is a copy of the sign in the inner mode, we can prove that it is | |
10654 | safe to do the operation in the wider mode. This simplifies | |
10655 | many range checks. */ | |
10656 | ||
10657 | if (mode_width <= HOST_BITS_PER_WIDE_INT | |
10658 | && subreg_lowpart_p (op0) | |
10659 | && GET_CODE (SUBREG_REG (op0)) == PLUS | |
10660 | && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT | |
10661 | && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0 | |
663522cb KH |
10662 | && (-INTVAL (XEXP (SUBREG_REG (op0), 1)) |
10663 | < (HOST_WIDE_INT) (GET_MODE_MASK (mode) / 2)) | |
adb7a1cb | 10664 | && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2 |
951553af RK |
10665 | && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0), |
10666 | GET_MODE (SUBREG_REG (op0))) | |
663522cb | 10667 | & ~GET_MODE_MASK (mode)) |
a687e897 RK |
10668 | || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0), |
10669 | GET_MODE (SUBREG_REG (op0))) | |
26c34780 RS |
10670 | > (unsigned int) |
10671 | (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) | |
a687e897 RK |
10672 | - GET_MODE_BITSIZE (mode))))) |
10673 | { | |
10674 | op0 = SUBREG_REG (op0); | |
10675 | continue; | |
10676 | } | |
10677 | ||
fe0cf571 RK |
10678 | /* If the inner mode is narrower and we are extracting the low part, |
10679 | we can treat the SUBREG as if it were a ZERO_EXTEND. */ | |
10680 | if (subreg_lowpart_p (op0) | |
89f1c7f2 RS |
10681 | && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width) |
10682 | /* Fall through */ ; | |
10683 | else | |
230d793d RS |
10684 | break; |
10685 | ||
0f41302f | 10686 | /* ... fall through ... */ |
230d793d RS |
10687 | |
10688 | case ZERO_EXTEND: | |
10689 | if ((unsigned_comparison_p || equality_comparison_p) | |
10690 | && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) | |
5f4f0e22 CH |
10691 | <= HOST_BITS_PER_WIDE_INT) |
10692 | && ((unsigned HOST_WIDE_INT) const_op | |
230d793d RS |
10693 | < GET_MODE_MASK (GET_MODE (XEXP (op0, 0))))) |
10694 | { | |
10695 | op0 = XEXP (op0, 0); | |
10696 | continue; | |
10697 | } | |
10698 | break; | |
10699 | ||
10700 | case PLUS: | |
20fdd649 | 10701 | /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do |
5089e22e | 10702 | this for equality comparisons due to pathological cases involving |
230d793d | 10703 | overflows. */ |
20fdd649 RK |
10704 | if (equality_comparison_p |
10705 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
10706 | op1, XEXP (op0, 1)))) | |
230d793d RS |
10707 | { |
10708 | op0 = XEXP (op0, 0); | |
10709 | op1 = tem; | |
10710 | continue; | |
10711 | } | |
10712 | ||
10713 | /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */ | |
10714 | if (const_op == 0 && XEXP (op0, 1) == constm1_rtx | |
10715 | && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p) | |
10716 | { | |
10717 | op0 = XEXP (XEXP (op0, 0), 0); | |
10718 | code = (code == LT ? EQ : NE); | |
10719 | continue; | |
10720 | } | |
10721 | break; | |
10722 | ||
10723 | case MINUS: | |
65945ec1 HPN |
10724 | /* We used to optimize signed comparisons against zero, but that |
10725 | was incorrect. Unsigned comparisons against zero (GTU, LEU) | |
10726 | arrive here as equality comparisons, or (GEU, LTU) are | |
10727 | optimized away. No need to special-case them. */ | |
0bd4b461 | 10728 | |
20fdd649 RK |
10729 | /* (eq (minus A B) C) -> (eq A (plus B C)) or |
10730 | (eq B (minus A C)), whichever simplifies. We can only do | |
10731 | this for equality comparisons due to pathological cases involving | |
10732 | overflows. */ | |
10733 | if (equality_comparison_p | |
10734 | && 0 != (tem = simplify_binary_operation (PLUS, mode, | |
10735 | XEXP (op0, 1), op1))) | |
10736 | { | |
10737 | op0 = XEXP (op0, 0); | |
10738 | op1 = tem; | |
10739 | continue; | |
10740 | } | |
10741 | ||
10742 | if (equality_comparison_p | |
10743 | && 0 != (tem = simplify_binary_operation (MINUS, mode, | |
10744 | XEXP (op0, 0), op1))) | |
10745 | { | |
10746 | op0 = XEXP (op0, 1); | |
10747 | op1 = tem; | |
10748 | continue; | |
10749 | } | |
10750 | ||
230d793d RS |
10751 | /* The sign bit of (minus (ashiftrt X C) X), where C is the number |
10752 | of bits in X minus 1, is one iff X > 0. */ | |
10753 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT | |
10754 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
26c34780 RS |
10755 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1)) |
10756 | == mode_width - 1 | |
230d793d RS |
10757 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) |
10758 | { | |
10759 | op0 = XEXP (op0, 1); | |
10760 | code = (code == GE ? LE : GT); | |
10761 | continue; | |
10762 | } | |
10763 | break; | |
10764 | ||
10765 | case XOR: | |
10766 | /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification | |
10767 | if C is zero or B is a constant. */ | |
10768 | if (equality_comparison_p | |
10769 | && 0 != (tem = simplify_binary_operation (XOR, mode, | |
10770 | XEXP (op0, 1), op1))) | |
10771 | { | |
10772 | op0 = XEXP (op0, 0); | |
10773 | op1 = tem; | |
10774 | continue; | |
10775 | } | |
10776 | break; | |
10777 | ||
10778 | case EQ: case NE: | |
69bc0a1f JH |
10779 | case UNEQ: case LTGT: |
10780 | case LT: case LTU: case UNLT: case LE: case LEU: case UNLE: | |
10781 | case GT: case GTU: case UNGT: case GE: case GEU: case UNGE: | |
10782 | case UNORDERED: case ORDERED: | |
230d793d RS |
10783 | /* We can't do anything if OP0 is a condition code value, rather |
10784 | than an actual data value. */ | |
10785 | if (const_op != 0 | |
10786 | #ifdef HAVE_cc0 | |
10787 | || XEXP (op0, 0) == cc0_rtx | |
10788 | #endif | |
10789 | || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC) | |
10790 | break; | |
10791 | ||
10792 | /* Get the two operands being compared. */ | |
10793 | if (GET_CODE (XEXP (op0, 0)) == COMPARE) | |
10794 | tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1); | |
10795 | else | |
10796 | tem = XEXP (op0, 0), tem1 = XEXP (op0, 1); | |
10797 | ||
10798 | /* Check for the cases where we simply want the result of the | |
10799 | earlier test or the opposite of that result. */ | |
9a915772 | 10800 | if (code == NE || code == EQ |
5f4f0e22 | 10801 | || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT |
3f508eca | 10802 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
230d793d | 10803 | && (STORE_FLAG_VALUE |
5f4f0e22 CH |
10804 | & (((HOST_WIDE_INT) 1 |
10805 | << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1)))) | |
aa6683f7 | 10806 | && (code == LT || code == GE))) |
230d793d | 10807 | { |
aa6683f7 GK |
10808 | enum rtx_code new_code; |
10809 | if (code == LT || code == NE) | |
10810 | new_code = GET_CODE (op0); | |
10811 | else | |
10812 | new_code = combine_reversed_comparison_code (op0); | |
23190837 | 10813 | |
aa6683f7 | 10814 | if (new_code != UNKNOWN) |
9a915772 | 10815 | { |
aa6683f7 GK |
10816 | code = new_code; |
10817 | op0 = tem; | |
10818 | op1 = tem1; | |
9a915772 JH |
10819 | continue; |
10820 | } | |
230d793d RS |
10821 | } |
10822 | break; | |
10823 | ||
10824 | case IOR: | |
da7d8304 | 10825 | /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero |
230d793d RS |
10826 | iff X <= 0. */ |
10827 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS | |
10828 | && XEXP (XEXP (op0, 0), 1) == constm1_rtx | |
10829 | && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1))) | |
10830 | { | |
10831 | op0 = XEXP (op0, 1); | |
10832 | code = (code == GE ? GT : LE); | |
10833 | continue; | |
10834 | } | |
10835 | break; | |
10836 | ||
10837 | case AND: | |
10838 | /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This | |
10839 | will be converted to a ZERO_EXTRACT later. */ | |
10840 | if (const_op == 0 && equality_comparison_p | |
45620ed4 | 10841 | && GET_CODE (XEXP (op0, 0)) == ASHIFT |
230d793d RS |
10842 | && XEXP (XEXP (op0, 0), 0) == const1_rtx) |
10843 | { | |
10844 | op0 = simplify_and_const_int | |
f1c6ba8b RK |
10845 | (op0, mode, gen_rtx_LSHIFTRT (mode, |
10846 | XEXP (op0, 1), | |
10847 | XEXP (XEXP (op0, 0), 1)), | |
5f4f0e22 | 10848 | (HOST_WIDE_INT) 1); |
230d793d RS |
10849 | continue; |
10850 | } | |
10851 | ||
10852 | /* If we are comparing (and (lshiftrt X C1) C2) for equality with | |
10853 | zero and X is a comparison and C1 and C2 describe only bits set | |
10854 | in STORE_FLAG_VALUE, we can compare with X. */ | |
10855 | if (const_op == 0 && equality_comparison_p | |
5f4f0e22 | 10856 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d RS |
10857 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10858 | && GET_CODE (XEXP (op0, 0)) == LSHIFTRT | |
10859 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
10860 | && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0 | |
5f4f0e22 | 10861 | && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT) |
230d793d RS |
10862 | { |
10863 | mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) | |
10864 | << INTVAL (XEXP (XEXP (op0, 0), 1))); | |
663522cb | 10865 | if ((~STORE_FLAG_VALUE & mask) == 0 |
230d793d RS |
10866 | && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<' |
10867 | || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0 | |
10868 | && GET_RTX_CLASS (GET_CODE (tem)) == '<'))) | |
10869 | { | |
10870 | op0 = XEXP (XEXP (op0, 0), 0); | |
10871 | continue; | |
10872 | } | |
10873 | } | |
10874 | ||
10875 | /* If we are doing an equality comparison of an AND of a bit equal | |
10876 | to the sign bit, replace this with a LT or GE comparison of | |
10877 | the underlying value. */ | |
10878 | if (equality_comparison_p | |
10879 | && const_op == 0 | |
10880 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
5f4f0e22 | 10881 | && mode_width <= HOST_BITS_PER_WIDE_INT |
230d793d | 10882 | && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode)) |
e51712db | 10883 | == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1))) |
230d793d RS |
10884 | { |
10885 | op0 = XEXP (op0, 0); | |
10886 | code = (code == EQ ? GE : LT); | |
10887 | continue; | |
10888 | } | |
10889 | ||
10890 | /* If this AND operation is really a ZERO_EXTEND from a narrower | |
10891 | mode, the constant fits within that mode, and this is either an | |
10892 | equality or unsigned comparison, try to do this comparison in | |
10893 | the narrower mode. */ | |
10894 | if ((equality_comparison_p || unsigned_comparison_p) | |
10895 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10896 | && (i = exact_log2 ((INTVAL (XEXP (op0, 1)) | |
10897 | & GET_MODE_MASK (mode)) | |
10898 | + 1)) >= 0 | |
10899 | && const_op >> i == 0 | |
10900 | && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode) | |
10901 | { | |
10902 | op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0)); | |
10903 | continue; | |
10904 | } | |
e5e809f4 JL |
10905 | |
10906 | /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits | |
10907 | in both M1 and M2 and the SUBREG is either paradoxical or | |
10908 | represents the low part, permute the SUBREG and the AND and | |
10909 | try again. */ | |
10910 | if (GET_CODE (XEXP (op0, 0)) == SUBREG | |
c5c76735 | 10911 | && (0 |
9ec36da5 | 10912 | #ifdef WORD_REGISTER_OPERATIONS |
c5c76735 JL |
10913 | || ((mode_width |
10914 | > (GET_MODE_BITSIZE | |
10915 | (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))) | |
10916 | && mode_width <= BITS_PER_WORD) | |
9ec36da5 | 10917 | #endif |
c5c76735 JL |
10918 | || ((mode_width |
10919 | <= (GET_MODE_BITSIZE | |
10920 | (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))) | |
10921 | && subreg_lowpart_p (XEXP (op0, 0)))) | |
adc05e6c JL |
10922 | #ifndef WORD_REGISTER_OPERATIONS |
10923 | /* It is unsafe to commute the AND into the SUBREG if the SUBREG | |
10924 | is paradoxical and WORD_REGISTER_OPERATIONS is not defined. | |
10925 | As originally written the upper bits have a defined value | |
10926 | due to the AND operation. However, if we commute the AND | |
10927 | inside the SUBREG then they no longer have defined values | |
10928 | and the meaning of the code has been changed. */ | |
10929 | && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0))) | |
10930 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))) | |
10931 | #endif | |
e5e809f4 JL |
10932 | && GET_CODE (XEXP (op0, 1)) == CONST_INT |
10933 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
10934 | && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))) | |
10935 | <= HOST_BITS_PER_WIDE_INT) | |
663522cb KH |
10936 | && (INTVAL (XEXP (op0, 1)) & ~mask) == 0 |
10937 | && 0 == (~GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0)))) | |
9ec36da5 | 10938 | & INTVAL (XEXP (op0, 1))) |
e51712db KG |
10939 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask |
10940 | && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) | |
9ec36da5 | 10941 | != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))) |
663522cb | 10942 | |
e5e809f4 JL |
10943 | { |
10944 | op0 | |
10945 | = gen_lowpart_for_combine | |
10946 | (mode, | |
10947 | gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))), | |
10948 | SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1))); | |
10949 | continue; | |
10950 | } | |
10951 | ||
9f8e169e RH |
10952 | /* Convert (ne (and (lshiftrt (not X)) 1) 0) to |
10953 | (eq (and (lshiftrt X) 1) 0). */ | |
10954 | if (const_op == 0 && equality_comparison_p | |
10955 | && XEXP (op0, 1) == const1_rtx | |
10956 | && GET_CODE (XEXP (op0, 0)) == LSHIFTRT | |
10957 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == NOT) | |
10958 | { | |
10959 | op0 = simplify_and_const_int | |
f1c6ba8b RK |
10960 | (op0, mode, |
10961 | gen_rtx_LSHIFTRT (mode, XEXP (XEXP (XEXP (op0, 0), 0), 0), | |
10962 | XEXP (XEXP (op0, 0), 1)), | |
9f8e169e RH |
10963 | (HOST_WIDE_INT) 1); |
10964 | code = (code == NE ? EQ : NE); | |
10965 | continue; | |
10966 | } | |
230d793d RS |
10967 | break; |
10968 | ||
10969 | case ASHIFT: | |
45620ed4 | 10970 | /* If we have (compare (ashift FOO N) (const_int C)) and |
230d793d | 10971 | the high order N bits of FOO (N+1 if an inequality comparison) |
951553af | 10972 | are known to be zero, we can do this by comparing FOO with C |
230d793d RS |
10973 | shifted right N bits so long as the low-order N bits of C are |
10974 | zero. */ | |
10975 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
10976 | && INTVAL (XEXP (op0, 1)) >= 0 | |
10977 | && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p) | |
5f4f0e22 CH |
10978 | < HOST_BITS_PER_WIDE_INT) |
10979 | && ((const_op | |
34785d05 | 10980 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0) |
5f4f0e22 | 10981 | && mode_width <= HOST_BITS_PER_WIDE_INT |
951553af | 10982 | && (nonzero_bits (XEXP (op0, 0), mode) |
663522cb KH |
10983 | & ~(mask >> (INTVAL (XEXP (op0, 1)) |
10984 | + ! equality_comparison_p))) == 0) | |
230d793d | 10985 | { |
7ce787fe NC |
10986 | /* We must perform a logical shift, not an arithmetic one, |
10987 | as we want the top N bits of C to be zero. */ | |
aaaec114 | 10988 | unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode); |
663522cb | 10989 | |
7ce787fe | 10990 | temp >>= INTVAL (XEXP (op0, 1)); |
2496c7bd | 10991 | op1 = gen_int_mode (temp, mode); |
230d793d RS |
10992 | op0 = XEXP (op0, 0); |
10993 | continue; | |
10994 | } | |
10995 | ||
dfbe1b2f | 10996 | /* If we are doing a sign bit comparison, it means we are testing |
230d793d | 10997 | a particular bit. Convert it to the appropriate AND. */ |
dfbe1b2f | 10998 | if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT |
5f4f0e22 | 10999 | && mode_width <= HOST_BITS_PER_WIDE_INT) |
230d793d | 11000 | { |
5f4f0e22 CH |
11001 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
11002 | ((HOST_WIDE_INT) 1 | |
11003 | << (mode_width - 1 | |
11004 | - INTVAL (XEXP (op0, 1))))); | |
230d793d RS |
11005 | code = (code == LT ? NE : EQ); |
11006 | continue; | |
11007 | } | |
dfbe1b2f RK |
11008 | |
11009 | /* If this an equality comparison with zero and we are shifting | |
11010 | the low bit to the sign bit, we can convert this to an AND of the | |
11011 | low-order bit. */ | |
11012 | if (const_op == 0 && equality_comparison_p | |
11013 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
26c34780 RS |
11014 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) |
11015 | == mode_width - 1) | |
dfbe1b2f | 11016 | { |
5f4f0e22 CH |
11017 | op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0), |
11018 | (HOST_WIDE_INT) 1); | |
dfbe1b2f RK |
11019 | continue; |
11020 | } | |
230d793d RS |
11021 | break; |
11022 | ||
11023 | case ASHIFTRT: | |
d0ab8cd3 RK |
11024 | /* If this is an equality comparison with zero, we can do this |
11025 | as a logical shift, which might be much simpler. */ | |
11026 | if (equality_comparison_p && const_op == 0 | |
11027 | && GET_CODE (XEXP (op0, 1)) == CONST_INT) | |
11028 | { | |
11029 | op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, | |
11030 | XEXP (op0, 0), | |
11031 | INTVAL (XEXP (op0, 1))); | |
11032 | continue; | |
11033 | } | |
11034 | ||
230d793d RS |
11035 | /* If OP0 is a sign extension and CODE is not an unsigned comparison, |
11036 | do the comparison in a narrower mode. */ | |
11037 | if (! unsigned_comparison_p | |
11038 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
11039 | && GET_CODE (XEXP (op0, 0)) == ASHIFT | |
11040 | && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1) | |
11041 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
22331794 | 11042 | MODE_INT, 1)) != BLKmode |
67e469d7 AM |
11043 | && (((unsigned HOST_WIDE_INT) const_op |
11044 | + (GET_MODE_MASK (tmode) >> 1) + 1) | |
11045 | <= GET_MODE_MASK (tmode))) | |
230d793d RS |
11046 | { |
11047 | op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0)); | |
11048 | continue; | |
11049 | } | |
11050 | ||
14a774a9 RK |
11051 | /* Likewise if OP0 is a PLUS of a sign extension with a |
11052 | constant, which is usually represented with the PLUS | |
11053 | between the shifts. */ | |
11054 | if (! unsigned_comparison_p | |
11055 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
11056 | && GET_CODE (XEXP (op0, 0)) == PLUS | |
11057 | && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT | |
11058 | && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT | |
11059 | && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1) | |
11060 | && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)), | |
11061 | MODE_INT, 1)) != BLKmode | |
67e469d7 AM |
11062 | && (((unsigned HOST_WIDE_INT) const_op |
11063 | + (GET_MODE_MASK (tmode) >> 1) + 1) | |
11064 | <= GET_MODE_MASK (tmode))) | |
14a774a9 RK |
11065 | { |
11066 | rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0); | |
11067 | rtx add_const = XEXP (XEXP (op0, 0), 1); | |
11068 | rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const, | |
11069 | XEXP (op0, 1)); | |
11070 | ||
11071 | op0 = gen_binary (PLUS, tmode, | |
11072 | gen_lowpart_for_combine (tmode, inner), | |
11073 | new_const); | |
11074 | continue; | |
11075 | } | |
11076 | ||
0f41302f | 11077 | /* ... fall through ... */ |
230d793d RS |
11078 | case LSHIFTRT: |
11079 | /* If we have (compare (xshiftrt FOO N) (const_int C)) and | |
951553af | 11080 | the low order N bits of FOO are known to be zero, we can do this |
230d793d RS |
11081 | by comparing FOO with C shifted left N bits so long as no |
11082 | overflow occurs. */ | |
11083 | if (GET_CODE (XEXP (op0, 1)) == CONST_INT | |
11084 | && INTVAL (XEXP (op0, 1)) >= 0 | |
5f4f0e22 CH |
11085 | && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT |
11086 | && mode_width <= HOST_BITS_PER_WIDE_INT | |
951553af | 11087 | && (nonzero_bits (XEXP (op0, 0), mode) |
5f4f0e22 | 11088 | & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0 |
67e469d7 AM |
11089 | && (((unsigned HOST_WIDE_INT) const_op |
11090 | + (GET_CODE (op0) != LSHIFTRT | |
11091 | ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1) | |
11092 | + 1) | |
11093 | : 0)) | |
11094 | <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)))) | |
230d793d | 11095 | { |
145d3bf2 RE |
11096 | /* If the shift was logical, then we must make the condition |
11097 | unsigned. */ | |
11098 | if (GET_CODE (op0) == LSHIFTRT) | |
11099 | code = unsigned_condition (code); | |
11100 | ||
230d793d | 11101 | const_op <<= INTVAL (XEXP (op0, 1)); |
5f4f0e22 | 11102 | op1 = GEN_INT (const_op); |
230d793d RS |
11103 | op0 = XEXP (op0, 0); |
11104 | continue; | |
11105 | } | |
11106 | ||
11107 | /* If we are using this shift to extract just the sign bit, we | |
11108 | can replace this with an LT or GE comparison. */ | |
11109 | if (const_op == 0 | |
11110 | && (equality_comparison_p || sign_bit_comparison_p) | |
11111 | && GET_CODE (XEXP (op0, 1)) == CONST_INT | |
26c34780 RS |
11112 | && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) |
11113 | == mode_width - 1) | |
230d793d RS |
11114 | { |
11115 | op0 = XEXP (op0, 0); | |
11116 | code = (code == NE || code == GT ? LT : GE); | |
11117 | continue; | |
11118 | } | |
11119 | break; | |
663522cb | 11120 | |
e9a25f70 JL |
11121 | default: |
11122 | break; | |
230d793d RS |
11123 | } |
11124 | ||
11125 | break; | |
11126 | } | |
11127 | ||
11128 | /* Now make any compound operations involved in this comparison. Then, | |
76d31c63 | 11129 | check for an outmost SUBREG on OP0 that is not doing anything or is |
5add6d1a JL |
11130 | paradoxical. The latter transformation must only be performed when |
11131 | it is known that the "extra" bits will be the same in op0 and op1 or | |
11132 | that they don't matter. There are three cases to consider: | |
11133 | ||
11134 | 1. SUBREG_REG (op0) is a register. In this case the bits are don't | |
11135 | care bits and we can assume they have any convenient value. So | |
11136 | making the transformation is safe. | |
11137 | ||
11138 | 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined. | |
11139 | In this case the upper bits of op0 are undefined. We should not make | |
11140 | the simplification in that case as we do not know the contents of | |
11141 | those bits. | |
11142 | ||
11143 | 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not | |
11144 | NIL. In that case we know those bits are zeros or ones. We must | |
11145 | also be sure that they are the same as the upper bits of op1. | |
11146 | ||
11147 | We can never remove a SUBREG for a non-equality comparison because | |
11148 | the sign bit is in a different place in the underlying object. */ | |
230d793d RS |
11149 | |
11150 | op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET); | |
11151 | op1 = make_compound_operation (op1, SET); | |
11152 | ||
11153 | if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0) | |
5add6d1a JL |
11154 | /* Case 3 above, to sometimes allow (subreg (mem x)), isn't |
11155 | implemented. */ | |
11156 | && GET_CODE (SUBREG_REG (op0)) == REG | |
230d793d | 11157 | && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT |
fa4e13e0 | 11158 | && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT |
5add6d1a | 11159 | && (code == NE || code == EQ)) |
230d793d | 11160 | { |
5add6d1a JL |
11161 | if (GET_MODE_SIZE (GET_MODE (op0)) |
11162 | > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0)))) | |
11163 | { | |
11164 | op0 = SUBREG_REG (op0); | |
11165 | op1 = gen_lowpart_for_combine (GET_MODE (op0), op1); | |
11166 | } | |
11167 | else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) | |
11168 | <= HOST_BITS_PER_WIDE_INT) | |
11169 | && (nonzero_bits (SUBREG_REG (op0), | |
11170 | GET_MODE (SUBREG_REG (op0))) | |
11171 | & ~GET_MODE_MASK (GET_MODE (op0))) == 0) | |
11172 | { | |
11173 | tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)), op1); | |
230d793d | 11174 | |
5add6d1a JL |
11175 | if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0))) |
11176 | & ~GET_MODE_MASK (GET_MODE (op0))) == 0) | |
11177 | op0 = SUBREG_REG (op0), op1 = tem; | |
11178 | } | |
11179 | } | |
230d793d RS |
11180 | |
11181 | /* We now do the opposite procedure: Some machines don't have compare | |
11182 | insns in all modes. If OP0's mode is an integer mode smaller than a | |
11183 | word and we can't do a compare in that mode, see if there is a larger | |
a687e897 RK |
11184 | mode for which we can do the compare. There are a number of cases in |
11185 | which we can use the wider mode. */ | |
230d793d RS |
11186 | |
11187 | mode = GET_MODE (op0); | |
11188 | if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT | |
11189 | && GET_MODE_SIZE (mode) < UNITS_PER_WORD | |
ef89d648 | 11190 | && ! have_insn_for (COMPARE, mode)) |
230d793d | 11191 | for (tmode = GET_MODE_WIDER_MODE (mode); |
5f4f0e22 CH |
11192 | (tmode != VOIDmode |
11193 | && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT); | |
230d793d | 11194 | tmode = GET_MODE_WIDER_MODE (tmode)) |
ef89d648 | 11195 | if (have_insn_for (COMPARE, tmode)) |
230d793d | 11196 | { |
d4c5ac1f AM |
11197 | int zero_extended; |
11198 | ||
951553af | 11199 | /* If the only nonzero bits in OP0 and OP1 are those in the |
a687e897 RK |
11200 | narrower mode and this is an equality or unsigned comparison, |
11201 | we can use the wider mode. Similarly for sign-extended | |
7e4dc511 | 11202 | values, in which case it is true for all comparisons. */ |
d4c5ac1f AM |
11203 | zero_extended = ((code == EQ || code == NE |
11204 | || code == GEU || code == GTU | |
11205 | || code == LEU || code == LTU) | |
11206 | && (nonzero_bits (op0, tmode) | |
11207 | & ~GET_MODE_MASK (mode)) == 0 | |
11208 | && ((GET_CODE (op1) == CONST_INT | |
11209 | || (nonzero_bits (op1, tmode) | |
11210 | & ~GET_MODE_MASK (mode)) == 0))); | |
11211 | ||
11212 | if (zero_extended | |
7e4dc511 | 11213 | || ((num_sign_bit_copies (op0, tmode) |
26c34780 RS |
11214 | > (unsigned int) (GET_MODE_BITSIZE (tmode) |
11215 | - GET_MODE_BITSIZE (mode))) | |
a687e897 | 11216 | && (num_sign_bit_copies (op1, tmode) |
26c34780 RS |
11217 | > (unsigned int) (GET_MODE_BITSIZE (tmode) |
11218 | - GET_MODE_BITSIZE (mode))))) | |
a687e897 | 11219 | { |
14a774a9 RK |
11220 | /* If OP0 is an AND and we don't have an AND in MODE either, |
11221 | make a new AND in the proper mode. */ | |
11222 | if (GET_CODE (op0) == AND | |
ef89d648 | 11223 | && !have_insn_for (AND, mode)) |
14a774a9 RK |
11224 | op0 = gen_binary (AND, tmode, |
11225 | gen_lowpart_for_combine (tmode, | |
11226 | XEXP (op0, 0)), | |
11227 | gen_lowpart_for_combine (tmode, | |
11228 | XEXP (op0, 1))); | |
11229 | ||
a687e897 | 11230 | op0 = gen_lowpart_for_combine (tmode, op0); |
d4c5ac1f AM |
11231 | if (zero_extended && GET_CODE (op1) == CONST_INT) |
11232 | op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (mode)); | |
a687e897 RK |
11233 | op1 = gen_lowpart_for_combine (tmode, op1); |
11234 | break; | |
11235 | } | |
230d793d | 11236 | |
a687e897 RK |
11237 | /* If this is a test for negative, we can make an explicit |
11238 | test of the sign bit. */ | |
11239 | ||
11240 | if (op1 == const0_rtx && (code == LT || code == GE) | |
11241 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
230d793d | 11242 | { |
a687e897 RK |
11243 | op0 = gen_binary (AND, tmode, |
11244 | gen_lowpart_for_combine (tmode, op0), | |
5f4f0e22 CH |
11245 | GEN_INT ((HOST_WIDE_INT) 1 |
11246 | << (GET_MODE_BITSIZE (mode) - 1))); | |
230d793d | 11247 | code = (code == LT) ? NE : EQ; |
a687e897 | 11248 | break; |
230d793d | 11249 | } |
230d793d RS |
11250 | } |
11251 | ||
b7a775b2 RK |
11252 | #ifdef CANONICALIZE_COMPARISON |
11253 | /* If this machine only supports a subset of valid comparisons, see if we | |
11254 | can convert an unsupported one into a supported one. */ | |
11255 | CANONICALIZE_COMPARISON (code, op0, op1); | |
11256 | #endif | |
11257 | ||
230d793d RS |
11258 | *pop0 = op0; |
11259 | *pop1 = op1; | |
11260 | ||
11261 | return code; | |
11262 | } | |
11263 | \f | |
9a915772 JH |
11264 | /* Like jump.c' reversed_comparison_code, but use combine infrastructure for |
11265 | searching backward. */ | |
c3ffea50 | 11266 | static enum rtx_code |
9a915772 JH |
11267 | combine_reversed_comparison_code (exp) |
11268 | rtx exp; | |
230d793d | 11269 | { |
cf0d9408 KH |
11270 | enum rtx_code code1 = reversed_comparison_code (exp, NULL); |
11271 | rtx x; | |
11272 | ||
11273 | if (code1 != UNKNOWN | |
11274 | || GET_MODE_CLASS (GET_MODE (XEXP (exp, 0))) != MODE_CC) | |
11275 | return code1; | |
11276 | /* Otherwise try and find where the condition codes were last set and | |
11277 | use that. */ | |
11278 | x = get_last_value (XEXP (exp, 0)); | |
11279 | if (!x || GET_CODE (x) != COMPARE) | |
11280 | return UNKNOWN; | |
11281 | return reversed_comparison_code_parts (GET_CODE (exp), | |
11282 | XEXP (x, 0), XEXP (x, 1), NULL); | |
9a915772 JH |
11283 | } |
11284 | /* Return comparison with reversed code of EXP and operands OP0 and OP1. | |
11285 | Return NULL_RTX in case we fail to do the reversal. */ | |
11286 | static rtx | |
11287 | reversed_comparison (exp, mode, op0, op1) | |
11288 | rtx exp, op0, op1; | |
11289 | enum machine_mode mode; | |
11290 | { | |
11291 | enum rtx_code reversed_code = combine_reversed_comparison_code (exp); | |
11292 | if (reversed_code == UNKNOWN) | |
11293 | return NULL_RTX; | |
11294 | else | |
11295 | return gen_binary (reversed_code, mode, op0, op1); | |
230d793d RS |
11296 | } |
11297 | \f | |
11298 | /* Utility function for following routine. Called when X is part of a value | |
11299 | being stored into reg_last_set_value. Sets reg_last_set_table_tick | |
11300 | for each register mentioned. Similar to mention_regs in cse.c */ | |
11301 | ||
11302 | static void | |
11303 | update_table_tick (x) | |
11304 | rtx x; | |
11305 | { | |
b3694847 SS |
11306 | enum rtx_code code = GET_CODE (x); |
11307 | const char *fmt = GET_RTX_FORMAT (code); | |
11308 | int i; | |
230d793d RS |
11309 | |
11310 | if (code == REG) | |
11311 | { | |
770ae6cc RK |
11312 | unsigned int regno = REGNO (x); |
11313 | unsigned int endregno | |
11314 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
11315 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
11316 | unsigned int r; | |
230d793d | 11317 | |
770ae6cc RK |
11318 | for (r = regno; r < endregno; r++) |
11319 | reg_last_set_table_tick[r] = label_tick; | |
230d793d RS |
11320 | |
11321 | return; | |
11322 | } | |
663522cb | 11323 | |
230d793d RS |
11324 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) |
11325 | /* Note that we can't have an "E" in values stored; see | |
11326 | get_last_value_validate. */ | |
11327 | if (fmt[i] == 'e') | |
11328 | update_table_tick (XEXP (x, i)); | |
11329 | } | |
11330 | ||
11331 | /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we | |
11332 | are saying that the register is clobbered and we no longer know its | |
7988fd36 RK |
11333 | value. If INSN is zero, don't update reg_last_set; this is only permitted |
11334 | with VALUE also zero and is used to invalidate the register. */ | |
230d793d RS |
11335 | |
11336 | static void | |
11337 | record_value_for_reg (reg, insn, value) | |
11338 | rtx reg; | |
11339 | rtx insn; | |
11340 | rtx value; | |
11341 | { | |
770ae6cc RK |
11342 | unsigned int regno = REGNO (reg); |
11343 | unsigned int endregno | |
11344 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
11345 | ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1); | |
11346 | unsigned int i; | |
230d793d RS |
11347 | |
11348 | /* If VALUE contains REG and we have a previous value for REG, substitute | |
11349 | the previous value. */ | |
11350 | if (value && insn && reg_overlap_mentioned_p (reg, value)) | |
11351 | { | |
11352 | rtx tem; | |
11353 | ||
11354 | /* Set things up so get_last_value is allowed to see anything set up to | |
11355 | our insn. */ | |
11356 | subst_low_cuid = INSN_CUID (insn); | |
663522cb | 11357 | tem = get_last_value (reg); |
230d793d | 11358 | |
14a774a9 RK |
11359 | /* If TEM is simply a binary operation with two CLOBBERs as operands, |
11360 | it isn't going to be useful and will take a lot of time to process, | |
11361 | so just use the CLOBBER. */ | |
11362 | ||
230d793d | 11363 | if (tem) |
14a774a9 RK |
11364 | { |
11365 | if ((GET_RTX_CLASS (GET_CODE (tem)) == '2' | |
11366 | || GET_RTX_CLASS (GET_CODE (tem)) == 'c') | |
11367 | && GET_CODE (XEXP (tem, 0)) == CLOBBER | |
11368 | && GET_CODE (XEXP (tem, 1)) == CLOBBER) | |
11369 | tem = XEXP (tem, 0); | |
11370 | ||
11371 | value = replace_rtx (copy_rtx (value), reg, tem); | |
11372 | } | |
230d793d RS |
11373 | } |
11374 | ||
11375 | /* For each register modified, show we don't know its value, that | |
ef026f91 RS |
11376 | we don't know about its bitwise content, that its value has been |
11377 | updated, and that we don't know the location of the death of the | |
11378 | register. */ | |
770ae6cc | 11379 | for (i = regno; i < endregno; i++) |
230d793d RS |
11380 | { |
11381 | if (insn) | |
11382 | reg_last_set[i] = insn; | |
770ae6cc | 11383 | |
230d793d | 11384 | reg_last_set_value[i] = 0; |
ef026f91 RS |
11385 | reg_last_set_mode[i] = 0; |
11386 | reg_last_set_nonzero_bits[i] = 0; | |
11387 | reg_last_set_sign_bit_copies[i] = 0; | |
230d793d RS |
11388 | reg_last_death[i] = 0; |
11389 | } | |
11390 | ||
11391 | /* Mark registers that are being referenced in this value. */ | |
11392 | if (value) | |
11393 | update_table_tick (value); | |
11394 | ||
11395 | /* Now update the status of each register being set. | |
11396 | If someone is using this register in this block, set this register | |
11397 | to invalid since we will get confused between the two lives in this | |
11398 | basic block. This makes using this register always invalid. In cse, we | |
11399 | scan the table to invalidate all entries using this register, but this | |
11400 | is too much work for us. */ | |
11401 | ||
11402 | for (i = regno; i < endregno; i++) | |
11403 | { | |
11404 | reg_last_set_label[i] = label_tick; | |
11405 | if (value && reg_last_set_table_tick[i] == label_tick) | |
11406 | reg_last_set_invalid[i] = 1; | |
11407 | else | |
11408 | reg_last_set_invalid[i] = 0; | |
11409 | } | |
11410 | ||
11411 | /* The value being assigned might refer to X (like in "x++;"). In that | |
11412 | case, we must replace it with (clobber (const_int 0)) to prevent | |
11413 | infinite loops. */ | |
9a893315 | 11414 | if (value && ! get_last_value_validate (&value, insn, |
230d793d RS |
11415 | reg_last_set_label[regno], 0)) |
11416 | { | |
11417 | value = copy_rtx (value); | |
9a893315 JW |
11418 | if (! get_last_value_validate (&value, insn, |
11419 | reg_last_set_label[regno], 1)) | |
230d793d RS |
11420 | value = 0; |
11421 | } | |
11422 | ||
55310dad RK |
11423 | /* For the main register being modified, update the value, the mode, the |
11424 | nonzero bits, and the number of sign bit copies. */ | |
11425 | ||
230d793d RS |
11426 | reg_last_set_value[regno] = value; |
11427 | ||
55310dad RK |
11428 | if (value) |
11429 | { | |
0a0440c9 | 11430 | enum machine_mode mode = GET_MODE (reg); |
2afabb48 | 11431 | subst_low_cuid = INSN_CUID (insn); |
0a0440c9 JJ |
11432 | reg_last_set_mode[regno] = mode; |
11433 | if (GET_MODE_CLASS (mode) == MODE_INT | |
11434 | && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT) | |
11435 | mode = nonzero_bits_mode; | |
11436 | reg_last_set_nonzero_bits[regno] = nonzero_bits (value, mode); | |
55310dad RK |
11437 | reg_last_set_sign_bit_copies[regno] |
11438 | = num_sign_bit_copies (value, GET_MODE (reg)); | |
11439 | } | |
230d793d RS |
11440 | } |
11441 | ||
230d793d | 11442 | /* Called via note_stores from record_dead_and_set_regs to handle one |
84832317 MM |
11443 | SET or CLOBBER in an insn. DATA is the instruction in which the |
11444 | set is occurring. */ | |
230d793d RS |
11445 | |
11446 | static void | |
84832317 | 11447 | record_dead_and_set_regs_1 (dest, setter, data) |
230d793d | 11448 | rtx dest, setter; |
84832317 | 11449 | void *data; |
230d793d | 11450 | { |
84832317 MM |
11451 | rtx record_dead_insn = (rtx) data; |
11452 | ||
ca89d290 RK |
11453 | if (GET_CODE (dest) == SUBREG) |
11454 | dest = SUBREG_REG (dest); | |
11455 | ||
230d793d RS |
11456 | if (GET_CODE (dest) == REG) |
11457 | { | |
11458 | /* If we are setting the whole register, we know its value. Otherwise | |
11459 | show that we don't know the value. We can handle SUBREG in | |
11460 | some cases. */ | |
11461 | if (GET_CODE (setter) == SET && dest == SET_DEST (setter)) | |
11462 | record_value_for_reg (dest, record_dead_insn, SET_SRC (setter)); | |
11463 | else if (GET_CODE (setter) == SET | |
11464 | && GET_CODE (SET_DEST (setter)) == SUBREG | |
11465 | && SUBREG_REG (SET_DEST (setter)) == dest | |
90bf8081 | 11466 | && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD |
230d793d | 11467 | && subreg_lowpart_p (SET_DEST (setter))) |
d0ab8cd3 RK |
11468 | record_value_for_reg (dest, record_dead_insn, |
11469 | gen_lowpart_for_combine (GET_MODE (dest), | |
11470 | SET_SRC (setter))); | |
230d793d | 11471 | else |
5f4f0e22 | 11472 | record_value_for_reg (dest, record_dead_insn, NULL_RTX); |
230d793d RS |
11473 | } |
11474 | else if (GET_CODE (dest) == MEM | |
11475 | /* Ignore pushes, they clobber nothing. */ | |
11476 | && ! push_operand (dest, GET_MODE (dest))) | |
11477 | mem_last_set = INSN_CUID (record_dead_insn); | |
11478 | } | |
11479 | ||
11480 | /* Update the records of when each REG was most recently set or killed | |
11481 | for the things done by INSN. This is the last thing done in processing | |
11482 | INSN in the combiner loop. | |
11483 | ||
ef026f91 RS |
11484 | We update reg_last_set, reg_last_set_value, reg_last_set_mode, |
11485 | reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death, | |
11486 | and also the similar information mem_last_set (which insn most recently | |
11487 | modified memory) and last_call_cuid (which insn was the most recent | |
11488 | subroutine call). */ | |
230d793d RS |
11489 | |
11490 | static void | |
11491 | record_dead_and_set_regs (insn) | |
11492 | rtx insn; | |
11493 | { | |
b3694847 | 11494 | rtx link; |
770ae6cc | 11495 | unsigned int i; |
55310dad | 11496 | |
230d793d RS |
11497 | for (link = REG_NOTES (insn); link; link = XEXP (link, 1)) |
11498 | { | |
dbc131f3 RK |
11499 | if (REG_NOTE_KIND (link) == REG_DEAD |
11500 | && GET_CODE (XEXP (link, 0)) == REG) | |
11501 | { | |
770ae6cc RK |
11502 | unsigned int regno = REGNO (XEXP (link, 0)); |
11503 | unsigned int endregno | |
dbc131f3 RK |
11504 | = regno + (regno < FIRST_PSEUDO_REGISTER |
11505 | ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0))) | |
11506 | : 1); | |
dbc131f3 RK |
11507 | |
11508 | for (i = regno; i < endregno; i++) | |
11509 | reg_last_death[i] = insn; | |
11510 | } | |
230d793d | 11511 | else if (REG_NOTE_KIND (link) == REG_INC) |
5f4f0e22 | 11512 | record_value_for_reg (XEXP (link, 0), insn, NULL_RTX); |
230d793d RS |
11513 | } |
11514 | ||
11515 | if (GET_CODE (insn) == CALL_INSN) | |
55310dad RK |
11516 | { |
11517 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
29655d3d | 11518 | if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) |
55310dad RK |
11519 | { |
11520 | reg_last_set_value[i] = 0; | |
ef026f91 RS |
11521 | reg_last_set_mode[i] = 0; |
11522 | reg_last_set_nonzero_bits[i] = 0; | |
11523 | reg_last_set_sign_bit_copies[i] = 0; | |
55310dad RK |
11524 | reg_last_death[i] = 0; |
11525 | } | |
11526 | ||
11527 | last_call_cuid = mem_last_set = INSN_CUID (insn); | |
29655d3d ZW |
11528 | |
11529 | /* Don't bother recording what this insn does. It might set the | |
11530 | return value register, but we can't combine into a call | |
11531 | pattern anyway, so there's no point trying (and it may cause | |
11532 | a crash, if e.g. we wind up asking for last_set_value of a | |
11533 | SUBREG of the return value register). */ | |
11534 | return; | |
55310dad | 11535 | } |
230d793d | 11536 | |
84832317 | 11537 | note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn); |
230d793d | 11538 | } |
732f2ac9 | 11539 | |
732f2ac9 JJ |
11540 | /* If a SUBREG has the promoted bit set, it is in fact a property of the |
11541 | register present in the SUBREG, so for each such SUBREG go back and | |
11542 | adjust nonzero and sign bit information of the registers that are | |
11543 | known to have some zero/sign bits set. | |
11544 | ||
11545 | This is needed because when combine blows the SUBREGs away, the | |
11546 | information on zero/sign bits is lost and further combines can be | |
11547 | missed because of that. */ | |
11548 | ||
11549 | static void | |
11550 | record_promoted_value (insn, subreg) | |
663522cb KH |
11551 | rtx insn; |
11552 | rtx subreg; | |
732f2ac9 | 11553 | { |
4a71b24f | 11554 | rtx links, set; |
770ae6cc | 11555 | unsigned int regno = REGNO (SUBREG_REG (subreg)); |
732f2ac9 JJ |
11556 | enum machine_mode mode = GET_MODE (subreg); |
11557 | ||
25af74a0 | 11558 | if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT) |
732f2ac9 JJ |
11559 | return; |
11560 | ||
663522cb | 11561 | for (links = LOG_LINKS (insn); links;) |
732f2ac9 JJ |
11562 | { |
11563 | insn = XEXP (links, 0); | |
11564 | set = single_set (insn); | |
11565 | ||
11566 | if (! set || GET_CODE (SET_DEST (set)) != REG | |
11567 | || REGNO (SET_DEST (set)) != regno | |
11568 | || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg))) | |
11569 | { | |
11570 | links = XEXP (links, 1); | |
11571 | continue; | |
11572 | } | |
11573 | ||
663522cb KH |
11574 | if (reg_last_set[regno] == insn) |
11575 | { | |
7879b81e | 11576 | if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0) |
663522cb KH |
11577 | reg_last_set_nonzero_bits[regno] &= GET_MODE_MASK (mode); |
11578 | } | |
732f2ac9 JJ |
11579 | |
11580 | if (GET_CODE (SET_SRC (set)) == REG) | |
11581 | { | |
11582 | regno = REGNO (SET_SRC (set)); | |
11583 | links = LOG_LINKS (insn); | |
11584 | } | |
11585 | else | |
11586 | break; | |
11587 | } | |
11588 | } | |
11589 | ||
11590 | /* Scan X for promoted SUBREGs. For each one found, | |
11591 | note what it implies to the registers used in it. */ | |
11592 | ||
11593 | static void | |
11594 | check_promoted_subreg (insn, x) | |
663522cb KH |
11595 | rtx insn; |
11596 | rtx x; | |
732f2ac9 JJ |
11597 | { |
11598 | if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x) | |
11599 | && GET_CODE (SUBREG_REG (x)) == REG) | |
11600 | record_promoted_value (insn, x); | |
11601 | else | |
11602 | { | |
11603 | const char *format = GET_RTX_FORMAT (GET_CODE (x)); | |
11604 | int i, j; | |
11605 | ||
11606 | for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++) | |
663522cb | 11607 | switch (format[i]) |
732f2ac9 JJ |
11608 | { |
11609 | case 'e': | |
11610 | check_promoted_subreg (insn, XEXP (x, i)); | |
11611 | break; | |
11612 | case 'V': | |
11613 | case 'E': | |
11614 | if (XVEC (x, i) != 0) | |
11615 | for (j = 0; j < XVECLEN (x, i); j++) | |
11616 | check_promoted_subreg (insn, XVECEXP (x, i, j)); | |
11617 | break; | |
11618 | } | |
11619 | } | |
11620 | } | |
230d793d RS |
11621 | \f |
11622 | /* Utility routine for the following function. Verify that all the registers | |
11623 | mentioned in *LOC are valid when *LOC was part of a value set when | |
11624 | label_tick == TICK. Return 0 if some are not. | |
11625 | ||
da7d8304 | 11626 | If REPLACE is nonzero, replace the invalid reference with |
230d793d RS |
11627 | (clobber (const_int 0)) and return 1. This replacement is useful because |
11628 | we often can get useful information about the form of a value (e.g., if | |
11629 | it was produced by a shift that always produces -1 or 0) even though | |
11630 | we don't know exactly what registers it was produced from. */ | |
11631 | ||
11632 | static int | |
9a893315 | 11633 | get_last_value_validate (loc, insn, tick, replace) |
230d793d | 11634 | rtx *loc; |
9a893315 | 11635 | rtx insn; |
230d793d RS |
11636 | int tick; |
11637 | int replace; | |
11638 | { | |
11639 | rtx x = *loc; | |
6f7d635c | 11640 | const char *fmt = GET_RTX_FORMAT (GET_CODE (x)); |
230d793d RS |
11641 | int len = GET_RTX_LENGTH (GET_CODE (x)); |
11642 | int i; | |
11643 | ||
11644 | if (GET_CODE (x) == REG) | |
11645 | { | |
770ae6cc RK |
11646 | unsigned int regno = REGNO (x); |
11647 | unsigned int endregno | |
11648 | = regno + (regno < FIRST_PSEUDO_REGISTER | |
11649 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); | |
11650 | unsigned int j; | |
230d793d RS |
11651 | |
11652 | for (j = regno; j < endregno; j++) | |
11653 | if (reg_last_set_invalid[j] | |
57cf50a4 GRK |
11654 | /* If this is a pseudo-register that was only set once and not |
11655 | live at the beginning of the function, it is always valid. */ | |
663522cb | 11656 | || (! (regno >= FIRST_PSEUDO_REGISTER |
57cf50a4 | 11657 | && REG_N_SETS (regno) == 1 |
770ae6cc | 11658 | && (! REGNO_REG_SET_P |
f6366fc7 | 11659 | (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno))) |
230d793d RS |
11660 | && reg_last_set_label[j] > tick)) |
11661 | { | |
11662 | if (replace) | |
38a448ca | 11663 | *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
230d793d RS |
11664 | return replace; |
11665 | } | |
11666 | ||
11667 | return 1; | |
11668 | } | |
9a893315 JW |
11669 | /* If this is a memory reference, make sure that there were |
11670 | no stores after it that might have clobbered the value. We don't | |
11671 | have alias info, so we assume any store invalidates it. */ | |
11672 | else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x) | |
11673 | && INSN_CUID (insn) <= mem_last_set) | |
11674 | { | |
11675 | if (replace) | |
38a448ca | 11676 | *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx); |
9a893315 JW |
11677 | return replace; |
11678 | } | |
230d793d RS |
11679 | |
11680 | for (i = 0; i < len; i++) | |
11681 | if ((fmt[i] == 'e' | |
9a893315 | 11682 | && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0) |
230d793d RS |
11683 | /* Don't bother with these. They shouldn't occur anyway. */ |
11684 | || fmt[i] == 'E') | |
11685 | return 0; | |
11686 | ||
11687 | /* If we haven't found a reason for it to be invalid, it is valid. */ | |
11688 | return 1; | |
11689 | } | |
11690 | ||
11691 | /* Get the last value assigned to X, if known. Some registers | |
11692 | in the value may be replaced with (clobber (const_int 0)) if their value | |
11693 | is known longer known reliably. */ | |
11694 | ||
11695 | static rtx | |
11696 | get_last_value (x) | |
11697 | rtx x; | |
11698 | { | |
770ae6cc | 11699 | unsigned int regno; |
230d793d RS |
11700 | rtx value; |
11701 | ||
11702 | /* If this is a non-paradoxical SUBREG, get the value of its operand and | |
11703 | then convert it to the desired mode. If this is a paradoxical SUBREG, | |
0f41302f | 11704 | we cannot predict what values the "extra" bits might have. */ |
230d793d RS |
11705 | if (GET_CODE (x) == SUBREG |
11706 | && subreg_lowpart_p (x) | |
11707 | && (GET_MODE_SIZE (GET_MODE (x)) | |
11708 | <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))) | |
11709 | && (value = get_last_value (SUBREG_REG (x))) != 0) | |
11710 | return gen_lowpart_for_combine (GET_MODE (x), value); | |
11711 | ||
11712 | if (GET_CODE (x) != REG) | |
11713 | return 0; | |
11714 | ||
11715 | regno = REGNO (x); | |
11716 | value = reg_last_set_value[regno]; | |
11717 | ||
57cf50a4 GRK |
11718 | /* If we don't have a value, or if it isn't for this basic block and |
11719 | it's either a hard register, set more than once, or it's a live | |
663522cb | 11720 | at the beginning of the function, return 0. |
57cf50a4 | 11721 | |
eaec9b3d | 11722 | Because if it's not live at the beginning of the function then the reg |
57cf50a4 GRK |
11723 | is always set before being used (is never used without being set). |
11724 | And, if it's set only once, and it's always set before use, then all | |
11725 | uses must have the same last value, even if it's not from this basic | |
11726 | block. */ | |
230d793d RS |
11727 | |
11728 | if (value == 0 | |
57cf50a4 GRK |
11729 | || (reg_last_set_label[regno] != label_tick |
11730 | && (regno < FIRST_PSEUDO_REGISTER | |
11731 | || REG_N_SETS (regno) != 1 | |
770ae6cc | 11732 | || (REGNO_REG_SET_P |
f6366fc7 | 11733 | (ENTRY_BLOCK_PTR->next_bb->global_live_at_start, regno))))) |
230d793d RS |
11734 | return 0; |
11735 | ||
4255220d | 11736 | /* If the value was set in a later insn than the ones we are processing, |
ca4cd906 | 11737 | we can't use it even if the register was only set once. */ |
bcd49eb7 | 11738 | if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid) |
ca4cd906 | 11739 | return 0; |
d0ab8cd3 RK |
11740 | |
11741 | /* If the value has all its registers valid, return it. */ | |
9a893315 JW |
11742 | if (get_last_value_validate (&value, reg_last_set[regno], |
11743 | reg_last_set_label[regno], 0)) | |
230d793d RS |
11744 | return value; |
11745 | ||
11746 | /* Otherwise, make a copy and replace any invalid register with | |
11747 | (clobber (const_int 0)). If that fails for some reason, return 0. */ | |
11748 | ||
11749 | value = copy_rtx (value); | |
9a893315 JW |
11750 | if (get_last_value_validate (&value, reg_last_set[regno], |
11751 | reg_last_set_label[regno], 1)) | |
230d793d RS |
11752 | return value; |
11753 | ||
11754 | return 0; | |
11755 | } | |
11756 | \f | |
11757 | /* Return nonzero if expression X refers to a REG or to memory | |
11758 | that is set in an instruction more recent than FROM_CUID. */ | |
11759 | ||
11760 | static int | |
11761 | use_crosses_set_p (x, from_cuid) | |
b3694847 | 11762 | rtx x; |
230d793d RS |
11763 | int from_cuid; |
11764 | { | |
b3694847 SS |
11765 | const char *fmt; |
11766 | int i; | |
11767 | enum rtx_code code = GET_CODE (x); | |
230d793d RS |
11768 | |
11769 | if (code == REG) | |
11770 | { | |
770ae6cc RK |
11771 | unsigned int regno = REGNO (x); |
11772 | unsigned endreg = regno + (regno < FIRST_PSEUDO_REGISTER | |
663522cb KH |
11773 | ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1); |
11774 | ||
230d793d RS |
11775 | #ifdef PUSH_ROUNDING |
11776 | /* Don't allow uses of the stack pointer to be moved, | |
11777 | because we don't know whether the move crosses a push insn. */ | |
f73ad30e | 11778 | if (regno == STACK_POINTER_REGNUM && PUSH_ARGS) |
230d793d RS |
11779 | return 1; |
11780 | #endif | |
770ae6cc | 11781 | for (; regno < endreg; regno++) |
e28f5732 RK |
11782 | if (reg_last_set[regno] |
11783 | && INSN_CUID (reg_last_set[regno]) > from_cuid) | |
11784 | return 1; | |
11785 | return 0; | |
230d793d RS |
11786 | } |
11787 | ||
11788 | if (code == MEM && mem_last_set > from_cuid) | |
11789 | return 1; | |
11790 | ||
11791 | fmt = GET_RTX_FORMAT (code); | |
11792 | ||
11793 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
11794 | { | |
11795 | if (fmt[i] == 'E') | |
11796 | { | |
b3694847 | 11797 | int j; |
230d793d RS |
11798 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
11799 | if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid)) | |
11800 | return 1; | |
11801 | } | |
11802 | else if (fmt[i] == 'e' | |
11803 | && use_crosses_set_p (XEXP (x, i), from_cuid)) | |
11804 | return 1; | |
11805 | } | |
11806 | return 0; | |
11807 | } | |
11808 | \f | |
11809 | /* Define three variables used for communication between the following | |
11810 | routines. */ | |
11811 | ||
770ae6cc | 11812 | static unsigned int reg_dead_regno, reg_dead_endregno; |
230d793d RS |
11813 | static int reg_dead_flag; |
11814 | ||
11815 | /* Function called via note_stores from reg_dead_at_p. | |
11816 | ||
663522cb | 11817 | If DEST is within [reg_dead_regno, reg_dead_endregno), set |
230d793d RS |
11818 | reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */ |
11819 | ||
11820 | static void | |
84832317 | 11821 | reg_dead_at_p_1 (dest, x, data) |
230d793d RS |
11822 | rtx dest; |
11823 | rtx x; | |
84832317 | 11824 | void *data ATTRIBUTE_UNUSED; |
230d793d | 11825 | { |
770ae6cc | 11826 | unsigned int regno, endregno; |
230d793d RS |
11827 | |
11828 | if (GET_CODE (dest) != REG) | |
11829 | return; | |
11830 | ||
11831 | regno = REGNO (dest); | |
663522cb | 11832 | endregno = regno + (regno < FIRST_PSEUDO_REGISTER |
230d793d RS |
11833 | ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1); |
11834 | ||
11835 | if (reg_dead_endregno > regno && reg_dead_regno < endregno) | |
11836 | reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1; | |
11837 | } | |
11838 | ||
da7d8304 | 11839 | /* Return nonzero if REG is known to be dead at INSN. |
230d793d RS |
11840 | |
11841 | We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER | |
11842 | referencing REG, it is dead. If we hit a SET referencing REG, it is | |
11843 | live. Otherwise, see if it is live or dead at the start of the basic | |
6e25d159 RK |
11844 | block we are in. Hard regs marked as being live in NEWPAT_USED_REGS |
11845 | must be assumed to be always live. */ | |
230d793d RS |
11846 | |
11847 | static int | |
11848 | reg_dead_at_p (reg, insn) | |
11849 | rtx reg; | |
11850 | rtx insn; | |
11851 | { | |
e0082a72 | 11852 | basic_block block; |
770ae6cc | 11853 | unsigned int i; |
230d793d RS |
11854 | |
11855 | /* Set variables for reg_dead_at_p_1. */ | |
11856 | reg_dead_regno = REGNO (reg); | |
11857 | reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER | |
11858 | ? HARD_REGNO_NREGS (reg_dead_regno, | |
11859 | GET_MODE (reg)) | |
11860 | : 1); | |
11861 | ||
11862 | reg_dead_flag = 0; | |
11863 | ||
6e25d159 RK |
11864 | /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */ |
11865 | if (reg_dead_regno < FIRST_PSEUDO_REGISTER) | |
11866 | { | |
11867 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
11868 | if (TEST_HARD_REG_BIT (newpat_used_regs, i)) | |
11869 | return 0; | |
11870 | } | |
11871 | ||
230d793d RS |
11872 | /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or |
11873 | beginning of function. */ | |
60715d0b | 11874 | for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER; |
230d793d RS |
11875 | insn = prev_nonnote_insn (insn)) |
11876 | { | |
84832317 | 11877 | note_stores (PATTERN (insn), reg_dead_at_p_1, NULL); |
230d793d RS |
11878 | if (reg_dead_flag) |
11879 | return reg_dead_flag == 1 ? 1 : 0; | |
11880 | ||
11881 | if (find_regno_note (insn, REG_DEAD, reg_dead_regno)) | |
11882 | return 1; | |
11883 | } | |
11884 | ||
e0082a72 | 11885 | /* Get the basic block that we were in. */ |
230d793d | 11886 | if (insn == 0) |
e0082a72 | 11887 | block = ENTRY_BLOCK_PTR->next_bb; |
230d793d RS |
11888 | else |
11889 | { | |
e0082a72 ZD |
11890 | FOR_EACH_BB (block) |
11891 | if (insn == block->head) | |
230d793d RS |
11892 | break; |
11893 | ||
e0082a72 | 11894 | if (block == EXIT_BLOCK_PTR) |
230d793d RS |
11895 | return 0; |
11896 | } | |
11897 | ||
11898 | for (i = reg_dead_regno; i < reg_dead_endregno; i++) | |
e0082a72 | 11899 | if (REGNO_REG_SET_P (block->global_live_at_start, i)) |
230d793d RS |
11900 | return 0; |
11901 | ||
11902 | return 1; | |
11903 | } | |
6e25d159 RK |
11904 | \f |
11905 | /* Note hard registers in X that are used. This code is similar to | |
11906 | that in flow.c, but much simpler since we don't care about pseudos. */ | |
11907 | ||
11908 | static void | |
11909 | mark_used_regs_combine (x) | |
11910 | rtx x; | |
11911 | { | |
770ae6cc RK |
11912 | RTX_CODE code = GET_CODE (x); |
11913 | unsigned int regno; | |
6e25d159 RK |
11914 | int i; |
11915 | ||
11916 | switch (code) | |
11917 | { | |
11918 | case LABEL_REF: | |
11919 | case SYMBOL_REF: | |
11920 | case CONST_INT: | |
11921 | case CONST: | |
11922 | case CONST_DOUBLE: | |
69ef87e2 | 11923 | case CONST_VECTOR: |
6e25d159 RK |
11924 | case PC: |
11925 | case ADDR_VEC: | |
11926 | case ADDR_DIFF_VEC: | |
11927 | case ASM_INPUT: | |
11928 | #ifdef HAVE_cc0 | |
11929 | /* CC0 must die in the insn after it is set, so we don't need to take | |
11930 | special note of it here. */ | |
11931 | case CC0: | |
11932 | #endif | |
11933 | return; | |
11934 | ||
11935 | case CLOBBER: | |
11936 | /* If we are clobbering a MEM, mark any hard registers inside the | |
11937 | address as used. */ | |
11938 | if (GET_CODE (XEXP (x, 0)) == MEM) | |
11939 | mark_used_regs_combine (XEXP (XEXP (x, 0), 0)); | |
11940 | return; | |
11941 | ||
11942 | case REG: | |
11943 | regno = REGNO (x); | |
11944 | /* A hard reg in a wide mode may really be multiple registers. | |
11945 | If so, mark all of them just like the first. */ | |
11946 | if (regno < FIRST_PSEUDO_REGISTER) | |
11947 | { | |
770ae6cc RK |
11948 | unsigned int endregno, r; |
11949 | ||
3eae4643 | 11950 | /* None of this applies to the stack, frame or arg pointers. */ |
6e25d159 RK |
11951 | if (regno == STACK_POINTER_REGNUM |
11952 | #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM | |
11953 | || regno == HARD_FRAME_POINTER_REGNUM | |
11954 | #endif | |
11955 | #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM | |
11956 | || (regno == ARG_POINTER_REGNUM && fixed_regs[regno]) | |
11957 | #endif | |
11958 | || regno == FRAME_POINTER_REGNUM) | |
11959 | return; | |
11960 | ||
770ae6cc RK |
11961 | endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); |
11962 | for (r = regno; r < endregno; r++) | |
11963 | SET_HARD_REG_BIT (newpat_used_regs, r); | |
6e25d159 RK |
11964 | } |
11965 | return; | |
11966 | ||
11967 | case SET: | |
11968 | { | |
11969 | /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in | |
11970 | the address. */ | |
b3694847 | 11971 | rtx testreg = SET_DEST (x); |
6e25d159 | 11972 | |
e048778f RK |
11973 | while (GET_CODE (testreg) == SUBREG |
11974 | || GET_CODE (testreg) == ZERO_EXTRACT | |
11975 | || GET_CODE (testreg) == SIGN_EXTRACT | |
11976 | || GET_CODE (testreg) == STRICT_LOW_PART) | |
6e25d159 RK |
11977 | testreg = XEXP (testreg, 0); |
11978 | ||
11979 | if (GET_CODE (testreg) == MEM) | |
11980 | mark_used_regs_combine (XEXP (testreg, 0)); | |
11981 | ||
11982 | mark_used_regs_combine (SET_SRC (x)); | |
6e25d159 | 11983 | } |
e9a25f70 JL |
11984 | return; |
11985 | ||
11986 | default: | |
11987 | break; | |
6e25d159 RK |
11988 | } |
11989 | ||
11990 | /* Recursively scan the operands of this expression. */ | |
11991 | ||
11992 | { | |
b3694847 | 11993 | const char *fmt = GET_RTX_FORMAT (code); |
6e25d159 RK |
11994 | |
11995 | for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
11996 | { | |
663522cb | 11997 | if (fmt[i] == 'e') |
6e25d159 | 11998 | mark_used_regs_combine (XEXP (x, i)); |
663522cb KH |
11999 | else if (fmt[i] == 'E') |
12000 | { | |
b3694847 | 12001 | int j; |
6e25d159 | 12002 | |
663522cb KH |
12003 | for (j = 0; j < XVECLEN (x, i); j++) |
12004 | mark_used_regs_combine (XVECEXP (x, i, j)); | |
12005 | } | |
6e25d159 RK |
12006 | } |
12007 | } | |
12008 | } | |
230d793d RS |
12009 | \f |
12010 | /* Remove register number REGNO from the dead registers list of INSN. | |
12011 | ||
12012 | Return the note used to record the death, if there was one. */ | |
12013 | ||
12014 | rtx | |
12015 | remove_death (regno, insn) | |
770ae6cc | 12016 | unsigned int regno; |
230d793d RS |
12017 | rtx insn; |
12018 | { | |
b3694847 | 12019 | rtx note = find_regno_note (insn, REG_DEAD, regno); |
230d793d RS |
12020 | |
12021 | if (note) | |
1a26b032 | 12022 | { |
b1f21e0a | 12023 | REG_N_DEATHS (regno)--; |
1a26b032 RK |
12024 | remove_note (insn, note); |
12025 | } | |
230d793d RS |
12026 | |
12027 | return note; | |
12028 | } | |
12029 | ||
12030 | /* For each register (hardware or pseudo) used within expression X, if its | |
12031 | death is in an instruction with cuid between FROM_CUID (inclusive) and | |
12032 | TO_INSN (exclusive), put a REG_DEAD note for that register in the | |
663522cb | 12033 | list headed by PNOTES. |
230d793d | 12034 | |
6eb12cef RK |
12035 | That said, don't move registers killed by maybe_kill_insn. |
12036 | ||
230d793d RS |
12037 | This is done when X is being merged by combination into TO_INSN. These |
12038 | notes will then be distributed as needed. */ | |
12039 | ||
12040 | static void | |
6eb12cef | 12041 | move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes) |
230d793d | 12042 | rtx x; |
6eb12cef | 12043 | rtx maybe_kill_insn; |
230d793d RS |
12044 | int from_cuid; |
12045 | rtx to_insn; | |
12046 | rtx *pnotes; | |
12047 | { | |
b3694847 SS |
12048 | const char *fmt; |
12049 | int len, i; | |
12050 | enum rtx_code code = GET_CODE (x); | |
230d793d RS |
12051 | |
12052 | if (code == REG) | |
12053 | { | |
770ae6cc | 12054 | unsigned int regno = REGNO (x); |
b3694847 SS |
12055 | rtx where_dead = reg_last_death[regno]; |
12056 | rtx before_dead, after_dead; | |
e340018d | 12057 | |
3eae4643 | 12058 | /* Don't move the register if it gets killed in between from and to. */ |
6eb12cef | 12059 | if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn) |
770ae6cc | 12060 | && ! reg_referenced_p (x, maybe_kill_insn)) |
6eb12cef RK |
12061 | return; |
12062 | ||
e340018d JW |
12063 | /* WHERE_DEAD could be a USE insn made by combine, so first we |
12064 | make sure that we have insns with valid INSN_CUID values. */ | |
12065 | before_dead = where_dead; | |
12066 | while (before_dead && INSN_UID (before_dead) > max_uid_cuid) | |
12067 | before_dead = PREV_INSN (before_dead); | |
770ae6cc | 12068 | |
e340018d JW |
12069 | after_dead = where_dead; |
12070 | while (after_dead && INSN_UID (after_dead) > max_uid_cuid) | |
12071 | after_dead = NEXT_INSN (after_dead); | |
12072 | ||
12073 | if (before_dead && after_dead | |
12074 | && INSN_CUID (before_dead) >= from_cuid | |
12075 | && (INSN_CUID (after_dead) < INSN_CUID (to_insn) | |
12076 | || (where_dead != after_dead | |
12077 | && INSN_CUID (after_dead) == INSN_CUID (to_insn)))) | |
230d793d | 12078 | { |
dbc131f3 | 12079 | rtx note = remove_death (regno, where_dead); |
230d793d RS |
12080 | |
12081 | /* It is possible for the call above to return 0. This can occur | |
12082 | when reg_last_death points to I2 or I1 that we combined with. | |
dbc131f3 RK |
12083 | In that case make a new note. |
12084 | ||
12085 | We must also check for the case where X is a hard register | |
12086 | and NOTE is a death note for a range of hard registers | |
12087 | including X. In that case, we must put REG_DEAD notes for | |
12088 | the remaining registers in place of NOTE. */ | |
12089 | ||
12090 | if (note != 0 && regno < FIRST_PSEUDO_REGISTER | |
12091 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
24e46fc4 | 12092 | > GET_MODE_SIZE (GET_MODE (x)))) |
dbc131f3 | 12093 | { |
770ae6cc RK |
12094 | unsigned int deadregno = REGNO (XEXP (note, 0)); |
12095 | unsigned int deadend | |
dbc131f3 RK |
12096 | = (deadregno + HARD_REGNO_NREGS (deadregno, |
12097 | GET_MODE (XEXP (note, 0)))); | |
770ae6cc RK |
12098 | unsigned int ourend |
12099 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
12100 | unsigned int i; | |
dbc131f3 RK |
12101 | |
12102 | for (i = deadregno; i < deadend; i++) | |
12103 | if (i < regno || i >= ourend) | |
12104 | REG_NOTES (where_dead) | |
38a448ca | 12105 | = gen_rtx_EXPR_LIST (REG_DEAD, |
e50126e8 | 12106 | regno_reg_rtx[i], |
38a448ca | 12107 | REG_NOTES (where_dead)); |
dbc131f3 | 12108 | } |
770ae6cc | 12109 | |
24e46fc4 JW |
12110 | /* If we didn't find any note, or if we found a REG_DEAD note that |
12111 | covers only part of the given reg, and we have a multi-reg hard | |
fabd69e8 RK |
12112 | register, then to be safe we must check for REG_DEAD notes |
12113 | for each register other than the first. They could have | |
12114 | their own REG_DEAD notes lying around. */ | |
24e46fc4 JW |
12115 | else if ((note == 0 |
12116 | || (note != 0 | |
12117 | && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0))) | |
12118 | < GET_MODE_SIZE (GET_MODE (x))))) | |
12119 | && regno < FIRST_PSEUDO_REGISTER | |
fabd69e8 RK |
12120 | && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1) |
12121 | { | |
770ae6cc RK |
12122 | unsigned int ourend |
12123 | = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
12124 | unsigned int i, offset; | |
fabd69e8 RK |
12125 | rtx oldnotes = 0; |
12126 | ||
24e46fc4 JW |
12127 | if (note) |
12128 | offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))); | |
12129 | else | |
12130 | offset = 1; | |
12131 | ||
12132 | for (i = regno + offset; i < ourend; i++) | |
e50126e8 | 12133 | move_deaths (regno_reg_rtx[i], |
6eb12cef | 12134 | maybe_kill_insn, from_cuid, to_insn, &oldnotes); |
fabd69e8 | 12135 | } |
230d793d | 12136 | |
dbc131f3 | 12137 | if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x)) |
230d793d RS |
12138 | { |
12139 | XEXP (note, 1) = *pnotes; | |
12140 | *pnotes = note; | |
12141 | } | |
12142 | else | |
38a448ca | 12143 | *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes); |
1a26b032 | 12144 | |
b1f21e0a | 12145 | REG_N_DEATHS (regno)++; |
230d793d RS |
12146 | } |
12147 | ||
12148 | return; | |
12149 | } | |
12150 | ||
12151 | else if (GET_CODE (x) == SET) | |
12152 | { | |
12153 | rtx dest = SET_DEST (x); | |
12154 | ||
6eb12cef | 12155 | move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes); |
230d793d | 12156 | |
a7c99304 RK |
12157 | /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG |
12158 | that accesses one word of a multi-word item, some | |
12159 | piece of everything register in the expression is used by | |
12160 | this insn, so remove any old death. */ | |
ddef6bc7 | 12161 | /* ??? So why do we test for equality of the sizes? */ |
a7c99304 RK |
12162 | |
12163 | if (GET_CODE (dest) == ZERO_EXTRACT | |
12164 | || GET_CODE (dest) == STRICT_LOW_PART | |
12165 | || (GET_CODE (dest) == SUBREG | |
12166 | && (((GET_MODE_SIZE (GET_MODE (dest)) | |
12167 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD) | |
12168 | == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) | |
12169 | + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))) | |
230d793d | 12170 | { |
6eb12cef | 12171 | move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes); |
a7c99304 | 12172 | return; |
230d793d RS |
12173 | } |
12174 | ||
a7c99304 RK |
12175 | /* If this is some other SUBREG, we know it replaces the entire |
12176 | value, so use that as the destination. */ | |
12177 | if (GET_CODE (dest) == SUBREG) | |
12178 | dest = SUBREG_REG (dest); | |
12179 | ||
12180 | /* If this is a MEM, adjust deaths of anything used in the address. | |
12181 | For a REG (the only other possibility), the entire value is | |
12182 | being replaced so the old value is not used in this insn. */ | |
230d793d RS |
12183 | |
12184 | if (GET_CODE (dest) == MEM) | |
6eb12cef RK |
12185 | move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid, |
12186 | to_insn, pnotes); | |
230d793d RS |
12187 | return; |
12188 | } | |
12189 | ||
12190 | else if (GET_CODE (x) == CLOBBER) | |
12191 | return; | |
12192 | ||
12193 | len = GET_RTX_LENGTH (code); | |
12194 | fmt = GET_RTX_FORMAT (code); | |
12195 | ||
12196 | for (i = 0; i < len; i++) | |
12197 | { | |
12198 | if (fmt[i] == 'E') | |
12199 | { | |
b3694847 | 12200 | int j; |
230d793d | 12201 | for (j = XVECLEN (x, i) - 1; j >= 0; j--) |
6eb12cef RK |
12202 | move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid, |
12203 | to_insn, pnotes); | |
230d793d RS |
12204 | } |
12205 | else if (fmt[i] == 'e') | |
6eb12cef | 12206 | move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes); |
230d793d RS |
12207 | } |
12208 | } | |
12209 | \f | |
a7c99304 RK |
12210 | /* Return 1 if X is the target of a bit-field assignment in BODY, the |
12211 | pattern of an insn. X must be a REG. */ | |
230d793d RS |
12212 | |
12213 | static int | |
a7c99304 RK |
12214 | reg_bitfield_target_p (x, body) |
12215 | rtx x; | |
230d793d RS |
12216 | rtx body; |
12217 | { | |
12218 | int i; | |
12219 | ||
12220 | if (GET_CODE (body) == SET) | |
a7c99304 RK |
12221 | { |
12222 | rtx dest = SET_DEST (body); | |
12223 | rtx target; | |
770ae6cc | 12224 | unsigned int regno, tregno, endregno, endtregno; |
a7c99304 RK |
12225 | |
12226 | if (GET_CODE (dest) == ZERO_EXTRACT) | |
12227 | target = XEXP (dest, 0); | |
12228 | else if (GET_CODE (dest) == STRICT_LOW_PART) | |
12229 | target = SUBREG_REG (XEXP (dest, 0)); | |
12230 | else | |
12231 | return 0; | |
12232 | ||
12233 | if (GET_CODE (target) == SUBREG) | |
12234 | target = SUBREG_REG (target); | |
12235 | ||
12236 | if (GET_CODE (target) != REG) | |
12237 | return 0; | |
12238 | ||
12239 | tregno = REGNO (target), regno = REGNO (x); | |
12240 | if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER) | |
12241 | return target == x; | |
12242 | ||
12243 | endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target)); | |
12244 | endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x)); | |
12245 | ||
12246 | return endregno > tregno && regno < endtregno; | |
12247 | } | |
230d793d RS |
12248 | |
12249 | else if (GET_CODE (body) == PARALLEL) | |
12250 | for (i = XVECLEN (body, 0) - 1; i >= 0; i--) | |
a7c99304 | 12251 | if (reg_bitfield_target_p (x, XVECEXP (body, 0, i))) |
230d793d RS |
12252 | return 1; |
12253 | ||
12254 | return 0; | |
663522cb | 12255 | } |
230d793d RS |
12256 | \f |
12257 | /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them | |
12258 | as appropriate. I3 and I2 are the insns resulting from the combination | |
12259 | insns including FROM (I2 may be zero). | |
12260 | ||
12261 | ELIM_I2 and ELIM_I1 are either zero or registers that we know will | |
12262 | not need REG_DEAD notes because they are being substituted for. This | |
12263 | saves searching in the most common cases. | |
12264 | ||
12265 | Each note in the list is either ignored or placed on some insns, depending | |
12266 | on the type of note. */ | |
12267 | ||
12268 | static void | |
12269 | distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1) | |
12270 | rtx notes; | |
12271 | rtx from_insn; | |
12272 | rtx i3, i2; | |
12273 | rtx elim_i2, elim_i1; | |
12274 | { | |
12275 | rtx note, next_note; | |
12276 | rtx tem; | |
12277 | ||
12278 | for (note = notes; note; note = next_note) | |
12279 | { | |
12280 | rtx place = 0, place2 = 0; | |
12281 | ||
12282 | /* If this NOTE references a pseudo register, ensure it references | |
12283 | the latest copy of that register. */ | |
12284 | if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG | |
12285 | && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER) | |
12286 | XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))]; | |
12287 | ||
12288 | next_note = XEXP (note, 1); | |
12289 | switch (REG_NOTE_KIND (note)) | |
12290 | { | |
c9903b44 | 12291 | case REG_BR_PROB: |
4db384c9 | 12292 | case REG_BR_PRED: |
c9903b44 DE |
12293 | case REG_EXEC_COUNT: |
12294 | /* Doesn't matter much where we put this, as long as it's somewhere. | |
12295 | It is preferable to keep these notes on branches, which is most | |
12296 | likely to be i3. */ | |
4a8d0c9c RH |
12297 | place = i3; |
12298 | break; | |
12299 | ||
12300 | case REG_VTABLE_REF: | |
12301 | /* ??? Should remain with *a particular* memory load. Given the | |
12302 | nature of vtable data, the last insn seems relatively safe. */ | |
c9903b44 DE |
12303 | place = i3; |
12304 | break; | |
12305 | ||
f7cfa78d GS |
12306 | case REG_NON_LOCAL_GOTO: |
12307 | if (GET_CODE (i3) == JUMP_INSN) | |
12308 | place = i3; | |
12309 | else if (i2 && GET_CODE (i2) == JUMP_INSN) | |
12310 | place = i2; | |
12311 | else | |
505ddab6 | 12312 | abort (); |
f7cfa78d GS |
12313 | break; |
12314 | ||
4b7c585f | 12315 | case REG_EH_REGION: |
662795a8 RH |
12316 | /* These notes must remain with the call or trapping instruction. */ |
12317 | if (GET_CODE (i3) == CALL_INSN) | |
12318 | place = i3; | |
12319 | else if (i2 && GET_CODE (i2) == CALL_INSN) | |
12320 | place = i2; | |
12321 | else if (flag_non_call_exceptions) | |
12322 | { | |
12323 | if (may_trap_p (i3)) | |
12324 | place = i3; | |
12325 | else if (i2 && may_trap_p (i2)) | |
12326 | place = i2; | |
12327 | /* ??? Otherwise assume we've combined things such that we | |
12328 | can now prove that the instructions can't trap. Drop the | |
12329 | note in this case. */ | |
12330 | } | |
12331 | else | |
12332 | abort (); | |
12333 | break; | |
12334 | ||
ca3920ad | 12335 | case REG_NORETURN: |
ab61c93f | 12336 | case REG_SETJMP: |
0e403ec3 AS |
12337 | /* These notes must remain with the call. It should not be |
12338 | possible for both I2 and I3 to be a call. */ | |
663522cb | 12339 | if (GET_CODE (i3) == CALL_INSN) |
4b7c585f JL |
12340 | place = i3; |
12341 | else if (i2 && GET_CODE (i2) == CALL_INSN) | |
12342 | place = i2; | |
12343 | else | |
12344 | abort (); | |
12345 | break; | |
12346 | ||
230d793d | 12347 | case REG_UNUSED: |
07d0cbdd | 12348 | /* Any clobbers for i3 may still exist, and so we must process |
176c9e6b JW |
12349 | REG_UNUSED notes from that insn. |
12350 | ||
12351 | Any clobbers from i2 or i1 can only exist if they were added by | |
12352 | recog_for_combine. In that case, recog_for_combine created the | |
12353 | necessary REG_UNUSED notes. Trying to keep any original | |
12354 | REG_UNUSED notes from these insns can cause incorrect output | |
12355 | if it is for the same register as the original i3 dest. | |
12356 | In that case, we will notice that the register is set in i3, | |
12357 | and then add a REG_UNUSED note for the destination of i3, which | |
07d0cbdd JW |
12358 | is wrong. However, it is possible to have REG_UNUSED notes from |
12359 | i2 or i1 for register which were both used and clobbered, so | |
12360 | we keep notes from i2 or i1 if they will turn into REG_DEAD | |
12361 | notes. */ | |
176c9e6b | 12362 | |
230d793d RS |
12363 | /* If this register is set or clobbered in I3, put the note there |
12364 | unless there is one already. */ | |
07d0cbdd | 12365 | if (reg_set_p (XEXP (note, 0), PATTERN (i3))) |
230d793d | 12366 | { |
07d0cbdd JW |
12367 | if (from_insn != i3) |
12368 | break; | |
12369 | ||
230d793d RS |
12370 | if (! (GET_CODE (XEXP (note, 0)) == REG |
12371 | ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0))) | |
12372 | : find_reg_note (i3, REG_UNUSED, XEXP (note, 0)))) | |
12373 | place = i3; | |
12374 | } | |
12375 | /* Otherwise, if this register is used by I3, then this register | |
12376 | now dies here, so we must put a REG_DEAD note here unless there | |
12377 | is one already. */ | |
12378 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)) | |
12379 | && ! (GET_CODE (XEXP (note, 0)) == REG | |
770ae6cc RK |
12380 | ? find_regno_note (i3, REG_DEAD, |
12381 | REGNO (XEXP (note, 0))) | |
230d793d RS |
12382 | : find_reg_note (i3, REG_DEAD, XEXP (note, 0)))) |
12383 | { | |
12384 | PUT_REG_NOTE_KIND (note, REG_DEAD); | |
12385 | place = i3; | |
12386 | } | |
12387 | break; | |
12388 | ||
12389 | case REG_EQUAL: | |
12390 | case REG_EQUIV: | |
9ae8ffe7 | 12391 | case REG_NOALIAS: |
230d793d RS |
12392 | /* These notes say something about results of an insn. We can |
12393 | only support them if they used to be on I3 in which case they | |
a687e897 RK |
12394 | remain on I3. Otherwise they are ignored. |
12395 | ||
12396 | If the note refers to an expression that is not a constant, we | |
12397 | must also ignore the note since we cannot tell whether the | |
12398 | equivalence is still true. It might be possible to do | |
12399 | slightly better than this (we only have a problem if I2DEST | |
12400 | or I1DEST is present in the expression), but it doesn't | |
12401 | seem worth the trouble. */ | |
12402 | ||
12403 | if (from_insn == i3 | |
12404 | && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0)))) | |
230d793d RS |
12405 | place = i3; |
12406 | break; | |
12407 | ||
12408 | case REG_INC: | |
12409 | case REG_NO_CONFLICT: | |
230d793d RS |
12410 | /* These notes say something about how a register is used. They must |
12411 | be present on any use of the register in I2 or I3. */ | |
12412 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))) | |
12413 | place = i3; | |
12414 | ||
12415 | if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2))) | |
12416 | { | |
12417 | if (place) | |
12418 | place2 = i2; | |
12419 | else | |
12420 | place = i2; | |
12421 | } | |
12422 | break; | |
12423 | ||
e55b4486 RH |
12424 | case REG_LABEL: |
12425 | /* This can show up in several ways -- either directly in the | |
12426 | pattern, or hidden off in the constant pool with (or without?) | |
12427 | a REG_EQUAL note. */ | |
12428 | /* ??? Ignore the without-reg_equal-note problem for now. */ | |
12429 | if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)) | |
12430 | || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX)) | |
12431 | && GET_CODE (XEXP (tem, 0)) == LABEL_REF | |
12432 | && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))) | |
12433 | place = i3; | |
12434 | ||
12435 | if (i2 | |
12436 | && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2)) | |
663522cb | 12437 | || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX)) |
e55b4486 RH |
12438 | && GET_CODE (XEXP (tem, 0)) == LABEL_REF |
12439 | && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))) | |
12440 | { | |
12441 | if (place) | |
12442 | place2 = i2; | |
12443 | else | |
12444 | place = i2; | |
12445 | } | |
2a3b43b6 JJ |
12446 | |
12447 | /* Don't attach REG_LABEL note to a JUMP_INSN which has | |
12448 | JUMP_LABEL already. Instead, decrement LABEL_NUSES. */ | |
12449 | if (place && GET_CODE (place) == JUMP_INSN && JUMP_LABEL (place)) | |
12450 | { | |
12451 | if (JUMP_LABEL (place) != XEXP (note, 0)) | |
12452 | abort (); | |
12453 | if (GET_CODE (JUMP_LABEL (place)) == CODE_LABEL) | |
12454 | LABEL_NUSES (JUMP_LABEL (place))--; | |
12455 | place = 0; | |
12456 | } | |
12457 | if (place2 && GET_CODE (place2) == JUMP_INSN && JUMP_LABEL (place2)) | |
12458 | { | |
12459 | if (JUMP_LABEL (place2) != XEXP (note, 0)) | |
12460 | abort (); | |
12461 | if (GET_CODE (JUMP_LABEL (place2)) == CODE_LABEL) | |
12462 | LABEL_NUSES (JUMP_LABEL (place2))--; | |
12463 | place2 = 0; | |
12464 | } | |
e55b4486 RH |
12465 | break; |
12466 | ||
c1194d74 | 12467 | case REG_NONNEG: |
230d793d | 12468 | case REG_WAS_0: |
c1194d74 JW |
12469 | /* These notes say something about the value of a register prior |
12470 | to the execution of an insn. It is too much trouble to see | |
12471 | if the note is still correct in all situations. It is better | |
12472 | to simply delete it. */ | |
230d793d RS |
12473 | break; |
12474 | ||
12475 | case REG_RETVAL: | |
12476 | /* If the insn previously containing this note still exists, | |
12477 | put it back where it was. Otherwise move it to the previous | |
12478 | insn. Adjust the corresponding REG_LIBCALL note. */ | |
12479 | if (GET_CODE (from_insn) != NOTE) | |
12480 | place = from_insn; | |
12481 | else | |
12482 | { | |
5f4f0e22 | 12483 | tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX); |
230d793d RS |
12484 | place = prev_real_insn (from_insn); |
12485 | if (tem && place) | |
12486 | XEXP (tem, 0) = place; | |
c71e1201 AO |
12487 | /* If we're deleting the last remaining instruction of a |
12488 | libcall sequence, don't add the notes. */ | |
12489 | else if (XEXP (note, 0) == from_insn) | |
12490 | tem = place = 0; | |
230d793d RS |
12491 | } |
12492 | break; | |
12493 | ||
12494 | case REG_LIBCALL: | |
12495 | /* This is handled similarly to REG_RETVAL. */ | |
12496 | if (GET_CODE (from_insn) != NOTE) | |
12497 | place = from_insn; | |
12498 | else | |
12499 | { | |
5f4f0e22 | 12500 | tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX); |
230d793d RS |
12501 | place = next_real_insn (from_insn); |
12502 | if (tem && place) | |
12503 | XEXP (tem, 0) = place; | |
c71e1201 AO |
12504 | /* If we're deleting the last remaining instruction of a |
12505 | libcall sequence, don't add the notes. */ | |
12506 | else if (XEXP (note, 0) == from_insn) | |
12507 | tem = place = 0; | |
230d793d RS |
12508 | } |
12509 | break; | |
12510 | ||
12511 | case REG_DEAD: | |
12512 | /* If the register is used as an input in I3, it dies there. | |
da7d8304 | 12513 | Similarly for I2, if it is nonzero and adjacent to I3. |
230d793d RS |
12514 | |
12515 | If the register is not used as an input in either I3 or I2 | |
12516 | and it is not one of the registers we were supposed to eliminate, | |
12517 | there are two possibilities. We might have a non-adjacent I2 | |
12518 | or we might have somehow eliminated an additional register | |
12519 | from a computation. For example, we might have had A & B where | |
12520 | we discover that B will always be zero. In this case we will | |
12521 | eliminate the reference to A. | |
12522 | ||
12523 | In both cases, we must search to see if we can find a previous | |
12524 | use of A and put the death note there. */ | |
12525 | ||
6e2d1486 RK |
12526 | if (from_insn |
12527 | && GET_CODE (from_insn) == CALL_INSN | |
663522cb | 12528 | && find_reg_fusage (from_insn, USE, XEXP (note, 0))) |
6e2d1486 RK |
12529 | place = from_insn; |
12530 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))) | |
230d793d RS |
12531 | place = i3; |
12532 | else if (i2 != 0 && next_nonnote_insn (i2) == i3 | |
12533 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) | |
12534 | place = i2; | |
12535 | ||
03afaf36 R |
12536 | if (rtx_equal_p (XEXP (note, 0), elim_i2) |
12537 | || rtx_equal_p (XEXP (note, 0), elim_i1)) | |
230d793d RS |
12538 | break; |
12539 | ||
12540 | if (place == 0) | |
38d8473f | 12541 | { |
f6366fc7 | 12542 | basic_block bb = this_basic_block; |
d3a923ee RH |
12543 | |
12544 | for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem)) | |
38d8473f | 12545 | { |
2c3c49de | 12546 | if (! INSN_P (tem)) |
d3a923ee RH |
12547 | { |
12548 | if (tem == bb->head) | |
12549 | break; | |
12550 | continue; | |
12551 | } | |
12552 | ||
38d8473f RK |
12553 | /* If the register is being set at TEM, see if that is all |
12554 | TEM is doing. If so, delete TEM. Otherwise, make this | |
12555 | into a REG_UNUSED note instead. */ | |
12556 | if (reg_set_p (XEXP (note, 0), PATTERN (tem))) | |
12557 | { | |
12558 | rtx set = single_set (tem); | |
e5e809f4 | 12559 | rtx inner_dest = 0; |
e51712db | 12560 | #ifdef HAVE_cc0 |
f5c97640 | 12561 | rtx cc0_setter = NULL_RTX; |
e51712db | 12562 | #endif |
e5e809f4 JL |
12563 | |
12564 | if (set != 0) | |
12565 | for (inner_dest = SET_DEST (set); | |
663522cb KH |
12566 | (GET_CODE (inner_dest) == STRICT_LOW_PART |
12567 | || GET_CODE (inner_dest) == SUBREG | |
12568 | || GET_CODE (inner_dest) == ZERO_EXTRACT); | |
e5e809f4 JL |
12569 | inner_dest = XEXP (inner_dest, 0)) |
12570 | ; | |
38d8473f RK |
12571 | |
12572 | /* Verify that it was the set, and not a clobber that | |
663522cb | 12573 | modified the register. |
f5c97640 RH |
12574 | |
12575 | CC0 targets must be careful to maintain setter/user | |
12576 | pairs. If we cannot delete the setter due to side | |
12577 | effects, mark the user with an UNUSED note instead | |
12578 | of deleting it. */ | |
38d8473f RK |
12579 | |
12580 | if (set != 0 && ! side_effects_p (SET_SRC (set)) | |
f5c97640 RH |
12581 | && rtx_equal_p (XEXP (note, 0), inner_dest) |
12582 | #ifdef HAVE_cc0 | |
12583 | && (! reg_mentioned_p (cc0_rtx, SET_SRC (set)) | |
12584 | || ((cc0_setter = prev_cc0_setter (tem)) != NULL | |
12585 | && sets_cc0_p (PATTERN (cc0_setter)) > 0)) | |
12586 | #endif | |
12587 | ) | |
38d8473f RK |
12588 | { |
12589 | /* Move the notes and links of TEM elsewhere. | |
663522cb | 12590 | This might delete other dead insns recursively. |
38d8473f RK |
12591 | First set the pattern to something that won't use |
12592 | any register. */ | |
12593 | ||
12594 | PATTERN (tem) = pc_rtx; | |
12595 | ||
12596 | distribute_notes (REG_NOTES (tem), tem, tem, | |
12597 | NULL_RTX, NULL_RTX, NULL_RTX); | |
12598 | distribute_links (LOG_LINKS (tem)); | |
12599 | ||
12600 | PUT_CODE (tem, NOTE); | |
12601 | NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED; | |
12602 | NOTE_SOURCE_FILE (tem) = 0; | |
f5c97640 RH |
12603 | |
12604 | #ifdef HAVE_cc0 | |
12605 | /* Delete the setter too. */ | |
12606 | if (cc0_setter) | |
12607 | { | |
12608 | PATTERN (cc0_setter) = pc_rtx; | |
12609 | ||
12610 | distribute_notes (REG_NOTES (cc0_setter), | |
12611 | cc0_setter, cc0_setter, | |
12612 | NULL_RTX, NULL_RTX, NULL_RTX); | |
12613 | distribute_links (LOG_LINKS (cc0_setter)); | |
12614 | ||
12615 | PUT_CODE (cc0_setter, NOTE); | |
d3a923ee RH |
12616 | NOTE_LINE_NUMBER (cc0_setter) |
12617 | = NOTE_INSN_DELETED; | |
f5c97640 RH |
12618 | NOTE_SOURCE_FILE (cc0_setter) = 0; |
12619 | } | |
12620 | #endif | |
38d8473f | 12621 | } |
e5e809f4 JL |
12622 | /* If the register is both set and used here, put the |
12623 | REG_DEAD note here, but place a REG_UNUSED note | |
12624 | here too unless there already is one. */ | |
12625 | else if (reg_referenced_p (XEXP (note, 0), | |
12626 | PATTERN (tem))) | |
12627 | { | |
12628 | place = tem; | |
12629 | ||
12630 | if (! find_regno_note (tem, REG_UNUSED, | |
12631 | REGNO (XEXP (note, 0)))) | |
12632 | REG_NOTES (tem) | |
c5c76735 | 12633 | = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0), |
9e6a5703 | 12634 | REG_NOTES (tem)); |
e5e809f4 | 12635 | } |
38d8473f RK |
12636 | else |
12637 | { | |
12638 | PUT_REG_NOTE_KIND (note, REG_UNUSED); | |
663522cb | 12639 | |
38d8473f RK |
12640 | /* If there isn't already a REG_UNUSED note, put one |
12641 | here. */ | |
12642 | if (! find_regno_note (tem, REG_UNUSED, | |
12643 | REGNO (XEXP (note, 0)))) | |
12644 | place = tem; | |
12645 | break; | |
d3a923ee RH |
12646 | } |
12647 | } | |
12648 | else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)) | |
12649 | || (GET_CODE (tem) == CALL_INSN | |
12650 | && find_reg_fusage (tem, USE, XEXP (note, 0)))) | |
12651 | { | |
12652 | place = tem; | |
12653 | ||
12654 | /* If we are doing a 3->2 combination, and we have a | |
12655 | register which formerly died in i3 and was not used | |
12656 | by i2, which now no longer dies in i3 and is used in | |
12657 | i2 but does not die in i2, and place is between i2 | |
12658 | and i3, then we may need to move a link from place to | |
12659 | i2. */ | |
12660 | if (i2 && INSN_UID (place) <= max_uid_cuid | |
12661 | && INSN_CUID (place) > INSN_CUID (i2) | |
663522cb KH |
12662 | && from_insn |
12663 | && INSN_CUID (from_insn) > INSN_CUID (i2) | |
d3a923ee RH |
12664 | && reg_referenced_p (XEXP (note, 0), PATTERN (i2))) |
12665 | { | |
12666 | rtx links = LOG_LINKS (place); | |
12667 | LOG_LINKS (place) = 0; | |
12668 | distribute_links (links); | |
12669 | } | |
12670 | break; | |
12671 | } | |
12672 | ||
12673 | if (tem == bb->head) | |
230d793d | 12674 | break; |
38d8473f | 12675 | } |
663522cb | 12676 | |
d3a923ee RH |
12677 | /* We haven't found an insn for the death note and it |
12678 | is still a REG_DEAD note, but we have hit the beginning | |
12679 | of the block. If the existing life info says the reg | |
715e7fbc | 12680 | was dead, there's nothing left to do. Otherwise, we'll |
e7139885 RH |
12681 | need to do a global life update after combine. */ |
12682 | if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 | |
12683 | && REGNO_REG_SET_P (bb->global_live_at_start, | |
12684 | REGNO (XEXP (note, 0)))) | |
4977bab6 | 12685 | SET_BIT (refresh_blocks, this_basic_block->index); |
38d8473f | 12686 | } |
230d793d RS |
12687 | |
12688 | /* If the register is set or already dead at PLACE, we needn't do | |
e5e809f4 JL |
12689 | anything with this note if it is still a REG_DEAD note. |
12690 | We can here if it is set at all, not if is it totally replace, | |
12691 | which is what `dead_or_set_p' checks, so also check for it being | |
12692 | set partially. */ | |
12693 | ||
230d793d RS |
12694 | if (place && REG_NOTE_KIND (note) == REG_DEAD) |
12695 | { | |
770ae6cc | 12696 | unsigned int regno = REGNO (XEXP (note, 0)); |
230d793d | 12697 | |
e7139885 RH |
12698 | /* Similarly, if the instruction on which we want to place |
12699 | the note is a noop, we'll need do a global live update | |
12700 | after we remove them in delete_noop_moves. */ | |
12701 | if (noop_move_p (place)) | |
4977bab6 | 12702 | SET_BIT (refresh_blocks, this_basic_block->index); |
e7139885 | 12703 | |
230d793d RS |
12704 | if (dead_or_set_p (place, XEXP (note, 0)) |
12705 | || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place))) | |
12706 | { | |
12707 | /* Unless the register previously died in PLACE, clear | |
12708 | reg_last_death. [I no longer understand why this is | |
12709 | being done.] */ | |
12710 | if (reg_last_death[regno] != place) | |
12711 | reg_last_death[regno] = 0; | |
12712 | place = 0; | |
12713 | } | |
12714 | else | |
12715 | reg_last_death[regno] = place; | |
12716 | ||
12717 | /* If this is a death note for a hard reg that is occupying | |
12718 | multiple registers, ensure that we are still using all | |
12719 | parts of the object. If we find a piece of the object | |
03afaf36 R |
12720 | that is unused, we must arrange for an appropriate REG_DEAD |
12721 | note to be added for it. However, we can't just emit a USE | |
12722 | and tag the note to it, since the register might actually | |
12723 | be dead; so we recourse, and the recursive call then finds | |
12724 | the previous insn that used this register. */ | |
230d793d RS |
12725 | |
12726 | if (place && regno < FIRST_PSEUDO_REGISTER | |
12727 | && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1) | |
12728 | { | |
770ae6cc | 12729 | unsigned int endregno |
230d793d RS |
12730 | = regno + HARD_REGNO_NREGS (regno, |
12731 | GET_MODE (XEXP (note, 0))); | |
12732 | int all_used = 1; | |
770ae6cc | 12733 | unsigned int i; |
230d793d RS |
12734 | |
12735 | for (i = regno; i < endregno; i++) | |
03afaf36 R |
12736 | if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0) |
12737 | && ! find_regno_fusage (place, USE, i)) | |
12738 | || dead_or_set_regno_p (place, i)) | |
12739 | all_used = 0; | |
a394b17b | 12740 | |
230d793d RS |
12741 | if (! all_used) |
12742 | { | |
12743 | /* Put only REG_DEAD notes for pieces that are | |
03afaf36 | 12744 | not already dead or set. */ |
230d793d | 12745 | |
03afaf36 R |
12746 | for (i = regno; i < endregno; |
12747 | i += HARD_REGNO_NREGS (i, reg_raw_mode[i])) | |
230d793d | 12748 | { |
e50126e8 | 12749 | rtx piece = regno_reg_rtx[i]; |
f6366fc7 | 12750 | basic_block bb = this_basic_block; |
230d793d | 12751 | |
03afaf36 | 12752 | if (! dead_or_set_p (place, piece) |
230d793d RS |
12753 | && ! reg_bitfield_target_p (piece, |
12754 | PATTERN (place))) | |
03afaf36 R |
12755 | { |
12756 | rtx new_note | |
12757 | = gen_rtx_EXPR_LIST (REG_DEAD, piece, NULL_RTX); | |
12758 | ||
12759 | distribute_notes (new_note, place, place, | |
12760 | NULL_RTX, NULL_RTX, NULL_RTX); | |
12761 | } | |
c762163e R |
12762 | else if (! refers_to_regno_p (i, i + 1, |
12763 | PATTERN (place), 0) | |
12764 | && ! find_regno_fusage (place, USE, i)) | |
12765 | for (tem = PREV_INSN (place); ; | |
12766 | tem = PREV_INSN (tem)) | |
12767 | { | |
12768 | if (! INSN_P (tem)) | |
12769 | { | |
12770 | if (tem == bb->head) | |
12771 | { | |
12772 | SET_BIT (refresh_blocks, | |
f6366fc7 | 12773 | this_basic_block->index); |
c762163e R |
12774 | break; |
12775 | } | |
12776 | continue; | |
12777 | } | |
12778 | if (dead_or_set_p (tem, piece) | |
12779 | || reg_bitfield_target_p (piece, | |
12780 | PATTERN (tem))) | |
12781 | { | |
12782 | REG_NOTES (tem) | |
71fd5a51 | 12783 | = gen_rtx_EXPR_LIST (REG_UNUSED, piece, |
c762163e R |
12784 | REG_NOTES (tem)); |
12785 | break; | |
12786 | } | |
12787 | } | |
12788 | ||
230d793d RS |
12789 | } |
12790 | ||
12791 | place = 0; | |
12792 | } | |
12793 | } | |
12794 | } | |
12795 | break; | |
12796 | ||
12797 | default: | |
12798 | /* Any other notes should not be present at this point in the | |
12799 | compilation. */ | |
12800 | abort (); | |
12801 | } | |
12802 | ||
12803 | if (place) | |
12804 | { | |
12805 | XEXP (note, 1) = REG_NOTES (place); | |
12806 | REG_NOTES (place) = note; | |
12807 | } | |
1a26b032 RK |
12808 | else if ((REG_NOTE_KIND (note) == REG_DEAD |
12809 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
12810 | && GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 12811 | REG_N_DEATHS (REGNO (XEXP (note, 0)))--; |
230d793d RS |
12812 | |
12813 | if (place2) | |
1a26b032 RK |
12814 | { |
12815 | if ((REG_NOTE_KIND (note) == REG_DEAD | |
12816 | || REG_NOTE_KIND (note) == REG_UNUSED) | |
12817 | && GET_CODE (XEXP (note, 0)) == REG) | |
b1f21e0a | 12818 | REG_N_DEATHS (REGNO (XEXP (note, 0)))++; |
1a26b032 | 12819 | |
38a448ca RH |
12820 | REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note), |
12821 | REG_NOTE_KIND (note), | |
12822 | XEXP (note, 0), | |
12823 | REG_NOTES (place2)); | |
1a26b032 | 12824 | } |
230d793d RS |
12825 | } |
12826 | } | |
12827 | \f | |
12828 | /* Similarly to above, distribute the LOG_LINKS that used to be present on | |
5089e22e RS |
12829 | I3, I2, and I1 to new locations. This is also called in one case to |
12830 | add a link pointing at I3 when I3's destination is changed. */ | |
230d793d RS |
12831 | |
12832 | static void | |
12833 | distribute_links (links) | |
12834 | rtx links; | |
12835 | { | |
12836 | rtx link, next_link; | |
12837 | ||
12838 | for (link = links; link; link = next_link) | |
12839 | { | |
12840 | rtx place = 0; | |
12841 | rtx insn; | |
12842 | rtx set, reg; | |
12843 | ||
12844 | next_link = XEXP (link, 1); | |
12845 | ||
12846 | /* If the insn that this link points to is a NOTE or isn't a single | |
12847 | set, ignore it. In the latter case, it isn't clear what we | |
663522cb | 12848 | can do other than ignore the link, since we can't tell which |
230d793d RS |
12849 | register it was for. Such links wouldn't be used by combine |
12850 | anyway. | |
12851 | ||
12852 | It is not possible for the destination of the target of the link to | |
12853 | have been changed by combine. The only potential of this is if we | |
12854 | replace I3, I2, and I1 by I3 and I2. But in that case the | |
12855 | destination of I2 also remains unchanged. */ | |
12856 | ||
12857 | if (GET_CODE (XEXP (link, 0)) == NOTE | |
12858 | || (set = single_set (XEXP (link, 0))) == 0) | |
12859 | continue; | |
12860 | ||
12861 | reg = SET_DEST (set); | |
12862 | while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT | |
12863 | || GET_CODE (reg) == SIGN_EXTRACT | |
12864 | || GET_CODE (reg) == STRICT_LOW_PART) | |
12865 | reg = XEXP (reg, 0); | |
12866 | ||
12867 | /* A LOG_LINK is defined as being placed on the first insn that uses | |
12868 | a register and points to the insn that sets the register. Start | |
12869 | searching at the next insn after the target of the link and stop | |
12870 | when we reach a set of the register or the end of the basic block. | |
12871 | ||
12872 | Note that this correctly handles the link that used to point from | |
5089e22e | 12873 | I3 to I2. Also note that not much searching is typically done here |
230d793d RS |
12874 | since most links don't point very far away. */ |
12875 | ||
12876 | for (insn = NEXT_INSN (XEXP (link, 0)); | |
f6366fc7 ZD |
12877 | (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR |
12878 | || this_basic_block->next_bb->head != insn)); | |
230d793d | 12879 | insn = NEXT_INSN (insn)) |
2c3c49de | 12880 | if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) |
230d793d RS |
12881 | { |
12882 | if (reg_referenced_p (reg, PATTERN (insn))) | |
12883 | place = insn; | |
12884 | break; | |
12885 | } | |
6e2d1486 | 12886 | else if (GET_CODE (insn) == CALL_INSN |
663522cb | 12887 | && find_reg_fusage (insn, USE, reg)) |
6e2d1486 RK |
12888 | { |
12889 | place = insn; | |
12890 | break; | |
12891 | } | |
230d793d RS |
12892 | |
12893 | /* If we found a place to put the link, place it there unless there | |
12894 | is already a link to the same insn as LINK at that point. */ | |
12895 | ||
12896 | if (place) | |
12897 | { | |
12898 | rtx link2; | |
12899 | ||
12900 | for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1)) | |
12901 | if (XEXP (link2, 0) == XEXP (link, 0)) | |
12902 | break; | |
12903 | ||
12904 | if (link2 == 0) | |
12905 | { | |
12906 | XEXP (link, 1) = LOG_LINKS (place); | |
12907 | LOG_LINKS (place) = link; | |
abe6e52f RK |
12908 | |
12909 | /* Set added_links_insn to the earliest insn we added a | |
12910 | link to. */ | |
663522cb | 12911 | if (added_links_insn == 0 |
abe6e52f RK |
12912 | || INSN_CUID (added_links_insn) > INSN_CUID (place)) |
12913 | added_links_insn = place; | |
230d793d RS |
12914 | } |
12915 | } | |
12916 | } | |
12917 | } | |
12918 | \f | |
1427d6d2 RK |
12919 | /* Compute INSN_CUID for INSN, which is an insn made by combine. */ |
12920 | ||
12921 | static int | |
12922 | insn_cuid (insn) | |
12923 | rtx insn; | |
12924 | { | |
12925 | while (insn != 0 && INSN_UID (insn) > max_uid_cuid | |
12926 | && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE) | |
12927 | insn = NEXT_INSN (insn); | |
12928 | ||
12929 | if (INSN_UID (insn) > max_uid_cuid) | |
12930 | abort (); | |
12931 | ||
12932 | return INSN_CUID (insn); | |
12933 | } | |
12934 | \f | |
230d793d RS |
12935 | void |
12936 | dump_combine_stats (file) | |
12937 | FILE *file; | |
12938 | { | |
ab87f8c8 | 12939 | fnotice |
230d793d RS |
12940 | (file, |
12941 | ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n", | |
12942 | combine_attempts, combine_merges, combine_extras, combine_successes); | |
12943 | } | |
12944 | ||
12945 | void | |
12946 | dump_combine_total_stats (file) | |
12947 | FILE *file; | |
12948 | { | |
ab87f8c8 | 12949 | fnotice |
230d793d RS |
12950 | (file, |
12951 | "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n", | |
12952 | total_attempts, total_merges, total_extras, total_successes); | |
12953 | } |