]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/combine.c
combine.c (setup_incoming_promotions): Fix sign-extend of zero-extend case; tidy...
[thirdparty/gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
3c71940f 2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
7ad7809b 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
6fb5fa3c 4 Free Software Foundation, Inc.
230d793d 5
1322177d 6This file is part of GCC.
230d793d 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
230d793d 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
230d793d
RS
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
230d793d 21
230d793d
RS
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
6fb5fa3c
DB
52 There are a few exceptions where the dataflow information isn't
53 completely updated (however this is only a local issue since it is
54 regenerated before the next pass that uses it):
230d793d
RS
55
56 - reg_live_length is not updated
4bbae09f
ILT
57 - reg_n_refs is not adjusted in the rare case when a register is
58 no longer required in a computation
24bd1a0b 59 - there are extremely rare cases (see distribute_notes) when a
4bbae09f 60 REG_DEAD note is lost
230d793d 61 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
663522cb 62 removed because there is no way to know which register it was
230d793d
RS
63 linking
64
65 To simplify substitution, we combine only when the earlier insn(s)
66 consist of only a single assignment. To simplify updating afterward,
67 we never combine when a subroutine call appears in the middle.
68
69 Since we do not represent assignments to CC0 explicitly except when that
70 is all an insn does, there is no LOG_LINKS entry in an insn that uses
71 the condition code for the insn that set the condition code.
72 Fortunately, these two insns must be consecutive.
73 Therefore, every JUMP_INSN is taken to have an implicit logical link
74 to the preceding insn. This is not quite right, since non-jumps can
75 also use the condition code; but in practice such insns would not
76 combine anyway. */
77
230d793d 78#include "config.h"
670ee920 79#include "system.h"
4977bab6
ZW
80#include "coretypes.h"
81#include "tm.h"
c5c76735 82#include "rtl.h"
61f71b34 83#include "tree.h"
a091679a 84#include "tm_p.h"
230d793d
RS
85#include "flags.h"
86#include "regs.h"
55310dad 87#include "hard-reg-set.h"
230d793d
RS
88#include "basic-block.h"
89#include "insn-config.h"
49ad7cfa 90#include "function.h"
ec5c56db 91/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 92#include "expr.h"
230d793d
RS
93#include "insn-attr.h"
94#include "recog.h"
95#include "real.h"
2e107e9e 96#include "toplev.h"
61f71b34 97#include "target.h"
aa2d0bc3
AO
98#include "optabs.h"
99#include "insn-codes.h"
2f93eea8 100#include "rtlhooks-def.h"
64b8935d
RS
101/* Include output.h for dump_file. */
102#include "output.h"
49c3b9a8 103#include "params.h"
ef330312
PB
104#include "timevar.h"
105#include "tree-pass.h"
6fb5fa3c 106#include "df.h"
7d4d2832 107#include "cgraph.h"
f73ad30e 108
230d793d
RS
109/* Number of attempts to combine instructions in this function. */
110
111static int combine_attempts;
112
113/* Number of attempts that got as far as substitution in this function. */
114
115static int combine_merges;
116
117/* Number of instructions combined with added SETs in this function. */
118
119static int combine_extras;
120
121/* Number of instructions combined in this function. */
122
123static int combine_successes;
124
125/* Totals over entire compilation. */
126
127static int total_attempts, total_merges, total_extras, total_successes;
9210df58 128
64c91930
EB
129/* combine_instructions may try to replace the right hand side of the
130 second instruction with the value of an associated REG_EQUAL note
131 before throwing it at try_combine. That is problematic when there
132 is a REG_DEAD note for a register used in the old right hand side
133 and can cause distribute_notes to do wrong things. This is the
134 second instruction if it has been so modified, null otherwise. */
1ff3ac02 135
64c91930 136static rtx i2mod;
e5d7a520 137
64c91930 138/* When I2MOD is nonnull, this is a copy of the old right hand side. */
e5d7a520 139
64c91930
EB
140static rtx i2mod_old_rhs;
141
142/* When I2MOD is nonnull, this is a copy of the new right hand side. */
143
144static rtx i2mod_new_rhs;
230d793d 145\f
829f8ff7 146typedef struct reg_stat_struct {
5eaad481
PB
147 /* Record last point of death of (hard or pseudo) register n. */
148 rtx last_death;
230d793d 149
5eaad481
PB
150 /* Record last point of modification of (hard or pseudo) register n. */
151 rtx last_set;
230d793d 152
5eaad481
PB
153 /* The next group of fields allows the recording of the last value assigned
154 to (hard or pseudo) register n. We use this information to see if an
155 operation being processed is redundant given a prior operation performed
156 on the register. For example, an `and' with a constant is redundant if
157 all the zero bits are already known to be turned off.
230d793d 158
5eaad481
PB
159 We use an approach similar to that used by cse, but change it in the
160 following ways:
161
162 (1) We do not want to reinitialize at each label.
163 (2) It is useful, but not critical, to know the actual value assigned
c22cacf3 164 to a register. Often just its form is helpful.
5eaad481
PB
165
166 Therefore, we maintain the following fields:
167
168 last_set_value the last value assigned
169 last_set_label records the value of label_tick when the
170 register was assigned
171 last_set_table_tick records the value of label_tick when a
172 value using the register is assigned
173 last_set_invalid set to nonzero when it is not valid
174 to use the value of this register in some
175 register's value
176
177 To understand the usage of these tables, it is important to understand
178 the distinction between the value in last_set_value being valid and
179 the register being validly contained in some other expression in the
180 table.
181
182 (The next two parameters are out of date).
183
184 reg_stat[i].last_set_value is valid if it is nonzero, and either
185 reg_n_sets[i] is 1 or reg_stat[i].last_set_label == label_tick.
186
187 Register I may validly appear in any expression returned for the value
188 of another register if reg_n_sets[i] is 1. It may also appear in the
189 value for register J if reg_stat[j].last_set_invalid is zero, or
190 reg_stat[i].last_set_label < reg_stat[j].last_set_label.
191
192 If an expression is found in the table containing a register which may
193 not validly appear in an expression, the register is replaced by
194 something that won't match, (clobber (const_int 0)). */
195
196 /* Record last value assigned to (hard or pseudo) register n. */
197
198 rtx last_set_value;
199
200 /* Record the value of label_tick when an expression involving register n
201 is placed in last_set_value. */
202
203 int last_set_table_tick;
204
205 /* Record the value of label_tick when the value for register n is placed in
206 last_set_value. */
207
208 int last_set_label;
209
210 /* These fields are maintained in parallel with last_set_value and are
324a6c95 211 used to store the mode in which the register was last set, the bits
5eaad481
PB
212 that were known to be zero when it was last set, and the number of
213 sign bits copies it was known to have when it was last set. */
214
215 unsigned HOST_WIDE_INT last_set_nonzero_bits;
216 char last_set_sign_bit_copies;
4df8acd3 217 ENUM_BITFIELD(machine_mode) last_set_mode : 8;
5eaad481
PB
218
219 /* Set nonzero if references to register n in expressions should not be
220 used. last_set_invalid is set nonzero when this register is being
221 assigned to and last_set_table_tick == label_tick. */
222
223 char last_set_invalid;
224
225 /* Some registers that are set more than once and used in more than one
226 basic block are nevertheless always set in similar ways. For example,
227 a QImode register may be loaded from memory in two places on a machine
228 where byte loads zero extend.
229
230 We record in the following fields if a register has some leading bits
231 that are always equal to the sign bit, and what we know about the
232 nonzero bits of a register, specifically which bits are known to be
233 zero.
234
235 If an entry is zero, it means that we don't know anything special. */
236
237 unsigned char sign_bit_copies;
238
239 unsigned HOST_WIDE_INT nonzero_bits;
4df8acd3
AN
240
241 /* Record the value of the label_tick when the last truncation
242 happened. The field truncated_to_mode is only valid if
243 truncation_label == label_tick. */
244
245 int truncation_label;
246
247 /* Record the last truncation seen for this register. If truncation
248 is not a nop to this mode we might be able to save an explicit
249 truncation if we know that value already contains a truncated
250 value. */
251
c22cacf3 252 ENUM_BITFIELD(machine_mode) truncated_to_mode : 8;
829f8ff7
ILT
253} reg_stat_type;
254
255DEF_VEC_O(reg_stat_type);
256DEF_VEC_ALLOC_O(reg_stat_type,heap);
5eaad481 257
829f8ff7 258static VEC(reg_stat_type,heap) *reg_stat;
230d793d 259
6fb5fa3c 260/* Record the luid of the last insn that invalidated memory
230d793d
RS
261 (anything that writes memory, and subroutine calls, but not pushes). */
262
263static int mem_last_set;
264
6fb5fa3c 265/* Record the luid of the last CALL_INSN
230d793d
RS
266 so we can tell whether a potential combination crosses any calls. */
267
6fb5fa3c 268static int last_call_luid;
230d793d
RS
269
270/* When `subst' is called, this is the insn that is being modified
271 (by combining in a previous insn). The PATTERN of this insn
272 is still the old pattern partially modified and it should not be
273 looked at, but this may be used to examine the successors of the insn
274 to judge whether a simplification is valid. */
275
276static rtx subst_insn;
277
6fb5fa3c 278/* This is the lowest LUID that `subst' is currently dealing with.
230d793d 279 get_last_value will not return a value if the register was set at or
6fb5fa3c 280 after this LUID. If not for this mechanism, we could get confused if
230d793d
RS
281 I2 or I1 in try_combine were an insn that used the old value of a register
282 to obtain a new value. In that case, we might erroneously get the
283 new value of the register when we wanted the old one. */
284
6fb5fa3c 285static int subst_low_luid;
230d793d 286
6e25d159
RK
287/* This contains any hard registers that are used in newpat; reg_dead_at_p
288 must consider all these registers to be always live. */
289
290static HARD_REG_SET newpat_used_regs;
291
abe6e52f
RK
292/* This is an insn to which a LOG_LINKS entry has been added. If this
293 insn is the earlier than I2 or I3, combine should rescan starting at
294 that location. */
295
296static rtx added_links_insn;
297
f6366fc7
ZD
298/* Basic block in which we are performing combines. */
299static basic_block this_basic_block;
f40751dd 300static bool optimize_this_for_speed_p;
715e7fbc 301
230d793d 302\f
6fb5fa3c
DB
303/* Length of the currently allocated uid_insn_cost array. */
304
305static int max_uid_known;
306
6fd21094 307/* The following array records the insn_rtx_cost for every insn
64b8935d
RS
308 in the instruction stream. */
309
310static int *uid_insn_cost;
311
6fb5fa3c
DB
312/* The following array records the LOG_LINKS for every insn in the
313 instruction stream as an INSN_LIST rtx. */
64b8935d 314
6fb5fa3c 315static rtx *uid_log_links;
64b8935d 316
6fb5fa3c
DB
317#define INSN_COST(INSN) (uid_insn_cost[INSN_UID (INSN)])
318#define LOG_LINKS(INSN) (uid_log_links[INSN_UID (INSN)])
319
320/* Incremented for each basic block. */
230d793d 321
568356af 322static int label_tick;
230d793d 323
e3104f55 324/* Reset to label_tick for each extended basic block in scanning order. */
6fb5fa3c
DB
325
326static int label_tick_ebb_start;
327
5eaad481
PB
328/* Mode used to compute significance in reg_stat[].nonzero_bits. It is the
329 largest integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 330
951553af 331static enum machine_mode nonzero_bits_mode;
230d793d 332
5eaad481
PB
333/* Nonzero when reg_stat[].nonzero_bits and reg_stat[].sign_bit_copies can
334 be safely used. It is zero while computing them and after combine has
335 completed. This former test prevents propagating values based on
336 previously set values, which can be incorrect if a variable is modified
337 in a loop. */
230d793d 338
951553af 339static int nonzero_sign_valid;
55310dad 340
230d793d
RS
341\f
342/* Record one modification to rtl structure
abcb0cdc 343 to be undone by storing old_contents into *where. */
230d793d 344
24b97832
ILT
345enum undo_kind { UNDO_RTX, UNDO_INT, UNDO_MODE };
346
230d793d
RS
347struct undo
348{
241cea85 349 struct undo *next;
24b97832 350 enum undo_kind kind;
abcb0cdc
ILT
351 union { rtx r; int i; enum machine_mode m; } old_contents;
352 union { rtx *r; int *i; } where;
230d793d
RS
353};
354
355/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
356 num_undo says how many are currently recorded.
357
230d793d 358 other_insn is nonzero if we have modified some other insn in the process
f1c6ba8b 359 of working on subst_insn. It must be verified too. */
230d793d
RS
360
361struct undobuf
362{
241cea85
RK
363 struct undo *undos;
364 struct undo *frees;
230d793d
RS
365 rtx other_insn;
366};
367
368static struct undobuf undobuf;
369
230d793d
RS
370/* Number of times the pseudo being substituted for
371 was found and replaced. */
372
373static int n_occurrences;
374
fa233e34 375static rtx reg_nonzero_bits_for_combine (const_rtx, enum machine_mode, const_rtx,
2f93eea8
PB
376 enum machine_mode,
377 unsigned HOST_WIDE_INT,
378 unsigned HOST_WIDE_INT *);
fa233e34 379static rtx reg_num_sign_bit_copies_for_combine (const_rtx, enum machine_mode, const_rtx,
2f93eea8
PB
380 enum machine_mode,
381 unsigned int, unsigned int *);
79a490a9
AJ
382static void do_SUBST (rtx *, rtx);
383static void do_SUBST_INT (int *, int);
5eaad481 384static void init_reg_last (void);
6fb5fa3c 385static void setup_incoming_promotions (rtx);
7bc980e1 386static void set_nonzero_bits_and_sign_copies (rtx, const_rtx, void *);
79a490a9
AJ
387static int cant_combine_insn_p (rtx);
388static int can_combine_p (rtx, rtx, rtx, rtx, rtx *, rtx *);
79a490a9
AJ
389static int combinable_i3pat (rtx, rtx *, rtx, rtx, int, rtx *);
390static int contains_muldiv (rtx);
391static rtx try_combine (rtx, rtx, rtx, int *);
392static void undo_all (void);
393static void undo_commit (void);
394static rtx *find_split_point (rtx *, rtx);
395static rtx subst (rtx, rtx, rtx, int, int);
6621d78e 396static rtx combine_simplify_rtx (rtx, enum machine_mode, int);
79a490a9
AJ
397static rtx simplify_if_then_else (rtx);
398static rtx simplify_set (rtx);
6621d78e 399static rtx simplify_logical (rtx);
79a490a9 400static rtx expand_compound_operation (rtx);
7bc980e1 401static const_rtx expand_field_assignment (const_rtx);
79a490a9
AJ
402static rtx make_extraction (enum machine_mode, rtx, HOST_WIDE_INT,
403 rtx, unsigned HOST_WIDE_INT, int, int, int);
404static rtx extract_left_shift (rtx, int);
405static rtx make_compound_operation (rtx, enum rtx_code);
406static int get_pos_from_mask (unsigned HOST_WIDE_INT,
407 unsigned HOST_WIDE_INT *);
b1257407 408static rtx canon_reg_for_combine (rtx, rtx);
79a490a9 409static rtx force_to_mode (rtx, enum machine_mode,
b1257407 410 unsigned HOST_WIDE_INT, int);
79a490a9
AJ
411static rtx if_then_else_cond (rtx, rtx *, rtx *);
412static rtx known_cond (rtx, enum rtx_code, rtx, rtx);
413static int rtx_equal_for_field_assignment_p (rtx, rtx);
414static rtx make_field_assignment (rtx);
415static rtx apply_distributive_law (rtx);
bcb34aa3 416static rtx distribute_and_simplify_rtx (rtx, int);
41e8659e
PB
417static rtx simplify_and_const_int_1 (enum machine_mode, rtx,
418 unsigned HOST_WIDE_INT);
79a490a9
AJ
419static rtx simplify_and_const_int (rtx, enum machine_mode, rtx,
420 unsigned HOST_WIDE_INT);
79a490a9
AJ
421static int merge_outer_ops (enum rtx_code *, HOST_WIDE_INT *, enum rtx_code,
422 HOST_WIDE_INT, enum machine_mode, int *);
41e8659e
PB
423static rtx simplify_shift_const_1 (enum rtx_code, enum machine_mode, rtx, int);
424static rtx simplify_shift_const (rtx, enum rtx_code, enum machine_mode, rtx,
79a490a9
AJ
425 int);
426static int recog_for_combine (rtx *, rtx, rtx *);
427static rtx gen_lowpart_for_combine (enum machine_mode, rtx);
79a490a9
AJ
428static enum rtx_code simplify_comparison (enum rtx_code, rtx *, rtx *);
429static void update_table_tick (rtx);
430static void record_value_for_reg (rtx, rtx, rtx);
36dafbd1 431static void check_promoted_subreg (rtx, rtx);
7bc980e1 432static void record_dead_and_set_regs_1 (rtx, const_rtx, void *);
79a490a9
AJ
433static void record_dead_and_set_regs (rtx);
434static int get_last_value_validate (rtx *, rtx, int, int);
fa233e34 435static rtx get_last_value (const_rtx);
7bc980e1
KG
436static int use_crosses_set_p (const_rtx, int);
437static void reg_dead_at_p_1 (rtx, const_rtx, void *);
79a490a9
AJ
438static int reg_dead_at_p (rtx, rtx);
439static void move_deaths (rtx, rtx, int, rtx, rtx *);
440static int reg_bitfield_target_p (rtx, rtx);
4bbae09f 441static void distribute_notes (rtx, rtx, rtx, rtx, rtx, rtx);
79a490a9
AJ
442static void distribute_links (rtx);
443static void mark_used_regs_combine (rtx);
79a490a9 444static void record_promoted_value (rtx, rtx);
67962db5
RS
445static int unmentioned_reg_p_1 (rtx *, void *);
446static bool unmentioned_reg_p (rtx, rtx);
36dafbd1
AN
447static int record_truncated_value (rtx *, void *);
448static void record_truncated_values (rtx *, void *);
fa233e34 449static bool reg_truncated_to_mode (enum machine_mode, const_rtx);
4df8acd3 450static rtx gen_lowpart_or_truncate (enum machine_mode, rtx);
2f93eea8
PB
451\f
452
453/* It is not safe to use ordinary gen_lowpart in combine.
454 See comments in gen_lowpart_for_combine. */
455#undef RTL_HOOKS_GEN_LOWPART
456#define RTL_HOOKS_GEN_LOWPART gen_lowpart_for_combine
457
bf667275
PB
458/* Our implementation of gen_lowpart never emits a new pseudo. */
459#undef RTL_HOOKS_GEN_LOWPART_NO_EMIT
460#define RTL_HOOKS_GEN_LOWPART_NO_EMIT gen_lowpart_for_combine
461
2f93eea8
PB
462#undef RTL_HOOKS_REG_NONZERO_REG_BITS
463#define RTL_HOOKS_REG_NONZERO_REG_BITS reg_nonzero_bits_for_combine
464
465#undef RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES
466#define RTL_HOOKS_REG_NUM_SIGN_BIT_COPIES reg_num_sign_bit_copies_for_combine
467
d3b72690
PB
468#undef RTL_HOOKS_REG_TRUNCATED_TO_MODE
469#define RTL_HOOKS_REG_TRUNCATED_TO_MODE reg_truncated_to_mode
470
2f93eea8
PB
471static const struct rtl_hooks combine_rtl_hooks = RTL_HOOKS_INITIALIZER;
472
6fb5fa3c 473\f
829f8ff7
ILT
474/* Try to split PATTERN found in INSN. This returns NULL_RTX if
475 PATTERN can not be split. Otherwise, it returns an insn sequence.
476 This is a wrapper around split_insns which ensures that the
477 reg_stat vector is made larger if the splitter creates a new
478 register. */
479
480static rtx
481combine_split_insns (rtx pattern, rtx insn)
482{
483 rtx ret;
484 unsigned int nregs;
485
486 ret = split_insns (pattern, insn);
487 nregs = max_reg_num ();
488 if (nregs > VEC_length (reg_stat_type, reg_stat))
489 VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
490 return ret;
491}
492
6fb5fa3c
DB
493/* This is used by find_single_use to locate an rtx in LOC that
494 contains exactly one use of DEST, which is typically either a REG
495 or CC0. It returns a pointer to the innermost rtx expression
496 containing DEST. Appearances of DEST that are being used to
497 totally replace it are not counted. */
498
499static rtx *
500find_single_use_1 (rtx dest, rtx *loc)
501{
502 rtx x = *loc;
503 enum rtx_code code = GET_CODE (x);
504 rtx *result = NULL;
505 rtx *this_result;
506 int i;
507 const char *fmt;
508
509 switch (code)
510 {
511 case CONST_INT:
512 case CONST:
513 case LABEL_REF:
514 case SYMBOL_REF:
515 case CONST_DOUBLE:
516 case CONST_VECTOR:
517 case CLOBBER:
518 return 0;
519
520 case SET:
521 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
522 of a REG that occupies all of the REG, the insn uses DEST if
523 it is mentioned in the destination or the source. Otherwise, we
524 need just check the source. */
525 if (GET_CODE (SET_DEST (x)) != CC0
526 && GET_CODE (SET_DEST (x)) != PC
527 && !REG_P (SET_DEST (x))
528 && ! (GET_CODE (SET_DEST (x)) == SUBREG
529 && REG_P (SUBREG_REG (SET_DEST (x)))
530 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
531 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
532 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
533 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
534 break;
535
536 return find_single_use_1 (dest, &SET_SRC (x));
537
538 case MEM:
539 case SUBREG:
540 return find_single_use_1 (dest, &XEXP (x, 0));
541
542 default:
543 break;
544 }
545
546 /* If it wasn't one of the common cases above, check each expression and
547 vector of this code. Look for a unique usage of DEST. */
548
549 fmt = GET_RTX_FORMAT (code);
550 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
551 {
552 if (fmt[i] == 'e')
553 {
554 if (dest == XEXP (x, i)
555 || (REG_P (dest) && REG_P (XEXP (x, i))
556 && REGNO (dest) == REGNO (XEXP (x, i))))
557 this_result = loc;
558 else
559 this_result = find_single_use_1 (dest, &XEXP (x, i));
560
561 if (result == NULL)
562 result = this_result;
563 else if (this_result)
564 /* Duplicate usage. */
565 return NULL;
566 }
567 else if (fmt[i] == 'E')
568 {
569 int j;
570
571 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
572 {
573 if (XVECEXP (x, i, j) == dest
574 || (REG_P (dest)
575 && REG_P (XVECEXP (x, i, j))
576 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
577 this_result = loc;
578 else
579 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
580
581 if (result == NULL)
582 result = this_result;
583 else if (this_result)
584 return NULL;
585 }
586 }
587 }
588
589 return result;
590}
591
592
593/* See if DEST, produced in INSN, is used only a single time in the
594 sequel. If so, return a pointer to the innermost rtx expression in which
595 it is used.
596
597 If PLOC is nonzero, *PLOC is set to the insn containing the single use.
598
599 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
600 care about REG_DEAD notes or LOG_LINKS.
601
602 Otherwise, we find the single use by finding an insn that has a
603 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
604 only referenced once in that insn, we know that it must be the first
605 and last insn referencing DEST. */
606
607static rtx *
608find_single_use (rtx dest, rtx insn, rtx *ploc)
609{
d25aa7ab 610 basic_block bb;
6fb5fa3c
DB
611 rtx next;
612 rtx *result;
613 rtx link;
614
615#ifdef HAVE_cc0
616 if (dest == cc0_rtx)
617 {
618 next = NEXT_INSN (insn);
619 if (next == 0
620 || (!NONJUMP_INSN_P (next) && !JUMP_P (next)))
621 return 0;
622
623 result = find_single_use_1 (dest, &PATTERN (next));
624 if (result && ploc)
625 *ploc = next;
626 return result;
627 }
628#endif
629
630 if (!REG_P (dest))
631 return 0;
632
d25aa7ab
PB
633 bb = BLOCK_FOR_INSN (insn);
634 for (next = NEXT_INSN (insn);
635 next && BLOCK_FOR_INSN (next) == bb;
636 next = NEXT_INSN (next))
6fb5fa3c
DB
637 if (INSN_P (next) && dead_or_set_p (next, dest))
638 {
639 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
640 if (XEXP (link, 0) == insn)
641 break;
642
643 if (link)
644 {
645 result = find_single_use_1 (dest, &PATTERN (next));
646 if (ploc)
647 *ploc = next;
648 return result;
649 }
650 }
651
652 return 0;
653}
230d793d 654\f
76095e2f
RH
655/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
656 insn. The substitution can be undone by undo_all. If INTO is already
657 set to NEWVAL, do not record this change. Because computing NEWVAL might
658 also call SUBST, we have to compute it before we put anything into
659 the undo table. */
660
661static void
79a490a9 662do_SUBST (rtx *into, rtx newval)
76095e2f
RH
663{
664 struct undo *buf;
665 rtx oldval = *into;
666
667 if (oldval == newval)
668 return;
669
4161da12
AO
670 /* We'd like to catch as many invalid transformations here as
671 possible. Unfortunately, there are way too many mode changes
672 that are perfectly valid, so we'd waste too much effort for
673 little gain doing the checks here. Focus on catching invalid
674 transformations involving integer constants. */
675 if (GET_MODE_CLASS (GET_MODE (oldval)) == MODE_INT
481683e1 676 && CONST_INT_P (newval))
4161da12
AO
677 {
678 /* Sanity check that we're replacing oldval with a CONST_INT
679 that is a valid sign-extension for the original mode. */
341c100f
NS
680 gcc_assert (INTVAL (newval)
681 == trunc_int_for_mode (INTVAL (newval), GET_MODE (oldval)));
4161da12
AO
682
683 /* Replacing the operand of a SUBREG or a ZERO_EXTEND with a
684 CONST_INT is not valid, because after the replacement, the
685 original mode would be gone. Unfortunately, we can't tell
686 when do_SUBST is called to replace the operand thereof, so we
687 perform this test on oldval instead, checking whether an
688 invalid replacement took place before we got here. */
341c100f 689 gcc_assert (!(GET_CODE (oldval) == SUBREG
481683e1 690 && CONST_INT_P (SUBREG_REG (oldval))));
341c100f 691 gcc_assert (!(GET_CODE (oldval) == ZERO_EXTEND
481683e1 692 && CONST_INT_P (XEXP (oldval, 0))));
e869aa39 693 }
4161da12 694
76095e2f
RH
695 if (undobuf.frees)
696 buf = undobuf.frees, undobuf.frees = buf->next;
697 else
5ed6ace5 698 buf = XNEW (struct undo);
76095e2f 699
abcb0cdc 700 buf->kind = UNDO_RTX;
76095e2f
RH
701 buf->where.r = into;
702 buf->old_contents.r = oldval;
703 *into = newval;
704
705 buf->next = undobuf.undos, undobuf.undos = buf;
706}
707
708#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
709
710/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
711 for the value of a HOST_WIDE_INT value (including CONST_INT) is
712 not safe. */
713
714static void
79a490a9 715do_SUBST_INT (int *into, int newval)
76095e2f
RH
716{
717 struct undo *buf;
3129af4c 718 int oldval = *into;
76095e2f
RH
719
720 if (oldval == newval)
721 return;
722
723 if (undobuf.frees)
724 buf = undobuf.frees, undobuf.frees = buf->next;
725 else
5ed6ace5 726 buf = XNEW (struct undo);
76095e2f 727
abcb0cdc 728 buf->kind = UNDO_INT;
76095e2f
RH
729 buf->where.i = into;
730 buf->old_contents.i = oldval;
731 *into = newval;
732
733 buf->next = undobuf.undos, undobuf.undos = buf;
734}
735
736#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
abcb0cdc
ILT
737
738/* Similar to SUBST, but just substitute the mode. This is used when
739 changing the mode of a pseudo-register, so that any other
740 references to the entry in the regno_reg_rtx array will change as
741 well. */
742
743static void
744do_SUBST_MODE (rtx *into, enum machine_mode newval)
745{
746 struct undo *buf;
747 enum machine_mode oldval = GET_MODE (*into);
748
749 if (oldval == newval)
750 return;
751
752 if (undobuf.frees)
753 buf = undobuf.frees, undobuf.frees = buf->next;
754 else
5ed6ace5 755 buf = XNEW (struct undo);
abcb0cdc
ILT
756
757 buf->kind = UNDO_MODE;
758 buf->where.r = into;
759 buf->old_contents.m = oldval;
38ae7651 760 adjust_reg_mode (*into, newval);
abcb0cdc
ILT
761
762 buf->next = undobuf.undos, undobuf.undos = buf;
763}
764
765#define SUBST_MODE(INTO, NEWVAL) do_SUBST_MODE(&(INTO), (NEWVAL))
76095e2f 766\f
64b8935d 767/* Subroutine of try_combine. Determine whether the combine replacement
9d35384d
RIL
768 patterns NEWPAT, NEWI2PAT and NEWOTHERPAT are cheaper according to
769 insn_rtx_cost that the original instruction sequence I1, I2, I3 and
b8698a0f 770 undobuf.other_insn. Note that I1 and/or NEWI2PAT may be NULL_RTX.
9d35384d
RIL
771 NEWOTHERPAT and undobuf.other_insn may also both be NULL_RTX. This
772 function returns false, if the costs of all instructions can be
773 estimated, and the replacements are more expensive than the original
774 sequence. */
64b8935d
RS
775
776static bool
9d35384d
RIL
777combine_validate_cost (rtx i1, rtx i2, rtx i3, rtx newpat, rtx newi2pat,
778 rtx newotherpat)
64b8935d
RS
779{
780 int i1_cost, i2_cost, i3_cost;
781 int new_i2_cost, new_i3_cost;
782 int old_cost, new_cost;
783
6fd21094 784 /* Lookup the original insn_rtx_costs. */
6fb5fa3c
DB
785 i2_cost = INSN_COST (i2);
786 i3_cost = INSN_COST (i3);
64b8935d
RS
787
788 if (i1)
789 {
6fb5fa3c 790 i1_cost = INSN_COST (i1);
64b8935d
RS
791 old_cost = (i1_cost > 0 && i2_cost > 0 && i3_cost > 0)
792 ? i1_cost + i2_cost + i3_cost : 0;
793 }
794 else
795 {
796 old_cost = (i2_cost > 0 && i3_cost > 0) ? i2_cost + i3_cost : 0;
797 i1_cost = 0;
798 }
799
6fd21094 800 /* Calculate the replacement insn_rtx_costs. */
f40751dd 801 new_i3_cost = insn_rtx_cost (newpat, optimize_this_for_speed_p);
64b8935d
RS
802 if (newi2pat)
803 {
f40751dd 804 new_i2_cost = insn_rtx_cost (newi2pat, optimize_this_for_speed_p);
64b8935d
RS
805 new_cost = (new_i2_cost > 0 && new_i3_cost > 0)
806 ? new_i2_cost + new_i3_cost : 0;
807 }
808 else
809 {
810 new_cost = new_i3_cost;
811 new_i2_cost = 0;
812 }
813
6bd26f0b
ILT
814 if (undobuf.other_insn)
815 {
816 int old_other_cost, new_other_cost;
817
6fb5fa3c 818 old_other_cost = INSN_COST (undobuf.other_insn);
f40751dd 819 new_other_cost = insn_rtx_cost (newotherpat, optimize_this_for_speed_p);
6bd26f0b
ILT
820 if (old_other_cost > 0 && new_other_cost > 0)
821 {
822 old_cost += old_other_cost;
823 new_cost += new_other_cost;
824 }
825 else
826 old_cost = 0;
827 }
828
64b8935d
RS
829 /* Disallow this recombination if both new_cost and old_cost are
830 greater than zero, and new_cost is greater than old cost. */
6bd26f0b 831 if (old_cost > 0
64b8935d
RS
832 && new_cost > old_cost)
833 {
834 if (dump_file)
835 {
836 if (i1)
837 {
838 fprintf (dump_file,
839 "rejecting combination of insns %d, %d and %d\n",
840 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
841 fprintf (dump_file, "original costs %d + %d + %d = %d\n",
842 i1_cost, i2_cost, i3_cost, old_cost);
843 }
844 else
845 {
846 fprintf (dump_file,
847 "rejecting combination of insns %d and %d\n",
848 INSN_UID (i2), INSN_UID (i3));
849 fprintf (dump_file, "original costs %d + %d = %d\n",
850 i2_cost, i3_cost, old_cost);
851 }
852
853 if (newi2pat)
854 {
855 fprintf (dump_file, "replacement costs %d + %d = %d\n",
856 new_i2_cost, new_i3_cost, new_cost);
857 }
858 else
859 fprintf (dump_file, "replacement cost %d\n", new_cost);
860 }
861
862 return false;
863 }
864
865 /* Update the uid_insn_cost array with the replacement costs. */
6fb5fa3c
DB
866 INSN_COST (i2) = new_i2_cost;
867 INSN_COST (i3) = new_i3_cost;
64b8935d 868 if (i1)
6fb5fa3c 869 INSN_COST (i1) = 0;
64b8935d
RS
870
871 return true;
872}
6fb5fa3c
DB
873
874
875/* Delete any insns that copy a register to itself. */
876
877static void
878delete_noop_moves (void)
879{
880 rtx insn, next;
881 basic_block bb;
882
883 FOR_EACH_BB (bb)
884 {
885 for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb)); insn = next)
886 {
887 next = NEXT_INSN (insn);
888 if (INSN_P (insn) && noop_move_p (insn))
889 {
6fb5fa3c
DB
890 if (dump_file)
891 fprintf (dump_file, "deleting noop move %d\n", INSN_UID (insn));
892
893 delete_insn_and_edges (insn);
894 }
895 }
896 }
897}
898
899\f
900/* Fill in log links field for all insns. */
901
902static void
903create_log_links (void)
904{
905 basic_block bb;
906 rtx *next_use, insn;
57512f53 907 df_ref *def_vec, *use_vec;
6fb5fa3c
DB
908
909 next_use = XCNEWVEC (rtx, max_reg_num ());
910
911 /* Pass through each block from the end, recording the uses of each
912 register and establishing log links when def is encountered.
913 Note that we do not clear next_use array in order to save time,
914 so we have to test whether the use is in the same basic block as def.
b8698a0f 915
6fb5fa3c
DB
916 There are a few cases below when we do not consider the definition or
917 usage -- these are taken from original flow.c did. Don't ask me why it is
918 done this way; I don't know and if it works, I don't want to know. */
919
920 FOR_EACH_BB (bb)
921 {
922 FOR_BB_INSNS_REVERSE (bb, insn)
923 {
b5b8b0ac 924 if (!NONDEBUG_INSN_P (insn))
6fb5fa3c
DB
925 continue;
926
927 /* Log links are created only once. */
928 gcc_assert (!LOG_LINKS (insn));
929
930 for (def_vec = DF_INSN_DEFS (insn); *def_vec; def_vec++)
931 {
57512f53 932 df_ref def = *def_vec;
6fb5fa3c
DB
933 int regno = DF_REF_REGNO (def);
934 rtx use_insn;
935
936 if (!next_use[regno])
937 continue;
938
939 /* Do not consider if it is pre/post modification in MEM. */
940 if (DF_REF_FLAGS (def) & DF_REF_PRE_POST_MODIFY)
941 continue;
942
943 /* Do not make the log link for frame pointer. */
944 if ((regno == FRAME_POINTER_REGNUM
945 && (! reload_completed || frame_pointer_needed))
946#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
947 || (regno == HARD_FRAME_POINTER_REGNUM
948 && (! reload_completed || frame_pointer_needed))
949#endif
950#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
951 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
952#endif
953 )
954 continue;
955
956 use_insn = next_use[regno];
957 if (BLOCK_FOR_INSN (use_insn) == bb)
958 {
959 /* flow.c claimed:
960
961 We don't build a LOG_LINK for hard registers contained
962 in ASM_OPERANDs. If these registers get replaced,
963 we might wind up changing the semantics of the insn,
964 even if reload can make what appear to be valid
965 assignments later. */
966 if (regno >= FIRST_PSEUDO_REGISTER
967 || asm_noperands (PATTERN (use_insn)) < 0)
3c2397cd
AH
968 {
969 /* Don't add duplicate links between instructions. */
970 rtx links;
971 for (links = LOG_LINKS (use_insn); links;
972 links = XEXP (links, 1))
973 if (insn == XEXP (links, 0))
974 break;
975
976 if (!links)
977 LOG_LINKS (use_insn) =
978 alloc_INSN_LIST (insn, LOG_LINKS (use_insn));
979 }
6fb5fa3c
DB
980 }
981 next_use[regno] = NULL_RTX;
982 }
983
984 for (use_vec = DF_INSN_USES (insn); *use_vec; use_vec++)
985 {
57512f53 986 df_ref use = *use_vec;
6fb5fa3c
DB
987 int regno = DF_REF_REGNO (use);
988
989 /* Do not consider the usage of the stack pointer
990 by function call. */
991 if (DF_REF_FLAGS (use) & DF_REF_CALL_STACK_USAGE)
992 continue;
993
994 next_use[regno] = insn;
995 }
996 }
997 }
998
999 free (next_use);
1000}
1001
1002/* Clear LOG_LINKS fields of insns. */
1003
1004static void
1005clear_log_links (void)
1006{
1007 rtx insn;
1008
1009 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1010 if (INSN_P (insn))
1011 free_INSN_LIST_list (&LOG_LINKS (insn));
1012}
64b8935d 1013\f
230d793d 1014/* Main entry point for combiner. F is the first insn of the function.
663522cb 1015 NREGS is the first unused pseudo-reg number.
230d793d 1016
da7d8304 1017 Return nonzero if the combiner has turned an indirect jump
44a76fc8 1018 instruction into a direct jump. */
8c215327 1019static int
79a490a9 1020combine_instructions (rtx f, unsigned int nregs)
230d793d 1021{
b3694847 1022 rtx insn, next;
b729186a 1023#ifdef HAVE_cc0
b3694847 1024 rtx prev;
b729186a 1025#endif
b3694847 1026 rtx links, nextlinks;
6fb5fa3c 1027 rtx first;
e3104f55 1028 basic_block last_bb;
230d793d 1029
44a76fc8
AG
1030 int new_direct_jump_p = 0;
1031
6fb5fa3c
DB
1032 for (first = f; first && !INSN_P (first); )
1033 first = NEXT_INSN (first);
1034 if (!first)
1035 return 0;
1036
230d793d
RS
1037 combine_attempts = 0;
1038 combine_merges = 0;
1039 combine_extras = 0;
1040 combine_successes = 0;
1041
2f93eea8 1042 rtl_hooks = combine_rtl_hooks;
4de249d9 1043
829f8ff7 1044 VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
230d793d
RS
1045
1046 init_recog_no_volatile ();
1047
6fb5fa3c
DB
1048 /* Allocate array for insn info. */
1049 max_uid_known = get_max_uid ();
1050 uid_log_links = XCNEWVEC (rtx, max_uid_known + 1);
1051 uid_insn_cost = XCNEWVEC (int, max_uid_known + 1);
230d793d 1052
951553af 1053 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 1054
5eaad481
PB
1055 /* Don't use reg_stat[].nonzero_bits when computing it. This can cause
1056 problems when, for example, we have j <<= 1 in a loop. */
230d793d 1057
951553af 1058 nonzero_sign_valid = 0;
e3104f55 1059 label_tick = label_tick_ebb_start = 1;
230d793d 1060
6fb5fa3c 1061 /* Scan all SETs and see if we can deduce anything about what
951553af 1062 bits are known to be zero for some registers and how many copies
d79f08e0
RK
1063 of the sign bit are known to exist for those registers.
1064
1065 Also set any known values so that we can use it while searching
1066 for what bits are known to be set. */
1067
6fb5fa3c 1068 setup_incoming_promotions (first);
e3104f55
AN
1069 /* Allow the entry block and the first block to fall into the same EBB.
1070 Conceptually the incoming promotions are assigned to the entry block. */
1071 last_bb = ENTRY_BLOCK_PTR;
7988fd36 1072
6fb5fa3c
DB
1073 create_log_links ();
1074 FOR_EACH_BB (this_basic_block)
230d793d 1075 {
f40751dd 1076 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
6fb5fa3c
DB
1077 last_call_luid = 0;
1078 mem_last_set = -1;
e3104f55
AN
1079
1080 label_tick++;
d25aa7ab 1081 if (!single_pred_p (this_basic_block)
e3104f55 1082 || single_pred (this_basic_block) != last_bb)
d25aa7ab 1083 label_tick_ebb_start = label_tick;
e3104f55
AN
1084 last_bb = this_basic_block;
1085
6fb5fa3c
DB
1086 FOR_BB_INSNS (this_basic_block, insn)
1087 if (INSN_P (insn) && BLOCK_FOR_INSN (insn))
1088 {
1089 subst_low_luid = DF_INSN_LUID (insn);
1090 subst_insn = insn;
d79f08e0 1091
6fb5fa3c
DB
1092 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
1093 insn);
1094 record_dead_and_set_regs (insn);
2dab894a
RK
1095
1096#ifdef AUTO_INC_DEC
6fb5fa3c
DB
1097 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
1098 if (REG_NOTE_KIND (links) == REG_INC)
1099 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
1100 insn);
2dab894a 1101#endif
64b8935d 1102
6fb5fa3c
DB
1103 /* Record the current insn_rtx_cost of this instruction. */
1104 if (NONJUMP_INSN_P (insn))
f40751dd
JH
1105 INSN_COST (insn) = insn_rtx_cost (PATTERN (insn),
1106 optimize_this_for_speed_p);
6fb5fa3c
DB
1107 if (dump_file)
1108 fprintf(dump_file, "insn_cost %d: %d\n",
1109 INSN_UID (insn), INSN_COST (insn));
1110 }
230d793d
RS
1111 }
1112
951553af 1113 nonzero_sign_valid = 1;
230d793d
RS
1114
1115 /* Now scan all the insns in forward order. */
e3104f55 1116 label_tick = label_tick_ebb_start = 1;
5eaad481 1117 init_reg_last ();
6fb5fa3c 1118 setup_incoming_promotions (first);
e3104f55 1119 last_bb = ENTRY_BLOCK_PTR;
7988fd36 1120
e0082a72 1121 FOR_EACH_BB (this_basic_block)
230d793d 1122 {
76ee70f6 1123 optimize_this_for_speed_p = optimize_bb_for_speed_p (this_basic_block);
6fb5fa3c
DB
1124 last_call_luid = 0;
1125 mem_last_set = -1;
e3104f55
AN
1126
1127 label_tick++;
d25aa7ab 1128 if (!single_pred_p (this_basic_block)
e3104f55 1129 || single_pred (this_basic_block) != last_bb)
d25aa7ab 1130 label_tick_ebb_start = label_tick;
e3104f55
AN
1131 last_bb = this_basic_block;
1132
a8ba47cb 1133 rtl_profile_for_bb (this_basic_block);
a813c111 1134 for (insn = BB_HEAD (this_basic_block);
c22cacf3 1135 insn != NEXT_INSN (BB_END (this_basic_block));
e0082a72 1136 insn = next ? next : NEXT_INSN (insn))
230d793d 1137 {
e0082a72 1138 next = 0;
b5b8b0ac 1139 if (NONDEBUG_INSN_P (insn))
0b17ab2f 1140 {
e0082a72
ZD
1141 /* See if we know about function return values before this
1142 insn based upon SUBREG flags. */
36dafbd1
AN
1143 check_promoted_subreg (insn, PATTERN (insn));
1144
1145 /* See if we can find hardregs and subreg of pseudos in
1146 narrower modes. This could help turning TRUNCATEs
1147 into SUBREGs. */
1148 note_uses (&PATTERN (insn), record_truncated_values, NULL);
230d793d 1149
e0082a72 1150 /* Try this insn with each insn it links back to. */
230d793d 1151
e0082a72
ZD
1152 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1153 if ((next = try_combine (insn, XEXP (links, 0),
1154 NULL_RTX, &new_direct_jump_p)) != 0)
230d793d 1155 goto retry;
0b17ab2f 1156
e0082a72
ZD
1157 /* Try each sequence of three linked insns ending with this one. */
1158
1159 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1160 {
1161 rtx link = XEXP (links, 0);
1162
1163 /* If the linked insn has been replaced by a note, then there
1164 is no point in pursuing this chain any further. */
4b4bf941 1165 if (NOTE_P (link))
e0082a72
ZD
1166 continue;
1167
1168 for (nextlinks = LOG_LINKS (link);
1169 nextlinks;
1170 nextlinks = XEXP (nextlinks, 1))
1171 if ((next = try_combine (insn, link,
1172 XEXP (nextlinks, 0),
1173 &new_direct_jump_p)) != 0)
1174 goto retry;
1175 }
230d793d 1176
9b89393b 1177#ifdef HAVE_cc0
e0082a72
ZD
1178 /* Try to combine a jump insn that uses CC0
1179 with a preceding insn that sets CC0, and maybe with its
1180 logical predecessor as well.
1181 This is how we make decrement-and-branch insns.
1182 We need this special code because data flow connections
1183 via CC0 do not get entered in LOG_LINKS. */
1184
4b4bf941 1185 if (JUMP_P (insn)
e0082a72 1186 && (prev = prev_nonnote_insn (insn)) != 0
4b4bf941 1187 && NONJUMP_INSN_P (prev)
e0082a72
ZD
1188 && sets_cc0_p (PATTERN (prev)))
1189 {
1190 if ((next = try_combine (insn, prev,
1191 NULL_RTX, &new_direct_jump_p)) != 0)
1192 goto retry;
1193
1194 for (nextlinks = LOG_LINKS (prev); nextlinks;
1195 nextlinks = XEXP (nextlinks, 1))
1196 if ((next = try_combine (insn, prev,
1197 XEXP (nextlinks, 0),
1198 &new_direct_jump_p)) != 0)
1199 goto retry;
1200 }
230d793d 1201
e0082a72 1202 /* Do the same for an insn that explicitly references CC0. */
4b4bf941 1203 if (NONJUMP_INSN_P (insn)
e0082a72 1204 && (prev = prev_nonnote_insn (insn)) != 0
4b4bf941 1205 && NONJUMP_INSN_P (prev)
e0082a72
ZD
1206 && sets_cc0_p (PATTERN (prev))
1207 && GET_CODE (PATTERN (insn)) == SET
1208 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
1209 {
1210 if ((next = try_combine (insn, prev,
1211 NULL_RTX, &new_direct_jump_p)) != 0)
1212 goto retry;
1213
1214 for (nextlinks = LOG_LINKS (prev); nextlinks;
1215 nextlinks = XEXP (nextlinks, 1))
1216 if ((next = try_combine (insn, prev,
1217 XEXP (nextlinks, 0),
1218 &new_direct_jump_p)) != 0)
1219 goto retry;
1220 }
230d793d 1221
e0082a72
ZD
1222 /* Finally, see if any of the insns that this insn links to
1223 explicitly references CC0. If so, try this insn, that insn,
1224 and its predecessor if it sets CC0. */
1225 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
4b4bf941 1226 if (NONJUMP_INSN_P (XEXP (links, 0))
e0082a72
ZD
1227 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
1228 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
1229 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
4b4bf941 1230 && NONJUMP_INSN_P (prev)
e0082a72
ZD
1231 && sets_cc0_p (PATTERN (prev))
1232 && (next = try_combine (insn, XEXP (links, 0),
1233 prev, &new_direct_jump_p)) != 0)
1234 goto retry;
9b89393b 1235#endif
e0082a72
ZD
1236
1237 /* Try combining an insn with two different insns whose results it
1238 uses. */
1239 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1240 for (nextlinks = XEXP (links, 1); nextlinks;
1241 nextlinks = XEXP (nextlinks, 1))
1242 if ((next = try_combine (insn, XEXP (links, 0),
1243 XEXP (nextlinks, 0),
1244 &new_direct_jump_p)) != 0)
1245 goto retry;
1246
67962db5
RS
1247 /* Try this insn with each REG_EQUAL note it links back to. */
1248 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
1249 {
1250 rtx set, note;
1251 rtx temp = XEXP (links, 0);
1252 if ((set = single_set (temp)) != 0
1253 && (note = find_reg_equal_equiv_note (temp)) != 0
65030b76 1254 && (note = XEXP (note, 0), GET_CODE (note)) != EXPR_LIST
67962db5
RS
1255 /* Avoid using a register that may already been marked
1256 dead by an earlier instruction. */
65030b76
RH
1257 && ! unmentioned_reg_p (note, SET_SRC (set))
1258 && (GET_MODE (note) == VOIDmode
1259 ? SCALAR_INT_MODE_P (GET_MODE (SET_DEST (set)))
1260 : GET_MODE (SET_DEST (set)) == GET_MODE (note)))
67962db5
RS
1261 {
1262 /* Temporarily replace the set's source with the
1263 contents of the REG_EQUAL note. The insn will
1264 be deleted or recognized by try_combine. */
1265 rtx orig = SET_SRC (set);
65030b76 1266 SET_SRC (set) = note;
64c91930
EB
1267 i2mod = temp;
1268 i2mod_old_rhs = copy_rtx (orig);
1269 i2mod_new_rhs = copy_rtx (note);
1270 next = try_combine (insn, i2mod, NULL_RTX,
67962db5 1271 &new_direct_jump_p);
64c91930 1272 i2mod = NULL_RTX;
67962db5
RS
1273 if (next)
1274 goto retry;
1275 SET_SRC (set) = orig;
1276 }
1277 }
1278
4b4bf941 1279 if (!NOTE_P (insn))
e0082a72
ZD
1280 record_dead_and_set_regs (insn);
1281
1282 retry:
1283 ;
1284 }
230d793d
RS
1285 }
1286 }
1287
a8ba47cb 1288 default_rtl_profile ();
6fb5fa3c
DB
1289 clear_log_links ();
1290 clear_bb_flags ();
25cd19de 1291 new_direct_jump_p |= purge_all_dead_edges ();
827c06b6 1292 delete_noop_moves ();
0005550b 1293
c05ddfa7 1294 /* Clean up. */
6fb5fa3c 1295 free (uid_log_links);
64b8935d 1296 free (uid_insn_cost);
829f8ff7 1297 VEC_free (reg_stat_type, heap, reg_stat);
715e7fbc 1298
e7749837
RH
1299 {
1300 struct undo *undo, *next;
1301 for (undo = undobuf.frees; undo; undo = next)
1302 {
1303 next = undo->next;
1304 free (undo);
1305 }
1306 undobuf.frees = 0;
1307 }
1308
230d793d
RS
1309 total_attempts += combine_attempts;
1310 total_merges += combine_merges;
1311 total_extras += combine_extras;
1312 total_successes += combine_successes;
1a26b032 1313
951553af 1314 nonzero_sign_valid = 0;
2f93eea8 1315 rtl_hooks = general_rtl_hooks;
972b320c
R
1316
1317 /* Make recognizer allow volatile MEMs again. */
1318 init_recog ();
44a76fc8
AG
1319
1320 return new_direct_jump_p;
230d793d 1321}
ef026f91 1322
5eaad481 1323/* Wipe the last_xxx fields of reg_stat in preparation for another pass. */
ef026f91
RS
1324
1325static void
5eaad481 1326init_reg_last (void)
ef026f91 1327{
5eaad481 1328 unsigned int i;
829f8ff7
ILT
1329 reg_stat_type *p;
1330
1331 for (i = 0; VEC_iterate (reg_stat_type, reg_stat, i, p); ++i)
1332 memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
ef026f91 1333}
230d793d 1334\f
7988fd36
RK
1335/* Set up any promoted values for incoming argument registers. */
1336
ee791cc3 1337static void
6fb5fa3c 1338setup_incoming_promotions (rtx first)
7988fd36 1339{
962f3833 1340 tree arg;
7d4d2832 1341 bool strictly_local = false;
7988fd36 1342
962f3833
RH
1343 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
1344 arg = TREE_CHAIN (arg))
61f71b34 1345 {
3a6a5afb 1346 rtx x, reg = DECL_INCOMING_RTL (arg);
7d4d2832
MS
1347 int uns1, uns3;
1348 enum machine_mode mode1, mode2, mode3, mode4;
962f3833 1349
7d4d2832 1350 /* Only continue if the incoming argument is in a register. */
962f3833
RH
1351 if (!REG_P (reg))
1352 continue;
1353
7d4d2832
MS
1354 /* Determine, if possible, whether all call sites of the current
1355 function lie within the current compilation unit. (This does
1356 take into account the exporting of a function via taking its
1357 address, and so forth.) */
7e8b322a 1358 strictly_local = cgraph_local_info (current_function_decl)->local;
7d4d2832
MS
1359
1360 /* The mode and signedness of the argument before any promotions happen
1361 (equal to the mode of the pseudo holding it at that stage). */
1362 mode1 = TYPE_MODE (TREE_TYPE (arg));
1363 uns1 = TYPE_UNSIGNED (TREE_TYPE (arg));
1364
1365 /* The mode and signedness of the argument after any source language and
1366 TARGET_PROMOTE_PROTOTYPES-driven promotions. */
1367 mode2 = TYPE_MODE (DECL_ARG_TYPE (arg));
1368 uns3 = TYPE_UNSIGNED (DECL_ARG_TYPE (arg));
1369
b8698a0f 1370 /* The mode and signedness of the argument as it is actually passed,
7d4d2832 1371 after any TARGET_PROMOTE_FUNCTION_ARGS-driven ABI promotions. */
cde0f3fd
PB
1372 mode3 = promote_function_mode (DECL_ARG_TYPE (arg), mode2, &uns3,
1373 TREE_TYPE (cfun->decl), 0);
7d4d2832
MS
1374
1375 /* The mode of the register in which the argument is being passed. */
1376 mode4 = GET_MODE (reg);
1377
3a6a5afb
RH
1378 /* Eliminate sign extensions in the callee when:
1379 (a) A mode promotion has occurred; */
1380 if (mode1 == mode3)
1381 continue;
1382 /* (b) The mode of the register is the same as the mode of
1383 the argument as it is passed; */
1384 if (mode3 != mode4)
1385 continue;
1386 /* (c) There's no language level extension; */
1387 if (mode1 == mode2)
1388 ;
1389 /* (c.1) All callers are from the current compilation unit. If that's
1390 the case we don't have to rely on an ABI, we only have to know
1391 what we're generating right now, and we know that we will do the
1392 mode1 to mode2 promotion with the given sign. */
1393 else if (!strictly_local)
1394 continue;
1395 /* (c.2) The combination of the two promotions is useful. This is
1396 true when the signs match, or if the first promotion is unsigned.
1397 In the later case, (sign_extend (zero_extend x)) is the same as
1398 (zero_extend (zero_extend x)), so make sure to force UNS3 true. */
1399 else if (uns1)
1400 uns3 = true;
1401 else if (uns3)
1402 continue;
1403
1404 /* Record that the value was promoted from mode1 to mode3,
1405 so that any sign extension at the head of the current
1406 function may be eliminated. */
1407 x = gen_rtx_CLOBBER (mode1, const0_rtx);
1408 x = gen_rtx_fmt_e ((uns3 ? ZERO_EXTEND : SIGN_EXTEND), mode3, x);
1409 record_value_for_reg (reg, first, x);
61f71b34 1410 }
7988fd36 1411}
7d4d2832 1412
91102d5a
RK
1413/* Called via note_stores. If X is a pseudo that is narrower than
1414 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
1415
1416 If we are setting only a portion of X and we can't figure out what
1417 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
1418 be happening.
1419
1420 Similarly, set how many bits of X are known to be copies of the sign bit
663522cb 1421 at all locations in the function. This is the smallest number implied
d0ab8cd3 1422 by any set of X. */
230d793d
RS
1423
1424static void
7bc980e1 1425set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
230d793d 1426{
6fb5fa3c 1427 rtx insn = (rtx) data;
770ae6cc 1428 unsigned int num;
d0ab8cd3 1429
f8cfc6aa 1430 if (REG_P (x)
230d793d 1431 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
1432 /* If this register is undefined at the start of the file, we can't
1433 say what its contents were. */
5e2d947c 1434 && ! REGNO_REG_SET_P
6fb5fa3c 1435 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
5f4f0e22 1436 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 1437 {
829f8ff7
ILT
1438 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
1439
2dab894a 1440 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80 1441 {
829f8ff7
ILT
1442 rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1443 rsp->sign_bit_copies = 1;
e8095e80
RK
1444 return;
1445 }
230d793d 1446
6fb5fa3c
DB
1447 /* If this register is being initialized using itself, and the
1448 register is uninitialized in this basic block, and there are
1449 no LOG_LINKS which set the register, then part of the
1450 register is uninitialized. In that case we can't assume
1451 anything about the number of nonzero bits.
1452
1453 ??? We could do better if we checked this in
1454 reg_{nonzero_bits,num_sign_bit_copies}_for_combine. Then we
1455 could avoid making assumptions about the insn which initially
1456 sets the register, while still using the information in other
1457 insns. We would have to be careful to check every insn
1458 involved in the combination. */
1459
1460 if (insn
1461 && reg_referenced_p (x, PATTERN (insn))
1462 && !REGNO_REG_SET_P (DF_LR_IN (BLOCK_FOR_INSN (insn)),
1463 REGNO (x)))
1464 {
1465 rtx link;
1466
1467 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1468 {
1469 if (dead_or_set_p (XEXP (link, 0), x))
1470 break;
1471 }
1472 if (!link)
1473 {
829f8ff7
ILT
1474 rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1475 rsp->sign_bit_copies = 1;
6fb5fa3c
DB
1476 return;
1477 }
1478 }
1479
230d793d 1480 /* If this is a complex assignment, see if we can convert it into a
5089e22e 1481 simple assignment. */
230d793d 1482 set = expand_field_assignment (set);
d79f08e0
RK
1483
1484 /* If this is a simple assignment, or we have a paradoxical SUBREG,
1485 set what we know about X. */
1486
1487 if (SET_DEST (set) == x
1488 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
1489 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
1490 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 1491 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 1492 {
9afa3d54
RK
1493 rtx src = SET_SRC (set);
1494
1495#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
1496 /* If X is narrower than a word and SRC is a non-negative
1497 constant that would appear negative in the mode of X,
5eaad481 1498 sign-extend it for use in reg_stat[].nonzero_bits because some
9afa3d54 1499 machines (maybe most) will actually do the sign-extension
663522cb 1500 and this is the conservative approach.
9afa3d54
RK
1501
1502 ??? For 2.5, try to tighten up the MD files in this regard
1503 instead of this kludge. */
1504
1505 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
481683e1 1506 && CONST_INT_P (src)
9afa3d54
RK
1507 && INTVAL (src) > 0
1508 && 0 != (INTVAL (src)
1509 & ((HOST_WIDE_INT) 1
9e69be8c 1510 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
1511 src = GEN_INT (INTVAL (src)
1512 | ((HOST_WIDE_INT) (-1)
1513 << GET_MODE_BITSIZE (GET_MODE (x))));
1514#endif
1515
0a0440c9 1516 /* Don't call nonzero_bits if it cannot change anything. */
829f8ff7
ILT
1517 if (rsp->nonzero_bits != ~(unsigned HOST_WIDE_INT) 0)
1518 rsp->nonzero_bits |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3 1519 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
829f8ff7
ILT
1520 if (rsp->sign_bit_copies == 0
1521 || rsp->sign_bit_copies > num)
1522 rsp->sign_bit_copies = num;
d0ab8cd3 1523 }
230d793d 1524 else
d0ab8cd3 1525 {
829f8ff7
ILT
1526 rsp->nonzero_bits = GET_MODE_MASK (GET_MODE (x));
1527 rsp->sign_bit_copies = 1;
d0ab8cd3 1528 }
230d793d
RS
1529 }
1530}
1531\f
1532/* See if INSN can be combined into I3. PRED and SUCC are optionally
1533 insns that were previously combined into I3 or that will be combined
1534 into the merger of INSN and I3.
1535
1536 Return 0 if the combination is not allowed for any reason.
1537
663522cb 1538 If the combination is allowed, *PDEST will be set to the single
230d793d
RS
1539 destination of INSN and *PSRC to the single source, and this function
1540 will return 1. */
1541
1542static int
79a490a9
AJ
1543can_combine_p (rtx insn, rtx i3, rtx pred ATTRIBUTE_UNUSED, rtx succ,
1544 rtx *pdest, rtx *psrc)
230d793d
RS
1545{
1546 int i;
7bc980e1
KG
1547 const_rtx set = 0;
1548 rtx src, dest;
b729186a
JL
1549 rtx p;
1550#ifdef AUTO_INC_DEC
76d31c63 1551 rtx link;
b729186a 1552#endif
230d793d
RS
1553 int all_adjacent = (succ ? (next_active_insn (insn) == succ
1554 && next_active_insn (succ) == i3)
1555 : next_active_insn (insn) == i3);
1556
1557 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
663522cb 1558 or a PARALLEL consisting of such a SET and CLOBBERs.
230d793d
RS
1559
1560 If INSN has CLOBBER parallel parts, ignore them for our processing.
1561 By definition, these happen during the execution of the insn. When it
1562 is merged with another insn, all bets are off. If they are, in fact,
1563 needed and aren't also supplied in I3, they may be added by
663522cb 1564 recog_for_combine. Otherwise, it won't match.
230d793d
RS
1565
1566 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
1567 note.
1568
663522cb 1569 Get the source and destination of INSN. If more than one, can't
230d793d 1570 combine. */
663522cb 1571
230d793d
RS
1572 if (GET_CODE (PATTERN (insn)) == SET)
1573 set = PATTERN (insn);
1574 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1575 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1576 {
1577 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1578 {
1579 rtx elt = XVECEXP (PATTERN (insn), 0, i);
1580
1581 switch (GET_CODE (elt))
1582 {
e3258cef
R
1583 /* This is important to combine floating point insns
1584 for the SH4 port. */
1585 case USE:
1586 /* Combining an isolated USE doesn't make sense.
d2604ae9 1587 We depend here on combinable_i3pat to reject them. */
e3258cef
R
1588 /* The code below this loop only verifies that the inputs of
1589 the SET in INSN do not change. We call reg_set_between_p
eaec9b3d 1590 to verify that the REG in the USE does not change between
e3258cef
R
1591 I3 and INSN.
1592 If the USE in INSN was for a pseudo register, the matching
1593 insn pattern will likely match any register; combining this
1594 with any other USE would only be safe if we knew that the
1595 used registers have identical values, or if there was
1596 something to tell them apart, e.g. different modes. For
eaec9b3d 1597 now, we forgo such complicated tests and simply disallow
e3258cef 1598 combining of USES of pseudo registers with any other USE. */
f8cfc6aa 1599 if (REG_P (XEXP (elt, 0))
e3258cef
R
1600 && GET_CODE (PATTERN (i3)) == PARALLEL)
1601 {
1602 rtx i3pat = PATTERN (i3);
1603 int i = XVECLEN (i3pat, 0) - 1;
770ae6cc
RK
1604 unsigned int regno = REGNO (XEXP (elt, 0));
1605
e3258cef
R
1606 do
1607 {
1608 rtx i3elt = XVECEXP (i3pat, 0, i);
770ae6cc 1609
e3258cef 1610 if (GET_CODE (i3elt) == USE
f8cfc6aa 1611 && REG_P (XEXP (i3elt, 0))
e3258cef
R
1612 && (REGNO (XEXP (i3elt, 0)) == regno
1613 ? reg_set_between_p (XEXP (elt, 0),
1614 PREV_INSN (insn), i3)
1615 : regno >= FIRST_PSEUDO_REGISTER))
1616 return 0;
1617 }
1618 while (--i >= 0);
1619 }
1620 break;
1621
230d793d
RS
1622 /* We can ignore CLOBBERs. */
1623 case CLOBBER:
1624 break;
1625
1626 case SET:
1627 /* Ignore SETs whose result isn't used but not those that
1628 have side-effects. */
1629 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
1d65f45c
RH
1630 && insn_nothrow_p (insn)
1631 && !side_effects_p (elt))
230d793d
RS
1632 break;
1633
1634 /* If we have already found a SET, this is a second one and
1635 so we cannot combine with this insn. */
1636 if (set)
1637 return 0;
1638
1639 set = elt;
1640 break;
1641
1642 default:
1643 /* Anything else means we can't combine. */
1644 return 0;
1645 }
1646 }
1647
1648 if (set == 0
1649 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
1650 so don't do anything with it. */
1651 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
1652 return 0;
1653 }
1654 else
1655 return 0;
1656
1657 if (set == 0)
1658 return 0;
1659
1660 set = expand_field_assignment (set);
1661 src = SET_SRC (set), dest = SET_DEST (set);
1662
1663 /* Don't eliminate a store in the stack pointer. */
1664 if (dest == stack_pointer_rtx
230d793d 1665 /* Don't combine with an insn that sets a register to itself if it has
d70dcf29 1666 a REG_EQUAL note. This may be part of a LIBCALL sequence. */
5f4f0e22 1667 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
62f7f1f5
GK
1668 /* Can't merge an ASM_OPERANDS. */
1669 || GET_CODE (src) == ASM_OPERANDS
230d793d
RS
1670 /* Can't merge a function call. */
1671 || GET_CODE (src) == CALL
cd5e8f1f 1672 /* Don't eliminate a function call argument. */
4b4bf941 1673 || (CALL_P (i3)
4dca5ec5 1674 && (find_reg_fusage (i3, USE, dest)
f8cfc6aa 1675 || (REG_P (dest)
4dca5ec5
RK
1676 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1677 && global_regs[REGNO (dest)])))
230d793d
RS
1678 /* Don't substitute into an incremented register. */
1679 || FIND_REG_INC_NOTE (i3, dest)
1680 || (succ && FIND_REG_INC_NOTE (succ, dest))
2f39b6ca
UW
1681 /* Don't substitute into a non-local goto, this confuses CFG. */
1682 || (JUMP_P (i3) && find_reg_note (i3, REG_NON_LOCAL_GOTO, NULL_RTX))
230d793d
RS
1683 /* Make sure that DEST is not used after SUCC but before I3. */
1684 || (succ && ! all_adjacent
1685 && reg_used_between_p (dest, succ, i3))
1686 /* Make sure that the value that is to be substituted for the register
1687 does not use any registers whose values alter in between. However,
1688 If the insns are adjacent, a use can't cross a set even though we
1689 think it might (this can happen for a sequence of insns each setting
5eaad481 1690 the same destination; last_set of that register might point to
d81481d3
RK
1691 a NOTE). If INSN has a REG_EQUIV note, the register is always
1692 equivalent to the memory so the substitution is valid even if there
1693 are intervening stores. Also, don't move a volatile asm or
1694 UNSPEC_VOLATILE across any other insns. */
230d793d 1695 || (! all_adjacent
3c0cb5de 1696 && (((!MEM_P (src)
d81481d3 1697 || ! find_reg_note (insn, REG_EQUIV, src))
6fb5fa3c 1698 && use_crosses_set_p (src, DF_INSN_LUID (insn)))
a66a10c7
RS
1699 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1700 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1701 /* Don't combine across a CALL_INSN, because that would possibly
1702 change whether the life span of some REGs crosses calls or not,
1703 and it is a pain to update that information.
1704 Exception: if source is a constant, moving it later can't hurt.
3de5e93a 1705 Accept that as a special case. */
6fb5fa3c 1706 || (DF_INSN_LUID (insn) < last_call_luid && ! CONSTANT_P (src)))
230d793d
RS
1707 return 0;
1708
1709 /* DEST must either be a REG or CC0. */
f8cfc6aa 1710 if (REG_P (dest))
230d793d
RS
1711 {
1712 /* If register alignment is being enforced for multi-word items in all
1713 cases except for parameters, it is possible to have a register copy
1714 insn referencing a hard register that is not allowed to contain the
1715 mode being copied and which would not be valid as an operand of most
1716 insns. Eliminate this problem by not combining with such an insn.
1717
1718 Also, on some machines we don't want to extend the life of a hard
53895717 1719 register. */
230d793d 1720
f8cfc6aa 1721 if (REG_P (src)
230d793d
RS
1722 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1723 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1724 /* Don't extend the life of a hard register unless it is
1725 user variable (if we have few registers) or it can't
1726 fit into the desired register (meaning something special
ecd40809
RK
1727 is going on).
1728 Also avoid substituting a return register into I3, because
1729 reload can't handle a conflict with constraints of other
1730 inputs. */
230d793d 1731 || (REGNO (src) < FIRST_PSEUDO_REGISTER
53895717 1732 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))))
230d793d
RS
1733 return 0;
1734 }
1735 else if (GET_CODE (dest) != CC0)
1736 return 0;
1737
45da19e3 1738
230d793d
RS
1739 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1740 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
45da19e3
UW
1741 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER)
1742 {
c22cacf3 1743 /* Don't substitute for a register intended as a clobberable
8c27b7d4 1744 operand. */
45da19e3
UW
1745 rtx reg = XEXP (XVECEXP (PATTERN (i3), 0, i), 0);
1746 if (rtx_equal_p (reg, dest))
1747 return 0;
1748
1749 /* If the clobber represents an earlyclobber operand, we must not
1750 substitute an expression containing the clobbered register.
647eea9d 1751 As we do not analyze the constraint strings here, we have to
45da19e3
UW
1752 make the conservative assumption. However, if the register is
1753 a fixed hard reg, the clobber cannot represent any operand;
1754 we leave it up to the machine description to either accept or
1755 reject use-and-clobber patterns. */
1756 if (!REG_P (reg)
1757 || REGNO (reg) >= FIRST_PSEUDO_REGISTER
1758 || !fixed_regs[REGNO (reg)])
1759 if (reg_overlap_mentioned_p (reg, src))
1760 return 0;
1761 }
230d793d
RS
1762
1763 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1764 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1765
1766 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1767 {
1768 /* Make sure succ doesn't contain a volatile reference. */
1769 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
c22cacf3 1770 return 0;
663522cb 1771
d276f2bb 1772 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
c22cacf3 1773 if (INSN_P (p) && p != succ && volatile_refs_p (PATTERN (p)))
cf0d9408 1774 return 0;
d276f2bb 1775 }
230d793d 1776
b79ee7eb
RH
1777 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1778 to be an explicit register variable, and was chosen for a reason. */
1779
1780 if (GET_CODE (src) == ASM_OPERANDS
f8cfc6aa 1781 && REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER)
b79ee7eb
RH
1782 return 0;
1783
4b2cb4a2
RS
1784 /* If there are any volatile insns between INSN and I3, reject, because
1785 they might affect machine state. */
1786
1787 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
2c3c49de 1788 if (INSN_P (p) && p != succ && volatile_insn_p (PATTERN (p)))
4b2cb4a2
RS
1789 return 0;
1790
17c9bcdd
HPN
1791 /* If INSN contains an autoincrement or autodecrement, make sure that
1792 register is not used between there and I3, and not already used in
1793 I3 either. Neither must it be used in PRED or SUCC, if they exist.
230d793d
RS
1794 Also insist that I3 not be a jump; if it were one
1795 and the incremented register were spilled, we would lose. */
1796
1797#ifdef AUTO_INC_DEC
1798 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1799 if (REG_NOTE_KIND (link) == REG_INC
4b4bf941 1800 && (JUMP_P (i3)
230d793d 1801 || reg_used_between_p (XEXP (link, 0), insn, i3)
17c9bcdd
HPN
1802 || (pred != NULL_RTX
1803 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (pred)))
1804 || (succ != NULL_RTX
1805 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (succ)))
230d793d
RS
1806 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1807 return 0;
1808#endif
1809
1810#ifdef HAVE_cc0
1811 /* Don't combine an insn that follows a CC0-setting insn.
1812 An insn that uses CC0 must not be separated from the one that sets it.
1813 We do, however, allow I2 to follow a CC0-setting insn if that insn
1814 is passed as I1; in that case it will be deleted also.
1815 We also allow combining in this case if all the insns are adjacent
1816 because that would leave the two CC0 insns adjacent as well.
1817 It would be more logical to test whether CC0 occurs inside I1 or I2,
1818 but that would be much slower, and this ought to be equivalent. */
1819
1820 p = prev_nonnote_insn (insn);
4b4bf941 1821 if (p && p != pred && NONJUMP_INSN_P (p) && sets_cc0_p (PATTERN (p))
230d793d
RS
1822 && ! all_adjacent)
1823 return 0;
1824#endif
1825
1826 /* If we get here, we have passed all the tests and the combination is
1827 to be allowed. */
1828
1829 *pdest = dest;
1830 *psrc = src;
1831
1832 return 1;
1833}
1834\f
1835/* LOC is the location within I3 that contains its pattern or the component
1836 of a PARALLEL of the pattern. We validate that it is valid for combining.
1837
1838 One problem is if I3 modifies its output, as opposed to replacing it
1839 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1840 so would produce an insn that is not equivalent to the original insns.
1841
1842 Consider:
1843
c22cacf3 1844 (set (reg:DI 101) (reg:DI 100))
230d793d
RS
1845 (set (subreg:SI (reg:DI 101) 0) <foo>)
1846
1847 This is NOT equivalent to:
1848
c22cacf3 1849 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
23190837 1850 (set (reg:DI 101) (reg:DI 100))])
230d793d
RS
1851
1852 Not only does this modify 100 (in which case it might still be valid
663522cb 1853 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
230d793d
RS
1854
1855 We can also run into a problem if I2 sets a register that I1
1856 uses and I1 gets directly substituted into I3 (not via I2). In that
1857 case, we would be getting the wrong value of I2DEST into I3, so we
1858 must reject the combination. This case occurs when I2 and I1 both
1859 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
da7d8304 1860 If I1_NOT_IN_SRC is nonzero, it means that finding I1 in the source
230d793d
RS
1861 of a SET must prevent combination from occurring.
1862
230d793d
RS
1863 Before doing the above check, we first try to expand a field assignment
1864 into a set of logical operations.
1865
da7d8304 1866 If PI3_DEST_KILLED is nonzero, it is a pointer to a location in which
230d793d
RS
1867 we place a register that is both set and used within I3. If more than one
1868 such register is detected, we fail.
1869
1870 Return 1 if the combination is valid, zero otherwise. */
1871
1872static int
79a490a9
AJ
1873combinable_i3pat (rtx i3, rtx *loc, rtx i2dest, rtx i1dest,
1874 int i1_not_in_src, rtx *pi3dest_killed)
230d793d
RS
1875{
1876 rtx x = *loc;
1877
1878 if (GET_CODE (x) == SET)
1879 {
73a39fc4 1880 rtx set = x ;
230d793d
RS
1881 rtx dest = SET_DEST (set);
1882 rtx src = SET_SRC (set);
29a82058 1883 rtx inner_dest = dest;
083a9e91 1884 rtx subdest;
663522cb 1885
230d793d
RS
1886 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1887 || GET_CODE (inner_dest) == SUBREG
1888 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1889 inner_dest = XEXP (inner_dest, 0);
1890
0595d388
AO
1891 /* Check for the case where I3 modifies its output, as discussed
1892 above. We don't want to prevent pseudos from being combined
1893 into the address of a MEM, so only prevent the combination if
1894 i1 or i2 set the same MEM. */
1895 if ((inner_dest != dest &&
3c0cb5de 1896 (!MEM_P (inner_dest)
0595d388
AO
1897 || rtx_equal_p (i2dest, inner_dest)
1898 || (i1dest && rtx_equal_p (i1dest, inner_dest)))
230d793d
RS
1899 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1900 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1901
53895717
BS
1902 /* This is the same test done in can_combine_p except we can't test
1903 all_adjacent; we don't have to, since this instruction will stay
1904 in place, thus we are not considering increasing the lifetime of
1905 INNER_DEST.
956d6950
JL
1906
1907 Also, if this insn sets a function argument, combining it with
1908 something that might need a spill could clobber a previous
1909 function argument; the all_adjacent test in can_combine_p also
1910 checks this; here, we do a more specific test for this case. */
663522cb 1911
f8cfc6aa 1912 || (REG_P (inner_dest)
dfbe1b2f 1913 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e 1914 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
53895717 1915 GET_MODE (inner_dest))))
230d793d
RS
1916 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1917 return 0;
1918
083a9e91
ILT
1919 /* If DEST is used in I3, it is being killed in this insn, so
1920 record that for later. We have to consider paradoxical
1921 subregs here, since they kill the whole register, but we
1922 ignore partial subregs, STRICT_LOW_PART, etc.
36a9c2e9
JL
1923 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1924 STACK_POINTER_REGNUM, since these are always considered to be
1925 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
083a9e91
ILT
1926 subdest = dest;
1927 if (GET_CODE (subdest) == SUBREG
1928 && (GET_MODE_SIZE (GET_MODE (subdest))
1929 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (subdest)))))
1930 subdest = SUBREG_REG (subdest);
1931 if (pi3dest_killed
1932 && REG_P (subdest)
1933 && reg_referenced_p (subdest, PATTERN (i3))
1934 && REGNO (subdest) != FRAME_POINTER_REGNUM
6d7096b0 1935#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
083a9e91 1936 && REGNO (subdest) != HARD_FRAME_POINTER_REGNUM
6d7096b0 1937#endif
36a9c2e9 1938#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
083a9e91
ILT
1939 && (REGNO (subdest) != ARG_POINTER_REGNUM
1940 || ! fixed_regs [REGNO (subdest)])
36a9c2e9 1941#endif
083a9e91 1942 && REGNO (subdest) != STACK_POINTER_REGNUM)
230d793d
RS
1943 {
1944 if (*pi3dest_killed)
1945 return 0;
1946
083a9e91 1947 *pi3dest_killed = subdest;
230d793d
RS
1948 }
1949 }
1950
1951 else if (GET_CODE (x) == PARALLEL)
1952 {
1953 int i;
1954
1955 for (i = 0; i < XVECLEN (x, 0); i++)
1956 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1957 i1_not_in_src, pi3dest_killed))
1958 return 0;
1959 }
1960
1961 return 1;
1962}
1963\f
14a774a9
RK
1964/* Return 1 if X is an arithmetic expression that contains a multiplication
1965 and division. We don't count multiplications by powers of two here. */
1966
1967static int
79a490a9 1968contains_muldiv (rtx x)
14a774a9
RK
1969{
1970 switch (GET_CODE (x))
1971 {
1972 case MOD: case DIV: case UMOD: case UDIV:
1973 return 1;
1974
1975 case MULT:
481683e1 1976 return ! (CONST_INT_P (XEXP (x, 1))
14a774a9
RK
1977 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1978 default:
ec8e098d
PB
1979 if (BINARY_P (x))
1980 return contains_muldiv (XEXP (x, 0))
14a774a9
RK
1981 || contains_muldiv (XEXP (x, 1));
1982
ec8e098d
PB
1983 if (UNARY_P (x))
1984 return contains_muldiv (XEXP (x, 0));
14a774a9 1985
ec8e098d 1986 return 0;
14a774a9
RK
1987 }
1988}
1989\f
c3410241
BS
1990/* Determine whether INSN can be used in a combination. Return nonzero if
1991 not. This is used in try_combine to detect early some cases where we
1992 can't perform combinations. */
1993
1994static int
79a490a9 1995cant_combine_insn_p (rtx insn)
c3410241
BS
1996{
1997 rtx set;
1998 rtx src, dest;
23190837 1999
c3410241
BS
2000 /* If this isn't really an insn, we can't do anything.
2001 This can occur when flow deletes an insn that it has merged into an
2002 auto-increment address. */
2003 if (! INSN_P (insn))
2004 return 1;
2005
7f0ea82e
R
2006 /* Never combine loads and stores involving hard regs that are likely
2007 to be spilled. The register allocator can usually handle such
cafe096b 2008 reg-reg moves by tying. If we allow the combiner to make
535a42b1 2009 substitutions of likely-spilled regs, reload might die.
c3410241
BS
2010 As an exception, we allow combinations involving fixed regs; these are
2011 not available to the register allocator so there's no risk involved. */
2012
2013 set = single_set (insn);
2014 if (! set)
2015 return 0;
2016 src = SET_SRC (set);
2017 dest = SET_DEST (set);
ad334b51
JH
2018 if (GET_CODE (src) == SUBREG)
2019 src = SUBREG_REG (src);
2020 if (GET_CODE (dest) == SUBREG)
2021 dest = SUBREG_REG (dest);
53895717
BS
2022 if (REG_P (src) && REG_P (dest)
2023 && ((REGNO (src) < FIRST_PSEUDO_REGISTER
7f0ea82e
R
2024 && ! fixed_regs[REGNO (src)]
2025 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (src))))
53895717 2026 || (REGNO (dest) < FIRST_PSEUDO_REGISTER
7f0ea82e
R
2027 && ! fixed_regs[REGNO (dest)]
2028 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (dest))))))
c3410241 2029 return 1;
53895717 2030
c3410241
BS
2031 return 0;
2032}
2033
45002e59
R
2034struct likely_spilled_retval_info
2035{
2036 unsigned regno, nregs;
2037 unsigned mask;
2038};
2039
2040/* Called via note_stores by likely_spilled_retval_p. Remove from info->mask
2041 hard registers that are known to be written to / clobbered in full. */
2042static void
7bc980e1 2043likely_spilled_retval_1 (rtx x, const_rtx set, void *data)
45002e59 2044{
f883e0a7
KG
2045 struct likely_spilled_retval_info *const info =
2046 (struct likely_spilled_retval_info *) data;
45002e59
R
2047 unsigned regno, nregs;
2048 unsigned new_mask;
2049
2050 if (!REG_P (XEXP (set, 0)))
2051 return;
2052 regno = REGNO (x);
2053 if (regno >= info->regno + info->nregs)
2054 return;
2055 nregs = hard_regno_nregs[regno][GET_MODE (x)];
2056 if (regno + nregs <= info->regno)
2057 return;
2058 new_mask = (2U << (nregs - 1)) - 1;
2059 if (regno < info->regno)
2060 new_mask >>= info->regno - regno;
2061 else
2062 new_mask <<= regno - info->regno;
c78ba53a 2063 info->mask &= ~new_mask;
45002e59
R
2064}
2065
2066/* Return nonzero iff part of the return value is live during INSN, and
2067 it is likely spilled. This can happen when more than one insn is needed
2068 to copy the return value, e.g. when we consider to combine into the
2069 second copy insn for a complex value. */
2070
2071static int
2072likely_spilled_retval_p (rtx insn)
2073{
2074 rtx use = BB_END (this_basic_block);
2075 rtx reg, p;
2076 unsigned regno, nregs;
2077 /* We assume here that no machine mode needs more than
2078 32 hard registers when the value overlaps with a register
2079 for which FUNCTION_VALUE_REGNO_P is true. */
2080 unsigned mask;
2081 struct likely_spilled_retval_info info;
2082
2083 if (!NONJUMP_INSN_P (use) || GET_CODE (PATTERN (use)) != USE || insn == use)
2084 return 0;
2085 reg = XEXP (PATTERN (use), 0);
2086 if (!REG_P (reg) || !FUNCTION_VALUE_REGNO_P (REGNO (reg)))
2087 return 0;
2088 regno = REGNO (reg);
2089 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
2090 if (nregs == 1)
2091 return 0;
2092 mask = (2U << (nregs - 1)) - 1;
2093
2094 /* Disregard parts of the return value that are set later. */
2095 info.regno = regno;
2096 info.nregs = nregs;
2097 info.mask = mask;
2098 for (p = PREV_INSN (use); info.mask && p != insn; p = PREV_INSN (p))
c78ba53a
RIL
2099 if (INSN_P (p))
2100 note_stores (PATTERN (p), likely_spilled_retval_1, &info);
45002e59
R
2101 mask = info.mask;
2102
2103 /* Check if any of the (probably) live return value registers is
2104 likely spilled. */
2105 nregs --;
2106 do
2107 {
2108 if ((mask & 1 << nregs)
2109 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (regno + nregs)))
2110 return 1;
2111 } while (nregs--);
2112 return 0;
2113}
2114
8c03ca00
EB
2115/* Adjust INSN after we made a change to its destination.
2116
2117 Changing the destination can invalidate notes that say something about
2118 the results of the insn and a LOG_LINK pointing to the insn. */
2119
2120static void
2121adjust_for_new_dest (rtx insn)
2122{
8c03ca00 2123 /* For notes, be conservative and simply remove them. */
7cd689bc 2124 remove_reg_equal_equiv_notes (insn);
8c03ca00
EB
2125
2126 /* The new insn will have a destination that was previously the destination
2127 of an insn just above it. Call distribute_links to make a LOG_LINK from
2128 the next use of that destination. */
2129 distribute_links (gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX));
531ba5d2
SP
2130
2131 df_insn_rescan (insn);
8c03ca00
EB
2132}
2133
4164b2fb
PB
2134/* Return TRUE if combine can reuse reg X in mode MODE.
2135 ADDED_SETS is nonzero if the original set is still required. */
2136static bool
2137can_change_dest_mode (rtx x, int added_sets, enum machine_mode mode)
2138{
2139 unsigned int regno;
2140
2141 if (!REG_P(x))
2142 return false;
2143
2144 regno = REGNO (x);
2145 /* Allow hard registers if the new mode is legal, and occupies no more
2146 registers than the old mode. */
2147 if (regno < FIRST_PSEUDO_REGISTER)
2148 return (HARD_REGNO_MODE_OK (regno, mode)
2149 && (hard_regno_nregs[regno][GET_MODE (x)]
2150 >= hard_regno_nregs[regno][mode]));
2151
2152 /* Or a pseudo that is only used once. */
2153 return (REG_N_SETS (regno) == 1 && !added_sets
2154 && !REG_USERVAR_P (x));
2155}
2156
622d5258
RS
2157
2158/* Check whether X, the destination of a set, refers to part of
2159 the register specified by REG. */
2160
2161static bool
2162reg_subword_p (rtx x, rtx reg)
2163{
2164 /* Check that reg is an integer mode register. */
2165 if (!REG_P (reg) || GET_MODE_CLASS (GET_MODE (reg)) != MODE_INT)
2166 return false;
2167
2168 if (GET_CODE (x) == STRICT_LOW_PART
2169 || GET_CODE (x) == ZERO_EXTRACT)
2170 x = XEXP (x, 0);
2171
2172 return GET_CODE (x) == SUBREG
2173 && SUBREG_REG (x) == reg
2174 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT;
2175}
2176
b5b8b0ac
AO
2177#ifdef AUTO_INC_DEC
2178/* Replace auto-increment addressing modes with explicit operations to
2179 access the same addresses without modifying the corresponding
2180 registers. If AFTER holds, SRC is meant to be reused after the
2181 side effect, otherwise it is to be reused before that. */
2182
2183static rtx
2184cleanup_auto_inc_dec (rtx src, bool after, enum machine_mode mem_mode)
2185{
2186 rtx x = src;
2187 const RTX_CODE code = GET_CODE (x);
2188 int i;
2189 const char *fmt;
2190
2191 switch (code)
2192 {
2193 case REG:
2194 case CONST_INT:
2195 case CONST_DOUBLE:
2196 case CONST_FIXED:
2197 case CONST_VECTOR:
2198 case SYMBOL_REF:
2199 case CODE_LABEL:
2200 case PC:
2201 case CC0:
2202 case SCRATCH:
2203 /* SCRATCH must be shared because they represent distinct values. */
2204 return x;
2205 case CLOBBER:
2206 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2207 return x;
2208 break;
2209
2210 case CONST:
2211 if (shared_const_p (x))
2212 return x;
2213 break;
2214
2215 case MEM:
2216 mem_mode = GET_MODE (x);
2217 break;
2218
2219 case PRE_INC:
2220 case PRE_DEC:
2221 case POST_INC:
2222 case POST_DEC:
2223 gcc_assert (mem_mode != VOIDmode && mem_mode != BLKmode);
2224 if (after == (code == PRE_INC || code == PRE_DEC))
2225 x = cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode);
2226 else
2227 x = gen_rtx_PLUS (GET_MODE (x),
2228 cleanup_auto_inc_dec (XEXP (x, 0), after, mem_mode),
2229 GEN_INT ((code == PRE_INC || code == POST_INC)
2230 ? GET_MODE_SIZE (mem_mode)
2231 : -GET_MODE_SIZE (mem_mode)));
2232 return x;
2233
2234 case PRE_MODIFY:
2235 case POST_MODIFY:
2236 if (after == (code == PRE_MODIFY))
2237 x = XEXP (x, 0);
2238 else
2239 x = XEXP (x, 1);
2240 return cleanup_auto_inc_dec (x, after, mem_mode);
2241
2242 default:
2243 break;
2244 }
2245
2246 /* Copy the various flags, fields, and other information. We assume
2247 that all fields need copying, and then clear the fields that should
2248 not be copied. That is the sensible default behavior, and forces
2249 us to explicitly document why we are *not* copying a flag. */
2250 x = shallow_copy_rtx (x);
2251
2252 /* We do not copy the USED flag, which is used as a mark bit during
2253 walks over the RTL. */
2254 RTX_FLAG (x, used) = 0;
2255
2256 /* We do not copy FRAME_RELATED for INSNs. */
2257 if (INSN_P (x))
2258 RTX_FLAG (x, frame_related) = 0;
2259
2260 fmt = GET_RTX_FORMAT (code);
2261 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2262 if (fmt[i] == 'e')
2263 XEXP (x, i) = cleanup_auto_inc_dec (XEXP (x, i), after, mem_mode);
2264 else if (fmt[i] == 'E' || fmt[i] == 'V')
2265 {
2266 int j;
2267 XVEC (x, i) = rtvec_alloc (XVECLEN (x, i));
2268 for (j = 0; j < XVECLEN (x, i); j++)
2269 XVECEXP (x, i, j)
2270 = cleanup_auto_inc_dec (XVECEXP (src, i, j), after, mem_mode);
2271 }
2272
2273 return x;
2274}
b5b8b0ac
AO
2275
2276/* Auxiliary data structure for propagate_for_debug_stmt. */
2277
2278struct rtx_subst_pair
2279{
3af4ba41 2280 rtx to;
b5b8b0ac
AO
2281 bool adjusted;
2282 bool after;
b5b8b0ac
AO
2283};
2284
3af4ba41
RS
2285/* DATA points to an rtx_subst_pair. Return the value that should be
2286 substituted. */
b5b8b0ac 2287
3af4ba41
RS
2288static rtx
2289propagate_for_debug_subst (rtx from ATTRIBUTE_UNUSED, void *data)
b5b8b0ac 2290{
3af4ba41
RS
2291 struct rtx_subst_pair *pair = (struct rtx_subst_pair *)data;
2292
b5b8b0ac
AO
2293 if (!pair->adjusted)
2294 {
2295 pair->adjusted = true;
2296 pair->to = cleanup_auto_inc_dec (pair->to, pair->after, VOIDmode);
3af4ba41 2297 return pair->to;
b5b8b0ac 2298 }
3af4ba41 2299 return copy_rtx (pair->to);
b5b8b0ac 2300}
3af4ba41 2301#endif
b5b8b0ac
AO
2302
2303/* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN
2304 and LAST. If MOVE holds, debug insns must also be moved past
2305 LAST. */
2306
2307static void
2308propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src, bool move)
2309{
3af4ba41 2310 rtx next, move_pos = move ? last : NULL_RTX, loc;
b5b8b0ac
AO
2311
2312#ifdef AUTO_INC_DEC
3af4ba41
RS
2313 struct rtx_subst_pair p;
2314 p.to = src;
b5b8b0ac
AO
2315 p.adjusted = false;
2316 p.after = move;
2317#endif
2318
2319 next = NEXT_INSN (insn);
2320 while (next != last)
2321 {
2322 insn = next;
2323 next = NEXT_INSN (insn);
2324 if (DEBUG_INSN_P (insn))
2325 {
3af4ba41
RS
2326#ifdef AUTO_INC_DEC
2327 loc = simplify_replace_fn_rtx (INSN_VAR_LOCATION_LOC (insn),
2328 dest, propagate_for_debug_subst, &p);
2329#else
2330 loc = simplify_replace_rtx (INSN_VAR_LOCATION_LOC (insn), dest, src);
2331#endif
2332 if (loc == INSN_VAR_LOCATION_LOC (insn))
b5b8b0ac 2333 continue;
3af4ba41 2334 INSN_VAR_LOCATION_LOC (insn) = loc;
b5b8b0ac
AO
2335 if (move_pos)
2336 {
2337 remove_insn (insn);
2338 PREV_INSN (insn) = NEXT_INSN (insn) = NULL_RTX;
2339 move_pos = emit_debug_insn_after (insn, move_pos);
2340 }
2341 else
2342 df_insn_rescan (insn);
2343 }
2344 }
2345}
622d5258 2346
63c6c7e0 2347/* Delete the unconditional jump INSN and adjust the CFG correspondingly.
d25aa7ab
PB
2348 Note that the INSN should be deleted *after* removing dead edges, so
2349 that the kept edge is the fallthrough edge for a (set (pc) (pc))
2350 but not for a (set (pc) (label_ref FOO)). */
2351
2352static void
2353update_cfg_for_uncondjump (rtx insn)
2354{
2355 basic_block bb = BLOCK_FOR_INSN (insn);
63c6c7e0 2356 bool at_end = (BB_END (bb) == insn);
d25aa7ab 2357
63c6c7e0 2358 if (at_end)
d25aa7ab
PB
2359 purge_dead_edges (bb);
2360
2361 delete_insn (insn);
63c6c7e0 2362 if (at_end && EDGE_COUNT (bb->succs) == 1)
d25aa7ab
PB
2363 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2364}
2365
2366
230d793d
RS
2367/* Try to combine the insns I1 and I2 into I3.
2368 Here I1 and I2 appear earlier than I3.
2369 I1 can be zero; then we combine just I2 into I3.
663522cb 2370
04956a1a 2371 If we are combining three insns and the resulting insn is not recognized,
230d793d
RS
2372 try splitting it into two insns. If that happens, I2 and I3 are retained
2373 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
2374 are pseudo-deleted.
2375
663522cb 2376 Return 0 if the combination does not work. Then nothing is changed.
abe6e52f 2377 If we did the combination, return the insn at which combine should
663522cb
KH
2378 resume scanning.
2379
da7d8304 2380 Set NEW_DIRECT_JUMP_P to a nonzero value if try_combine creates a
44a76fc8 2381 new direct jump instruction. */
230d793d
RS
2382
2383static rtx
79a490a9 2384try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p)
230d793d 2385{
02359929 2386 /* New patterns for I3 and I2, respectively. */
230d793d 2387 rtx newpat, newi2pat = 0;
9b12dc4f 2388 rtvec newpat_vec_with_clobbers = 0;
cddd8b72 2389 int substed_i2 = 0, substed_i1 = 0;
230d793d
RS
2390 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
2391 int added_sets_1, added_sets_2;
2392 /* Total number of SETs to put into I3. */
2393 int total_sets;
a1105617 2394 /* Nonzero if I2's body now appears in I3. */
230d793d
RS
2395 int i2_is_used;
2396 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 2397 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
2398 /* Contains I3 if the destination of I3 is used in its source, which means
2399 that the old life of I3 is being killed. If that usage is placed into
2400 I2 and not in I3, a REG_DEAD note must be made. */
2401 rtx i3dest_killed = 0;
2402 /* SET_DEST and SET_SRC of I2 and I1. */
b5b8b0ac
AO
2403 rtx i2dest = 0, i2src = 0, i1dest = 0, i1src = 0;
2404 /* Set if I2DEST was reused as a scratch register. */
2405 bool i2scratch = false;
b5425e75
JW
2406 /* PATTERN (I1) and PATTERN (I2), or a copy of it in certain cases. */
2407 rtx i1pat = 0, i2pat = 0;
230d793d 2408 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 2409 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
4bbae09f 2410 int i2dest_killed = 0, i1dest_killed = 0;
230d793d
RS
2411 int i1_feeds_i3 = 0;
2412 /* Notes that must be added to REG_NOTES in I3 and I2. */
2413 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
2414 /* Notes that we substituted I3 into I2 instead of the normal case. */
2415 int i3_subst_into_i2 = 0;
df7d75de
RK
2416 /* Notes that I1, I2 or I3 is a MULT operation. */
2417 int have_mult = 0;
9e42ab3e 2418 int swap_i2i3 = 0;
7ad7809b 2419 int changed_i3_dest = 0;
230d793d
RS
2420
2421 int maxreg;
2422 rtx temp;
b3694847 2423 rtx link;
9d35384d
RIL
2424 rtx other_pat = 0;
2425 rtx new_other_notes;
230d793d
RS
2426 int i;
2427
c3410241
BS
2428 /* Exit early if one of the insns involved can't be used for
2429 combinations. */
2430 if (cant_combine_insn_p (i3)
2431 || cant_combine_insn_p (i2)
2432 || (i1 && cant_combine_insn_p (i1))
4a8cae83 2433 || likely_spilled_retval_p (i3))
230d793d
RS
2434 return 0;
2435
2436 combine_attempts++;
230d793d
RS
2437 undobuf.other_insn = 0;
2438
6e25d159
RK
2439 /* Reset the hard register usage information. */
2440 CLEAR_HARD_REG_SET (newpat_used_regs);
2441
28a5fb2e
AN
2442 if (dump_file && (dump_flags & TDF_DETAILS))
2443 {
2444 if (i1)
2445 fprintf (dump_file, "\nTrying %d, %d -> %d:\n",
2446 INSN_UID (i1), INSN_UID (i2), INSN_UID (i3));
2447 else
2448 fprintf (dump_file, "\nTrying %d -> %d:\n",
2449 INSN_UID (i2), INSN_UID (i3));
2450 }
2451
230d793d
RS
2452 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
2453 code below, set I1 to be the earlier of the two insns. */
6fb5fa3c 2454 if (i1 && DF_INSN_LUID (i1) > DF_INSN_LUID (i2))
230d793d
RS
2455 temp = i1, i1 = i2, i2 = temp;
2456
abe6e52f 2457 added_links_insn = 0;
137e889e 2458
230d793d 2459 /* First check for one important special-case that the code below will
c7be4f66 2460 not handle. Namely, the case where I1 is zero, I2 is a PARALLEL
230d793d
RS
2461 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
2462 we may be able to replace that destination with the destination of I3.
2463 This occurs in the common code where we compute both a quotient and
2464 remainder into a structure, in which case we want to do the computation
2465 directly into the structure to avoid register-register copies.
2466
c7be4f66
RK
2467 Note that this case handles both multiple sets in I2 and also
2468 cases where I2 has a number of CLOBBER or PARALLELs.
2469
230d793d
RS
2470 We make very conservative checks below and only try to handle the
2471 most common cases of this. For example, we only handle the case
2472 where I2 and I3 are adjacent to avoid making difficult register
2473 usage tests. */
2474
4b4bf941 2475 if (i1 == 0 && NONJUMP_INSN_P (i3) && GET_CODE (PATTERN (i3)) == SET
f8cfc6aa 2476 && REG_P (SET_SRC (PATTERN (i3)))
230d793d 2477 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
230d793d
RS
2478 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
2479 && GET_CODE (PATTERN (i2)) == PARALLEL
2480 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
2481 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
2482 below would need to check what is inside (and reg_overlap_mentioned_p
2483 doesn't support those codes anyway). Don't allow those destinations;
2484 the resulting insn isn't likely to be recognized anyway. */
2485 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
2486 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
2487 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
2488 SET_DEST (PATTERN (i3)))
b5b8b0ac 2489 && next_active_insn (i2) == i3)
5089e22e
RS
2490 {
2491 rtx p2 = PATTERN (i2);
2492
2493 /* Make sure that the destination of I3,
2494 which we are going to substitute into one output of I2,
2495 is not used within another output of I2. We must avoid making this:
2496 (parallel [(set (mem (reg 69)) ...)
2497 (set (reg 69) ...)])
2498 which is not well-defined as to order of actions.
2499 (Besides, reload can't handle output reloads for this.)
2500
2501 The problem can also happen if the dest of I3 is a memory ref,
2502 if another dest in I2 is an indirect memory ref. */
2503 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
2504 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2505 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
2506 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
2507 SET_DEST (XVECEXP (p2, 0, i))))
2508 break;
230d793d 2509
5089e22e
RS
2510 if (i == XVECLEN (p2, 0))
2511 for (i = 0; i < XVECLEN (p2, 0); i++)
481c7efa
FS
2512 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
2513 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
2514 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
5089e22e
RS
2515 {
2516 combine_merges++;
230d793d 2517
5089e22e 2518 subst_insn = i3;
6fb5fa3c 2519 subst_low_luid = DF_INSN_LUID (i2);
230d793d 2520
c4e861e8 2521 added_sets_2 = added_sets_1 = 0;
b5b8b0ac 2522 i2src = SET_DEST (PATTERN (i3));
5089e22e 2523 i2dest = SET_SRC (PATTERN (i3));
4bbae09f 2524 i2dest_killed = dead_or_set_p (i2, i2dest);
230d793d 2525
5089e22e
RS
2526 /* Replace the dest in I2 with our dest and make the resulting
2527 insn the new pattern for I3. Then skip to where we
2528 validate the pattern. Everything was set up above. */
663522cb 2529 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
5089e22e
RS
2530 SET_DEST (PATTERN (i3)));
2531
2532 newpat = p2;
176c9e6b 2533 i3_subst_into_i2 = 1;
5089e22e
RS
2534 goto validate_replacement;
2535 }
2536 }
230d793d 2537
622d5258
RS
2538 /* If I2 is setting a pseudo to a constant and I3 is setting some
2539 sub-part of it to another constant, merge them by making a new
667c1c2c
RK
2540 constant. */
2541 if (i1 == 0
2542 && (temp = single_set (i2)) != 0
481683e1 2543 && (CONST_INT_P (SET_SRC (temp))
667c1c2c 2544 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
667c1c2c 2545 && GET_CODE (PATTERN (i3)) == SET
481683e1 2546 && (CONST_INT_P (SET_SRC (PATTERN (i3)))
622d5258
RS
2547 || GET_CODE (SET_SRC (PATTERN (i3))) == CONST_DOUBLE)
2548 && reg_subword_p (SET_DEST (PATTERN (i3)), SET_DEST (temp)))
667c1c2c 2549 {
622d5258
RS
2550 rtx dest = SET_DEST (PATTERN (i3));
2551 int offset = -1;
2552 int width = 0;
667c1c2c 2553
d89b36e1 2554 if (GET_CODE (dest) == ZERO_EXTRACT)
48b4d901 2555 {
481683e1
SZ
2556 if (CONST_INT_P (XEXP (dest, 1))
2557 && CONST_INT_P (XEXP (dest, 2)))
622d5258
RS
2558 {
2559 width = INTVAL (XEXP (dest, 1));
2560 offset = INTVAL (XEXP (dest, 2));
d89b36e1 2561 dest = XEXP (dest, 0);
622d5258 2562 if (BITS_BIG_ENDIAN)
d89b36e1 2563 offset = GET_MODE_BITSIZE (GET_MODE (dest)) - width - offset;
622d5258 2564 }
48b4d901 2565 }
d89b36e1 2566 else
48b4d901 2567 {
d89b36e1
RS
2568 if (GET_CODE (dest) == STRICT_LOW_PART)
2569 dest = XEXP (dest, 0);
622d5258
RS
2570 width = GET_MODE_BITSIZE (GET_MODE (dest));
2571 offset = 0;
2572 }
d89b36e1
RS
2573
2574 if (offset >= 0)
622d5258 2575 {
d89b36e1
RS
2576 /* If this is the low part, we're done. */
2577 if (subreg_lowpart_p (dest))
2578 ;
2579 /* Handle the case where inner is twice the size of outer. */
2580 else if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2581 == 2 * GET_MODE_BITSIZE (GET_MODE (dest)))
2582 offset += GET_MODE_BITSIZE (GET_MODE (dest));
2583 /* Otherwise give up for now. */
2584 else
2585 offset = -1;
622d5258
RS
2586 }
2587
28b02c60
UB
2588 if (offset >= 0
2589 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (temp)))
2590 <= HOST_BITS_PER_WIDE_INT * 2))
622d5258
RS
2591 {
2592 HOST_WIDE_INT mhi, ohi, ihi;
2593 HOST_WIDE_INT mlo, olo, ilo;
2594 rtx inner = SET_SRC (PATTERN (i3));
2595 rtx outer = SET_SRC (temp);
2596
481683e1 2597 if (CONST_INT_P (outer))
622d5258
RS
2598 {
2599 olo = INTVAL (outer);
2600 ohi = olo < 0 ? -1 : 0;
2601 }
2602 else
2603 {
2604 olo = CONST_DOUBLE_LOW (outer);
2605 ohi = CONST_DOUBLE_HIGH (outer);
2606 }
2607
481683e1 2608 if (CONST_INT_P (inner))
622d5258
RS
2609 {
2610 ilo = INTVAL (inner);
2611 ihi = ilo < 0 ? -1 : 0;
2612 }
2613 else
2614 {
2615 ilo = CONST_DOUBLE_LOW (inner);
2616 ihi = CONST_DOUBLE_HIGH (inner);
2617 }
2618
2619 if (width < HOST_BITS_PER_WIDE_INT)
2620 {
2621 mlo = ((unsigned HOST_WIDE_INT) 1 << width) - 1;
2622 mhi = 0;
2623 }
2624 else if (width < HOST_BITS_PER_WIDE_INT * 2)
2625 {
2626 mhi = ((unsigned HOST_WIDE_INT) 1
2627 << (width - HOST_BITS_PER_WIDE_INT)) - 1;
2628 mlo = -1;
2629 }
2630 else
2631 {
2632 mlo = -1;
2633 mhi = -1;
2634 }
2635
2636 ilo &= mlo;
2637 ihi &= mhi;
2638
2639 if (offset >= HOST_BITS_PER_WIDE_INT)
2640 {
2641 mhi = mlo << (offset - HOST_BITS_PER_WIDE_INT);
2642 mlo = 0;
2643 ihi = ilo << (offset - HOST_BITS_PER_WIDE_INT);
2644 ilo = 0;
2645 }
2646 else if (offset > 0)
2647 {
2648 mhi = (mhi << offset) | ((unsigned HOST_WIDE_INT) mlo
2649 >> (HOST_BITS_PER_WIDE_INT - offset));
2650 mlo = mlo << offset;
2651 ihi = (ihi << offset) | ((unsigned HOST_WIDE_INT) ilo
2652 >> (HOST_BITS_PER_WIDE_INT - offset));
2653 ilo = ilo << offset;
2654 }
2655
2656 olo = (olo & ~mlo) | ilo;
2657 ohi = (ohi & ~mhi) | ihi;
2658
2659 combine_merges++;
2660 subst_insn = i3;
6fb5fa3c 2661 subst_low_luid = DF_INSN_LUID (i2);
622d5258
RS
2662 added_sets_2 = added_sets_1 = 0;
2663 i2dest = SET_DEST (temp);
2664 i2dest_killed = dead_or_set_p (i2, i2dest);
2665
2666 SUBST (SET_SRC (temp),
2667 immed_double_const (olo, ohi, GET_MODE (SET_DEST (temp))));
2668
2669 newpat = PATTERN (i2);
2670 goto validate_replacement;
48b4d901 2671 }
667c1c2c
RK
2672 }
2673
230d793d
RS
2674#ifndef HAVE_cc0
2675 /* If we have no I1 and I2 looks like:
2676 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
2677 (set Y OP)])
2678 make up a dummy I1 that is
2679 (set Y OP)
2680 and change I2 to be
c22cacf3 2681 (set (reg:CC X) (compare:CC Y (const_int 0)))
230d793d
RS
2682
2683 (We can ignore any trailing CLOBBERs.)
2684
2685 This undoes a previous combination and allows us to match a branch-and-
2686 decrement insn. */
2687
2688 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
2689 && XVECLEN (PATTERN (i2), 0) >= 2
2690 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
2691 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
2692 == MODE_CC)
2693 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
2694 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
2695 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
f8cfc6aa 2696 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, 1)))
230d793d
RS
2697 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
2698 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
2699 {
663522cb 2700 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
230d793d
RS
2701 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
2702 break;
2703
2704 if (i == 1)
2705 {
2706 /* We make I1 with the same INSN_UID as I2. This gives it
6fb5fa3c 2707 the same DF_INSN_LUID for value tracking. Our fake I1 will
230d793d
RS
2708 never appear in the insn stream so giving it the same INSN_UID
2709 as I2 will not cause a problem. */
2710
4977bab6 2711 i1 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
0435312e 2712 BLOCK_FOR_INSN (i2), INSN_LOCATOR (i2),
6fb5fa3c 2713 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX);
230d793d
RS
2714
2715 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
2716 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
2717 SET_DEST (PATTERN (i1)));
2718 }
2719 }
2720#endif
2721
2722 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
2723 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
2724 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
2725 {
2726 undo_all ();
2727 return 0;
2728 }
2729
2730 /* Record whether I2DEST is used in I2SRC and similarly for the other
2731 cases. Knowing this will help in register status updating below. */
2732 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
2733 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
2734 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
4bbae09f
ILT
2735 i2dest_killed = dead_or_set_p (i2, i2dest);
2736 i1dest_killed = i1 && dead_or_set_p (i1, i1dest);
230d793d 2737
916f14f1 2738 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
2739 in I2SRC. */
2740 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
2741
2742 /* Ensure that I3's pattern can be the destination of combines. */
2743 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
2744 i1 && i2dest_in_i1src && i1_feeds_i3,
2745 &i3dest_killed))
2746 {
2747 undo_all ();
2748 return 0;
2749 }
2750
df7d75de
RK
2751 /* See if any of the insns is a MULT operation. Unless one is, we will
2752 reject a combination that is, since it must be slower. Be conservative
2753 here. */
2754 if (GET_CODE (i2src) == MULT
2755 || (i1 != 0 && GET_CODE (i1src) == MULT)
2756 || (GET_CODE (PATTERN (i3)) == SET
2757 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
2758 have_mult = 1;
2759
230d793d
RS
2760 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
2761 We used to do this EXCEPT in one case: I3 has a post-inc in an
2762 output operand. However, that exception can give rise to insns like
23190837 2763 mov r3,(r3)+
230d793d 2764 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 2765 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
2766
2767#if 0
2768 if (!(GET_CODE (PATTERN (i3)) == SET
f8cfc6aa 2769 && REG_P (SET_SRC (PATTERN (i3)))
3c0cb5de 2770 && MEM_P (SET_DEST (PATTERN (i3)))
230d793d
RS
2771 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
2772 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
2773 /* It's not the exception. */
2774#endif
2775#ifdef AUTO_INC_DEC
2776 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
2777 if (REG_NOTE_KIND (link) == REG_INC
2778 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
2779 || (i1 != 0
2780 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
2781 {
2782 undo_all ();
2783 return 0;
2784 }
2785#endif
2786
2787 /* See if the SETs in I1 or I2 need to be kept around in the merged
2788 instruction: whenever the value set there is still needed past I3.
2789 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
2790
2791 For the SET in I1, we have two cases: If I1 and I2 independently
2792 feed into I3, the set in I1 needs to be kept around if I1DEST dies
2793 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
2794 in I1 needs to be kept around unless I1DEST dies or is set in either
2795 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
2796 I1DEST. If so, we know I1 feeds into I2. */
2797
2798 added_sets_2 = ! dead_or_set_p (i3, i2dest);
2799
2800 added_sets_1
2801 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
2802 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
2803
2804 /* If the set in I2 needs to be kept around, we must make a copy of
2805 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 2806 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
2807 an already-substituted copy. This also prevents making self-referential
2808 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
2809 I2DEST. */
2810
230d793d 2811 if (added_sets_2)
b5425e75
JW
2812 {
2813 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2814 i2pat = gen_rtx_SET (VOIDmode, i2dest, copy_rtx (i2src));
2815 else
2816 i2pat = copy_rtx (PATTERN (i2));
2817 }
2818
2819 if (added_sets_1)
2820 {
2821 if (GET_CODE (PATTERN (i1)) == PARALLEL)
2822 i1pat = gen_rtx_SET (VOIDmode, i1dest, copy_rtx (i1src));
2823 else
2824 i1pat = copy_rtx (PATTERN (i1));
2825 }
230d793d
RS
2826
2827 combine_merges++;
2828
2829 /* Substitute in the latest insn for the regs set by the earlier ones. */
2830
2831 maxreg = max_reg_num ();
2832
2833 subst_insn = i3;
230d793d 2834
230d793d
RS
2835#ifndef HAVE_cc0
2836 /* Many machines that don't use CC0 have insns that can both perform an
2837 arithmetic operation and set the condition code. These operations will
2838 be represented as a PARALLEL with the first element of the vector
2839 being a COMPARE of an arithmetic operation with the constant zero.
2840 The second element of the vector will set some pseudo to the result
2841 of the same arithmetic operation. If we simplify the COMPARE, we won't
2842 match such a pattern and so will generate an extra insn. Here we test
2843 for this case, where both the comparison and the operation result are
2844 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
2845 I2SRC. Later we will make the PARALLEL that contains I2. */
2846
2847 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
2848 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
2849 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
2850 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
2851 {
94134f42 2852#ifdef SELECT_CC_MODE
230d793d
RS
2853 rtx *cc_use;
2854 enum machine_mode compare_mode;
081f5e7e 2855#endif
230d793d
RS
2856
2857 newpat = PATTERN (i3);
2858 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
2859
2860 i2_is_used = 1;
2861
94134f42 2862#ifdef SELECT_CC_MODE
230d793d
RS
2863 /* See if a COMPARE with the operand we substituted in should be done
2864 with the mode that is currently being used. If not, do the same
2865 processing we do in `subst' for a SET; namely, if the destination
2866 is used only once, try to replace it with a register of the proper
2867 mode and also replace the COMPARE. */
2868 if (undobuf.other_insn == 0
2869 && (cc_use = find_single_use (SET_DEST (newpat), i3,
2870 &undobuf.other_insn))
77fa0940
RK
2871 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
2872 i2src, const0_rtx))
230d793d
RS
2873 != GET_MODE (SET_DEST (newpat))))
2874 {
4164b2fb
PB
2875 if (can_change_dest_mode(SET_DEST (newpat), added_sets_2,
2876 compare_mode))
230d793d 2877 {
4164b2fb 2878 unsigned int regno = REGNO (SET_DEST (newpat));
abcb0cdc 2879 rtx new_dest;
4164b2fb 2880
abcb0cdc
ILT
2881 if (regno < FIRST_PSEUDO_REGISTER)
2882 new_dest = gen_rtx_REG (compare_mode, regno);
2883 else
2884 {
2885 SUBST_MODE (regno_reg_rtx[regno], compare_mode);
2886 new_dest = regno_reg_rtx[regno];
2887 }
230d793d
RS
2888
2889 SUBST (SET_DEST (newpat), new_dest);
2890 SUBST (XEXP (*cc_use, 0), new_dest);
2891 SUBST (SET_SRC (newpat),
f1c6ba8b 2892 gen_rtx_COMPARE (compare_mode, i2src, const0_rtx));
230d793d
RS
2893 }
2894 else
2895 undobuf.other_insn = 0;
2896 }
663522cb 2897#endif
230d793d
RS
2898 }
2899 else
2900#endif
2901 {
7cf3d079
JM
2902 /* It is possible that the source of I2 or I1 may be performing
2903 an unneeded operation, such as a ZERO_EXTEND of something
2904 that is known to have the high part zero. Handle that case
2905 by letting subst look at the innermost one of them.
2906
2907 Another way to do this would be to have a function that tries
2908 to simplify a single insn instead of merging two or more
2909 insns. We don't do this because of the potential of infinite
2910 loops and because of the potential extra memory required.
2911 However, doing it the way we are is a bit of a kludge and
2912 doesn't catch all cases.
2913
2914 But only do this if -fexpensive-optimizations since it slows
2915 things down and doesn't usually win.
2916
2917 This is not done in the COMPARE case above because the
2918 unmodified I2PAT is used in the PARALLEL and so a pattern
2919 with a modified I2SRC would not match. */
2920
2921 if (flag_expensive_optimizations)
2922 {
2923 /* Pass pc_rtx so no substitutions are done, just
2924 simplifications. */
2925 if (i1)
2926 {
6fb5fa3c 2927 subst_low_luid = DF_INSN_LUID (i1);
7cf3d079
JM
2928 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
2929 }
2930 else
2931 {
6fb5fa3c 2932 subst_low_luid = DF_INSN_LUID (i2);
7cf3d079
JM
2933 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
2934 }
2935 }
2936
230d793d
RS
2937 n_occurrences = 0; /* `subst' counts here */
2938
2939 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
2940 need to make a unique copy of I2SRC each time we substitute it
2941 to avoid self-referential rtl. */
2942
6fb5fa3c 2943 subst_low_luid = DF_INSN_LUID (i2);
230d793d
RS
2944 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
2945 ! i1_feeds_i3 && i1dest_in_i1src);
cddd8b72 2946 substed_i2 = 1;
230d793d
RS
2947
2948 /* Record whether i2's body now appears within i3's body. */
2949 i2_is_used = n_occurrences;
2950 }
2951
2952 /* If we already got a failure, don't try to do more. Otherwise,
2953 try to substitute in I1 if we have it. */
2954
2955 if (i1 && GET_CODE (newpat) != CLOBBER)
2956 {
df1a98ae
EB
2957 /* Check that an autoincrement side-effect on I1 has not been lost.
2958 This happens if I1DEST is mentioned in I2 and dies there, and
2959 has disappeared from the new pattern. */
2960 if ((FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
2961 && !i1_feeds_i3
2962 && dead_or_set_p (i2, i1dest)
2963 && !reg_overlap_mentioned_p (i1dest, newpat))
2964 /* Before we can do this substitution, we must redo the test done
2965 above (see detailed comments there) that ensures that I1DEST
2966 isn't mentioned in any SETs in NEWPAT that are field assignments. */
2967 || !combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX, 0, 0))
230d793d
RS
2968 {
2969 undo_all ();
2970 return 0;
2971 }
2972
2973 n_occurrences = 0;
6fb5fa3c 2974 subst_low_luid = DF_INSN_LUID (i1);
230d793d 2975 newpat = subst (newpat, i1dest, i1src, 0, 0);
cddd8b72 2976 substed_i1 = 1;
230d793d
RS
2977 }
2978
916f14f1
RK
2979 /* Fail if an autoincrement side-effect has been duplicated. Be careful
2980 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 2981 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 2982 && i2_is_used + added_sets_2 > 1)
5f4f0e22 2983 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
2984 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
2985 > 1))
535a42b1 2986 /* Fail if we tried to make a new register. */
230d793d
RS
2987 || max_reg_num () != maxreg
2988 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
2989 || GET_CODE (newpat) == CLOBBER
2990 /* Fail if this new pattern is a MULT and we didn't have one before
2991 at the outer level. */
2992 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
2993 && ! have_mult))
230d793d
RS
2994 {
2995 undo_all ();
2996 return 0;
2997 }
2998
2999 /* If the actions of the earlier insns must be kept
3000 in addition to substituting them into the latest one,
3001 we must make a new PARALLEL for the latest insn
3002 to hold additional the SETs. */
3003
3004 if (added_sets_1 || added_sets_2)
3005 {
3006 combine_extras++;
3007
3008 if (GET_CODE (newpat) == PARALLEL)
3009 {
3010 rtvec old = XVEC (newpat, 0);
3011 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 3012 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
d38a30c9
KG
3013 memcpy (XVEC (newpat, 0)->elem, &old->elem[0],
3014 sizeof (old->elem[0]) * old->num_elem);
230d793d
RS
3015 }
3016 else
3017 {
3018 rtx old = newpat;
3019 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 3020 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
3021 XVECEXP (newpat, 0, 0) = old;
3022 }
3023
cf0d9408 3024 if (added_sets_1)
b5425e75 3025 XVECEXP (newpat, 0, --total_sets) = i1pat;
cf0d9408
KH
3026
3027 if (added_sets_2)
3028 {
3029 /* If there is no I1, use I2's body as is. We used to also not do
3030 the subst call below if I2 was substituted into I3,
3031 but that could lose a simplification. */
3032 if (i1 == 0)
3033 XVECEXP (newpat, 0, --total_sets) = i2pat;
3034 else
3035 /* See comment where i2pat is assigned. */
3036 XVECEXP (newpat, 0, --total_sets)
3037 = subst (i2pat, i1dest, i1src, 0, 0);
3038 }
230d793d
RS
3039 }
3040
3041 /* We come here when we are replacing a destination in I2 with the
3042 destination of I3. */
3043 validate_replacement:
3044
6e25d159
RK
3045 /* Note which hard regs this insn has as inputs. */
3046 mark_used_regs_combine (newpat);
3047
9b12dc4f
R
3048 /* If recog_for_combine fails, it strips existing clobbers. If we'll
3049 consider splitting this pattern, we might need these clobbers. */
3050 if (i1 && GET_CODE (newpat) == PARALLEL
3051 && GET_CODE (XVECEXP (newpat, 0, XVECLEN (newpat, 0) - 1)) == CLOBBER)
3052 {
3053 int len = XVECLEN (newpat, 0);
3054
3055 newpat_vec_with_clobbers = rtvec_alloc (len);
3056 for (i = 0; i < len; i++)
3057 RTVEC_ELT (newpat_vec_with_clobbers, i) = XVECEXP (newpat, 0, i);
3058 }
3059
230d793d 3060 /* Is the result of combination a valid instruction? */
8e2f6e35 3061 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
3062
3063 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
8051c2eb
AM
3064 the second SET's destination is a register that is unused and isn't
3065 marked as an instruction that might trap in an EH region. In that case,
230d793d
RS
3066 we just need the first SET. This can occur when simplifying a divmod
3067 insn. We *must* test for this case here because the code below that
3068 splits two independent SETs doesn't handle this case correctly when it
da6fdad3 3069 updates the register status.
230d793d 3070
da6fdad3
AM
3071 It's pointless doing this if we originally had two sets, one from
3072 i3, and one from i2. Combining then splitting the parallel results
3073 in the original i2 again plus an invalid insn (which we delete).
3074 The net effect is only to move instructions around, which makes
3075 debug info less accurate.
3076
3077 Also check the case where the first SET's destination is unused.
3078 That would not cause incorrect code, but does cause an unneeded
3079 insn to remain. */
3080
3081 if (insn_code_number < 0
3082 && !(added_sets_2 && i1 == 0)
3083 && GET_CODE (newpat) == PARALLEL
230d793d
RS
3084 && XVECLEN (newpat, 0) == 2
3085 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3086 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
230d793d
RS
3087 && asm_noperands (newpat) < 0)
3088 {
5c881655
KH
3089 rtx set0 = XVECEXP (newpat, 0, 0);
3090 rtx set1 = XVECEXP (newpat, 0, 1);
8051c2eb 3091
f8cfc6aa 3092 if (((REG_P (SET_DEST (set1))
8051c2eb
AM
3093 && find_reg_note (i3, REG_UNUSED, SET_DEST (set1)))
3094 || (GET_CODE (SET_DEST (set1)) == SUBREG
3095 && find_reg_note (i3, REG_UNUSED, SUBREG_REG (SET_DEST (set1)))))
1d65f45c
RH
3096 && insn_nothrow_p (i3)
3097 && !side_effects_p (SET_SRC (set1)))
8051c2eb
AM
3098 {
3099 newpat = set0;
3100 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3101 }
3102
f8cfc6aa 3103 else if (((REG_P (SET_DEST (set0))
8051c2eb
AM
3104 && find_reg_note (i3, REG_UNUSED, SET_DEST (set0)))
3105 || (GET_CODE (SET_DEST (set0)) == SUBREG
3106 && find_reg_note (i3, REG_UNUSED,
3107 SUBREG_REG (SET_DEST (set0)))))
1d65f45c
RH
3108 && insn_nothrow_p (i3)
3109 && !side_effects_p (SET_SRC (set0)))
8051c2eb
AM
3110 {
3111 newpat = set1;
3112 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
3113
3114 if (insn_code_number >= 0)
7ad7809b 3115 changed_i3_dest = 1;
8051c2eb 3116 }
230d793d
RS
3117 }
3118
3119 /* If we were combining three insns and the result is a simple SET
3120 with no ASM_OPERANDS that wasn't recognized, try to split it into two
663522cb 3121 insns. There are two ways to do this. It can be split using a
916f14f1
RK
3122 machine-specific method (like when you have an addition of a large
3123 constant) or by combine in the function find_split_point. */
3124
230d793d
RS
3125 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
3126 && asm_noperands (newpat) < 0)
3127 {
829f8ff7 3128 rtx parallel, m_split, *split;
916f14f1
RK
3129
3130 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
3131 use I2DEST as a scratch register will help. In the latter case,
3132 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1 3133
829f8ff7 3134 m_split = combine_split_insns (newpat, i3);
a70c61d9
JW
3135
3136 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
3137 inputs of NEWPAT. */
3138
3139 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
3140 possible to try that as a scratch reg. This would require adding
3141 more code to make it work though. */
3142
abcb0cdc 3143 if (m_split == 0 && ! reg_overlap_mentioned_p (i2dest, newpat))
42495ca0 3144 {
abcb0cdc 3145 enum machine_mode new_mode = GET_MODE (SET_DEST (newpat));
c5c76735 3146
abcb0cdc
ILT
3147 /* First try to split using the original register as a
3148 scratch register. */
829f8ff7
ILT
3149 parallel = gen_rtx_PARALLEL (VOIDmode,
3150 gen_rtvec (2, newpat,
3151 gen_rtx_CLOBBER (VOIDmode,
3152 i2dest)));
3153 m_split = combine_split_insns (parallel, i3);
abcb0cdc
ILT
3154
3155 /* If that didn't work, try changing the mode of I2DEST if
3156 we can. */
3157 if (m_split == 0
3158 && new_mode != GET_MODE (i2dest)
3159 && new_mode != VOIDmode
3160 && can_change_dest_mode (i2dest, added_sets_2, new_mode))
c7ca5912 3161 {
abcb0cdc
ILT
3162 enum machine_mode old_mode = GET_MODE (i2dest);
3163 rtx ni2dest;
3164
3165 if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3166 ni2dest = gen_rtx_REG (new_mode, REGNO (i2dest));
3167 else
3168 {
3169 SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], new_mode);
3170 ni2dest = regno_reg_rtx[REGNO (i2dest)];
3171 }
3172
829f8ff7
ILT
3173 parallel = (gen_rtx_PARALLEL
3174 (VOIDmode,
3175 gen_rtvec (2, newpat,
3176 gen_rtx_CLOBBER (VOIDmode,
3177 ni2dest))));
3178 m_split = combine_split_insns (parallel, i3);
abcb0cdc
ILT
3179
3180 if (m_split == 0
3181 && REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
3182 {
3183 struct undo *buf;
3184
38ae7651 3185 adjust_reg_mode (regno_reg_rtx[REGNO (i2dest)], old_mode);
abcb0cdc
ILT
3186 buf = undobuf.undos;
3187 undobuf.undos = buf->next;
3188 buf->next = undobuf.frees;
3189 undobuf.frees = buf;
3190 }
c7ca5912 3191 }
b5b8b0ac
AO
3192
3193 i2scratch = m_split != 0;
42495ca0 3194 }
916f14f1 3195
9b12dc4f
R
3196 /* If recog_for_combine has discarded clobbers, try to use them
3197 again for the split. */
3198 if (m_split == 0 && newpat_vec_with_clobbers)
829f8ff7
ILT
3199 {
3200 parallel = gen_rtx_PARALLEL (VOIDmode, newpat_vec_with_clobbers);
3201 m_split = combine_split_insns (parallel, i3);
3202 }
9b12dc4f 3203
2f937369 3204 if (m_split && NEXT_INSN (m_split) == NULL_RTX)
d340408c 3205 {
2f937369 3206 m_split = PATTERN (m_split);
d340408c
RH
3207 insn_code_number = recog_for_combine (&m_split, i3, &new_i3_notes);
3208 if (insn_code_number >= 0)
3209 newpat = m_split;
23190837 3210 }
2f937369 3211 else if (m_split && NEXT_INSN (NEXT_INSN (m_split)) == NULL_RTX
d340408c 3212 && (next_real_insn (i2) == i3
6fb5fa3c 3213 || ! use_crosses_set_p (PATTERN (m_split), DF_INSN_LUID (i2))))
916f14f1 3214 {
1a26b032 3215 rtx i2set, i3set;
2f937369
DM
3216 rtx newi3pat = PATTERN (NEXT_INSN (m_split));
3217 newi2pat = PATTERN (m_split);
916f14f1 3218
2f937369
DM
3219 i3set = single_set (NEXT_INSN (m_split));
3220 i2set = single_set (m_split);
1a26b032 3221
8e2f6e35 3222 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
3223
3224 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
3225 register status, so don't use these insns. If I2's destination
3226 is used between I2 and I3, we also can't use these insns. */
1a26b032 3227
9cc96794
RK
3228 if (i2_code_number >= 0 && i2set && i3set
3229 && (next_real_insn (i2) == i3
3230 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
3231 insn_code_number = recog_for_combine (&newi3pat, i3,
3232 &new_i3_notes);
d0ab8cd3
RK
3233 if (insn_code_number >= 0)
3234 newpat = newi3pat;
3235
c767f54b 3236 /* It is possible that both insns now set the destination of I3.
22609cbf 3237 If so, we must show an extra use of it. */
c767f54b 3238
393de53f
RK
3239 if (insn_code_number >= 0)
3240 {
3241 rtx new_i3_dest = SET_DEST (i3set);
3242 rtx new_i2_dest = SET_DEST (i2set);
3243
3244 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
3245 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
3246 || GET_CODE (new_i3_dest) == SUBREG)
3247 new_i3_dest = XEXP (new_i3_dest, 0);
3248
d4096689
RK
3249 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
3250 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
3251 || GET_CODE (new_i2_dest) == SUBREG)
3252 new_i2_dest = XEXP (new_i2_dest, 0);
3253
f8cfc6aa
JQ
3254 if (REG_P (new_i3_dest)
3255 && REG_P (new_i2_dest)
393de53f 3256 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
6fb5fa3c 3257 INC_REG_N_SETS (REGNO (new_i2_dest), 1);
393de53f 3258 }
916f14f1 3259 }
230d793d
RS
3260
3261 /* If we can split it and use I2DEST, go ahead and see if that
3262 helps things be recognized. Verify that none of the registers
3263 are set between I2 and I3. */
d0ab8cd3 3264 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d 3265#ifdef HAVE_cc0
f8cfc6aa 3266 && REG_P (i2dest)
230d793d
RS
3267#endif
3268 /* We need I2DEST in the proper mode. If it is a hard register
1ad93fbf
BS
3269 or the only use of a pseudo, we can change its mode.
3270 Make sure we don't change a hard register to have a mode that
3271 isn't valid for it, or change the number of registers. */
230d793d
RS
3272 && (GET_MODE (*split) == GET_MODE (i2dest)
3273 || GET_MODE (*split) == VOIDmode
4164b2fb
PB
3274 || can_change_dest_mode (i2dest, added_sets_2,
3275 GET_MODE (*split)))
230d793d 3276 && (next_real_insn (i2) == i3
6fb5fa3c 3277 || ! use_crosses_set_p (*split, DF_INSN_LUID (i2)))
230d793d
RS
3278 /* We can't overwrite I2DEST if its value is still used by
3279 NEWPAT. */
3280 && ! reg_referenced_p (i2dest, newpat))
3281 {
3282 rtx newdest = i2dest;
df7d75de
RK
3283 enum rtx_code split_code = GET_CODE (*split);
3284 enum machine_mode split_mode = GET_MODE (*split);
c2c22cd6
RS
3285 bool subst_done = false;
3286 newi2pat = NULL_RTX;
230d793d 3287
b5b8b0ac
AO
3288 i2scratch = true;
3289
230d793d
RS
3290 /* Get NEWDEST as a register in the proper mode. We have already
3291 validated that we can do this. */
df7d75de 3292 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 3293 {
abcb0cdc
ILT
3294 if (REGNO (i2dest) < FIRST_PSEUDO_REGISTER)
3295 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
3296 else
3297 {
3298 SUBST_MODE (regno_reg_rtx[REGNO (i2dest)], split_mode);
3299 newdest = regno_reg_rtx[REGNO (i2dest)];
3300 }
230d793d
RS
3301 }
3302
3303 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
3304 an ASHIFT. This can occur if it was inside a PLUS and hence
3305 appeared to be a memory address. This is a kludge. */
df7d75de 3306 if (split_code == MULT
481683e1 3307 && CONST_INT_P (XEXP (*split, 1))
1568d79b 3308 && INTVAL (XEXP (*split, 1)) > 0
230d793d 3309 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823 3310 {
f1c6ba8b
RK
3311 SUBST (*split, gen_rtx_ASHIFT (split_mode,
3312 XEXP (*split, 0), GEN_INT (i)));
1dc8a823
JW
3313 /* Update split_code because we may not have a multiply
3314 anymore. */
3315 split_code = GET_CODE (*split);
3316 }
230d793d
RS
3317
3318#ifdef INSN_SCHEDULING
3319 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
3320 be written as a ZERO_EXTEND. */
3c0cb5de 3321 if (split_code == SUBREG && MEM_P (SUBREG_REG (*split)))
25c25947
R
3322 {
3323#ifdef LOAD_EXTEND_OP
3324 /* Or as a SIGN_EXTEND if LOAD_EXTEND_OP says that that's
3325 what it really is. */
3326 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (*split)))
3327 == SIGN_EXTEND)
3328 SUBST (*split, gen_rtx_SIGN_EXTEND (split_mode,
3329 SUBREG_REG (*split)));
3330 else
3331#endif
3332 SUBST (*split, gen_rtx_ZERO_EXTEND (split_mode,
3333 SUBREG_REG (*split)));
3334 }
230d793d
RS
3335#endif
3336
c2c22cd6
RS
3337 /* Attempt to split binary operators using arithmetic identities. */
3338 if (BINARY_P (SET_SRC (newpat))
3339 && split_mode == GET_MODE (SET_SRC (newpat))
3340 && ! side_effects_p (SET_SRC (newpat)))
3341 {
3342 rtx setsrc = SET_SRC (newpat);
3343 enum machine_mode mode = GET_MODE (setsrc);
3344 enum rtx_code code = GET_CODE (setsrc);
3345 rtx src_op0 = XEXP (setsrc, 0);
3346 rtx src_op1 = XEXP (setsrc, 1);
3347
3348 /* Split "X = Y op Y" as "Z = Y; X = Z op Z". */
3349 if (rtx_equal_p (src_op0, src_op1))
3350 {
3351 newi2pat = gen_rtx_SET (VOIDmode, newdest, src_op0);
3352 SUBST (XEXP (setsrc, 0), newdest);
3353 SUBST (XEXP (setsrc, 1), newdest);
3354 subst_done = true;
3355 }
3356 /* Split "((P op Q) op R) op S" where op is PLUS or MULT. */
3357 else if ((code == PLUS || code == MULT)
3358 && GET_CODE (src_op0) == code
3359 && GET_CODE (XEXP (src_op0, 0)) == code
3360 && (INTEGRAL_MODE_P (mode)
3361 || (FLOAT_MODE_P (mode)
3362 && flag_unsafe_math_optimizations)))
3363 {
3364 rtx p = XEXP (XEXP (src_op0, 0), 0);
3365 rtx q = XEXP (XEXP (src_op0, 0), 1);
3366 rtx r = XEXP (src_op0, 1);
3367 rtx s = src_op1;
3368
3369 /* Split both "((X op Y) op X) op Y" and
3370 "((X op Y) op Y) op X" as "T op T" where T is
3371 "X op Y". */
3372 if ((rtx_equal_p (p,r) && rtx_equal_p (q,s))
3373 || (rtx_equal_p (p,s) && rtx_equal_p (q,r)))
3374 {
3375 newi2pat = gen_rtx_SET (VOIDmode, newdest,
3376 XEXP (src_op0, 0));
3377 SUBST (XEXP (setsrc, 0), newdest);
3378 SUBST (XEXP (setsrc, 1), newdest);
3379 subst_done = true;
3380 }
3381 /* Split "((X op X) op Y) op Y)" as "T op T" where
3382 T is "X op Y". */
3383 else if (rtx_equal_p (p,q) && rtx_equal_p (r,s))
3384 {
3385 rtx tmp = simplify_gen_binary (code, mode, p, r);
3386 newi2pat = gen_rtx_SET (VOIDmode, newdest, tmp);
3387 SUBST (XEXP (setsrc, 0), newdest);
3388 SUBST (XEXP (setsrc, 1), newdest);
3389 subst_done = true;
3390 }
3391 }
3392 }
3393
3394 if (!subst_done)
3395 {
3396 newi2pat = gen_rtx_SET (VOIDmode, newdest, *split);
3397 SUBST (*split, newdest);
3398 }
3399
8e2f6e35 3400 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de 3401
985f2d8f
JJ
3402 /* recog_for_combine might have added CLOBBERs to newi2pat.
3403 Make sure NEWPAT does not depend on the clobbered regs. */
3404 if (GET_CODE (newi2pat) == PARALLEL)
3405 for (i = XVECLEN (newi2pat, 0) - 1; i >= 0; i--)
3406 if (GET_CODE (XVECEXP (newi2pat, 0, i)) == CLOBBER)
3407 {
3408 rtx reg = XEXP (XVECEXP (newi2pat, 0, i), 0);
3409 if (reg_overlap_mentioned_p (reg, newpat))
3410 {
3411 undo_all ();
3412 return 0;
3413 }
3414 }
3415
df7d75de
RK
3416 /* If the split point was a MULT and we didn't have one before,
3417 don't use one now. */
3418 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 3419 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
3420 }
3421 }
3422
3423 /* Check for a case where we loaded from memory in a narrow mode and
3424 then sign extended it, but we need both registers. In that case,
3425 we have a PARALLEL with both loads from the same memory location.
3426 We can split this into a load from memory followed by a register-register
3427 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
3428 eliminate the copy.
3429
a9b2f059
JW
3430 We cannot do this if the destination of the first assignment is a
3431 condition code register or cc0. We eliminate this case by making sure
3432 the SET_DEST and SET_SRC have the same mode.
3433
f0343c74
RK
3434 We cannot do this if the destination of the second assignment is
3435 a register that we have already assumed is zero-extended. Similarly
3436 for a SUBREG of such a register. */
230d793d
RS
3437
3438 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3439 && GET_CODE (newpat) == PARALLEL
3440 && XVECLEN (newpat, 0) == 2
3441 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3442 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
a9b2f059
JW
3443 && (GET_MODE (SET_DEST (XVECEXP (newpat, 0, 0)))
3444 == GET_MODE (SET_SRC (XVECEXP (newpat, 0, 0))))
230d793d
RS
3445 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3446 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
3447 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
3448 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
6fb5fa3c 3449 DF_INSN_LUID (i2))
230d793d
RS
3450 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3451 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74 3452 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
f8cfc6aa 3453 (REG_P (temp)
829f8ff7
ILT
3454 && VEC_index (reg_stat_type, reg_stat,
3455 REGNO (temp))->nonzero_bits != 0
f0343c74
RK
3456 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3457 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
829f8ff7
ILT
3458 && (VEC_index (reg_stat_type, reg_stat,
3459 REGNO (temp))->nonzero_bits
f0343c74
RK
3460 != GET_MODE_MASK (word_mode))))
3461 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
3462 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
f8cfc6aa 3463 (REG_P (temp)
829f8ff7
ILT
3464 && VEC_index (reg_stat_type, reg_stat,
3465 REGNO (temp))->nonzero_bits != 0
f0343c74
RK
3466 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
3467 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
829f8ff7
ILT
3468 && (VEC_index (reg_stat_type, reg_stat,
3469 REGNO (temp))->nonzero_bits
f0343c74 3470 != GET_MODE_MASK (word_mode)))))
230d793d
RS
3471 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3472 SET_SRC (XVECEXP (newpat, 0, 1)))
3473 && ! find_reg_note (i3, REG_UNUSED,
3474 SET_DEST (XVECEXP (newpat, 0, 0))))
3475 {
472fbdd1
RK
3476 rtx ni2dest;
3477
230d793d 3478 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 3479 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
3480 newpat = XVECEXP (newpat, 0, 1);
3481 SUBST (SET_SRC (newpat),
4de249d9 3482 gen_lowpart (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 3483 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 3484
230d793d 3485 if (i2_code_number >= 0)
8e2f6e35 3486 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
3487
3488 if (insn_code_number >= 0)
9e42ab3e 3489 swap_i2i3 = 1;
230d793d 3490 }
663522cb 3491
230d793d
RS
3492 /* Similarly, check for a case where we have a PARALLEL of two independent
3493 SETs but we started with three insns. In this case, we can do the sets
3494 as two separate insns. This case occurs when some SET allows two
3495 other insns to combine, but the destination of that SET is still live. */
3496
3497 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
3498 && GET_CODE (newpat) == PARALLEL
3499 && XVECLEN (newpat, 0) == 2
3500 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
3501 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
3502 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
3503 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
3504 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
3505 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
3506 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
6fb5fa3c 3507 DF_INSN_LUID (i2))
230d793d
RS
3508 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
3509 XVECEXP (newpat, 0, 0))
3510 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
14a774a9
RK
3511 XVECEXP (newpat, 0, 1))
3512 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
c8983342
KH
3513 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1))))
3514#ifdef HAVE_cc0
3515 /* We cannot split the parallel into two sets if both sets
3516 reference cc0. */
3517 && ! (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0))
3518 && reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 1)))
3519#endif
3520 )
230d793d 3521 {
e9a25f70
JL
3522 /* Normally, it doesn't matter which of the two is done first,
3523 but it does if one references cc0. In that case, it has to
3524 be first. */
3525#ifdef HAVE_cc0
3526 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
3527 {
3528 newi2pat = XVECEXP (newpat, 0, 0);
3529 newpat = XVECEXP (newpat, 0, 1);
3530 }
3531 else
3532#endif
3533 {
3534 newi2pat = XVECEXP (newpat, 0, 1);
3535 newpat = XVECEXP (newpat, 0, 0);
3536 }
230d793d 3537
8e2f6e35 3538 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 3539
230d793d 3540 if (i2_code_number >= 0)
8e2f6e35 3541 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
3542 }
3543
3544 /* If it still isn't recognized, fail and change things back the way they
3545 were. */
3546 if ((insn_code_number < 0
3547 /* Is the result a reasonable ASM_OPERANDS? */
3548 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
3549 {
3550 undo_all ();
3551 return 0;
3552 }
3553
3554 /* If we had to change another insn, make sure it is valid also. */
3555 if (undobuf.other_insn)
3556 {
6e25d159
RK
3557 CLEAR_HARD_REG_SET (newpat_used_regs);
3558
9d35384d 3559 other_pat = PATTERN (undobuf.other_insn);
8e2f6e35
BS
3560 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
3561 &new_other_notes);
230d793d
RS
3562
3563 if (other_code_number < 0 && ! check_asm_operands (other_pat))
3564 {
3565 undo_all ();
3566 return 0;
3567 }
230d793d 3568 }
9d35384d 3569
5ef17dd2 3570#ifdef HAVE_cc0
1f52178b 3571 /* If I2 is the CC0 setter and I3 is the CC0 user then check whether
ec5c56db 3572 they are adjacent to each other or not. */
5ef17dd2
CC
3573 {
3574 rtx p = prev_nonnote_insn (i3);
4b4bf941 3575 if (p && p != i2 && NONJUMP_INSN_P (p) && newi2pat
663522cb 3576 && sets_cc0_p (newi2pat))
5ef17dd2 3577 {
663522cb
KH
3578 undo_all ();
3579 return 0;
5ef17dd2 3580 }
663522cb
KH
3581 }
3582#endif
230d793d 3583
6fd21094 3584 /* Only allow this combination if insn_rtx_costs reports that the
64b8935d 3585 replacement instructions are cheaper than the originals. */
9d35384d 3586 if (!combine_validate_cost (i1, i2, i3, newpat, newi2pat, other_pat))
64b8935d
RS
3587 {
3588 undo_all ();
3589 return 0;
3590 }
3591
b5b8b0ac
AO
3592 if (MAY_HAVE_DEBUG_INSNS)
3593 {
3594 struct undo *undo;
3595
3596 for (undo = undobuf.undos; undo; undo = undo->next)
3597 if (undo->kind == UNDO_MODE)
3598 {
3599 rtx reg = *undo->where.r;
3600 enum machine_mode new_mode = GET_MODE (reg);
3601 enum machine_mode old_mode = undo->old_contents.m;
3602
3603 /* Temporarily revert mode back. */
3604 adjust_reg_mode (reg, old_mode);
3605
3606 if (reg == i2dest && i2scratch)
3607 {
3608 /* If we used i2dest as a scratch register with a
3609 different mode, substitute it for the original
3610 i2src while its original mode is temporarily
3611 restored, and then clear i2scratch so that we don't
3612 do it again later. */
3613 propagate_for_debug (i2, i3, reg, i2src, false);
3614 i2scratch = false;
3615 /* Put back the new mode. */
3616 adjust_reg_mode (reg, new_mode);
3617 }
3618 else
3619 {
3620 rtx tempreg = gen_raw_REG (old_mode, REGNO (reg));
3621 rtx first, last;
3622
3623 if (reg == i2dest)
3624 {
3625 first = i2;
3626 last = i3;
3627 }
3628 else
3629 {
3630 first = i3;
3631 last = undobuf.other_insn;
3632 gcc_assert (last);
3633 }
3634
3635 /* We're dealing with a reg that changed mode but not
3636 meaning, so we want to turn it into a subreg for
3637 the new mode. However, because of REG sharing and
3638 because its mode had already changed, we have to do
3639 it in two steps. First, replace any debug uses of
3640 reg, with its original mode temporarily restored,
3641 with this copy we have created; then, replace the
3642 copy with the SUBREG of the original shared reg,
3643 once again changed to the new mode. */
3644 propagate_for_debug (first, last, reg, tempreg, false);
3645 adjust_reg_mode (reg, new_mode);
3646 propagate_for_debug (first, last, tempreg,
3647 lowpart_subreg (old_mode, reg, new_mode),
3648 false);
3649 }
3650 }
3651 }
3652
7ad7809b
JJ
3653 /* If we will be able to accept this, we have made a
3654 change to the destination of I3. This requires us to
3655 do a few adjustments. */
3656
3657 if (changed_i3_dest)
3658 {
3659 PATTERN (i3) = newpat;
3660 adjust_for_new_dest (i3);
3661 }
3662
663522cb 3663 /* We now know that we can do this combination. Merge the insns and
230d793d
RS
3664 update the status of registers and LOG_LINKS. */
3665
9d35384d
RIL
3666 if (undobuf.other_insn)
3667 {
3668 rtx note, next;
3669
3670 PATTERN (undobuf.other_insn) = other_pat;
3671
3672 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
3673 are still valid. Then add any non-duplicate notes added by
3674 recog_for_combine. */
3675 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
3676 {
3677 next = XEXP (note, 1);
3678
3679 if (REG_NOTE_KIND (note) == REG_UNUSED
3680 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
3681 remove_note (undobuf.other_insn, note);
3682 }
3683
3684 distribute_notes (new_other_notes, undobuf.other_insn,
3685 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
3686 }
3687
9e42ab3e
RZ
3688 if (swap_i2i3)
3689 {
3690 rtx insn;
3691 rtx link;
3692 rtx ni2dest;
3693
3694 /* I3 now uses what used to be its destination and which is now
c22cacf3 3695 I2's destination. This requires us to do a few adjustments. */
9e42ab3e
RZ
3696 PATTERN (i3) = newpat;
3697 adjust_for_new_dest (i3);
3698
3699 /* We need a LOG_LINK from I3 to I2. But we used to have one,
c22cacf3 3700 so we still will.
9e42ab3e
RZ
3701
3702 However, some later insn might be using I2's dest and have
3703 a LOG_LINK pointing at I3. We must remove this link.
3704 The simplest way to remove the link is to point it at I1,
3705 which we know will be a NOTE. */
3706
0b21d1dc
UW
3707 /* newi2pat is usually a SET here; however, recog_for_combine might
3708 have added some clobbers. */
3709 if (GET_CODE (newi2pat) == PARALLEL)
3710 ni2dest = SET_DEST (XVECEXP (newi2pat, 0, 0));
3711 else
3712 ni2dest = SET_DEST (newi2pat);
3713
9e42ab3e
RZ
3714 for (insn = NEXT_INSN (i3);
3715 insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
3716 || insn != BB_HEAD (this_basic_block->next_bb));
3717 insn = NEXT_INSN (insn))
3718 {
3719 if (INSN_P (insn) && reg_referenced_p (ni2dest, PATTERN (insn)))
3720 {
3721 for (link = LOG_LINKS (insn); link;
3722 link = XEXP (link, 1))
3723 if (XEXP (link, 0) == i3)
3724 XEXP (link, 0) = i1;
3725
3726 break;
3727 }
3728 }
3729 }
3730
230d793d
RS
3731 {
3732 rtx i3notes, i2notes, i1notes = 0;
3733 rtx i3links, i2links, i1links = 0;
3734 rtx midnotes = 0;
770ae6cc 3735 unsigned int regno;
4bbae09f
ILT
3736 /* Compute which registers we expect to eliminate. newi2pat may be setting
3737 either i3dest or i2dest, so we must check it. Also, i1dest may be the
3738 same as i3dest, in which case newi2pat may be setting i1dest. */
3739 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
3740 || i2dest_in_i2src || i2dest_in_i1src
3741 || !i2dest_killed
3742 ? 0 : i2dest);
3743 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
3744 || (newi2pat && reg_set_p (i1dest, newi2pat))
3745 || !i1dest_killed
3746 ? 0 : i1dest);
230d793d
RS
3747
3748 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
3749 clear them. */
3750 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
3751 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
3752 if (i1)
3753 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
3754
3755 /* Ensure that we do not have something that should not be shared but
3756 occurs multiple times in the new insns. Check this by first
5089e22e 3757 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
3758
3759 reset_used_flags (i3notes);
3760 reset_used_flags (i2notes);
3761 reset_used_flags (i1notes);
3762 reset_used_flags (newpat);
3763 reset_used_flags (newi2pat);
3764 if (undobuf.other_insn)
3765 reset_used_flags (PATTERN (undobuf.other_insn));
3766
3767 i3notes = copy_rtx_if_shared (i3notes);
3768 i2notes = copy_rtx_if_shared (i2notes);
3769 i1notes = copy_rtx_if_shared (i1notes);
3770 newpat = copy_rtx_if_shared (newpat);
3771 newi2pat = copy_rtx_if_shared (newi2pat);
3772 if (undobuf.other_insn)
3773 reset_used_flags (PATTERN (undobuf.other_insn));
3774
3775 INSN_CODE (i3) = insn_code_number;
3776 PATTERN (i3) = newpat;
cddd8b72 3777
4b4bf941 3778 if (CALL_P (i3) && CALL_INSN_FUNCTION_USAGE (i3))
cddd8b72
AO
3779 {
3780 rtx call_usage = CALL_INSN_FUNCTION_USAGE (i3);
3781
3782 reset_used_flags (call_usage);
3783 call_usage = copy_rtx (call_usage);
3784
3785 if (substed_i2)
3786 replace_rtx (call_usage, i2dest, i2src);
3787
3788 if (substed_i1)
3789 replace_rtx (call_usage, i1dest, i1src);
3790
3791 CALL_INSN_FUNCTION_USAGE (i3) = call_usage;
3792 }
3793
230d793d
RS
3794 if (undobuf.other_insn)
3795 INSN_CODE (undobuf.other_insn) = other_code_number;
3796
3797 /* We had one special case above where I2 had more than one set and
3798 we replaced a destination of one of those sets with the destination
3799 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
3800 in this basic block. Note that this (expensive) case is rare.
3801
3802 Also, in this case, we must pretend that all REG_NOTEs for I2
3803 actually came from I3, so that REG_UNUSED notes from I2 will be
3804 properly handled. */
3805
c7be4f66 3806 if (i3_subst_into_i2)
176c9e6b 3807 {
1786009e 3808 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
676cb929
RS
3809 if ((GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == SET
3810 || GET_CODE (XVECEXP (PATTERN (i2), 0, i)) == CLOBBER)
f8cfc6aa 3811 && REG_P (SET_DEST (XVECEXP (PATTERN (i2), 0, i)))
1786009e
ZW
3812 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
3813 && ! find_reg_note (i2, REG_UNUSED,
3814 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
3815 for (temp = NEXT_INSN (i2);
f6366fc7 3816 temp && (this_basic_block->next_bb == EXIT_BLOCK_PTR
a813c111 3817 || BB_HEAD (this_basic_block) != temp);
1786009e
ZW
3818 temp = NEXT_INSN (temp))
3819 if (temp != i3 && INSN_P (temp))
3820 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
3821 if (XEXP (link, 0) == i2)
3822 XEXP (link, 0) = i3;
176c9e6b
JW
3823
3824 if (i3notes)
3825 {
3826 rtx link = i3notes;
3827 while (XEXP (link, 1))
3828 link = XEXP (link, 1);
3829 XEXP (link, 1) = i2notes;
3830 }
3831 else
3832 i3notes = i2notes;
3833 i2notes = 0;
3834 }
230d793d
RS
3835
3836 LOG_LINKS (i3) = 0;
3837 REG_NOTES (i3) = 0;
3838 LOG_LINKS (i2) = 0;
3839 REG_NOTES (i2) = 0;
3840
3841 if (newi2pat)
3842 {
b5b8b0ac
AO
3843 if (MAY_HAVE_DEBUG_INSNS && i2scratch)
3844 propagate_for_debug (i2, i3, i2dest, i2src, false);
230d793d
RS
3845 INSN_CODE (i2) = i2_code_number;
3846 PATTERN (i2) = newi2pat;
3847 }
3848 else
b5b8b0ac
AO
3849 {
3850 if (MAY_HAVE_DEBUG_INSNS && i2src)
3851 propagate_for_debug (i2, i3, i2dest, i2src, i3_subst_into_i2);
3852 SET_INSN_DELETED (i2);
3853 }
230d793d
RS
3854
3855 if (i1)
3856 {
3857 LOG_LINKS (i1) = 0;
3858 REG_NOTES (i1) = 0;
b5b8b0ac
AO
3859 if (MAY_HAVE_DEBUG_INSNS)
3860 propagate_for_debug (i1, i3, i1dest, i1src, false);
6773e15f 3861 SET_INSN_DELETED (i1);
230d793d
RS
3862 }
3863
3864 /* Get death notes for everything that is now used in either I3 or
663522cb 3865 I2 and used to die in a previous insn. If we built two new
6eb12cef
RK
3866 patterns, move from I1 to I2 then I2 to I3 so that we get the
3867 proper movement on registers that I2 modifies. */
230d793d 3868
230d793d 3869 if (newi2pat)
6eb12cef 3870 {
6fb5fa3c
DB
3871 move_deaths (newi2pat, NULL_RTX, DF_INSN_LUID (i1), i2, &midnotes);
3872 move_deaths (newpat, newi2pat, DF_INSN_LUID (i1), i3, &midnotes);
6eb12cef
RK
3873 }
3874 else
6fb5fa3c 3875 move_deaths (newpat, NULL_RTX, i1 ? DF_INSN_LUID (i1) : DF_INSN_LUID (i2),
6eb12cef 3876 i3, &midnotes);
230d793d
RS
3877
3878 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
3879 if (i3notes)
4bbae09f
ILT
3880 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
3881 elim_i2, elim_i1);
230d793d 3882 if (i2notes)
4bbae09f
ILT
3883 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
3884 elim_i2, elim_i1);
230d793d 3885 if (i1notes)
4bbae09f
ILT
3886 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
3887 elim_i2, elim_i1);
230d793d 3888 if (midnotes)
4bbae09f
ILT
3889 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3890 elim_i2, elim_i1);
230d793d
RS
3891
3892 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
3893 know these are REG_UNUSED and want them to go to the desired insn,
6fb5fa3c 3894 so we always pass it as i3. */
1a26b032 3895
230d793d 3896 if (newi2pat && new_i2_notes)
6fb5fa3c 3897 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
b8698a0f 3898
230d793d 3899 if (new_i3_notes)
6fb5fa3c 3900 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
3901
3902 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
3903 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
3904 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
3905 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
3906 Show an additional death due to the REG_DEAD note we make here. If
3907 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 3908
230d793d 3909 if (i3dest_killed)
1a26b032 3910 {
e9a25f70 3911 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
efc0b2bd
ILT
3912 distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3913 NULL_RTX),
4bbae09f 3914 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 3915 else
efc0b2bd
ILT
3916 distribute_notes (alloc_reg_note (REG_DEAD, i3dest_killed,
3917 NULL_RTX),
4bbae09f
ILT
3918 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3919 elim_i2, elim_i1);
1a26b032 3920 }
58c8c593 3921
230d793d 3922 if (i2dest_in_i2src)
58c8c593
RK
3923 {
3924 if (newi2pat && reg_set_p (i2dest, newi2pat))
efc0b2bd 3925 distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
4bbae09f 3926 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
58c8c593 3927 else
efc0b2bd 3928 distribute_notes (alloc_reg_note (REG_DEAD, i2dest, NULL_RTX),
4bbae09f
ILT
3929 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3930 NULL_RTX, NULL_RTX);
58c8c593
RK
3931 }
3932
230d793d 3933 if (i1dest_in_i1src)
58c8c593
RK
3934 {
3935 if (newi2pat && reg_set_p (i1dest, newi2pat))
efc0b2bd 3936 distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
4bbae09f 3937 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
58c8c593 3938 else
efc0b2bd 3939 distribute_notes (alloc_reg_note (REG_DEAD, i1dest, NULL_RTX),
4bbae09f
ILT
3940 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
3941 NULL_RTX, NULL_RTX);
58c8c593 3942 }
230d793d
RS
3943
3944 distribute_links (i3links);
3945 distribute_links (i2links);
3946 distribute_links (i1links);
3947
f8cfc6aa 3948 if (REG_P (i2dest))
230d793d 3949 {
d0ab8cd3
RK
3950 rtx link;
3951 rtx i2_insn = 0, i2_val = 0, set;
3952
3953 /* The insn that used to set this register doesn't exist, and
3954 this life of the register may not exist either. See if one of
663522cb 3955 I3's links points to an insn that sets I2DEST. If it does,
d0ab8cd3
RK
3956 that is now the last known value for I2DEST. If we don't update
3957 this and I2 set the register to a value that depended on its old
230d793d
RS
3958 contents, we will get confused. If this insn is used, thing
3959 will be set correctly in combine_instructions. */
d0ab8cd3
RK
3960
3961 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3962 if ((set = single_set (XEXP (link, 0))) != 0
3963 && rtx_equal_p (i2dest, SET_DEST (set)))
3964 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
3965
3966 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
3967
3968 /* If the reg formerly set in I2 died only once and that was in I3,
3969 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
3970 if (! added_sets_2
3971 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
3972 && ! i2dest_in_i2src)
230d793d
RS
3973 {
3974 regno = REGNO (i2dest);
6fb5fa3c 3975 INC_REG_N_SETS (regno, -1);
230d793d
RS
3976 }
3977 }
3978
f8cfc6aa 3979 if (i1 && REG_P (i1dest))
230d793d 3980 {
d0ab8cd3
RK
3981 rtx link;
3982 rtx i1_insn = 0, i1_val = 0, set;
3983
3984 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
3985 if ((set = single_set (XEXP (link, 0))) != 0
3986 && rtx_equal_p (i1dest, SET_DEST (set)))
3987 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
3988
3989 record_value_for_reg (i1dest, i1_insn, i1_val);
3990
230d793d 3991 regno = REGNO (i1dest);
5af91171 3992 if (! added_sets_1 && ! i1dest_in_i1src)
6fb5fa3c 3993 INC_REG_N_SETS (regno, -1);
230d793d
RS
3994 }
3995
5eaad481
PB
3996 /* Update reg_stat[].nonzero_bits et al for any changes that may have
3997 been made to this insn. The order of
3998 set_nonzero_bits_and_sign_copies() is important. Because newi2pat
3999 can affect nonzero_bits of newpat */
22609cbf 4000 if (newi2pat)
84832317 4001 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
5fb7c247 4002 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
230d793d 4003 }
d25aa7ab 4004
6fb5fa3c
DB
4005 if (undobuf.other_insn != NULL_RTX)
4006 {
4007 if (dump_file)
4008 {
4009 fprintf (dump_file, "modifying other_insn ");
4010 dump_insn_slim (dump_file, undobuf.other_insn);
4011 }
4012 df_insn_rescan (undobuf.other_insn);
4013 }
4014
4015 if (i1 && !(NOTE_P(i1) && (NOTE_KIND (i1) == NOTE_INSN_DELETED)))
4016 {
4017 if (dump_file)
4018 {
4019 fprintf (dump_file, "modifying insn i1 ");
4020 dump_insn_slim (dump_file, i1);
4021 }
4022 df_insn_rescan (i1);
4023 }
230d793d 4024
6fb5fa3c
DB
4025 if (i2 && !(NOTE_P(i2) && (NOTE_KIND (i2) == NOTE_INSN_DELETED)))
4026 {
4027 if (dump_file)
4028 {
4029 fprintf (dump_file, "modifying insn i2 ");
4030 dump_insn_slim (dump_file, i2);
4031 }
4032 df_insn_rescan (i2);
4033 }
4034
4035 if (i3 && !(NOTE_P(i3) && (NOTE_KIND (i3) == NOTE_INSN_DELETED)))
4036 {
4037 if (dump_file)
4038 {
4039 fprintf (dump_file, "modifying insn i3 ");
4040 dump_insn_slim (dump_file, i3);
4041 }
4042 df_insn_rescan (i3);
4043 }
b8698a0f 4044
d25aa7ab
PB
4045 /* Set new_direct_jump_p if a new return or simple jump instruction
4046 has been created. Adjust the CFG accordingly. */
4047
4048 if (returnjump_p (i3) || any_uncondjump_p (i3))
4049 {
4050 *new_direct_jump_p = 1;
4051 mark_jump_label (PATTERN (i3), i3, 0);
4052 update_cfg_for_uncondjump (i3);
4053 }
4054
4055 if (undobuf.other_insn != NULL_RTX
4056 && (returnjump_p (undobuf.other_insn)
4057 || any_uncondjump_p (undobuf.other_insn)))
4058 {
4059 *new_direct_jump_p = 1;
4060 update_cfg_for_uncondjump (undobuf.other_insn);
4061 }
4062
4063 /* A noop might also need cleaning up of CFG, if it comes from the
4064 simplification of a jump. */
4065 if (GET_CODE (newpat) == SET
4066 && SET_SRC (newpat) == pc_rtx
4067 && SET_DEST (newpat) == pc_rtx)
4068 {
4069 *new_direct_jump_p = 1;
4070 update_cfg_for_uncondjump (i3);
4071 }
b8698a0f 4072
230d793d 4073 combine_successes++;
e7749837 4074 undo_commit ();
230d793d 4075
abe6e52f 4076 if (added_links_insn
6fb5fa3c
DB
4077 && (newi2pat == 0 || DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i2))
4078 && DF_INSN_LUID (added_links_insn) < DF_INSN_LUID (i3))
abe6e52f
RK
4079 return added_links_insn;
4080 else
4081 return newi2pat ? i2 : i3;
230d793d
RS
4082}
4083\f
4084/* Undo all the modifications recorded in undobuf. */
4085
4086static void
79a490a9 4087undo_all (void)
230d793d 4088{
241cea85
RK
4089 struct undo *undo, *next;
4090
4091 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 4092 {
241cea85 4093 next = undo->next;
abcb0cdc
ILT
4094 switch (undo->kind)
4095 {
4096 case UNDO_RTX:
4097 *undo->where.r = undo->old_contents.r;
4098 break;
4099 case UNDO_INT:
4100 *undo->where.i = undo->old_contents.i;
4101 break;
4102 case UNDO_MODE:
38ae7651 4103 adjust_reg_mode (*undo->where.r, undo->old_contents.m);
abcb0cdc
ILT
4104 break;
4105 default:
4106 gcc_unreachable ();
4107 }
241cea85
RK
4108
4109 undo->next = undobuf.frees;
4110 undobuf.frees = undo;
7c046e4e 4111 }
230d793d 4112
f1c6ba8b 4113 undobuf.undos = 0;
230d793d 4114}
e7749837
RH
4115
4116/* We've committed to accepting the changes we made. Move all
4117 of the undos to the free list. */
4118
4119static void
79a490a9 4120undo_commit (void)
e7749837
RH
4121{
4122 struct undo *undo, *next;
4123
4124 for (undo = undobuf.undos; undo; undo = next)
4125 {
4126 next = undo->next;
4127 undo->next = undobuf.frees;
4128 undobuf.frees = undo;
4129 }
f1c6ba8b 4130 undobuf.undos = 0;
e7749837 4131}
230d793d
RS
4132\f
4133/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
4134 where we have an arithmetic expression and return that point. LOC will
4135 be inside INSN.
230d793d
RS
4136
4137 try_combine will call this function to see if an insn can be split into
4138 two insns. */
4139
4140static rtx *
79a490a9 4141find_split_point (rtx *loc, rtx insn)
230d793d
RS
4142{
4143 rtx x = *loc;
4144 enum rtx_code code = GET_CODE (x);
4145 rtx *split;
770ae6cc
RK
4146 unsigned HOST_WIDE_INT len = 0;
4147 HOST_WIDE_INT pos = 0;
4148 int unsignedp = 0;
6a651371 4149 rtx inner = NULL_RTX;
230d793d
RS
4150
4151 /* First special-case some codes. */
4152 switch (code)
4153 {
4154 case SUBREG:
4155#ifdef INSN_SCHEDULING
4156 /* If we are making a paradoxical SUBREG invalid, it becomes a split
4157 point. */
3c0cb5de 4158 if (MEM_P (SUBREG_REG (x)))
230d793d
RS
4159 return loc;
4160#endif
d0ab8cd3 4161 return find_split_point (&SUBREG_REG (x), insn);
230d793d 4162
230d793d 4163 case MEM:
916f14f1 4164#ifdef HAVE_lo_sum
230d793d
RS
4165 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
4166 using LO_SUM and HIGH. */
4167 if (GET_CODE (XEXP (x, 0)) == CONST
4168 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
4169 {
d4ebfa65
BE
4170 enum machine_mode address_mode
4171 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
4172
230d793d 4173 SUBST (XEXP (x, 0),
d4ebfa65
BE
4174 gen_rtx_LO_SUM (address_mode,
4175 gen_rtx_HIGH (address_mode, XEXP (x, 0)),
f1c6ba8b 4176 XEXP (x, 0)));
230d793d
RS
4177 return &XEXP (XEXP (x, 0), 0);
4178 }
230d793d
RS
4179#endif
4180
916f14f1
RK
4181 /* If we have a PLUS whose second operand is a constant and the
4182 address is not valid, perhaps will can split it up using
4183 the machine-specific way to split large constants. We use
ddd5a7c1 4184 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
4185 it will not remain in the result. */
4186 if (GET_CODE (XEXP (x, 0)) == PLUS
481683e1 4187 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
09e881c9
BE
4188 && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4189 MEM_ADDR_SPACE (x)))
916f14f1
RK
4190 {
4191 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
829f8ff7
ILT
4192 rtx seq = combine_split_insns (gen_rtx_SET (VOIDmode, reg,
4193 XEXP (x, 0)),
4194 subst_insn);
916f14f1
RK
4195
4196 /* This should have produced two insns, each of which sets our
4197 placeholder. If the source of the second is a valid address,
4198 we can make put both sources together and make a split point
4199 in the middle. */
4200
2f937369
DM
4201 if (seq
4202 && NEXT_INSN (seq) != NULL_RTX
4203 && NEXT_INSN (NEXT_INSN (seq)) == NULL_RTX
4b4bf941 4204 && NONJUMP_INSN_P (seq)
2f937369
DM
4205 && GET_CODE (PATTERN (seq)) == SET
4206 && SET_DEST (PATTERN (seq)) == reg
916f14f1 4207 && ! reg_mentioned_p (reg,
2f937369 4208 SET_SRC (PATTERN (seq)))
4b4bf941 4209 && NONJUMP_INSN_P (NEXT_INSN (seq))
2f937369
DM
4210 && GET_CODE (PATTERN (NEXT_INSN (seq))) == SET
4211 && SET_DEST (PATTERN (NEXT_INSN (seq))) == reg
09e881c9
BE
4212 && memory_address_addr_space_p
4213 (GET_MODE (x), SET_SRC (PATTERN (NEXT_INSN (seq))),
4214 MEM_ADDR_SPACE (x)))
916f14f1 4215 {
2f937369
DM
4216 rtx src1 = SET_SRC (PATTERN (seq));
4217 rtx src2 = SET_SRC (PATTERN (NEXT_INSN (seq)));
916f14f1
RK
4218
4219 /* Replace the placeholder in SRC2 with SRC1. If we can
4220 find where in SRC2 it was placed, that can become our
4221 split point and we can replace this address with SRC2.
4222 Just try two obvious places. */
4223
4224 src2 = replace_rtx (src2, reg, src1);
4225 split = 0;
4226 if (XEXP (src2, 0) == src1)
4227 split = &XEXP (src2, 0);
4228 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
4229 && XEXP (XEXP (src2, 0), 0) == src1)
4230 split = &XEXP (XEXP (src2, 0), 0);
4231
4232 if (split)
4233 {
4234 SUBST (XEXP (x, 0), src2);
4235 return split;
4236 }
4237 }
663522cb 4238
1a26b032
RK
4239 /* If that didn't work, perhaps the first operand is complex and
4240 needs to be computed separately, so make a split point there.
4241 This will occur on machines that just support REG + CONST
4242 and have a constant moved through some previous computation. */
4243
ec8e098d 4244 else if (!OBJECT_P (XEXP (XEXP (x, 0), 0))
1a26b032 4245 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
ec8e098d 4246 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
1a26b032 4247 return &XEXP (XEXP (x, 0), 0);
916f14f1 4248 }
acfb3980
RA
4249
4250 /* If we have a PLUS whose first operand is complex, try computing it
4251 separately by making a split there. */
4252 if (GET_CODE (XEXP (x, 0)) == PLUS
09e881c9
BE
4253 && ! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
4254 MEM_ADDR_SPACE (x))
acfb3980
RA
4255 && ! OBJECT_P (XEXP (XEXP (x, 0), 0))
4256 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
4257 && OBJECT_P (SUBREG_REG (XEXP (XEXP (x, 0), 0)))))
4258 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
4259 break;
4260
230d793d
RS
4261 case SET:
4262#ifdef HAVE_cc0
4263 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
4264 ZERO_EXTRACT, the most likely reason why this doesn't match is that
4265 we need to put the operand into a register. So split at that
4266 point. */
4267
4268 if (SET_DEST (x) == cc0_rtx
4269 && GET_CODE (SET_SRC (x)) != COMPARE
4270 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
ec8e098d 4271 && !OBJECT_P (SET_SRC (x))
230d793d 4272 && ! (GET_CODE (SET_SRC (x)) == SUBREG
ec8e098d 4273 && OBJECT_P (SUBREG_REG (SET_SRC (x)))))
230d793d
RS
4274 return &SET_SRC (x);
4275#endif
4276
4277 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 4278 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
4279 if (split && split != &SET_SRC (x))
4280 return split;
4281
041d7180
JL
4282 /* See if we can split SET_DEST as it stands. */
4283 split = find_split_point (&SET_DEST (x), insn);
4284 if (split && split != &SET_DEST (x))
4285 return split;
4286
230d793d
RS
4287 /* See if this is a bitfield assignment with everything constant. If
4288 so, this is an IOR of an AND, so split it into that. */
4289 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4290 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 4291 <= HOST_BITS_PER_WIDE_INT)
481683e1
SZ
4292 && CONST_INT_P (XEXP (SET_DEST (x), 1))
4293 && CONST_INT_P (XEXP (SET_DEST (x), 2))
4294 && CONST_INT_P (SET_SRC (x))
230d793d 4295 && ((INTVAL (XEXP (SET_DEST (x), 1))
cf0d9408 4296 + INTVAL (XEXP (SET_DEST (x), 2)))
230d793d
RS
4297 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
4298 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
4299 {
770ae6cc
RK
4300 HOST_WIDE_INT pos = INTVAL (XEXP (SET_DEST (x), 2));
4301 unsigned HOST_WIDE_INT len = INTVAL (XEXP (SET_DEST (x), 1));
4302 unsigned HOST_WIDE_INT src = INTVAL (SET_SRC (x));
230d793d
RS
4303 rtx dest = XEXP (SET_DEST (x), 0);
4304 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 4305 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
f6173932 4306 rtx or_mask;
230d793d 4307
f76b9db2
ILT
4308 if (BITS_BIG_ENDIAN)
4309 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 4310
f6173932 4311 or_mask = gen_int_mode (src << pos, mode);
770ae6cc 4312 if (src == mask)
230d793d 4313 SUBST (SET_SRC (x),
f6173932 4314 simplify_gen_binary (IOR, mode, dest, or_mask));
230d793d 4315 else
bcb34aa3
PB
4316 {
4317 rtx negmask = gen_int_mode (~(mask << pos), mode);
4318 SUBST (SET_SRC (x),
4319 simplify_gen_binary (IOR, mode,
c22cacf3 4320 simplify_gen_binary (AND, mode,
bcb34aa3 4321 dest, negmask),
f6173932 4322 or_mask));
bcb34aa3 4323 }
230d793d
RS
4324
4325 SUBST (SET_DEST (x), dest);
4326
d0ab8cd3 4327 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
4328 if (split && split != &SET_SRC (x))
4329 return split;
4330 }
4331
4332 /* Otherwise, see if this is an operation that we can split into two.
4333 If so, try to split that. */
4334 code = GET_CODE (SET_SRC (x));
4335
4336 switch (code)
4337 {
d0ab8cd3
RK
4338 case AND:
4339 /* If we are AND'ing with a large constant that is only a single
4340 bit and the result is only being used in a context where we
da7d8304 4341 need to know if it is zero or nonzero, replace it with a bit
d0ab8cd3
RK
4342 extraction. This will avoid the large constant, which might
4343 have taken more than one insn to make. If the constant were
4344 not a valid argument to the AND but took only one insn to make,
4345 this is no worse, but if it took more than one insn, it will
4346 be better. */
4347
481683e1 4348 if (CONST_INT_P (XEXP (SET_SRC (x), 1))
f8cfc6aa 4349 && REG_P (XEXP (SET_SRC (x), 0))
d0ab8cd3 4350 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
f8cfc6aa 4351 && REG_P (SET_DEST (x))
cf0d9408 4352 && (split = find_single_use (SET_DEST (x), insn, (rtx*) 0)) != 0
d0ab8cd3
RK
4353 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
4354 && XEXP (*split, 0) == SET_DEST (x)
4355 && XEXP (*split, 1) == const0_rtx)
4356 {
76184def
DE
4357 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
4358 XEXP (SET_SRC (x), 0),
4359 pos, NULL_RTX, 1, 1, 0, 0);
4360 if (extraction != 0)
4361 {
4362 SUBST (SET_SRC (x), extraction);
4363 return find_split_point (loc, insn);
4364 }
d0ab8cd3
RK
4365 }
4366 break;
4367
1a6ec070 4368 case NE:
938d968e 4369 /* If STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
ec5c56db 4370 is known to be on, this can be converted into a NEG of a shift. */
1a6ec070
RK
4371 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
4372 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 4373 && 1 <= (pos = exact_log2
1a6ec070
RK
4374 (nonzero_bits (XEXP (SET_SRC (x), 0),
4375 GET_MODE (XEXP (SET_SRC (x), 0))))))
4376 {
4377 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
4378
4379 SUBST (SET_SRC (x),
f1c6ba8b
RK
4380 gen_rtx_NEG (mode,
4381 gen_rtx_LSHIFTRT (mode,
4382 XEXP (SET_SRC (x), 0),
4383 GEN_INT (pos))));
1a6ec070
RK
4384
4385 split = find_split_point (&SET_SRC (x), insn);
4386 if (split && split != &SET_SRC (x))
4387 return split;
4388 }
4389 break;
4390
230d793d
RS
4391 case SIGN_EXTEND:
4392 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
4393
4394 /* We can't optimize if either mode is a partial integer
4395 mode as we don't know how many bits are significant
4396 in those modes. */
4397 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
4398 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
4399 break;
4400
230d793d
RS
4401 pos = 0;
4402 len = GET_MODE_BITSIZE (GET_MODE (inner));
4403 unsignedp = 0;
4404 break;
4405
4406 case SIGN_EXTRACT:
4407 case ZERO_EXTRACT:
481683e1
SZ
4408 if (CONST_INT_P (XEXP (SET_SRC (x), 1))
4409 && CONST_INT_P (XEXP (SET_SRC (x), 2)))
230d793d
RS
4410 {
4411 inner = XEXP (SET_SRC (x), 0);
4412 len = INTVAL (XEXP (SET_SRC (x), 1));
4413 pos = INTVAL (XEXP (SET_SRC (x), 2));
4414
f76b9db2
ILT
4415 if (BITS_BIG_ENDIAN)
4416 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
4417 unsignedp = (code == ZERO_EXTRACT);
4418 }
4419 break;
e9a25f70
JL
4420
4421 default:
4422 break;
230d793d
RS
4423 }
4424
4425 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
4426 {
4427 enum machine_mode mode = GET_MODE (SET_SRC (x));
4428
d0ab8cd3
RK
4429 /* For unsigned, we have a choice of a shift followed by an
4430 AND or two shifts. Use two shifts for field sizes where the
4431 constant might be too large. We assume here that we can
4432 always at least get 8-bit constants in an AND insn, which is
4433 true for every current RISC. */
4434
4435 if (unsignedp && len <= 8)
230d793d
RS
4436 {
4437 SUBST (SET_SRC (x),
f1c6ba8b
RK
4438 gen_rtx_AND (mode,
4439 gen_rtx_LSHIFTRT
4de249d9 4440 (mode, gen_lowpart (mode, inner),
f1c6ba8b
RK
4441 GEN_INT (pos)),
4442 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 4443
d0ab8cd3 4444 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
4445 if (split && split != &SET_SRC (x))
4446 return split;
4447 }
4448 else
4449 {
4450 SUBST (SET_SRC (x),
f1c6ba8b 4451 gen_rtx_fmt_ee
d0ab8cd3 4452 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
f1c6ba8b 4453 gen_rtx_ASHIFT (mode,
4de249d9 4454 gen_lowpart (mode, inner),
f1c6ba8b
RK
4455 GEN_INT (GET_MODE_BITSIZE (mode)
4456 - len - pos)),
5f4f0e22 4457 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 4458
d0ab8cd3 4459 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
4460 if (split && split != &SET_SRC (x))
4461 return split;
4462 }
4463 }
4464
4465 /* See if this is a simple operation with a constant as the second
4466 operand. It might be that this constant is out of range and hence
4467 could be used as a split point. */
ec8e098d 4468 if (BINARY_P (SET_SRC (x))
230d793d 4469 && CONSTANT_P (XEXP (SET_SRC (x), 1))
ec8e098d 4470 && (OBJECT_P (XEXP (SET_SRC (x), 0))
230d793d 4471 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
ec8e098d 4472 && OBJECT_P (SUBREG_REG (XEXP (SET_SRC (x), 0))))))
230d793d
RS
4473 return &XEXP (SET_SRC (x), 1);
4474
4475 /* Finally, see if this is a simple operation with its first operand
4476 not in a register. The operation might require this operand in a
4477 register, so return it as a split point. We can always do this
4478 because if the first operand were another operation, we would have
4479 already found it as a split point. */
ec8e098d 4480 if ((BINARY_P (SET_SRC (x)) || UNARY_P (SET_SRC (x)))
230d793d
RS
4481 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
4482 return &XEXP (SET_SRC (x), 0);
4483
4484 return 0;
4485
4486 case AND:
4487 case IOR:
4488 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
4489 it is better to write this as (not (ior A B)) so we can split it.
4490 Similarly for IOR. */
4491 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
4492 {
4493 SUBST (*loc,
f1c6ba8b
RK
4494 gen_rtx_NOT (GET_MODE (x),
4495 gen_rtx_fmt_ee (code == IOR ? AND : IOR,
4496 GET_MODE (x),
4497 XEXP (XEXP (x, 0), 0),
4498 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 4499 return find_split_point (loc, insn);
230d793d
RS
4500 }
4501
4502 /* Many RISC machines have a large set of logical insns. If the
4503 second operand is a NOT, put it first so we will try to split the
4504 other operand first. */
4505 if (GET_CODE (XEXP (x, 1)) == NOT)
4506 {
4507 rtx tem = XEXP (x, 0);
4508 SUBST (XEXP (x, 0), XEXP (x, 1));
4509 SUBST (XEXP (x, 1), tem);
4510 }
4511 break;
e9a25f70
JL
4512
4513 default:
4514 break;
230d793d
RS
4515 }
4516
4517 /* Otherwise, select our actions depending on our rtx class. */
4518 switch (GET_RTX_CLASS (code))
4519 {
ec8e098d
PB
4520 case RTX_BITFIELD_OPS: /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
4521 case RTX_TERNARY:
d0ab8cd3 4522 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
4523 if (split)
4524 return split;
0f41302f 4525 /* ... fall through ... */
ec8e098d
PB
4526 case RTX_BIN_ARITH:
4527 case RTX_COMM_ARITH:
4528 case RTX_COMPARE:
4529 case RTX_COMM_COMPARE:
d0ab8cd3 4530 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
4531 if (split)
4532 return split;
0f41302f 4533 /* ... fall through ... */
ec8e098d 4534 case RTX_UNARY:
230d793d
RS
4535 /* Some machines have (and (shift ...) ...) insns. If X is not
4536 an AND, but XEXP (X, 0) is, use it as our split point. */
4537 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
4538 return &XEXP (x, 0);
4539
d0ab8cd3 4540 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
4541 if (split)
4542 return split;
4543 return loc;
230d793d 4544
ec8e098d
PB
4545 default:
4546 /* Otherwise, we don't have a split point. */
4547 return 0;
4548 }
230d793d
RS
4549}
4550\f
4551/* Throughout X, replace FROM with TO, and return the result.
4552 The result is TO if X is FROM;
4553 otherwise the result is X, but its contents may have been modified.
4554 If they were modified, a record was made in undobuf so that
4555 undo_all will (among other things) return X to its original state.
4556
4557 If the number of changes necessary is too much to record to undo,
4558 the excess changes are not made, so the result is invalid.
4559 The changes already made can still be undone.
4560 undobuf.num_undo is incremented for such changes, so by testing that
4561 the caller can tell whether the result is valid.
4562
4563 `n_occurrences' is incremented each time FROM is replaced.
663522cb 4564
da7d8304 4565 IN_DEST is nonzero if we are processing the SET_DEST of a SET.
230d793d 4566
da7d8304
KH
4567 UNIQUE_COPY is nonzero if each substitution must be unique. We do this
4568 by copying if `n_occurrences' is nonzero. */
230d793d
RS
4569
4570static rtx
79a490a9 4571subst (rtx x, rtx from, rtx to, int in_dest, int unique_copy)
230d793d 4572{
b3694847 4573 enum rtx_code code = GET_CODE (x);
230d793d 4574 enum machine_mode op0_mode = VOIDmode;
b3694847
SS
4575 const char *fmt;
4576 int len, i;
32e9fa48 4577 rtx new_rtx;
230d793d
RS
4578
4579/* Two expressions are equal if they are identical copies of a shared
4580 RTX or if they are both registers with the same register number
4581 and mode. */
4582
4583#define COMBINE_RTX_EQUAL_P(X,Y) \
4584 ((X) == (Y) \
f8cfc6aa 4585 || (REG_P (X) && REG_P (Y) \
230d793d
RS
4586 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
4587
4588 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
4589 {
4590 n_occurrences++;
4591 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
4592 }
4593
6fb5fa3c
DB
4594 /* If X and FROM are the same register but different modes, they
4595 will not have been seen as equal above. However, the log links code
4596 will make a LOG_LINKS entry for that case. If we do nothing, we
4597 will try to rerecognize our original insn and, when it succeeds,
4598 we will delete the feeding insn, which is incorrect.
230d793d
RS
4599
4600 So force this insn not to match in this (rare) case. */
f8cfc6aa 4601 if (! in_dest && code == REG && REG_P (from)
467f79e8 4602 && reg_overlap_mentioned_p (x, from))
38a448ca 4603 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
4604
4605 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
4606 of which may contain things that can be combined. */
ec8e098d 4607 if (code != MEM && code != LO_SUM && OBJECT_P (x))
230d793d
RS
4608 return x;
4609
4610 /* It is possible to have a subexpression appear twice in the insn.
4611 Suppose that FROM is a register that appears within TO.
4612 Then, after that subexpression has been scanned once by `subst',
4613 the second time it is scanned, TO may be found. If we were
4614 to scan TO here, we would find FROM within it and create a
4615 self-referent rtl structure which is completely wrong. */
4616 if (COMBINE_RTX_EQUAL_P (x, to))
4617 return to;
4618
4f4b3679
RH
4619 /* Parallel asm_operands need special attention because all of the
4620 inputs are shared across the arms. Furthermore, unsharing the
4621 rtl results in recognition failures. Failure to handle this case
4622 specially can result in circular rtl.
4623
4624 Solve this by doing a normal pass across the first entry of the
4625 parallel, and only processing the SET_DESTs of the subsequent
4626 entries. Ug. */
4627
4628 if (code == PARALLEL
4629 && GET_CODE (XVECEXP (x, 0, 0)) == SET
4630 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 4631 {
32e9fa48 4632 new_rtx = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
4f4b3679
RH
4633
4634 /* If this substitution failed, this whole thing fails. */
32e9fa48
KG
4635 if (GET_CODE (new_rtx) == CLOBBER
4636 && XEXP (new_rtx, 0) == const0_rtx)
4637 return new_rtx;
4f4b3679 4638
32e9fa48 4639 SUBST (XVECEXP (x, 0, 0), new_rtx);
4f4b3679
RH
4640
4641 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 4642 {
4f4b3679 4643 rtx dest = SET_DEST (XVECEXP (x, 0, i));
663522cb 4644
f8cfc6aa 4645 if (!REG_P (dest)
4f4b3679
RH
4646 && GET_CODE (dest) != CC0
4647 && GET_CODE (dest) != PC)
230d793d 4648 {
32e9fa48 4649 new_rtx = subst (dest, from, to, 0, unique_copy);
230d793d 4650
4f4b3679 4651 /* If this substitution failed, this whole thing fails. */
32e9fa48
KG
4652 if (GET_CODE (new_rtx) == CLOBBER
4653 && XEXP (new_rtx, 0) == const0_rtx)
4654 return new_rtx;
230d793d 4655
32e9fa48 4656 SUBST (SET_DEST (XVECEXP (x, 0, i)), new_rtx);
230d793d
RS
4657 }
4658 }
4f4b3679
RH
4659 }
4660 else
4661 {
4662 len = GET_RTX_LENGTH (code);
4663 fmt = GET_RTX_FORMAT (code);
4664
4665 /* We don't need to process a SET_DEST that is a register, CC0,
4666 or PC, so set up to skip this common case. All other cases
4667 where we want to suppress replacing something inside a
4668 SET_SRC are handled via the IN_DEST operand. */
4669 if (code == SET
f8cfc6aa 4670 && (REG_P (SET_DEST (x))
4f4b3679
RH
4671 || GET_CODE (SET_DEST (x)) == CC0
4672 || GET_CODE (SET_DEST (x)) == PC))
4673 fmt = "ie";
4674
4675 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
4676 constant. */
4677 if (fmt[0] == 'e')
4678 op0_mode = GET_MODE (XEXP (x, 0));
4679
4680 for (i = 0; i < len; i++)
230d793d 4681 {
4f4b3679 4682 if (fmt[i] == 'E')
230d793d 4683 {
b3694847 4684 int j;
4f4b3679
RH
4685 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4686 {
4687 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
4688 {
32e9fa48 4689 new_rtx = (unique_copy && n_occurrences
4f4b3679
RH
4690 ? copy_rtx (to) : to);
4691 n_occurrences++;
4692 }
4693 else
4694 {
32e9fa48 4695 new_rtx = subst (XVECEXP (x, i, j), from, to, 0,
4f4b3679
RH
4696 unique_copy);
4697
4698 /* If this substitution failed, this whole thing
4699 fails. */
32e9fa48
KG
4700 if (GET_CODE (new_rtx) == CLOBBER
4701 && XEXP (new_rtx, 0) == const0_rtx)
4702 return new_rtx;
4f4b3679
RH
4703 }
4704
32e9fa48 4705 SUBST (XVECEXP (x, i, j), new_rtx);
4f4b3679
RH
4706 }
4707 }
4708 else if (fmt[i] == 'e')
4709 {
0a33d11e 4710 /* If this is a register being set, ignore it. */
32e9fa48 4711 new_rtx = XEXP (x, i);
0a33d11e 4712 if (in_dest
0a33d11e 4713 && i == 0
b78b8bd8 4714 && (((code == SUBREG || code == ZERO_EXTRACT)
32e9fa48 4715 && REG_P (new_rtx))
b78b8bd8 4716 || code == STRICT_LOW_PART))
0a33d11e
RH
4717 ;
4718
4719 else if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
4f4b3679
RH
4720 {
4721 /* In general, don't install a subreg involving two
4722 modes not tieable. It can worsen register
4723 allocation, and can even make invalid reload
4724 insns, since the reg inside may need to be copied
4725 from in the outside mode, and that may be invalid
4726 if it is an fp reg copied in integer mode.
4727
4728 We allow two exceptions to this: It is valid if
4729 it is inside another SUBREG and the mode of that
4730 SUBREG and the mode of the inside of TO is
4731 tieable and it is valid if X is a SET that copies
4732 FROM to CC0. */
4733
4734 if (GET_CODE (to) == SUBREG
4735 && ! MODES_TIEABLE_P (GET_MODE (to),
4736 GET_MODE (SUBREG_REG (to)))
4737 && ! (code == SUBREG
4738 && MODES_TIEABLE_P (GET_MODE (x),
4739 GET_MODE (SUBREG_REG (to))))
42301240 4740#ifdef HAVE_cc0
4f4b3679 4741 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 4742#endif
4f4b3679
RH
4743 )
4744 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 4745
cff9f8d5 4746#ifdef CANNOT_CHANGE_MODE_CLASS
ed8afe3a 4747 if (code == SUBREG
f8cfc6aa 4748 && REG_P (to)
ed8afe3a 4749 && REGNO (to) < FIRST_PSEUDO_REGISTER
cff9f8d5
AH
4750 && REG_CANNOT_CHANGE_MODE_P (REGNO (to),
4751 GET_MODE (to),
4752 GET_MODE (x)))
ed8afe3a
GK
4753 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
4754#endif
4755
32e9fa48 4756 new_rtx = (unique_copy && n_occurrences ? copy_rtx (to) : to);
4f4b3679
RH
4757 n_occurrences++;
4758 }
4759 else
4760 /* If we are in a SET_DEST, suppress most cases unless we
4761 have gone inside a MEM, in which case we want to
4762 simplify the address. We assume here that things that
4763 are actually part of the destination have their inner
663522cb 4764 parts in the first expression. This is true for SUBREG,
4f4b3679
RH
4765 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
4766 things aside from REG and MEM that should appear in a
4767 SET_DEST. */
32e9fa48 4768 new_rtx = subst (XEXP (x, i), from, to,
4f4b3679
RH
4769 (((in_dest
4770 && (code == SUBREG || code == STRICT_LOW_PART
4771 || code == ZERO_EXTRACT))
4772 || code == SET)
4773 && i == 0), unique_copy);
4774
4775 /* If we found that we will have to reject this combination,
4776 indicate that by returning the CLOBBER ourselves, rather than
4777 an expression containing it. This will speed things up as
4778 well as prevent accidents where two CLOBBERs are considered
4779 to be equal, thus producing an incorrect simplification. */
4780
32e9fa48
KG
4781 if (GET_CODE (new_rtx) == CLOBBER && XEXP (new_rtx, 0) == const0_rtx)
4782 return new_rtx;
4f4b3679 4783
cc8c96fd 4784 if (GET_CODE (x) == SUBREG
481683e1 4785 && (CONST_INT_P (new_rtx)
32e9fa48 4786 || GET_CODE (new_rtx) == CONST_DOUBLE))
4161da12 4787 {
b0dd4808 4788 enum machine_mode mode = GET_MODE (x);
2e676d78 4789
32e9fa48 4790 x = simplify_subreg (GET_MODE (x), new_rtx,
4161da12
AO
4791 GET_MODE (SUBREG_REG (x)),
4792 SUBREG_BYTE (x));
4793 if (! x)
b0dd4808 4794 x = gen_rtx_CLOBBER (mode, const0_rtx);
4161da12 4795 }
481683e1 4796 else if (CONST_INT_P (new_rtx)
4161da12
AO
4797 && GET_CODE (x) == ZERO_EXTEND)
4798 {
4799 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
32e9fa48 4800 new_rtx, GET_MODE (XEXP (x, 0)));
341c100f 4801 gcc_assert (x);
4161da12
AO
4802 }
4803 else
32e9fa48 4804 SUBST (XEXP (x, i), new_rtx);
230d793d 4805 }
230d793d
RS
4806 }
4807 }
4808
95afbcac
UB
4809 /* Check if we are loading something from the constant pool via float
4810 extension; in this case we would undo compress_float_constant
4811 optimization and degenerate constant load to an immediate value. */
4812 if (GET_CODE (x) == FLOAT_EXTEND
4813 && MEM_P (XEXP (x, 0))
4814 && MEM_READONLY_P (XEXP (x, 0)))
4815 {
4816 rtx tmp = avoid_constant_pool_reference (x);
4817 if (x != tmp)
4818 return x;
4819 }
4820
8079805d
RK
4821 /* Try to simplify X. If the simplification changed the code, it is likely
4822 that further simplification will help, so loop, but limit the number
4823 of repetitions that will be performed. */
4824
4825 for (i = 0; i < 4; i++)
4826 {
4827 /* If X is sufficiently simple, don't bother trying to do anything
4828 with it. */
4829 if (code != CONST_INT && code != REG && code != CLOBBER)
6621d78e 4830 x = combine_simplify_rtx (x, op0_mode, in_dest);
d0ab8cd3 4831
8079805d
RK
4832 if (GET_CODE (x) == code)
4833 break;
d0ab8cd3 4834
8079805d 4835 code = GET_CODE (x);
eeb43d32 4836
8079805d
RK
4837 /* We no longer know the original mode of operand 0 since we
4838 have changed the form of X) */
4839 op0_mode = VOIDmode;
4840 }
eeb43d32 4841
8079805d
RK
4842 return x;
4843}
4844\f
4845/* Simplify X, a piece of RTL. We just operate on the expression at the
4846 outer level; call `subst' to simplify recursively. Return the new
4847 expression.
4848
6621d78e
PB
4849 OP0_MODE is the original mode of XEXP (x, 0). IN_DEST is nonzero
4850 if we are inside a SET_DEST. */
eeb43d32 4851
8079805d 4852static rtx
6621d78e 4853combine_simplify_rtx (rtx x, enum machine_mode op0_mode, int in_dest)
8079805d
RK
4854{
4855 enum rtx_code code = GET_CODE (x);
4856 enum machine_mode mode = GET_MODE (x);
4857 rtx temp;
4858 int i;
d0ab8cd3 4859
230d793d
RS
4860 /* If this is a commutative operation, put a constant last and a complex
4861 expression first. We don't need to do this for comparisons here. */
ec8e098d 4862 if (COMMUTATIVE_ARITH_P (x)
e5c56fd9 4863 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
230d793d
RS
4864 {
4865 temp = XEXP (x, 0);
4866 SUBST (XEXP (x, 0), XEXP (x, 1));
4867 SUBST (XEXP (x, 1), temp);
4868 }
4869
663522cb 4870 /* If this is a simple operation applied to an IF_THEN_ELSE, try
d0ab8cd3 4871 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
4872 things. Check for cases where both arms are testing the same
4873 condition.
4874
4875 Don't do anything if all operands are very simple. */
4876
ec8e098d
PB
4877 if ((BINARY_P (x)
4878 && ((!OBJECT_P (XEXP (x, 0))
abe6e52f 4879 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
ec8e098d
PB
4880 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))
4881 || (!OBJECT_P (XEXP (x, 1))
abe6e52f 4882 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
ec8e098d
PB
4883 && OBJECT_P (SUBREG_REG (XEXP (x, 1)))))))
4884 || (UNARY_P (x)
c22cacf3 4885 && (!OBJECT_P (XEXP (x, 0))
abe6e52f 4886 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
ec8e098d 4887 && OBJECT_P (SUBREG_REG (XEXP (x, 0)))))))
d0ab8cd3 4888 {
d6edb99e 4889 rtx cond, true_rtx, false_rtx;
abe6e52f 4890
d6edb99e 4891 cond = if_then_else_cond (x, &true_rtx, &false_rtx);
0802d516
RK
4892 if (cond != 0
4893 /* If everything is a comparison, what we have is highly unlikely
4894 to be simpler, so don't use it. */
ec8e098d
PB
4895 && ! (COMPARISON_P (x)
4896 && (COMPARISON_P (true_rtx) || COMPARISON_P (false_rtx))))
abe6e52f
RK
4897 {
4898 rtx cop1 = const0_rtx;
4899 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
4900
ec8e098d 4901 if (cond_code == NE && COMPARISON_P (cond))
15448afc
RK
4902 return x;
4903
663522cb 4904 /* Simplify the alternative arms; this may collapse the true and
c6279378
UW
4905 false arms to store-flag values. Be careful to use copy_rtx
4906 here since true_rtx or false_rtx might share RTL with x as a
4907 result of the if_then_else_cond call above. */
4908 true_rtx = subst (copy_rtx (true_rtx), pc_rtx, pc_rtx, 0, 0);
4909 false_rtx = subst (copy_rtx (false_rtx), pc_rtx, pc_rtx, 0, 0);
9210df58 4910
d6edb99e 4911 /* If true_rtx and false_rtx are not general_operands, an if_then_else
085f1714 4912 is unlikely to be simpler. */
d6edb99e
ZW
4913 if (general_operand (true_rtx, VOIDmode)
4914 && general_operand (false_rtx, VOIDmode))
085f1714 4915 {
434c87d4
JH
4916 enum rtx_code reversed;
4917
085f1714
RH
4918 /* Restarting if we generate a store-flag expression will cause
4919 us to loop. Just drop through in this case. */
4920
4921 /* If the result values are STORE_FLAG_VALUE and zero, we can
4922 just make the comparison operation. */
d6edb99e 4923 if (true_rtx == const_true_rtx && false_rtx == const0_rtx)
bcb34aa3
PB
4924 x = simplify_gen_relational (cond_code, mode, VOIDmode,
4925 cond, cop1);
fa4e13e0 4926 else if (true_rtx == const0_rtx && false_rtx == const_true_rtx
434c87d4 4927 && ((reversed = reversed_comparison_code_parts
79a490a9 4928 (cond_code, cond, cop1, NULL))
c22cacf3 4929 != UNKNOWN))
bcb34aa3
PB
4930 x = simplify_gen_relational (reversed, mode, VOIDmode,
4931 cond, cop1);
085f1714
RH
4932
4933 /* Likewise, we can make the negate of a comparison operation
4934 if the result values are - STORE_FLAG_VALUE and zero. */
481683e1 4935 else if (CONST_INT_P (true_rtx)
d6edb99e
ZW
4936 && INTVAL (true_rtx) == - STORE_FLAG_VALUE
4937 && false_rtx == const0_rtx)
f1c6ba8b 4938 x = simplify_gen_unary (NEG, mode,
bcb34aa3
PB
4939 simplify_gen_relational (cond_code,
4940 mode, VOIDmode,
4941 cond, cop1),
f1c6ba8b 4942 mode);
481683e1 4943 else if (CONST_INT_P (false_rtx)
d6edb99e 4944 && INTVAL (false_rtx) == - STORE_FLAG_VALUE
434c87d4
JH
4945 && true_rtx == const0_rtx
4946 && ((reversed = reversed_comparison_code_parts
79a490a9 4947 (cond_code, cond, cop1, NULL))
c22cacf3 4948 != UNKNOWN))
f1c6ba8b 4949 x = simplify_gen_unary (NEG, mode,
bcb34aa3
PB
4950 simplify_gen_relational (reversed,
4951 mode, VOIDmode,
4952 cond, cop1),
f1c6ba8b 4953 mode);
085f1714
RH
4954 else
4955 return gen_rtx_IF_THEN_ELSE (mode,
bcb34aa3
PB
4956 simplify_gen_relational (cond_code,
4957 mode,
4958 VOIDmode,
4959 cond,
4960 cop1),
d6edb99e 4961 true_rtx, false_rtx);
5109d49f 4962
085f1714
RH
4963 code = GET_CODE (x);
4964 op0_mode = VOIDmode;
4965 }
abe6e52f 4966 }
d0ab8cd3
RK
4967 }
4968
230d793d
RS
4969 /* Try to fold this expression in case we have constants that weren't
4970 present before. */
4971 temp = 0;
4972 switch (GET_RTX_CLASS (code))
4973 {
ec8e098d 4974 case RTX_UNARY:
c0657872
RS
4975 if (op0_mode == VOIDmode)
4976 op0_mode = GET_MODE (XEXP (x, 0));
230d793d
RS
4977 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
4978 break;
ec8e098d
PB
4979 case RTX_COMPARE:
4980 case RTX_COMM_COMPARE:
47b1e19b
JH
4981 {
4982 enum machine_mode cmp_mode = GET_MODE (XEXP (x, 0));
4983 if (cmp_mode == VOIDmode)
1cac8785
DD
4984 {
4985 cmp_mode = GET_MODE (XEXP (x, 1));
4986 if (cmp_mode == VOIDmode)
4987 cmp_mode = op0_mode;
4988 }
7ce3e360 4989 temp = simplify_relational_operation (code, mode, cmp_mode,
47b1e19b
JH
4990 XEXP (x, 0), XEXP (x, 1));
4991 }
230d793d 4992 break;
ec8e098d
PB
4993 case RTX_COMM_ARITH:
4994 case RTX_BIN_ARITH:
230d793d
RS
4995 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
4996 break;
ec8e098d
PB
4997 case RTX_BITFIELD_OPS:
4998 case RTX_TERNARY:
230d793d
RS
4999 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
5000 XEXP (x, 1), XEXP (x, 2));
5001 break;
ec8e098d
PB
5002 default:
5003 break;
230d793d
RS
5004 }
5005
5006 if (temp)
4531c1c7
DN
5007 {
5008 x = temp;
5009 code = GET_CODE (temp);
5010 op0_mode = VOIDmode;
5011 mode = GET_MODE (temp);
5012 }
230d793d 5013
230d793d 5014 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
5015 if (code == PLUS || code == MINUS
5016 || code == AND || code == IOR || code == XOR)
230d793d
RS
5017 {
5018 x = apply_distributive_law (x);
5019 code = GET_CODE (x);
6e20204f 5020 op0_mode = VOIDmode;
230d793d
RS
5021 }
5022
5023 /* If CODE is an associative operation not otherwise handled, see if we
5024 can associate some operands. This can win if they are constants or
e0e08ac2 5025 if they are logically related (i.e. (a & b) & a). */
493efd37
TM
5026 if ((code == PLUS || code == MINUS || code == MULT || code == DIV
5027 || code == AND || code == IOR || code == XOR
230d793d 5028 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
493efd37 5029 && ((INTEGRAL_MODE_P (mode) && code != DIV)
a1a82611 5030 || (flag_associative_math && FLOAT_MODE_P (mode))))
230d793d
RS
5031 {
5032 if (GET_CODE (XEXP (x, 0)) == code)
5033 {
5034 rtx other = XEXP (XEXP (x, 0), 0);
5035 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
5036 rtx inner_op1 = XEXP (x, 1);
5037 rtx inner;
663522cb 5038
230d793d
RS
5039 /* Make sure we pass the constant operand if any as the second
5040 one if this is a commutative operation. */
ec8e098d 5041 if (CONSTANT_P (inner_op0) && COMMUTATIVE_ARITH_P (x))
230d793d
RS
5042 {
5043 rtx tem = inner_op0;
5044 inner_op0 = inner_op1;
5045 inner_op1 = tem;
5046 }
5047 inner = simplify_binary_operation (code == MINUS ? PLUS
5048 : code == DIV ? MULT
230d793d
RS
5049 : code,
5050 mode, inner_op0, inner_op1);
5051
5052 /* For commutative operations, try the other pair if that one
5053 didn't simplify. */
ec8e098d 5054 if (inner == 0 && COMMUTATIVE_ARITH_P (x))
230d793d
RS
5055 {
5056 other = XEXP (XEXP (x, 0), 1);
5057 inner = simplify_binary_operation (code, mode,
5058 XEXP (XEXP (x, 0), 0),
5059 XEXP (x, 1));
5060 }
5061
5062 if (inner)
bcb34aa3 5063 return simplify_gen_binary (code, mode, other, inner);
230d793d
RS
5064 }
5065 }
5066
5067 /* A little bit of algebraic simplification here. */
5068 switch (code)
5069 {
5070 case MEM:
5071 /* Ensure that our address has any ASHIFTs converted to MULT in case
5072 address-recognizing predicates are called later. */
5073 temp = make_compound_operation (XEXP (x, 0), MEM);
5074 SUBST (XEXP (x, 0), temp);
5075 break;
5076
5077 case SUBREG:
eea50aa0
JH
5078 if (op0_mode == VOIDmode)
5079 op0_mode = GET_MODE (SUBREG_REG (x));
230d793d 5080
4de249d9 5081 /* See if this can be moved to simplify_subreg. */
3c99d5ff 5082 if (CONSTANT_P (SUBREG_REG (x))
156755ac 5083 && subreg_lowpart_offset (mode, op0_mode) == SUBREG_BYTE (x)
4de249d9 5084 /* Don't call gen_lowpart if the inner mode
156755ac
JJ
5085 is VOIDmode and we cannot simplify it, as SUBREG without
5086 inner mode is invalid. */
5087 && (GET_MODE (SUBREG_REG (x)) != VOIDmode
5088 || gen_lowpart_common (mode, SUBREG_REG (x))))
4de249d9 5089 return gen_lowpart (mode, SUBREG_REG (x));
230d793d 5090
a13287e1 5091 if (GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_CC)
c22cacf3 5092 break;
eea50aa0
JH
5093 {
5094 rtx temp;
5095 temp = simplify_subreg (mode, SUBREG_REG (x), op0_mode,
23190837 5096 SUBREG_BYTE (x));
eea50aa0
JH
5097 if (temp)
5098 return temp;
5099 }
b65c1b5b 5100
30984c57 5101 /* Don't change the mode of the MEM if that would change the meaning
3eacd71f 5102 of the address. */
3c0cb5de 5103 if (MEM_P (SUBREG_REG (x))
30984c57 5104 && (MEM_VOLATILE_P (SUBREG_REG (x))
3eacd71f 5105 || mode_dependent_address_p (XEXP (SUBREG_REG (x), 0))))
30984c57
JJ
5106 return gen_rtx_CLOBBER (mode, const0_rtx);
5107
87e3e0c1
RK
5108 /* Note that we cannot do any narrowing for non-constants since
5109 we might have been counting on using the fact that some bits were
5110 zero. We now do this in the SET. */
5111
230d793d
RS
5112 break;
5113
230d793d 5114 case NEG:
230d793d
RS
5115 temp = expand_compound_operation (XEXP (x, 0));
5116
5117 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
23190837 5118 replaced by (lshiftrt X C). This will convert
230d793d
RS
5119 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
5120
5121 if (GET_CODE (temp) == ASHIFTRT
481683e1 5122 && CONST_INT_P (XEXP (temp, 1))
230d793d 5123 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
13991abb 5124 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (temp, 0),
8079805d 5125 INTVAL (XEXP (temp, 1)));
230d793d 5126
951553af 5127 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
5128 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
5129 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
5130 (sign_extract X 1 Y). But only do this if TEMP isn't a register
5131 or a SUBREG of one since we'd be making the expression more
5132 complex if it was just a register. */
5133
f8cfc6aa 5134 if (!REG_P (temp)
230d793d 5135 && ! (GET_CODE (temp) == SUBREG
f8cfc6aa 5136 && REG_P (SUBREG_REG (temp)))
951553af 5137 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
5138 {
5139 rtx temp1 = simplify_shift_const
5f4f0e22
CH
5140 (NULL_RTX, ASHIFTRT, mode,
5141 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
5142 GET_MODE_BITSIZE (mode) - 1 - i),
5143 GET_MODE_BITSIZE (mode) - 1 - i);
5144
5145 /* If all we did was surround TEMP with the two shifts, we
5146 haven't improved anything, so don't use it. Otherwise,
5147 we are better off with TEMP1. */
5148 if (GET_CODE (temp1) != ASHIFTRT
5149 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
5150 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 5151 return temp1;
230d793d
RS
5152 }
5153 break;
5154
2ca9ae17 5155 case TRUNCATE:
e30fb98f
JL
5156 /* We can't handle truncation to a partial integer mode here
5157 because we don't know the real bitsize of the partial
5158 integer mode. */
5159 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
5160 break;
5161
cfb8805e 5162 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2ca9ae17
JW
5163 SUBST (XEXP (x, 0),
5164 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
b1257407 5165 GET_MODE_MASK (mode), 0));
0f13a422 5166
d0e4968c
DD
5167 /* We can truncate a constant value and return it. */
5168 if (CONST_INT_P (XEXP (x, 0)))
5169 return gen_int_mode (INTVAL (XEXP (x, 0)), mode);
5170
bd1ef757
PB
5171 /* Similarly to what we do in simplify-rtx.c, a truncate of a register
5172 whose value is a comparison can be replaced with a subreg if
5173 STORE_FLAG_VALUE permits. */
0f13a422 5174 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
663522cb 5175 && ((HOST_WIDE_INT) STORE_FLAG_VALUE & ~GET_MODE_MASK (mode)) == 0
0f13a422 5176 && (temp = get_last_value (XEXP (x, 0)))
ec8e098d 5177 && COMPARISON_P (temp))
4de249d9 5178 return gen_lowpart (mode, XEXP (x, 0));
2ca9ae17
JW
5179 break;
5180
230d793d
RS
5181 case CONST:
5182 /* (const (const X)) can become (const X). Do it this way rather than
5183 returning the inner CONST since CONST can be shared with a
5184 REG_EQUAL note. */
5185 if (GET_CODE (XEXP (x, 0)) == CONST)
5186 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
5187 break;
5188
5189#ifdef HAVE_lo_sum
5190 case LO_SUM:
5191 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
5192 can add in an offset. find_split_point will split this address up
5193 again if it doesn't match. */
5194 if (GET_CODE (XEXP (x, 0)) == HIGH
5195 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
5196 return XEXP (x, 1);
5197 break;
5198#endif
5199
5200 case PLUS:
230d793d
RS
5201 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
5202 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
5203 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
5204 sign_extract. The `and' may be a zero_extend and the two
5205 <c>, -<c> constants may be reversed. */
230d793d 5206 if (GET_CODE (XEXP (x, 0)) == XOR
481683e1
SZ
5207 && CONST_INT_P (XEXP (x, 1))
5208 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
663522cb 5209 && INTVAL (XEXP (x, 1)) == -INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
5210 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5211 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 5212 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d 5213 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
481683e1 5214 && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
230d793d 5215 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 5216 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
5217 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
5218 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
770ae6cc 5219 == (unsigned int) i + 1))))
8079805d
RK
5220 return simplify_shift_const
5221 (NULL_RTX, ASHIFTRT, mode,
5222 simplify_shift_const (NULL_RTX, ASHIFT, mode,
5223 XEXP (XEXP (XEXP (x, 0), 0), 0),
5224 GET_MODE_BITSIZE (mode) - (i + 1)),
5225 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 5226
bc0776c6 5227 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
5228 can become (ashiftrt (ashift (xor x 1) C) C) where C is
5229 the bitsize of the mode - 1. This allows simplification of
5230 "a = (b & 8) == 0;" */
5231 if (XEXP (x, 1) == constm1_rtx
f8cfc6aa 5232 && !REG_P (XEXP (x, 0))
e869aa39 5233 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
f8cfc6aa 5234 && REG_P (SUBREG_REG (XEXP (x, 0))))
951553af 5235 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
5236 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
5237 simplify_shift_const (NULL_RTX, ASHIFT, mode,
f1c6ba8b 5238 gen_rtx_XOR (mode, XEXP (x, 0), const1_rtx),
8079805d
RK
5239 GET_MODE_BITSIZE (mode) - 1),
5240 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
5241
5242 /* If we are adding two things that have no bits in common, convert
5243 the addition into an IOR. This will often be further simplified,
5244 for example in cases like ((a & 1) + (a & 2)), which can
5245 become a & 3. */
5246
ac49a949 5247 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
5248 && (nonzero_bits (XEXP (x, 0), mode)
5249 & nonzero_bits (XEXP (x, 1), mode)) == 0)
085f1714
RH
5250 {
5251 /* Try to simplify the expression further. */
bcb34aa3 5252 rtx tor = simplify_gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
6621d78e 5253 temp = combine_simplify_rtx (tor, mode, in_dest);
085f1714
RH
5254
5255 /* If we could, great. If not, do not go ahead with the IOR
5256 replacement, since PLUS appears in many special purpose
5257 address arithmetic instructions. */
5258 if (GET_CODE (temp) != CLOBBER && temp != tor)
5259 return temp;
5260 }
230d793d
RS
5261 break;
5262
5263 case MINUS:
5264 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
5265 (and <foo> (const_int pow2-1)) */
5266 if (GET_CODE (XEXP (x, 1)) == AND
481683e1 5267 && CONST_INT_P (XEXP (XEXP (x, 1), 1))
663522cb 5268 && exact_log2 (-INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
230d793d 5269 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d 5270 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
663522cb 5271 -INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
230d793d
RS
5272 break;
5273
5274 case MULT:
5275 /* If we have (mult (plus A B) C), apply the distributive law and then
5276 the inverse distributive law to see if things simplify. This
5277 occurs mostly in addresses, often when unrolling loops. */
5278
5279 if (GET_CODE (XEXP (x, 0)) == PLUS)
5280 {
bcb34aa3
PB
5281 rtx result = distribute_and_simplify_rtx (x, 0);
5282 if (result)
5283 return result;
230d793d 5284 }
bcb34aa3 5285
4ba5f925 5286 /* Try simplify a*(b/c) as (a*b)/c. */
b8698a0f 5287 if (FLOAT_MODE_P (mode) && flag_associative_math
4ba5f925
JH
5288 && GET_CODE (XEXP (x, 0)) == DIV)
5289 {
5290 rtx tem = simplify_binary_operation (MULT, mode,
5291 XEXP (XEXP (x, 0), 0),
5292 XEXP (x, 1));
5293 if (tem)
bcb34aa3 5294 return simplify_gen_binary (DIV, mode, tem, XEXP (XEXP (x, 0), 1));
4ba5f925 5295 }
230d793d
RS
5296 break;
5297
5298 case UDIV:
5299 /* If this is a divide by a power of two, treat it as a shift if
5300 its first operand is a shift. */
481683e1 5301 if (CONST_INT_P (XEXP (x, 1))
230d793d
RS
5302 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
5303 && (GET_CODE (XEXP (x, 0)) == ASHIFT
5304 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
5305 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
5306 || GET_CODE (XEXP (x, 0)) == ROTATE
5307 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 5308 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
5309 break;
5310
5311 case EQ: case NE:
5312 case GT: case GTU: case GE: case GEU:
5313 case LT: case LTU: case LE: case LEU:
69bc0a1f 5314 case UNEQ: case LTGT:
23190837
AJ
5315 case UNGT: case UNGE:
5316 case UNLT: case UNLE:
69bc0a1f 5317 case UNORDERED: case ORDERED:
230d793d
RS
5318 /* If the first operand is a condition code, we can't do anything
5319 with it. */
5320 if (GET_CODE (XEXP (x, 0)) == COMPARE
5321 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
8beccec8 5322 && ! CC0_P (XEXP (x, 0))))
230d793d
RS
5323 {
5324 rtx op0 = XEXP (x, 0);
5325 rtx op1 = XEXP (x, 1);
5326 enum rtx_code new_code;
5327
5328 if (GET_CODE (op0) == COMPARE)
5329 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5330
5331 /* Simplify our comparison, if possible. */
5332 new_code = simplify_comparison (code, &op0, &op1);
5333
230d793d 5334 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 5335 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
5336 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
5337 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
5338 known to be either 0 or -1, NE becomes a NEG and EQ becomes
5339 (plus X 1).
5340
5341 Remove any ZERO_EXTRACT we made when thinking this was a
5342 comparison. It may now be simpler to use, e.g., an AND. If a
5343 ZERO_EXTRACT is indeed appropriate, it will be placed back by
5344 the call to make_compound_operation in the SET case. */
5345
0802d516
RK
5346 if (STORE_FLAG_VALUE == 1
5347 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
a191f0ee
RH
5348 && op1 == const0_rtx
5349 && mode == GET_MODE (op0)
5350 && nonzero_bits (op0, mode) == 1)
4de249d9
PB
5351 return gen_lowpart (mode,
5352 expand_compound_operation (op0));
5109d49f 5353
0802d516
RK
5354 else if (STORE_FLAG_VALUE == 1
5355 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 5356 && op1 == const0_rtx
a191f0ee 5357 && mode == GET_MODE (op0)
5109d49f
RK
5358 && (num_sign_bit_copies (op0, mode)
5359 == GET_MODE_BITSIZE (mode)))
5360 {
5361 op0 = expand_compound_operation (op0);
f1c6ba8b 5362 return simplify_gen_unary (NEG, mode,
4de249d9 5363 gen_lowpart (mode, op0),
f1c6ba8b 5364 mode);
5109d49f
RK
5365 }
5366
0802d516
RK
5367 else if (STORE_FLAG_VALUE == 1
5368 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 5369 && op1 == const0_rtx
a191f0ee 5370 && mode == GET_MODE (op0)
5109d49f 5371 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
5372 {
5373 op0 = expand_compound_operation (op0);
bcb34aa3
PB
5374 return simplify_gen_binary (XOR, mode,
5375 gen_lowpart (mode, op0),
5376 const1_rtx);
5109d49f 5377 }
818b11b9 5378
0802d516
RK
5379 else if (STORE_FLAG_VALUE == 1
5380 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 5381 && op1 == const0_rtx
a191f0ee 5382 && mode == GET_MODE (op0)
5109d49f
RK
5383 && (num_sign_bit_copies (op0, mode)
5384 == GET_MODE_BITSIZE (mode)))
5385 {
5386 op0 = expand_compound_operation (op0);
4de249d9 5387 return plus_constant (gen_lowpart (mode, op0), 1);
818b11b9 5388 }
230d793d 5389
5109d49f
RK
5390 /* If STORE_FLAG_VALUE is -1, we have cases similar to
5391 those above. */
0802d516
RK
5392 if (STORE_FLAG_VALUE == -1
5393 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 5394 && op1 == const0_rtx
5109d49f
RK
5395 && (num_sign_bit_copies (op0, mode)
5396 == GET_MODE_BITSIZE (mode)))
4de249d9
PB
5397 return gen_lowpart (mode,
5398 expand_compound_operation (op0));
5109d49f 5399
0802d516
RK
5400 else if (STORE_FLAG_VALUE == -1
5401 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 5402 && op1 == const0_rtx
a191f0ee 5403 && mode == GET_MODE (op0)
5109d49f
RK
5404 && nonzero_bits (op0, mode) == 1)
5405 {
5406 op0 = expand_compound_operation (op0);
f1c6ba8b 5407 return simplify_gen_unary (NEG, mode,
4de249d9 5408 gen_lowpart (mode, op0),
f1c6ba8b 5409 mode);
5109d49f
RK
5410 }
5411
0802d516
RK
5412 else if (STORE_FLAG_VALUE == -1
5413 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 5414 && op1 == const0_rtx
a191f0ee 5415 && mode == GET_MODE (op0)
5109d49f
RK
5416 && (num_sign_bit_copies (op0, mode)
5417 == GET_MODE_BITSIZE (mode)))
230d793d 5418 {
818b11b9 5419 op0 = expand_compound_operation (op0);
f1c6ba8b 5420 return simplify_gen_unary (NOT, mode,
4de249d9 5421 gen_lowpart (mode, op0),
f1c6ba8b 5422 mode);
5109d49f
RK
5423 }
5424
5425 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
5426 else if (STORE_FLAG_VALUE == -1
5427 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f 5428 && op1 == const0_rtx
a191f0ee 5429 && mode == GET_MODE (op0)
5109d49f
RK
5430 && nonzero_bits (op0, mode) == 1)
5431 {
5432 op0 = expand_compound_operation (op0);
4de249d9 5433 return plus_constant (gen_lowpart (mode, op0), -1);
230d793d 5434 }
230d793d
RS
5435
5436 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
5437 one bit that might be nonzero, we can convert (ne x 0) to
5438 (ashift x c) where C puts the bit in the sign bit. Remove any
5439 AND with STORE_FLAG_VALUE when we are done, since we are only
5440 going to test the sign bit. */
3f508eca 5441 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 5442 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5443 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e869aa39 5444 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
5445 && op1 == const0_rtx
5446 && mode == GET_MODE (op0)
5109d49f 5447 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 5448 {
818b11b9
RK
5449 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
5450 expand_compound_operation (op0),
230d793d
RS
5451 GET_MODE_BITSIZE (mode) - 1 - i);
5452 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
5453 return XEXP (x, 0);
5454 else
5455 return x;
5456 }
5457
5458 /* If the code changed, return a whole new comparison. */
5459 if (new_code != code)
f1c6ba8b 5460 return gen_rtx_fmt_ee (new_code, mode, op0, op1);
230d793d 5461
663522cb 5462 /* Otherwise, keep this operation, but maybe change its operands.
230d793d
RS
5463 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
5464 SUBST (XEXP (x, 0), op0);
5465 SUBST (XEXP (x, 1), op1);
5466 }
5467 break;
663522cb 5468
230d793d 5469 case IF_THEN_ELSE:
8079805d 5470 return simplify_if_then_else (x);
9210df58 5471
8079805d
RK
5472 case ZERO_EXTRACT:
5473 case SIGN_EXTRACT:
5474 case ZERO_EXTEND:
5475 case SIGN_EXTEND:
0f41302f 5476 /* If we are processing SET_DEST, we are done. */
8079805d
RK
5477 if (in_dest)
5478 return x;
d0ab8cd3 5479
8079805d 5480 return expand_compound_operation (x);
d0ab8cd3 5481
8079805d
RK
5482 case SET:
5483 return simplify_set (x);
1a26b032 5484
8079805d
RK
5485 case AND:
5486 case IOR:
6621d78e 5487 return simplify_logical (x);
d0ab8cd3 5488
8079805d
RK
5489 case ASHIFT:
5490 case LSHIFTRT:
5491 case ASHIFTRT:
5492 case ROTATE:
5493 case ROTATERT:
5494 /* If this is a shift by a constant amount, simplify it. */
481683e1 5495 if (CONST_INT_P (XEXP (x, 1)))
663522cb 5496 return simplify_shift_const (x, code, mode, XEXP (x, 0),
8079805d
RK
5497 INTVAL (XEXP (x, 1)));
5498
f8cfc6aa 5499 else if (SHIFT_COUNT_TRUNCATED && !REG_P (XEXP (x, 1)))
8079805d 5500 SUBST (XEXP (x, 1),
f1b1186f 5501 force_to_mode (XEXP (x, 1), GET_MODE (XEXP (x, 1)),
663522cb 5502 ((HOST_WIDE_INT) 1
8079805d
RK
5503 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
5504 - 1,
b1257407 5505 0));
8079805d 5506 break;
e9a25f70
JL
5507
5508 default:
5509 break;
8079805d
RK
5510 }
5511
5512 return x;
5513}
5514\f
5515/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 5516
8079805d 5517static rtx
79a490a9 5518simplify_if_then_else (rtx x)
8079805d
RK
5519{
5520 enum machine_mode mode = GET_MODE (x);
5521 rtx cond = XEXP (x, 0);
d6edb99e
ZW
5522 rtx true_rtx = XEXP (x, 1);
5523 rtx false_rtx = XEXP (x, 2);
8079805d 5524 enum rtx_code true_code = GET_CODE (cond);
ec8e098d 5525 int comparison_p = COMPARISON_P (cond);
8079805d
RK
5526 rtx temp;
5527 int i;
9a915772
JH
5528 enum rtx_code false_code;
5529 rtx reversed;
8079805d 5530
0f41302f 5531 /* Simplify storing of the truth value. */
d6edb99e 5532 if (comparison_p && true_rtx == const_true_rtx && false_rtx == const0_rtx)
bcb34aa3
PB
5533 return simplify_gen_relational (true_code, mode, VOIDmode,
5534 XEXP (cond, 0), XEXP (cond, 1));
663522cb 5535
0f41302f 5536 /* Also when the truth value has to be reversed. */
9a915772 5537 if (comparison_p
d6edb99e 5538 && true_rtx == const0_rtx && false_rtx == const_true_rtx
14f02e73 5539 && (reversed = reversed_comparison (cond, mode)))
9a915772 5540 return reversed;
8079805d
RK
5541
5542 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
5543 in it is being compared against certain values. Get the true and false
5544 comparisons and see if that says anything about the value of each arm. */
5545
9a915772 5546 if (comparison_p
14f02e73 5547 && ((false_code = reversed_comparison_code (cond, NULL))
9a915772 5548 != UNKNOWN)
f8cfc6aa 5549 && REG_P (XEXP (cond, 0)))
8079805d
RK
5550 {
5551 HOST_WIDE_INT nzb;
5552 rtx from = XEXP (cond, 0);
8079805d
RK
5553 rtx true_val = XEXP (cond, 1);
5554 rtx false_val = true_val;
5555 int swapped = 0;
9210df58 5556
8079805d 5557 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 5558
8079805d 5559 if (false_code == EQ)
1a26b032 5560 {
8079805d 5561 swapped = 1, true_code = EQ, false_code = NE;
d6edb99e 5562 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
8079805d 5563 }
5109d49f 5564
8079805d
RK
5565 /* If we are comparing against zero and the expression being tested has
5566 only a single bit that might be nonzero, that is its value when it is
5567 not equal to zero. Similarly if it is known to be -1 or 0. */
5568
5569 if (true_code == EQ && true_val == const0_rtx
5570 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
7f482dfe
UB
5571 {
5572 false_code = EQ;
5573 false_val = GEN_INT (trunc_int_for_mode (nzb, GET_MODE (from)));
5574 }
8079805d
RK
5575 else if (true_code == EQ && true_val == const0_rtx
5576 && (num_sign_bit_copies (from, GET_MODE (from))
5577 == GET_MODE_BITSIZE (GET_MODE (from))))
7f482dfe
UB
5578 {
5579 false_code = EQ;
5580 false_val = constm1_rtx;
5581 }
8079805d
RK
5582
5583 /* Now simplify an arm if we know the value of the register in the
5584 branch and it is used in the arm. Be careful due to the potential
5585 of locally-shared RTL. */
5586
d6edb99e
ZW
5587 if (reg_mentioned_p (from, true_rtx))
5588 true_rtx = subst (known_cond (copy_rtx (true_rtx), true_code,
5589 from, true_val),
8079805d 5590 pc_rtx, pc_rtx, 0, 0);
d6edb99e
ZW
5591 if (reg_mentioned_p (from, false_rtx))
5592 false_rtx = subst (known_cond (copy_rtx (false_rtx), false_code,
8079805d
RK
5593 from, false_val),
5594 pc_rtx, pc_rtx, 0, 0);
5595
d6edb99e
ZW
5596 SUBST (XEXP (x, 1), swapped ? false_rtx : true_rtx);
5597 SUBST (XEXP (x, 2), swapped ? true_rtx : false_rtx);
8079805d 5598
d6edb99e
ZW
5599 true_rtx = XEXP (x, 1);
5600 false_rtx = XEXP (x, 2);
5601 true_code = GET_CODE (cond);
8079805d 5602 }
5109d49f 5603
8079805d
RK
5604 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
5605 reversed, do so to avoid needing two sets of patterns for
5606 subtract-and-branch insns. Similarly if we have a constant in the true
5607 arm, the false arm is the same as the first operand of the comparison, or
5608 the false arm is more complicated than the true arm. */
5609
9a915772 5610 if (comparison_p
14f02e73 5611 && reversed_comparison_code (cond, NULL) != UNKNOWN
d6edb99e
ZW
5612 && (true_rtx == pc_rtx
5613 || (CONSTANT_P (true_rtx)
481683e1 5614 && !CONST_INT_P (false_rtx) && false_rtx != pc_rtx)
d6edb99e 5615 || true_rtx == const0_rtx
ec8e098d
PB
5616 || (OBJECT_P (true_rtx) && !OBJECT_P (false_rtx))
5617 || (GET_CODE (true_rtx) == SUBREG && OBJECT_P (SUBREG_REG (true_rtx))
5618 && !OBJECT_P (false_rtx))
d6edb99e
ZW
5619 || reg_mentioned_p (true_rtx, false_rtx)
5620 || rtx_equal_p (false_rtx, XEXP (cond, 0))))
8079805d 5621 {
9a915772 5622 true_code = reversed_comparison_code (cond, NULL);
14f02e73 5623 SUBST (XEXP (x, 0), reversed_comparison (cond, GET_MODE (cond)));
d6edb99e
ZW
5624 SUBST (XEXP (x, 1), false_rtx);
5625 SUBST (XEXP (x, 2), true_rtx);
1a26b032 5626
d6edb99e
ZW
5627 temp = true_rtx, true_rtx = false_rtx, false_rtx = temp;
5628 cond = XEXP (x, 0);
bb821298 5629
0f41302f 5630 /* It is possible that the conditional has been simplified out. */
bb821298 5631 true_code = GET_CODE (cond);
ec8e098d 5632 comparison_p = COMPARISON_P (cond);
8079805d 5633 }
abe6e52f 5634
8079805d 5635 /* If the two arms are identical, we don't need the comparison. */
1a26b032 5636
d6edb99e
ZW
5637 if (rtx_equal_p (true_rtx, false_rtx) && ! side_effects_p (cond))
5638 return true_rtx;
1a26b032 5639
5be669c7
RK
5640 /* Convert a == b ? b : a to "a". */
5641 if (true_code == EQ && ! side_effects_p (cond)
73e42cf3 5642 && !HONOR_NANS (mode)
d6edb99e
ZW
5643 && rtx_equal_p (XEXP (cond, 0), false_rtx)
5644 && rtx_equal_p (XEXP (cond, 1), true_rtx))
5645 return false_rtx;
5be669c7 5646 else if (true_code == NE && ! side_effects_p (cond)
73e42cf3 5647 && !HONOR_NANS (mode)
d6edb99e
ZW
5648 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5649 && rtx_equal_p (XEXP (cond, 1), false_rtx))
5650 return true_rtx;
5be669c7 5651
8079805d
RK
5652 /* Look for cases where we have (abs x) or (neg (abs X)). */
5653
5654 if (GET_MODE_CLASS (mode) == MODE_INT
e37f6a49
SB
5655 && comparison_p
5656 && XEXP (cond, 1) == const0_rtx
d6edb99e
ZW
5657 && GET_CODE (false_rtx) == NEG
5658 && rtx_equal_p (true_rtx, XEXP (false_rtx, 0))
d6edb99e
ZW
5659 && rtx_equal_p (true_rtx, XEXP (cond, 0))
5660 && ! side_effects_p (true_rtx))
8079805d
RK
5661 switch (true_code)
5662 {
5663 case GT:
5664 case GE:
f1c6ba8b 5665 return simplify_gen_unary (ABS, mode, true_rtx, mode);
8079805d
RK
5666 case LT:
5667 case LE:
f1c6ba8b
RK
5668 return
5669 simplify_gen_unary (NEG, mode,
5670 simplify_gen_unary (ABS, mode, true_rtx, mode),
5671 mode);
cf0d9408
KH
5672 default:
5673 break;
8079805d
RK
5674 }
5675
5676 /* Look for MIN or MAX. */
5677
de6c5979 5678 if ((! FLOAT_MODE_P (mode) || flag_unsafe_math_optimizations)
8079805d 5679 && comparison_p
d6edb99e
ZW
5680 && rtx_equal_p (XEXP (cond, 0), true_rtx)
5681 && rtx_equal_p (XEXP (cond, 1), false_rtx)
8079805d
RK
5682 && ! side_effects_p (cond))
5683 switch (true_code)
5684 {
5685 case GE:
5686 case GT:
bcb34aa3 5687 return simplify_gen_binary (SMAX, mode, true_rtx, false_rtx);
8079805d
RK
5688 case LE:
5689 case LT:
bcb34aa3 5690 return simplify_gen_binary (SMIN, mode, true_rtx, false_rtx);
8079805d
RK
5691 case GEU:
5692 case GTU:
bcb34aa3 5693 return simplify_gen_binary (UMAX, mode, true_rtx, false_rtx);
8079805d
RK
5694 case LEU:
5695 case LTU:
bcb34aa3 5696 return simplify_gen_binary (UMIN, mode, true_rtx, false_rtx);
e9a25f70
JL
5697 default:
5698 break;
8079805d 5699 }
663522cb 5700
8079805d
RK
5701 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
5702 second operand is zero, this can be done as (OP Z (mult COND C2)) where
5703 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
5704 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
5705 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 5706 neither 1 or -1, but it isn't worth checking for. */
8079805d 5707
0802d516 5708 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
02484af9
EB
5709 && comparison_p
5710 && GET_MODE_CLASS (mode) == MODE_INT
5711 && ! side_effects_p (x))
8079805d 5712 {
d6edb99e
ZW
5713 rtx t = make_compound_operation (true_rtx, SET);
5714 rtx f = make_compound_operation (false_rtx, SET);
8079805d
RK
5715 rtx cond_op0 = XEXP (cond, 0);
5716 rtx cond_op1 = XEXP (cond, 1);
f822d252 5717 enum rtx_code op = UNKNOWN, extend_op = UNKNOWN;
8079805d 5718 enum machine_mode m = mode;
6a651371 5719 rtx z = 0, c1 = NULL_RTX;
8079805d 5720
8079805d
RK
5721 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
5722 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
5723 || GET_CODE (t) == ASHIFT
5724 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
5725 && rtx_equal_p (XEXP (t, 0), f))
5726 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
5727
5728 /* If an identity-zero op is commutative, check whether there
0f41302f 5729 would be a match if we swapped the operands. */
8079805d
RK
5730 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
5731 || GET_CODE (t) == XOR)
5732 && rtx_equal_p (XEXP (t, 1), f))
5733 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
5734 else if (GET_CODE (t) == SIGN_EXTEND
5735 && (GET_CODE (XEXP (t, 0)) == PLUS
5736 || GET_CODE (XEXP (t, 0)) == MINUS
5737 || GET_CODE (XEXP (t, 0)) == IOR
5738 || GET_CODE (XEXP (t, 0)) == XOR
5739 || GET_CODE (XEXP (t, 0)) == ASHIFT
5740 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5741 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5742 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5743 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5744 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5745 && (num_sign_bit_copies (f, GET_MODE (f))
26c34780
RS
5746 > (unsigned int)
5747 (GET_MODE_BITSIZE (mode)
8079805d
RK
5748 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
5749 {
5750 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5751 extend_op = SIGN_EXTEND;
5752 m = GET_MODE (XEXP (t, 0));
1a26b032 5753 }
8079805d
RK
5754 else if (GET_CODE (t) == SIGN_EXTEND
5755 && (GET_CODE (XEXP (t, 0)) == PLUS
5756 || GET_CODE (XEXP (t, 0)) == IOR
5757 || GET_CODE (XEXP (t, 0)) == XOR)
5758 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5759 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5760 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5761 && (num_sign_bit_copies (f, GET_MODE (f))
26c34780
RS
5762 > (unsigned int)
5763 (GET_MODE_BITSIZE (mode)
8079805d
RK
5764 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
5765 {
5766 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5767 extend_op = SIGN_EXTEND;
5768 m = GET_MODE (XEXP (t, 0));
5769 }
5770 else if (GET_CODE (t) == ZERO_EXTEND
5771 && (GET_CODE (XEXP (t, 0)) == PLUS
5772 || GET_CODE (XEXP (t, 0)) == MINUS
5773 || GET_CODE (XEXP (t, 0)) == IOR
5774 || GET_CODE (XEXP (t, 0)) == XOR
5775 || GET_CODE (XEXP (t, 0)) == ASHIFT
5776 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
5777 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
5778 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
5779 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5780 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
5781 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
5782 && ((nonzero_bits (f, GET_MODE (f))
663522cb 5783 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
8079805d
RK
5784 == 0))
5785 {
5786 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
5787 extend_op = ZERO_EXTEND;
5788 m = GET_MODE (XEXP (t, 0));
5789 }
5790 else if (GET_CODE (t) == ZERO_EXTEND
5791 && (GET_CODE (XEXP (t, 0)) == PLUS
5792 || GET_CODE (XEXP (t, 0)) == IOR
5793 || GET_CODE (XEXP (t, 0)) == XOR)
5794 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
5795 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5796 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
5797 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
5798 && ((nonzero_bits (f, GET_MODE (f))
663522cb 5799 & ~GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
8079805d
RK
5800 == 0))
5801 {
5802 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
5803 extend_op = ZERO_EXTEND;
5804 m = GET_MODE (XEXP (t, 0));
5805 }
663522cb 5806
8079805d
RK
5807 if (z)
5808 {
bcb34aa3
PB
5809 temp = subst (simplify_gen_relational (true_code, m, VOIDmode,
5810 cond_op0, cond_op1),
8079805d 5811 pc_rtx, pc_rtx, 0, 0);
bcb34aa3
PB
5812 temp = simplify_gen_binary (MULT, m, temp,
5813 simplify_gen_binary (MULT, m, c1,
5814 const_true_rtx));
8079805d 5815 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
bcb34aa3 5816 temp = simplify_gen_binary (op, m, gen_lowpart (m, z), temp);
8079805d 5817
f822d252 5818 if (extend_op != UNKNOWN)
f1c6ba8b 5819 temp = simplify_gen_unary (extend_op, mode, temp, m);
8079805d
RK
5820
5821 return temp;
5822 }
5823 }
224eeff2 5824
8079805d
RK
5825 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
5826 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
5827 negation of a single bit, we can convert this operation to a shift. We
5828 can actually do this more generally, but it doesn't seem worth it. */
5829
5830 if (true_code == NE && XEXP (cond, 1) == const0_rtx
481683e1 5831 && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
8079805d 5832 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
d6edb99e 5833 && (i = exact_log2 (INTVAL (true_rtx))) >= 0)
8079805d
RK
5834 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
5835 == GET_MODE_BITSIZE (mode))
d6edb99e 5836 && (i = exact_log2 (-INTVAL (true_rtx))) >= 0)))
8079805d
RK
5837 return
5838 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4de249d9 5839 gen_lowpart (mode, XEXP (cond, 0)), i);
230d793d 5840
83588a9d
JH
5841 /* (IF_THEN_ELSE (NE REG 0) (0) (8)) is REG for nonzero_bits (REG) == 8. */
5842 if (true_code == NE && XEXP (cond, 1) == const0_rtx
481683e1 5843 && false_rtx == const0_rtx && CONST_INT_P (true_rtx)
db33236e 5844 && GET_MODE (XEXP (cond, 0)) == mode
83588a9d
JH
5845 && (INTVAL (true_rtx) & GET_MODE_MASK (mode))
5846 == nonzero_bits (XEXP (cond, 0), mode)
5847 && (i = exact_log2 (INTVAL (true_rtx) & GET_MODE_MASK (mode))) >= 0)
5848 return XEXP (cond, 0);
5849
8079805d
RK
5850 return x;
5851}
5852\f
5853/* Simplify X, a SET expression. Return the new expression. */
230d793d 5854
8079805d 5855static rtx
79a490a9 5856simplify_set (rtx x)
8079805d
RK
5857{
5858 rtx src = SET_SRC (x);
5859 rtx dest = SET_DEST (x);
5860 enum machine_mode mode
5861 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
5862 rtx other_insn;
5863 rtx *cc_use;
5864
5865 /* (set (pc) (return)) gets written as (return). */
5866 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
5867 return src;
230d793d 5868
87e3e0c1
RK
5869 /* Now that we know for sure which bits of SRC we are using, see if we can
5870 simplify the expression for the object knowing that we only need the
5871 low-order bits. */
5872
855c3a2e
IS
5873 if (GET_MODE_CLASS (mode) == MODE_INT
5874 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
c5c76735 5875 {
b1257407 5876 src = force_to_mode (src, mode, ~(HOST_WIDE_INT) 0, 0);
c5c76735
JL
5877 SUBST (SET_SRC (x), src);
5878 }
87e3e0c1 5879
8079805d
RK
5880 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
5881 the comparison result and try to simplify it unless we already have used
5882 undobuf.other_insn. */
dbf4f1a2
RS
5883 if ((GET_MODE_CLASS (mode) == MODE_CC
5884 || GET_CODE (src) == COMPARE
5885 || CC0_P (dest))
8079805d
RK
5886 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
5887 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
ec8e098d 5888 && COMPARISON_P (*cc_use)
c0d3ac4d 5889 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
5890 {
5891 enum rtx_code old_code = GET_CODE (*cc_use);
5892 enum rtx_code new_code;
f40f4c8e 5893 rtx op0, op1, tmp;
8079805d
RK
5894 int other_changed = 0;
5895 enum machine_mode compare_mode = GET_MODE (dest);
5896
5897 if (GET_CODE (src) == COMPARE)
5898 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
5899 else
8abcb0f7 5900 op0 = src, op1 = CONST0_RTX (GET_MODE (src));
230d793d 5901
c6fb08ad
PB
5902 tmp = simplify_relational_operation (old_code, compare_mode, VOIDmode,
5903 op0, op1);
5904 if (!tmp)
c22cacf3 5905 new_code = old_code;
c6fb08ad 5906 else if (!CONSTANT_P (tmp))
c22cacf3
MS
5907 {
5908 new_code = GET_CODE (tmp);
5909 op0 = XEXP (tmp, 0);
5910 op1 = XEXP (tmp, 1);
5911 }
f40f4c8e 5912 else
f40f4c8e
RS
5913 {
5914 rtx pat = PATTERN (other_insn);
5915 undobuf.other_insn = other_insn;
5916 SUBST (*cc_use, tmp);
5917
5918 /* Attempt to simplify CC user. */
5919 if (GET_CODE (pat) == SET)
5920 {
32e9fa48
KG
5921 rtx new_rtx = simplify_rtx (SET_SRC (pat));
5922 if (new_rtx != NULL_RTX)
5923 SUBST (SET_SRC (pat), new_rtx);
f40f4c8e
RS
5924 }
5925
5926 /* Convert X into a no-op move. */
5927 SUBST (SET_DEST (x), pc_rtx);
5928 SUBST (SET_SRC (x), pc_rtx);
5929 return x;
5930 }
5931
8079805d 5932 /* Simplify our comparison, if possible. */
c6fb08ad 5933 new_code = simplify_comparison (new_code, &op0, &op1);
230d793d 5934
94134f42 5935#ifdef SELECT_CC_MODE
8079805d
RK
5936 /* If this machine has CC modes other than CCmode, check to see if we
5937 need to use a different CC mode here. */
c6fb08ad
PB
5938 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5939 compare_mode = GET_MODE (op0);
5940 else
5941 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
230d793d 5942
94134f42 5943#ifndef HAVE_cc0
8079805d
RK
5944 /* If the mode changed, we have to change SET_DEST, the mode in the
5945 compare, and the mode in the place SET_DEST is used. If SET_DEST is
5946 a hard register, just build new versions with the proper mode. If it
5947 is a pseudo, we lose unless it is only time we set the pseudo, in
5948 which case we can safely change its mode. */
5949 if (compare_mode != GET_MODE (dest))
5950 {
4164b2fb 5951 if (can_change_dest_mode (dest, 0, compare_mode))
230d793d 5952 {
4164b2fb 5953 unsigned int regno = REGNO (dest);
abcb0cdc 5954 rtx new_dest;
4164b2fb 5955
abcb0cdc
ILT
5956 if (regno < FIRST_PSEUDO_REGISTER)
5957 new_dest = gen_rtx_REG (compare_mode, regno);
5958 else
5959 {
5960 SUBST_MODE (regno_reg_rtx[regno], compare_mode);
5961 new_dest = regno_reg_rtx[regno];
5962 }
230d793d 5963
8079805d
RK
5964 SUBST (SET_DEST (x), new_dest);
5965 SUBST (XEXP (*cc_use, 0), new_dest);
5966 other_changed = 1;
230d793d 5967
8079805d 5968 dest = new_dest;
230d793d 5969 }
8079805d 5970 }
94134f42
ZW
5971#endif /* cc0 */
5972#endif /* SELECT_CC_MODE */
230d793d 5973
8079805d
RK
5974 /* If the code changed, we have to build a new comparison in
5975 undobuf.other_insn. */
5976 if (new_code != old_code)
5977 {
2051c897 5978 int other_changed_previously = other_changed;
8079805d 5979 unsigned HOST_WIDE_INT mask;
d3302fd3 5980 rtx old_cc_use = *cc_use;
8079805d 5981
f1c6ba8b
RK
5982 SUBST (*cc_use, gen_rtx_fmt_ee (new_code, GET_MODE (*cc_use),
5983 dest, const0_rtx));
2051c897 5984 other_changed = 1;
8079805d
RK
5985
5986 /* If the only change we made was to change an EQ into an NE or
5987 vice versa, OP0 has only one bit that might be nonzero, and OP1
5988 is zero, check if changing the user of the condition code will
5989 produce a valid insn. If it won't, we can keep the original code
5990 in that insn by surrounding our operation with an XOR. */
5991
5992 if (((old_code == NE && new_code == EQ)
5993 || (old_code == EQ && new_code == NE))
2051c897 5994 && ! other_changed_previously && op1 == const0_rtx
8079805d
RK
5995 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
5996 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 5997 {
8079805d 5998 rtx pat = PATTERN (other_insn), note = 0;
230d793d 5999
8e2f6e35 6000 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
6001 && ! check_asm_operands (pat)))
6002 {
d3302fd3 6003 *cc_use = old_cc_use;
2051c897 6004 other_changed = 0;
230d793d 6005
bcb34aa3
PB
6006 op0 = simplify_gen_binary (XOR, GET_MODE (op0),
6007 op0, GEN_INT (mask));
230d793d 6008 }
230d793d 6009 }
8079805d
RK
6010 }
6011
6012 if (other_changed)
6013 undobuf.other_insn = other_insn;
230d793d 6014
8079805d
RK
6015 /* Otherwise, if we didn't previously have a COMPARE in the
6016 correct mode, we need one. */
6017 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
6018 {
f1c6ba8b 6019 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
8079805d 6020 src = SET_SRC (x);
230d793d 6021 }
8c98e9d0
FJ
6022 else if (GET_MODE (op0) == compare_mode && op1 == const0_rtx)
6023 {
4eff8045 6024 SUBST (SET_SRC (x), op0);
8c98e9d0 6025 src = SET_SRC (x);
c22cacf3 6026 }
4eff8045
EB
6027 /* Otherwise, update the COMPARE if needed. */
6028 else if (XEXP (src, 0) != op0 || XEXP (src, 1) != op1)
230d793d 6029 {
4eff8045
EB
6030 SUBST (SET_SRC (x), gen_rtx_COMPARE (compare_mode, op0, op1));
6031 src = SET_SRC (x);
230d793d 6032 }
8079805d
RK
6033 }
6034 else
6035 {
6036 /* Get SET_SRC in a form where we have placed back any
6037 compound expressions. Then do the checks below. */
6038 src = make_compound_operation (src, SET);
6039 SUBST (SET_SRC (x), src);
6040 }
230d793d 6041
8079805d
RK
6042 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
6043 and X being a REG or (subreg (reg)), we may be able to convert this to
663522cb 6044 (set (subreg:m2 x) (op)).
df62f951 6045
5c881655
KH
6046 We can always do this if M1 is narrower than M2 because that means that
6047 we only care about the low bits of the result.
df62f951 6048
5c881655
KH
6049 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
6050 perform a narrower operation than requested since the high-order bits will
6051 be undefined. On machine where it is defined, this transformation is safe
6052 as long as M1 and M2 have the same number of words. */
663522cb 6053
8079805d 6054 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
ec8e098d 6055 && !OBJECT_P (SUBREG_REG (src))
8079805d
RK
6056 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
6057 / UNITS_PER_WORD)
6058 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
6059 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
5c881655
KH
6060#ifndef WORD_REGISTER_OPERATIONS
6061 && (GET_MODE_SIZE (GET_MODE (src))
c22cacf3 6062 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
5c881655 6063#endif
cff9f8d5 6064#ifdef CANNOT_CHANGE_MODE_CLASS
f8cfc6aa 6065 && ! (REG_P (dest) && REGNO (dest) < FIRST_PSEUDO_REGISTER
cff9f8d5 6066 && REG_CANNOT_CHANGE_MODE_P (REGNO (dest),
73a39fc4 6067 GET_MODE (SUBREG_REG (src)),
b0c42aed 6068 GET_MODE (src)))
663522cb 6069#endif
f8cfc6aa 6070 && (REG_P (dest)
8079805d 6071 || (GET_CODE (dest) == SUBREG
f8cfc6aa 6072 && REG_P (SUBREG_REG (dest)))))
8079805d
RK
6073 {
6074 SUBST (SET_DEST (x),
4de249d9 6075 gen_lowpart (GET_MODE (SUBREG_REG (src)),
8079805d
RK
6076 dest));
6077 SUBST (SET_SRC (x), SUBREG_REG (src));
6078
6079 src = SET_SRC (x), dest = SET_DEST (x);
6080 }
df62f951 6081
8c1d52a3
KH
6082#ifdef HAVE_cc0
6083 /* If we have (set (cc0) (subreg ...)), we try to remove the subreg
6084 in SRC. */
6085 if (dest == cc0_rtx
6086 && GET_CODE (src) == SUBREG
6087 && subreg_lowpart_p (src)
6088 && (GET_MODE_BITSIZE (GET_MODE (src))
6089 < GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (src)))))
6090 {
6091 rtx inner = SUBREG_REG (src);
6092 enum machine_mode inner_mode = GET_MODE (inner);
6093
6094 /* Here we make sure that we don't have a sign bit on. */
6095 if (GET_MODE_BITSIZE (inner_mode) <= HOST_BITS_PER_WIDE_INT
6096 && (nonzero_bits (inner, inner_mode)
6097 < ((unsigned HOST_WIDE_INT) 1
ff076520 6098 << (GET_MODE_BITSIZE (GET_MODE (src)) - 1))))
8c1d52a3
KH
6099 {
6100 SUBST (SET_SRC (x), inner);
6101 src = SET_SRC (x);
6102 }
6103 }
6104#endif
6105
8baf60bb 6106#ifdef LOAD_EXTEND_OP
8079805d
RK
6107 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
6108 would require a paradoxical subreg. Replace the subreg with a
0f41302f 6109 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
6110
6111 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
f27017d1 6112 && INTEGRAL_MODE_P (GET_MODE (SUBREG_REG (src)))
f822d252 6113 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != UNKNOWN
ddef6bc7 6114 && SUBREG_BYTE (src) == 0
8079805d
RK
6115 && (GET_MODE_SIZE (GET_MODE (src))
6116 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
3c0cb5de 6117 && MEM_P (SUBREG_REG (src)))
8079805d
RK
6118 {
6119 SUBST (SET_SRC (x),
2fb00d7f
KH
6120 gen_rtx_fmt_e (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
6121 GET_MODE (src), SUBREG_REG (src)));
8079805d
RK
6122
6123 src = SET_SRC (x);
6124 }
230d793d
RS
6125#endif
6126
8079805d
RK
6127 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
6128 are comparing an item known to be 0 or -1 against 0, use a logical
6129 operation instead. Check for one of the arms being an IOR of the other
6130 arm with some value. We compute three terms to be IOR'ed together. In
6131 practice, at most two will be nonzero. Then we do the IOR's. */
6132
6133 if (GET_CODE (dest) != PC
6134 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 6135 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
6136 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
6137 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 6138 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
6139#ifdef HAVE_conditional_move
6140 && ! can_conditionally_move_p (GET_MODE (src))
6141#endif
8079805d
RK
6142 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
6143 GET_MODE (XEXP (XEXP (src, 0), 0)))
6144 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
6145 && ! side_effects_p (src))
6146 {
d6edb99e 6147 rtx true_rtx = (GET_CODE (XEXP (src, 0)) == NE
8079805d 6148 ? XEXP (src, 1) : XEXP (src, 2));
d6edb99e 6149 rtx false_rtx = (GET_CODE (XEXP (src, 0)) == NE
8079805d
RK
6150 ? XEXP (src, 2) : XEXP (src, 1));
6151 rtx term1 = const0_rtx, term2, term3;
6152
d6edb99e
ZW
6153 if (GET_CODE (true_rtx) == IOR
6154 && rtx_equal_p (XEXP (true_rtx, 0), false_rtx))
e869aa39 6155 term1 = false_rtx, true_rtx = XEXP (true_rtx, 1), false_rtx = const0_rtx;
d6edb99e
ZW
6156 else if (GET_CODE (true_rtx) == IOR
6157 && rtx_equal_p (XEXP (true_rtx, 1), false_rtx))
e869aa39 6158 term1 = false_rtx, true_rtx = XEXP (true_rtx, 0), false_rtx = const0_rtx;
d6edb99e
ZW
6159 else if (GET_CODE (false_rtx) == IOR
6160 && rtx_equal_p (XEXP (false_rtx, 0), true_rtx))
e869aa39 6161 term1 = true_rtx, false_rtx = XEXP (false_rtx, 1), true_rtx = const0_rtx;
d6edb99e
ZW
6162 else if (GET_CODE (false_rtx) == IOR
6163 && rtx_equal_p (XEXP (false_rtx, 1), true_rtx))
e869aa39 6164 term1 = true_rtx, false_rtx = XEXP (false_rtx, 0), true_rtx = const0_rtx;
d6edb99e 6165
bcb34aa3
PB
6166 term2 = simplify_gen_binary (AND, GET_MODE (src),
6167 XEXP (XEXP (src, 0), 0), true_rtx);
6168 term3 = simplify_gen_binary (AND, GET_MODE (src),
6169 simplify_gen_unary (NOT, GET_MODE (src),
6170 XEXP (XEXP (src, 0), 0),
6171 GET_MODE (src)),
6172 false_rtx);
8079805d
RK
6173
6174 SUBST (SET_SRC (x),
bcb34aa3
PB
6175 simplify_gen_binary (IOR, GET_MODE (src),
6176 simplify_gen_binary (IOR, GET_MODE (src),
6177 term1, term2),
6178 term3));
8079805d
RK
6179
6180 src = SET_SRC (x);
6181 }
230d793d 6182
246e00f2
RK
6183 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
6184 whole thing fail. */
6185 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
6186 return src;
6187 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
6188 return dest;
6189 else
6190 /* Convert this into a field assignment operation, if possible. */
6191 return make_field_assignment (x);
8079805d
RK
6192}
6193\f
6194/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
6621d78e 6195 result. */
8079805d
RK
6196
6197static rtx
6621d78e 6198simplify_logical (rtx x)
8079805d
RK
6199{
6200 enum machine_mode mode = GET_MODE (x);
6201 rtx op0 = XEXP (x, 0);
6202 rtx op1 = XEXP (x, 1);
6203
6204 switch (GET_CODE (x))
6205 {
230d793d 6206 case AND:
2e8f9abf
DM
6207 /* We can call simplify_and_const_int only if we don't lose
6208 any (sign) bits when converting INTVAL (op1) to
6209 "unsigned HOST_WIDE_INT". */
481683e1 6210 if (CONST_INT_P (op1)
2e8f9abf
DM
6211 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6212 || INTVAL (op1) > 0))
230d793d 6213 {
8079805d 6214 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d 6215 if (GET_CODE (x) != AND)
8079805d 6216 return x;
0e32506c 6217
ec8e098d
PB
6218 op0 = XEXP (x, 0);
6219 op1 = XEXP (x, 1);
230d793d
RS
6220 }
6221
bcb34aa3
PB
6222 /* If we have any of (and (ior A B) C) or (and (xor A B) C),
6223 apply the distributive law and then the inverse distributive
6224 law to see if things simplify. */
1999435c 6225 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d 6226 {
bcb34aa3
PB
6227 rtx result = distribute_and_simplify_rtx (x, 0);
6228 if (result)
6229 return result;
230d793d 6230 }
1999435c 6231 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
bcb34aa3
PB
6232 {
6233 rtx result = distribute_and_simplify_rtx (x, 1);
6234 if (result)
6235 return result;
6236 }
230d793d
RS
6237 break;
6238
6239 case IOR:
230d793d
RS
6240 /* If we have (ior (and A B) C), apply the distributive law and then
6241 the inverse distributive law to see if things simplify. */
6242
1999435c
PB
6243 if (GET_CODE (op0) == AND)
6244 {
bcb34aa3
PB
6245 rtx result = distribute_and_simplify_rtx (x, 0);
6246 if (result)
6247 return result;
1999435c
PB
6248 }
6249
6250 if (GET_CODE (op1) == AND)
230d793d 6251 {
bcb34aa3
PB
6252 rtx result = distribute_and_simplify_rtx (x, 1);
6253 if (result)
6254 return result;
230d793d 6255 }
230d793d 6256 break;
e9a25f70
JL
6257
6258 default:
341c100f 6259 gcc_unreachable ();
230d793d
RS
6260 }
6261
6262 return x;
6263}
6264\f
6265/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
6266 operations" because they can be replaced with two more basic operations.
6267 ZERO_EXTEND is also considered "compound" because it can be replaced with
6268 an AND operation, which is simpler, though only one operation.
6269
6270 The function expand_compound_operation is called with an rtx expression
663522cb 6271 and will convert it to the appropriate shifts and AND operations,
230d793d
RS
6272 simplifying at each stage.
6273
6274 The function make_compound_operation is called to convert an expression
6275 consisting of shifts and ANDs into the equivalent compound expression.
6276 It is the inverse of this function, loosely speaking. */
6277
6278static rtx
79a490a9 6279expand_compound_operation (rtx x)
230d793d 6280{
770ae6cc 6281 unsigned HOST_WIDE_INT pos = 0, len;
230d793d 6282 int unsignedp = 0;
770ae6cc 6283 unsigned int modewidth;
230d793d
RS
6284 rtx tem;
6285
6286 switch (GET_CODE (x))
6287 {
6288 case ZERO_EXTEND:
6289 unsignedp = 1;
6290 case SIGN_EXTEND:
75473182
RS
6291 /* We can't necessarily use a const_int for a multiword mode;
6292 it depends on implicitly extending the value.
6293 Since we don't know the right way to extend it,
6294 we can't tell whether the implicit way is right.
6295
6296 Even for a mode that is no wider than a const_int,
6297 we can't win, because we need to sign extend one of its bits through
6298 the rest of it, and we don't know which bit. */
481683e1 6299 if (CONST_INT_P (XEXP (x, 0)))
75473182 6300 return x;
230d793d 6301
8079805d
RK
6302 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
6303 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
6304 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
6305 reloaded. If not for that, MEM's would very rarely be safe.
6306
6307 Reject MODEs bigger than a word, because we might not be able
6308 to reference a two-register group starting with an arbitrary register
6309 (and currently gen_lowpart might crash for a SUBREG). */
663522cb 6310
8079805d 6311 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
6312 return x;
6313
71012d97
GK
6314 /* Reject MODEs that aren't scalar integers because turning vector
6315 or complex modes into shifts causes problems. */
6316
6317 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6318 return x;
6319
230d793d
RS
6320 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
6321 /* If the inner object has VOIDmode (the only way this can happen
e0a2f705 6322 is if it is an ASM_OPERANDS), we can't do anything since we don't
230d793d
RS
6323 know how much masking to do. */
6324 if (len == 0)
6325 return x;
6326
6327 break;
6328
6329 case ZERO_EXTRACT:
6330 unsignedp = 1;
46d096a3
SB
6331
6332 /* ... fall through ... */
6333
230d793d
RS
6334 case SIGN_EXTRACT:
6335 /* If the operand is a CLOBBER, just return it. */
6336 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
6337 return XEXP (x, 0);
6338
481683e1
SZ
6339 if (!CONST_INT_P (XEXP (x, 1))
6340 || !CONST_INT_P (XEXP (x, 2))
230d793d
RS
6341 || GET_MODE (XEXP (x, 0)) == VOIDmode)
6342 return x;
6343
71012d97
GK
6344 /* Reject MODEs that aren't scalar integers because turning vector
6345 or complex modes into shifts causes problems. */
6346
6347 if (! SCALAR_INT_MODE_P (GET_MODE (XEXP (x, 0))))
6348 return x;
6349
230d793d
RS
6350 len = INTVAL (XEXP (x, 1));
6351 pos = INTVAL (XEXP (x, 2));
6352
81162f97
PB
6353 /* This should stay within the object being extracted, fail otherwise. */
6354 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
6355 return x;
230d793d 6356
f76b9db2
ILT
6357 if (BITS_BIG_ENDIAN)
6358 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
6359
230d793d
RS
6360 break;
6361
6362 default:
6363 return x;
6364 }
0f808b6f
JH
6365 /* Convert sign extension to zero extension, if we know that the high
6366 bit is not set, as this is easier to optimize. It will be converted
6367 back to cheaper alternative in make_extraction. */
6368 if (GET_CODE (x) == SIGN_EXTEND
6369 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6370 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
663522cb 6371 & ~(((unsigned HOST_WIDE_INT)
0f808b6f
JH
6372 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
6373 >> 1))
6374 == 0)))
6375 {
6376 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
3dcd7d45
EC
6377 rtx temp2 = expand_compound_operation (temp);
6378
6379 /* Make sure this is a profitable operation. */
f40751dd
JH
6380 if (rtx_cost (x, SET, optimize_this_for_speed_p)
6381 > rtx_cost (temp2, SET, optimize_this_for_speed_p))
3dcd7d45 6382 return temp2;
f40751dd
JH
6383 else if (rtx_cost (x, SET, optimize_this_for_speed_p)
6384 > rtx_cost (temp, SET, optimize_this_for_speed_p))
3dcd7d45
EC
6385 return temp;
6386 else
6387 return x;
0f808b6f 6388 }
230d793d 6389
0f13a422
ILT
6390 /* We can optimize some special cases of ZERO_EXTEND. */
6391 if (GET_CODE (x) == ZERO_EXTEND)
6392 {
6393 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
c22cacf3
MS
6394 know that the last value didn't have any inappropriate bits
6395 set. */
0f13a422
ILT
6396 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6397 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
6398 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6399 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
663522cb 6400 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
6401 return XEXP (XEXP (x, 0), 0);
6402
6403 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6404 if (GET_CODE (XEXP (x, 0)) == SUBREG
6405 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6406 && subreg_lowpart_p (XEXP (x, 0))
6407 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6408 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
663522cb 6409 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
6410 return SUBREG_REG (XEXP (x, 0));
6411
6412 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
c22cacf3
MS
6413 is a comparison and STORE_FLAG_VALUE permits. This is like
6414 the first case, but it works even when GET_MODE (x) is larger
6415 than HOST_WIDE_INT. */
0f13a422
ILT
6416 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
6417 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
ec8e098d 6418 && COMPARISON_P (XEXP (XEXP (x, 0), 0))
0f13a422
ILT
6419 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6420 <= HOST_BITS_PER_WIDE_INT)
23190837 6421 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 6422 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
6423 return XEXP (XEXP (x, 0), 0);
6424
6425 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
6426 if (GET_CODE (XEXP (x, 0)) == SUBREG
6427 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
6428 && subreg_lowpart_p (XEXP (x, 0))
ec8e098d 6429 && COMPARISON_P (SUBREG_REG (XEXP (x, 0)))
0f13a422
ILT
6430 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6431 <= HOST_BITS_PER_WIDE_INT)
6432 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
663522cb 6433 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
6434 return SUBREG_REG (XEXP (x, 0));
6435
0f13a422
ILT
6436 }
6437
230d793d
RS
6438 /* If we reach here, we want to return a pair of shifts. The inner
6439 shift is a left shift of BITSIZE - POS - LEN bits. The outer
6440 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
6441 logical depending on the value of UNSIGNEDP.
6442
6443 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
6444 converted into an AND of a shift.
6445
6446 We must check for the case where the left shift would have a negative
6447 count. This can happen in a case like (x >> 31) & 255 on machines
6448 that can't shift by a constant. On those machines, we would first
663522cb 6449 combine the shift with the AND to produce a variable-position
230d793d
RS
6450 extraction. Then the constant of 31 would be substituted in to produce
6451 a such a position. */
6452
6453 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
770ae6cc 6454 if (modewidth + len >= pos)
56106c2e
RS
6455 {
6456 enum machine_mode mode = GET_MODE (x);
6457 tem = gen_lowpart (mode, XEXP (x, 0));
6458 if (!tem || GET_CODE (tem) == CLOBBER)
6459 return x;
6460 tem = simplify_shift_const (NULL_RTX, ASHIFT, mode,
6461 tem, modewidth - pos - len);
6462 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
6463 mode, tem, modewidth - len);
6464 }
5f4f0e22
CH
6465 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
6466 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
6467 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
6468 GET_MODE (x),
6469 XEXP (x, 0), pos),
5f4f0e22 6470 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
6471 else
6472 /* Any other cases we can't handle. */
6473 return x;
230d793d
RS
6474
6475 /* If we couldn't do this for some reason, return the original
6476 expression. */
6477 if (GET_CODE (tem) == CLOBBER)
6478 return x;
6479
6480 return tem;
6481}
6482\f
6483/* X is a SET which contains an assignment of one object into
6484 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
6485 or certain SUBREGS). If possible, convert it into a series of
6486 logical operations.
6487
6488 We half-heartedly support variable positions, but do not at all
6489 support variable lengths. */
6490
7bc980e1
KG
6491static const_rtx
6492expand_field_assignment (const_rtx x)
230d793d
RS
6493{
6494 rtx inner;
0f41302f 6495 rtx pos; /* Always counts from low bit. */
230d793d 6496 int len;
bcb34aa3 6497 rtx mask, cleared, masked;
230d793d
RS
6498 enum machine_mode compute_mode;
6499
6500 /* Loop until we find something we can't simplify. */
6501 while (1)
6502 {
6503 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
6504 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
6505 {
6506 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
6507 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
47073a38 6508 pos = GEN_INT (subreg_lsb (XEXP (SET_DEST (x), 0)));
230d793d
RS
6509 }
6510 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
481683e1 6511 && CONST_INT_P (XEXP (SET_DEST (x), 1)))
230d793d
RS
6512 {
6513 inner = XEXP (SET_DEST (x), 0);
6514 len = INTVAL (XEXP (SET_DEST (x), 1));
6515 pos = XEXP (SET_DEST (x), 2);
6516
79c29032 6517 /* A constant position should stay within the width of INNER. */
481683e1 6518 if (CONST_INT_P (pos)
81162f97
PB
6519 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
6520 break;
230d793d 6521
f76b9db2
ILT
6522 if (BITS_BIG_ENDIAN)
6523 {
481683e1 6524 if (CONST_INT_P (pos))
f76b9db2
ILT
6525 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
6526 - INTVAL (pos));
6527 else if (GET_CODE (pos) == MINUS
481683e1 6528 && CONST_INT_P (XEXP (pos, 1))
f76b9db2
ILT
6529 && (INTVAL (XEXP (pos, 1))
6530 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
6531 /* If position is ADJUST - X, new position is X. */
6532 pos = XEXP (pos, 0);
6533 else
bcb34aa3
PB
6534 pos = simplify_gen_binary (MINUS, GET_MODE (pos),
6535 GEN_INT (GET_MODE_BITSIZE (
6536 GET_MODE (inner))
6537 - len),
6538 pos);
f76b9db2 6539 }
230d793d
RS
6540 }
6541
6542 /* A SUBREG between two modes that occupy the same numbers of words
6543 can be done by moving the SUBREG to the source. */
6544 else if (GET_CODE (SET_DEST (x)) == SUBREG
b1e9c8a9
AO
6545 /* We need SUBREGs to compute nonzero_bits properly. */
6546 && nonzero_sign_valid
230d793d
RS
6547 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
6548 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6549 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
6550 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
6551 {
38a448ca 6552 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
4de249d9 6553 gen_lowpart
c5c76735
JL
6554 (GET_MODE (SUBREG_REG (SET_DEST (x))),
6555 SET_SRC (x)));
230d793d
RS
6556 continue;
6557 }
6558 else
6559 break;
6560
6561 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
6562 inner = SUBREG_REG (inner);
6563
6564 compute_mode = GET_MODE (inner);
6565
71012d97
GK
6566 /* Don't attempt bitwise arithmetic on non scalar integer modes. */
6567 if (! SCALAR_INT_MODE_P (compute_mode))
861556b4
RH
6568 {
6569 enum machine_mode imode;
6570
71012d97 6571 /* Don't do anything for vector or complex integral types. */
861556b4
RH
6572 if (! FLOAT_MODE_P (compute_mode))
6573 break;
6574
6575 /* Try to find an integral mode to pun with. */
6576 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
6577 if (imode == BLKmode)
6578 break;
6579
6580 compute_mode = imode;
4de249d9 6581 inner = gen_lowpart (imode, inner);
861556b4
RH
6582 }
6583
230d793d 6584 /* Compute a mask of LEN bits, if we can do this on the host machine. */
bcb34aa3 6585 if (len >= HOST_BITS_PER_WIDE_INT)
230d793d
RS
6586 break;
6587
6588 /* Now compute the equivalent expression. Make a copy of INNER
6589 for the SET_DEST in case it is a MEM into which we will substitute;
6590 we don't want shared RTL in that case. */
bcb34aa3
PB
6591 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
6592 cleared = simplify_gen_binary (AND, compute_mode,
6593 simplify_gen_unary (NOT, compute_mode,
6594 simplify_gen_binary (ASHIFT,
6595 compute_mode,
6596 mask, pos),
6597 compute_mode),
6598 inner);
6599 masked = simplify_gen_binary (ASHIFT, compute_mode,
6600 simplify_gen_binary (
6601 AND, compute_mode,
6602 gen_lowpart (compute_mode, SET_SRC (x)),
6603 mask),
6604 pos);
6605
6606 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
6607 simplify_gen_binary (IOR, compute_mode,
6608 cleared, masked));
230d793d
RS
6609 }
6610
6611 return x;
6612}
6613\f
8999a12e
RK
6614/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
6615 it is an RTX that represents a variable starting position; otherwise,
6616 POS is the (constant) starting bit position (counted from the LSB).
230d793d 6617
da7d8304 6618 UNSIGNEDP is nonzero for an unsigned reference and zero for a
230d793d
RS
6619 signed reference.
6620
da7d8304
KH
6621 IN_DEST is nonzero if this is a reference in the destination of a
6622 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If nonzero,
230d793d
RS
6623 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
6624 be used.
6625
da7d8304 6626 IN_COMPARE is nonzero if we are in a COMPARE. This means that a
230d793d
RS
6627 ZERO_EXTRACT should be built even for bits starting at bit 0.
6628
76184def
DE
6629 MODE is the desired mode of the result (if IN_DEST == 0).
6630
6631 The result is an RTX for the extraction or NULL_RTX if the target
6632 can't handle it. */
230d793d
RS
6633
6634static rtx
79a490a9
AJ
6635make_extraction (enum machine_mode mode, rtx inner, HOST_WIDE_INT pos,
6636 rtx pos_rtx, unsigned HOST_WIDE_INT len, int unsignedp,
6637 int in_dest, int in_compare)
230d793d 6638{
94b4b17a
RS
6639 /* This mode describes the size of the storage area
6640 to fetch the overall value from. Within that, we
6641 ignore the POS lowest bits, etc. */
230d793d
RS
6642 enum machine_mode is_mode = GET_MODE (inner);
6643 enum machine_mode inner_mode;
79c29032 6644 enum machine_mode wanted_inner_mode;
d7cd794f 6645 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
6646 enum machine_mode pos_mode = word_mode;
6647 enum machine_mode extraction_mode = word_mode;
6648 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
32e9fa48 6649 rtx new_rtx = 0;
8999a12e 6650 rtx orig_pos_rtx = pos_rtx;
770ae6cc 6651 HOST_WIDE_INT orig_pos;
230d793d 6652
79c29032 6653 if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
6654 {
6655 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
6656 consider just the QI as the memory to extract from.
6657 The subreg adds or removes high bits; its mode is
6658 irrelevant to the meaning of this extraction,
6659 since POS and LEN count from the lsb. */
3c0cb5de 6660 if (MEM_P (SUBREG_REG (inner)))
94b4b17a
RS
6661 is_mode = GET_MODE (SUBREG_REG (inner));
6662 inner = SUBREG_REG (inner);
6663 }
988ef418 6664 else if (GET_CODE (inner) == ASHIFT
481683e1 6665 && CONST_INT_P (XEXP (inner, 1))
988ef418 6666 && pos_rtx == 0 && pos == 0
3129af4c 6667 && len > (unsigned HOST_WIDE_INT) INTVAL (XEXP (inner, 1)))
988ef418
RS
6668 {
6669 /* We're extracting the least significant bits of an rtx
6670 (ashift X (const_int C)), where LEN > C. Extract the
6671 least significant (LEN - C) bits of X, giving an rtx
6672 whose mode is MODE, then shift it left C times. */
32e9fa48 6673 new_rtx = make_extraction (mode, XEXP (inner, 0),
988ef418
RS
6674 0, 0, len - INTVAL (XEXP (inner, 1)),
6675 unsignedp, in_dest, in_compare);
32e9fa48
KG
6676 if (new_rtx != 0)
6677 return gen_rtx_ASHIFT (mode, new_rtx, XEXP (inner, 1));
988ef418 6678 }
230d793d
RS
6679
6680 inner_mode = GET_MODE (inner);
6681
481683e1 6682 if (pos_rtx && CONST_INT_P (pos_rtx))
8999a12e 6683 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
6684
6685 /* See if this can be done without an extraction. We never can if the
6686 width of the field is not the same as that of some integer mode. For
6687 registers, we can only avoid the extraction if the position is at the
6688 low-order bit and this is either not in the destination or we have the
6689 appropriate STRICT_LOW_PART operation available.
6690
6691 For MEM, we can avoid an extract if the field starts on an appropriate
79c29032 6692 boundary and we can change the mode of the memory reference. */
230d793d
RS
6693
6694 if (tmode != BLKmode
4d9cfc7b 6695 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
3c0cb5de 6696 && !MEM_P (inner)
4df8acd3
AN
6697 && (inner_mode == tmode
6698 || !REG_P (inner)
6699 || TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
6700 GET_MODE_BITSIZE (inner_mode))
6701 || reg_truncated_to_mode (tmode, inner))
230d793d 6702 && (! in_dest
f8cfc6aa 6703 || (REG_P (inner)
ef89d648 6704 && have_insn_for (STRICT_LOW_PART, tmode))))
3c0cb5de 6705 || (MEM_P (inner) && pos_rtx == 0
dfbe1b2f
RK
6706 && (pos
6707 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
6708 : BITS_PER_UNIT)) == 0
230d793d
RS
6709 /* We can't do this if we are widening INNER_MODE (it
6710 may not be aligned, for one thing). */
6711 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
6712 && (inner_mode == tmode
6713 || (! mode_dependent_address_p (XEXP (inner, 0))
6714 && ! MEM_VOLATILE_P (inner))))))
6715 {
230d793d
RS
6716 /* If INNER is a MEM, make a new MEM that encompasses just the desired
6717 field. If the original and current mode are the same, we need not
663522cb 6718 adjust the offset. Otherwise, we do if bytes big endian.
230d793d 6719
4d9cfc7b
RK
6720 If INNER is not a MEM, get a piece consisting of just the field
6721 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d 6722
3c0cb5de 6723 if (MEM_P (inner))
230d793d 6724 {
f1ec5147
RK
6725 HOST_WIDE_INT offset;
6726
94b4b17a
RS
6727 /* POS counts from lsb, but make OFFSET count in memory order. */
6728 if (BYTES_BIG_ENDIAN)
6729 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
6730 else
6731 offset = pos / BITS_PER_UNIT;
230d793d 6732
32e9fa48 6733 new_rtx = adjust_address_nv (inner, tmode, offset);
230d793d 6734 }
f8cfc6aa 6735 else if (REG_P (inner))
c0d3ac4d 6736 {
c0d3ac4d 6737 if (tmode != inner_mode)
ddef6bc7 6738 {
4de249d9 6739 /* We can't call gen_lowpart in a DEST since we
91f8389c
EB
6740 always want a SUBREG (see below) and it would sometimes
6741 return a new hard register. */
6742 if (pos || in_dest)
54c2fc72 6743 {
54c2fc72
JW
6744 HOST_WIDE_INT final_word = pos / BITS_PER_WORD;
6745
6746 if (WORDS_BIG_ENDIAN
6747 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD)
6748 final_word = ((GET_MODE_SIZE (inner_mode)
6749 - GET_MODE_SIZE (tmode))
6750 / UNITS_PER_WORD) - final_word;
6751
6752 final_word *= UNITS_PER_WORD;
6753 if (BYTES_BIG_ENDIAN &&
6754 GET_MODE_SIZE (inner_mode) > GET_MODE_SIZE (tmode))
6755 final_word += (GET_MODE_SIZE (inner_mode)
6756 - GET_MODE_SIZE (tmode)) % UNITS_PER_WORD;
6757
6758 /* Avoid creating invalid subregs, for example when
6759 simplifying (x>>32)&255. */
b166bfd2 6760 if (!validate_subreg (tmode, inner_mode, inner, final_word))
54c2fc72
JW
6761 return NULL_RTX;
6762
32e9fa48 6763 new_rtx = gen_rtx_SUBREG (tmode, inner, final_word);
54c2fc72
JW
6764 }
6765 else
32e9fa48 6766 new_rtx = gen_lowpart (tmode, inner);
ddef6bc7 6767 }
23190837 6768 else
32e9fa48 6769 new_rtx = inner;
23190837 6770 }
230d793d 6771 else
32e9fa48 6772 new_rtx = force_to_mode (inner, tmode,
6139ff20 6773 len >= HOST_BITS_PER_WIDE_INT
0345195a 6774 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 6775 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
b1257407 6776 0);
230d793d 6777
663522cb 6778 /* If this extraction is going into the destination of a SET,
230d793d
RS
6779 make a STRICT_LOW_PART unless we made a MEM. */
6780
6781 if (in_dest)
32e9fa48
KG
6782 return (MEM_P (new_rtx) ? new_rtx
6783 : (GET_CODE (new_rtx) != SUBREG
38a448ca 6784 ? gen_rtx_CLOBBER (tmode, const0_rtx)
32e9fa48 6785 : gen_rtx_STRICT_LOW_PART (VOIDmode, new_rtx)));
230d793d 6786
0f808b6f 6787 if (mode == tmode)
32e9fa48 6788 return new_rtx;
0f808b6f 6789
481683e1 6790 if (CONST_INT_P (new_rtx))
32e9fa48 6791 return gen_int_mode (INTVAL (new_rtx), mode);
0a7ec763 6792
0f808b6f
JH
6793 /* If we know that no extraneous bits are set, and that the high
6794 bit is not set, convert the extraction to the cheaper of
6795 sign and zero extension, that are equivalent in these cases. */
6796 if (flag_expensive_optimizations
6797 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
32e9fa48 6798 && ((nonzero_bits (new_rtx, tmode)
663522cb
KH
6799 & ~(((unsigned HOST_WIDE_INT)
6800 GET_MODE_MASK (tmode))
6801 >> 1))
0f808b6f
JH
6802 == 0)))
6803 {
32e9fa48
KG
6804 rtx temp = gen_rtx_ZERO_EXTEND (mode, new_rtx);
6805 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new_rtx);
0f808b6f
JH
6806
6807 /* Prefer ZERO_EXTENSION, since it gives more information to
6808 backends. */
f40751dd
JH
6809 if (rtx_cost (temp, SET, optimize_this_for_speed_p)
6810 <= rtx_cost (temp1, SET, optimize_this_for_speed_p))
0f808b6f
JH
6811 return temp;
6812 return temp1;
6813 }
6814
230d793d
RS
6815 /* Otherwise, sign- or zero-extend unless we already are in the
6816 proper mode. */
6817
f1c6ba8b 6818 return (gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
32e9fa48 6819 mode, new_rtx));
230d793d
RS
6820 }
6821
cc471082
RS
6822 /* Unless this is a COMPARE or we have a funny memory reference,
6823 don't do anything with zero-extending field extracts starting at
6824 the low-order bit since they are simple AND operations. */
8999a12e 6825 if (pos_rtx == 0 && pos == 0 && ! in_dest
79c29032 6826 && ! in_compare && unsignedp)
230d793d
RS
6827 return 0;
6828
79c29032
PB
6829 /* Unless INNER is not MEM, reject this if we would be spanning bytes or
6830 if the position is not a constant and the length is not 1. In all
6831 other cases, we would only be going outside our object in cases when
6832 an original shift would have been undefined. */
6833 if (MEM_P (inner)
e7373556
RK
6834 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
6835 || (pos_rtx != 0 && len != 1)))
6836 return 0;
6837
d7cd794f 6838 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d 6839 and the mode for the result. */
505ddab6 6840 if (in_dest && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
230d793d 6841 {
da920570
ZW
6842 wanted_inner_reg_mode = mode_for_extraction (EP_insv, 0);
6843 pos_mode = mode_for_extraction (EP_insv, 2);
6844 extraction_mode = mode_for_extraction (EP_insv, 3);
230d793d 6845 }
230d793d 6846
da920570
ZW
6847 if (! in_dest && unsignedp
6848 && mode_for_extraction (EP_extzv, -1) != MAX_MACHINE_MODE)
230d793d 6849 {
da920570
ZW
6850 wanted_inner_reg_mode = mode_for_extraction (EP_extzv, 1);
6851 pos_mode = mode_for_extraction (EP_extzv, 3);
6852 extraction_mode = mode_for_extraction (EP_extzv, 0);
230d793d 6853 }
230d793d 6854
da920570
ZW
6855 if (! in_dest && ! unsignedp
6856 && mode_for_extraction (EP_extv, -1) != MAX_MACHINE_MODE)
230d793d 6857 {
da920570
ZW
6858 wanted_inner_reg_mode = mode_for_extraction (EP_extv, 1);
6859 pos_mode = mode_for_extraction (EP_extv, 3);
6860 extraction_mode = mode_for_extraction (EP_extv, 0);
230d793d 6861 }
230d793d
RS
6862
6863 /* Never narrow an object, since that might not be safe. */
6864
6865 if (mode != VOIDmode
6866 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
6867 extraction_mode = mode;
6868
6869 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
6870 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6871 pos_mode = GET_MODE (pos_rtx);
6872
79c29032
PB
6873 /* If this is not from memory, the desired mode is the preferred mode
6874 for an extraction pattern's first input operand, or word_mode if there
6875 is none. */
3c0cb5de 6876 if (!MEM_P (inner))
d7cd794f 6877 wanted_inner_mode = wanted_inner_reg_mode;
79c29032
PB
6878 else
6879 {
6880 /* Be careful not to go beyond the extracted object and maintain the
c22cacf3 6881 natural alignment of the memory. */
79c29032
PB
6882 wanted_inner_mode = smallest_mode_for_size (len, MODE_INT);
6883 while (pos % GET_MODE_BITSIZE (wanted_inner_mode) + len
6884 > GET_MODE_BITSIZE (wanted_inner_mode))
6885 {
6886 wanted_inner_mode = GET_MODE_WIDER_MODE (wanted_inner_mode);
6887 gcc_assert (wanted_inner_mode != VOIDmode);
6888 }
6889
6890 /* If we have to change the mode of memory and cannot, the desired mode
6891 is EXTRACTION_MODE. */
6892 if (inner_mode != wanted_inner_mode
6893 && (mode_dependent_address_p (XEXP (inner, 0))
6894 || MEM_VOLATILE_P (inner)
6895 || pos_rtx))
6896 wanted_inner_mode = extraction_mode;
6897 }
230d793d 6898
6139ff20
RK
6899 orig_pos = pos;
6900
f76b9db2
ILT
6901 if (BITS_BIG_ENDIAN)
6902 {
cf54c2cd
DE
6903 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6904 BITS_BIG_ENDIAN style. If position is constant, compute new
6905 position. Otherwise, build subtraction.
6906 Note that POS is relative to the mode of the original argument.
6907 If it's a MEM we need to recompute POS relative to that.
6908 However, if we're extracting from (or inserting into) a register,
6909 we want to recompute POS relative to wanted_inner_mode. */
3c0cb5de 6910 int width = (MEM_P (inner)
cf54c2cd
DE
6911 ? GET_MODE_BITSIZE (is_mode)
6912 : GET_MODE_BITSIZE (wanted_inner_mode));
6913
f76b9db2 6914 if (pos_rtx == 0)
cf54c2cd 6915 pos = width - len - pos;
f76b9db2
ILT
6916 else
6917 pos_rtx
f1c6ba8b 6918 = gen_rtx_MINUS (GET_MODE (pos_rtx), GEN_INT (width - len), pos_rtx);
cf54c2cd 6919 /* POS may be less than 0 now, but we check for that below.
3c0cb5de 6920 Note that it can only be less than 0 if !MEM_P (inner). */
f76b9db2 6921 }
230d793d 6922
79c29032
PB
6923 /* If INNER has a wider mode, and this is a constant extraction, try to
6924 make it smaller and adjust the byte to point to the byte containing
230d793d 6925 the value. */
d7cd794f 6926 if (wanted_inner_mode != VOIDmode
79c29032
PB
6927 && inner_mode != wanted_inner_mode
6928 && ! pos_rtx
d7cd794f 6929 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
79c29032
PB
6930 && MEM_P (inner)
6931 && ! mode_dependent_address_p (XEXP (inner, 0))
6932 && ! MEM_VOLATILE_P (inner))
230d793d
RS
6933 {
6934 int offset = 0;
6935
6936 /* The computations below will be correct if the machine is big
6937 endian in both bits and bytes or little endian in bits and bytes.
6938 If it is mixed, we must adjust. */
663522cb 6939
230d793d 6940 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 6941 adjust OFFSET to compensate. */
f76b9db2 6942 if (BYTES_BIG_ENDIAN
230d793d
RS
6943 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6944 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d 6945
79c29032
PB
6946 /* We can now move to the desired byte. */
6947 offset += (pos / GET_MODE_BITSIZE (wanted_inner_mode))
6948 * GET_MODE_SIZE (wanted_inner_mode);
6949 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d 6950
f76b9db2 6951 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
d7cd794f 6952 && is_mode != wanted_inner_mode)
c6b3f1f2 6953 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 6954 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 6955
79c29032 6956 inner = adjust_address_nv (inner, wanted_inner_mode, offset);
230d793d
RS
6957 }
6958
3f36bac2
AN
6959 /* If INNER is not memory, get it into the proper mode. If we are changing
6960 its mode, POS must be a constant and smaller than the size of the new
6961 mode. */
3c0cb5de 6962 else if (!MEM_P (inner))
9e74dc41 6963 {
3f36bac2
AN
6964 /* On the LHS, don't create paradoxical subregs implicitely truncating
6965 the register unless TRULY_NOOP_TRUNCATION. */
6966 if (in_dest
6967 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (GET_MODE (inner)),
6968 GET_MODE_BITSIZE (wanted_inner_mode)))
6969 return NULL_RTX;
6970
9e74dc41
RK
6971 if (GET_MODE (inner) != wanted_inner_mode
6972 && (pos_rtx != 0
6973 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
3f36bac2 6974 return NULL_RTX;
9e74dc41 6975
01ea23f3 6976 if (orig_pos < 0)
3f36bac2 6977 return NULL_RTX;
01ea23f3 6978
9e74dc41
RK
6979 inner = force_to_mode (inner, wanted_inner_mode,
6980 pos_rtx
6981 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
0345195a 6982 ? ~(unsigned HOST_WIDE_INT) 0
729a2125
RK
6983 : ((((unsigned HOST_WIDE_INT) 1 << len) - 1)
6984 << orig_pos),
b1257407 6985 0);
9e74dc41 6986 }
230d793d
RS
6987
6988 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6989 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 6990 if (pos_rtx != 0
230d793d 6991 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
0f808b6f 6992 {
f1c6ba8b 6993 rtx temp = gen_rtx_ZERO_EXTEND (pos_mode, pos_rtx);
0f808b6f
JH
6994
6995 /* If we know that no extraneous bits are set, and that the high
eaec9b3d 6996 bit is not set, convert extraction to cheaper one - either
0f808b6f
JH
6997 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6998 cases. */
6999 if (flag_expensive_optimizations
7000 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
7001 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
663522cb
KH
7002 & ~(((unsigned HOST_WIDE_INT)
7003 GET_MODE_MASK (GET_MODE (pos_rtx)))
7004 >> 1))
0f808b6f
JH
7005 == 0)))
7006 {
7007 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
7008
25ffb1f6 7009 /* Prefer ZERO_EXTENSION, since it gives more information to
0f808b6f 7010 backends. */
f40751dd
JH
7011 if (rtx_cost (temp1, SET, optimize_this_for_speed_p)
7012 < rtx_cost (temp, SET, optimize_this_for_speed_p))
0f808b6f
JH
7013 temp = temp1;
7014 }
7015 pos_rtx = temp;
7016 }
8999a12e 7017 else if (pos_rtx != 0
230d793d 7018 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4de249d9 7019 pos_rtx = gen_lowpart (pos_mode, pos_rtx);
230d793d 7020
8999a12e
RK
7021 /* Make POS_RTX unless we already have it and it is correct. If we don't
7022 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 7023 be a CONST_INT. */
8999a12e
RK
7024 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
7025 pos_rtx = orig_pos_rtx;
7026
7027 else if (pos_rtx == 0)
5f4f0e22 7028 pos_rtx = GEN_INT (pos);
230d793d
RS
7029
7030 /* Make the required operation. See if we can use existing rtx. */
32e9fa48 7031 new_rtx = gen_rtx_fmt_eee (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 7032 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d 7033 if (! in_dest)
32e9fa48 7034 new_rtx = gen_lowpart (mode, new_rtx);
230d793d 7035
32e9fa48 7036 return new_rtx;
230d793d
RS
7037}
7038\f
71923da7
RK
7039/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
7040 with any other operations in X. Return X without that shift if so. */
7041
7042static rtx
79a490a9 7043extract_left_shift (rtx x, int count)
71923da7
RK
7044{
7045 enum rtx_code code = GET_CODE (x);
7046 enum machine_mode mode = GET_MODE (x);
7047 rtx tem;
7048
7049 switch (code)
7050 {
7051 case ASHIFT:
7052 /* This is the shift itself. If it is wide enough, we will return
7053 either the value being shifted if the shift count is equal to
7054 COUNT or a shift for the difference. */
481683e1 7055 if (CONST_INT_P (XEXP (x, 1))
71923da7
RK
7056 && INTVAL (XEXP (x, 1)) >= count)
7057 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
7058 INTVAL (XEXP (x, 1)) - count);
7059 break;
7060
7061 case NEG: case NOT:
7062 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
f1c6ba8b 7063 return simplify_gen_unary (code, mode, tem, mode);
71923da7
RK
7064
7065 break;
7066
7067 case PLUS: case IOR: case XOR: case AND:
7068 /* If we can safely shift this constant and we find the inner shift,
7069 make a new operation. */
481683e1 7070 if (CONST_INT_P (XEXP (x, 1))
b729186a 7071 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7 7072 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
bcb34aa3
PB
7073 return simplify_gen_binary (code, mode, tem,
7074 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
71923da7
RK
7075
7076 break;
663522cb 7077
e9a25f70
JL
7078 default:
7079 break;
71923da7
RK
7080 }
7081
7082 return 0;
7083}
7084\f
230d793d
RS
7085/* Look at the expression rooted at X. Look for expressions
7086 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
7087 Form these expressions.
7088
7089 Return the new rtx, usually just X.
7090
8aeea6e6 7091 Also, for machines like the VAX that don't have logical shift insns,
230d793d
RS
7092 try to convert logical to arithmetic shift operations in cases where
7093 they are equivalent. This undoes the canonicalizations to logical
7094 shifts done elsewhere.
7095
7096 We try, as much as possible, to re-use rtl expressions to save memory.
7097
7098 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
7099 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
7100 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
7101 or a COMPARE against zero, it is COMPARE. */
7102
7103static rtx
79a490a9 7104make_compound_operation (rtx x, enum rtx_code in_code)
230d793d
RS
7105{
7106 enum rtx_code code = GET_CODE (x);
7107 enum machine_mode mode = GET_MODE (x);
7108 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 7109 rtx rhs, lhs;
230d793d 7110 enum rtx_code next_code;
6ffef2ad 7111 int i, j;
32e9fa48 7112 rtx new_rtx = 0;
280f58ba 7113 rtx tem;
6f7d635c 7114 const char *fmt;
230d793d
RS
7115
7116 /* Select the code to be used in recursive calls. Once we are inside an
7117 address, we stay there. If we have a comparison, set to COMPARE,
7118 but once inside, go back to our default of SET. */
7119
42495ca0 7120 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
ec8e098d 7121 : ((code == COMPARE || COMPARISON_P (x))
230d793d
RS
7122 && XEXP (x, 1) == const0_rtx) ? COMPARE
7123 : in_code == COMPARE ? SET : in_code);
7124
7125 /* Process depending on the code of this operation. If NEW is set
da7d8304 7126 nonzero, it will be returned. */
230d793d
RS
7127
7128 switch (code)
7129 {
7130 case ASHIFT:
230d793d
RS
7131 /* Convert shifts by constants into multiplications if inside
7132 an address. */
481683e1 7133 if (in_code == MEM && CONST_INT_P (XEXP (x, 1))
5f4f0e22 7134 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 7135 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba 7136 {
32e9fa48
KG
7137 new_rtx = make_compound_operation (XEXP (x, 0), next_code);
7138 new_rtx = gen_rtx_MULT (mode, new_rtx,
f1c6ba8b
RK
7139 GEN_INT ((HOST_WIDE_INT) 1
7140 << INTVAL (XEXP (x, 1))));
280f58ba 7141 }
230d793d
RS
7142 break;
7143
7144 case AND:
7145 /* If the second operand is not a constant, we can't do anything
7146 with it. */
481683e1 7147 if (!CONST_INT_P (XEXP (x, 1)))
230d793d
RS
7148 break;
7149
7150 /* If the constant is a power of two minus one and the first operand
7151 is a logical right shift, make an extraction. */
7152 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7153 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba 7154 {
32e9fa48
KG
7155 new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7156 new_rtx = make_extraction (mode, new_rtx, 0, XEXP (XEXP (x, 0), 1), i, 1,
280f58ba
RK
7157 0, in_code == COMPARE);
7158 }
dfbe1b2f 7159
230d793d
RS
7160 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
7161 else if (GET_CODE (XEXP (x, 0)) == SUBREG
7162 && subreg_lowpart_p (XEXP (x, 0))
7163 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
7164 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba 7165 {
32e9fa48 7166 new_rtx = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
280f58ba 7167 next_code);
32e9fa48 7168 new_rtx = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new_rtx, 0,
280f58ba
RK
7169 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
7170 0, in_code == COMPARE);
7171 }
45620ed4 7172 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
7173 else if ((GET_CODE (XEXP (x, 0)) == XOR
7174 || GET_CODE (XEXP (x, 0)) == IOR)
7175 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
7176 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
7177 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
7178 {
7179 /* Apply the distributive law, and then try to make extractions. */
32e9fa48 7180 new_rtx = gen_rtx_fmt_ee (GET_CODE (XEXP (x, 0)), mode,
f1c6ba8b
RK
7181 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
7182 XEXP (x, 1)),
7183 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
7184 XEXP (x, 1)));
32e9fa48 7185 new_rtx = make_compound_operation (new_rtx, in_code);
c2f9f64e 7186 }
a7c99304
RK
7187
7188 /* If we are have (and (rotate X C) M) and C is larger than the number
7189 of bits in M, this is an extraction. */
7190
7191 else if (GET_CODE (XEXP (x, 0)) == ROTATE
481683e1 7192 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
a7c99304
RK
7193 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
7194 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba 7195 {
32e9fa48
KG
7196 new_rtx = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
7197 new_rtx = make_extraction (mode, new_rtx,
280f58ba
RK
7198 (GET_MODE_BITSIZE (mode)
7199 - INTVAL (XEXP (XEXP (x, 0), 1))),
7200 NULL_RTX, i, 1, 0, in_code == COMPARE);
7201 }
a7c99304
RK
7202
7203 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
7204 a logical shift and our mask turns off all the propagated sign
7205 bits, we can replace the logical shift with an arithmetic shift. */
ef89d648
ZW
7206 else if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7207 && !have_insn_for (LSHIFTRT, mode)
7208 && have_insn_for (ASHIFTRT, mode)
481683e1 7209 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
230d793d 7210 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
7211 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
7212 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 7213 {
5f4f0e22 7214 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7215
7216 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
7217 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
7218 SUBST (XEXP (x, 0),
f1c6ba8b
RK
7219 gen_rtx_ASHIFTRT (mode,
7220 make_compound_operation
7221 (XEXP (XEXP (x, 0), 0), next_code),
7222 XEXP (XEXP (x, 0), 1)));
230d793d
RS
7223 }
7224
7225 /* If the constant is one less than a power of two, this might be
7226 representable by an extraction even if no shift is present.
7227 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
7228 we are in a COMPARE. */
7229 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
32e9fa48 7230 new_rtx = make_extraction (mode,
280f58ba
RK
7231 make_compound_operation (XEXP (x, 0),
7232 next_code),
7233 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
7234
7235 /* If we are in a comparison and this is an AND with a power of two,
7236 convert this into the appropriate bit extract. */
7237 else if (in_code == COMPARE
7238 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
32e9fa48 7239 new_rtx = make_extraction (mode,
280f58ba
RK
7240 make_compound_operation (XEXP (x, 0),
7241 next_code),
7242 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
7243
7244 break;
7245
7246 case LSHIFTRT:
7247 /* If the sign bit is known to be zero, replace this with an
7248 arithmetic shift. */
ef89d648
ZW
7249 if (have_insn_for (ASHIFTRT, mode)
7250 && ! have_insn_for (LSHIFTRT, mode)
5f4f0e22 7251 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 7252 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 7253 {
32e9fa48 7254 new_rtx = gen_rtx_ASHIFTRT (mode,
f1c6ba8b
RK
7255 make_compound_operation (XEXP (x, 0),
7256 next_code),
7257 XEXP (x, 1));
230d793d
RS
7258 break;
7259 }
7260
0f41302f 7261 /* ... fall through ... */
230d793d
RS
7262
7263 case ASHIFTRT:
71923da7
RK
7264 lhs = XEXP (x, 0);
7265 rhs = XEXP (x, 1);
7266
230d793d
RS
7267 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
7268 this is a SIGN_EXTRACT. */
481683e1 7269 if (CONST_INT_P (rhs)
71923da7 7270 && GET_CODE (lhs) == ASHIFT
481683e1 7271 && CONST_INT_P (XEXP (lhs, 1))
261639a2
JJ
7272 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1))
7273 && INTVAL (rhs) < mode_width)
280f58ba 7274 {
32e9fa48
KG
7275 new_rtx = make_compound_operation (XEXP (lhs, 0), next_code);
7276 new_rtx = make_extraction (mode, new_rtx,
71923da7
RK
7277 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
7278 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3 7279 code == LSHIFTRT, 0, in_code == COMPARE);
8231ad94 7280 break;
d0ab8cd3
RK
7281 }
7282
71923da7
RK
7283 /* See if we have operations between an ASHIFTRT and an ASHIFT.
7284 If so, try to merge the shifts into a SIGN_EXTEND. We could
7285 also do this for some cases of SIGN_EXTRACT, but it doesn't
7286 seem worth the effort; the case checked for occurs on Alpha. */
663522cb 7287
ec8e098d 7288 if (!OBJECT_P (lhs)
71923da7 7289 && ! (GET_CODE (lhs) == SUBREG
ec8e098d 7290 && (OBJECT_P (SUBREG_REG (lhs))))
481683e1 7291 && CONST_INT_P (rhs)
71923da7 7292 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
261639a2 7293 && INTVAL (rhs) < mode_width
32e9fa48
KG
7294 && (new_rtx = extract_left_shift (lhs, INTVAL (rhs))) != 0)
7295 new_rtx = make_extraction (mode, make_compound_operation (new_rtx, next_code),
71923da7
RK
7296 0, NULL_RTX, mode_width - INTVAL (rhs),
7297 code == LSHIFTRT, 0, in_code == COMPARE);
663522cb 7298
230d793d 7299 break;
280f58ba
RK
7300
7301 case SUBREG:
7302 /* Call ourselves recursively on the inner expression. If we are
7303 narrowing the object and it has a different RTL code from
7304 what it originally did, do this SUBREG as a force_to_mode. */
7305
0a5cbff6 7306 tem = make_compound_operation (SUBREG_REG (x), in_code);
0a5cbff6 7307
966b148a
AK
7308 {
7309 rtx simplified;
7310 simplified = simplify_subreg (GET_MODE (x), tem, GET_MODE (tem),
7311 SUBREG_BYTE (x));
0a5cbff6 7312
966b148a
AK
7313 if (simplified)
7314 tem = simplified;
6f28d3e9 7315
966b148a
AK
7316 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
7317 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
7318 && subreg_lowpart_p (x))
7319 {
7320 rtx newer = force_to_mode (tem, mode, ~(HOST_WIDE_INT) 0,
b1257407 7321 0);
c22cacf3 7322
966b148a
AK
7323 /* If we have something other than a SUBREG, we might have
7324 done an expansion, so rerun ourselves. */
7325 if (GET_CODE (newer) != SUBREG)
7326 newer = make_compound_operation (newer, in_code);
c22cacf3 7327
827f4079
AN
7328 /* force_to_mode can expand compounds. If it just re-expanded the
7329 compound use gen_lowpart instead to convert to the desired
7330 mode. */
7331 if (rtx_equal_p (newer, x))
7332 return gen_lowpart (GET_MODE (x), tem);
7333
966b148a
AK
7334 return newer;
7335 }
7336
7337 if (simplified)
6f28d3e9 7338 return tem;
966b148a 7339 }
e9a25f70 7340 break;
663522cb 7341
e9a25f70
JL
7342 default:
7343 break;
230d793d
RS
7344 }
7345
32e9fa48 7346 if (new_rtx)
230d793d 7347 {
32e9fa48 7348 x = gen_lowpart (mode, new_rtx);
230d793d
RS
7349 code = GET_CODE (x);
7350 }
7351
7352 /* Now recursively process each operand of this operation. */
7353 fmt = GET_RTX_FORMAT (code);
7354 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7355 if (fmt[i] == 'e')
7356 {
32e9fa48
KG
7357 new_rtx = make_compound_operation (XEXP (x, i), next_code);
7358 SUBST (XEXP (x, i), new_rtx);
230d793d 7359 }
6ffef2ad
RS
7360 else if (fmt[i] == 'E')
7361 for (j = 0; j < XVECLEN (x, i); j++)
7362 {
7363 new_rtx = make_compound_operation (XVECEXP (x, i, j), next_code);
7364 SUBST (XVECEXP (x, i, j), new_rtx);
7365 }
230d793d 7366
756191b7
JM
7367 /* If this is a commutative operation, the changes to the operands
7368 may have made it noncanonical. */
7369 if (COMMUTATIVE_ARITH_P (x)
7370 && swap_commutative_operands_p (XEXP (x, 0), XEXP (x, 1)))
7371 {
7372 tem = XEXP (x, 0);
7373 SUBST (XEXP (x, 0), XEXP (x, 1));
7374 SUBST (XEXP (x, 1), tem);
7375 }
7376
230d793d
RS
7377 return x;
7378}
7379\f
7380/* Given M see if it is a value that would select a field of bits
663522cb
KH
7381 within an item, but not the entire word. Return -1 if not.
7382 Otherwise, return the starting position of the field, where 0 is the
7383 low-order bit.
230d793d
RS
7384
7385 *PLEN is set to the length of the field. */
7386
7387static int
79a490a9 7388get_pos_from_mask (unsigned HOST_WIDE_INT m, unsigned HOST_WIDE_INT *plen)
230d793d
RS
7389{
7390 /* Get the bit number of the first 1 bit from the right, -1 if none. */
663522cb 7391 int pos = exact_log2 (m & -m);
6de9cd9a 7392 int len = 0;
230d793d 7393
6de9cd9a
DN
7394 if (pos >= 0)
7395 /* Now shift off the low-order zero bits and see if we have a
7396 power of two minus 1. */
7397 len = exact_log2 ((m >> pos) + 1);
230d793d 7398
d3bc8938 7399 if (len <= 0)
6de9cd9a 7400 pos = -1;
230d793d 7401
d3bc8938 7402 *plen = len;
230d793d
RS
7403 return pos;
7404}
7405\f
b1257407
PB
7406/* If X refers to a register that equals REG in value, replace these
7407 references with REG. */
7408static rtx
7409canon_reg_for_combine (rtx x, rtx reg)
7410{
7411 rtx op0, op1, op2;
7412 const char *fmt;
7413 int i;
7414 bool copied;
7415
7416 enum rtx_code code = GET_CODE (x);
7417 switch (GET_RTX_CLASS (code))
7418 {
7419 case RTX_UNARY:
7420 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7421 if (op0 != XEXP (x, 0))
7422 return simplify_gen_unary (GET_CODE (x), GET_MODE (x), op0,
7423 GET_MODE (reg));
7424 break;
7425
7426 case RTX_BIN_ARITH:
7427 case RTX_COMM_ARITH:
7428 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7429 op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7430 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7431 return simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7432 break;
7433
7434 case RTX_COMPARE:
7435 case RTX_COMM_COMPARE:
7436 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7437 op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7438 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
7439 return simplify_gen_relational (GET_CODE (x), GET_MODE (x),
7440 GET_MODE (op0), op0, op1);
7441 break;
7442
7443 case RTX_TERNARY:
7444 case RTX_BITFIELD_OPS:
7445 op0 = canon_reg_for_combine (XEXP (x, 0), reg);
7446 op1 = canon_reg_for_combine (XEXP (x, 1), reg);
7447 op2 = canon_reg_for_combine (XEXP (x, 2), reg);
7448 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1) || op2 != XEXP (x, 2))
7449 return simplify_gen_ternary (GET_CODE (x), GET_MODE (x),
7450 GET_MODE (op0), op0, op1, op2);
7451
7452 case RTX_OBJ:
7453 if (REG_P (x))
7454 {
7455 if (rtx_equal_p (get_last_value (reg), x)
7456 || rtx_equal_p (reg, get_last_value (x)))
7457 return reg;
7458 else
7459 break;
7460 }
7461
7462 /* fall through */
7463
7464 default:
7465 fmt = GET_RTX_FORMAT (code);
7466 copied = false;
7467 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
c22cacf3
MS
7468 if (fmt[i] == 'e')
7469 {
7470 rtx op = canon_reg_for_combine (XEXP (x, i), reg);
b1257407
PB
7471 if (op != XEXP (x, i))
7472 {
7473 if (!copied)
7474 {
7475 copied = true;
7476 x = copy_rtx (x);
7477 }
7478 XEXP (x, i) = op;
c22cacf3
MS
7479 }
7480 }
7481 else if (fmt[i] == 'E')
7482 {
7483 int j;
7484 for (j = 0; j < XVECLEN (x, i); j++)
b1257407 7485 {
c22cacf3
MS
7486 rtx op = canon_reg_for_combine (XVECEXP (x, i, j), reg);
7487 if (op != XVECEXP (x, i, j))
b1257407
PB
7488 {
7489 if (!copied)
7490 {
7491 copied = true;
7492 x = copy_rtx (x);
7493 }
7494 XVECEXP (x, i, j) = op;
c22cacf3 7495 }
b1257407
PB
7496 }
7497 }
7498
7499 break;
7500 }
7501
7502 return x;
7503}
7504
4df8acd3
AN
7505/* Return X converted to MODE. If the value is already truncated to
7506 MODE we can just return a subreg even though in the general case we
7507 would need an explicit truncation. */
7508
7509static rtx
7510gen_lowpart_or_truncate (enum machine_mode mode, rtx x)
7511{
d686d892
RS
7512 if (!CONST_INT_P (x)
7513 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
7514 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
7515 GET_MODE_BITSIZE (GET_MODE (x)))
7516 && !(REG_P (x) && reg_truncated_to_mode (mode, x)))
7517 {
7518 /* Bit-cast X into an integer mode. */
7519 if (!SCALAR_INT_MODE_P (GET_MODE (x)))
7520 x = gen_lowpart (int_mode_for_mode (GET_MODE (x)), x);
7521 x = simplify_gen_unary (TRUNCATE, int_mode_for_mode (mode),
7522 x, GET_MODE (x));
7523 }
7524
7525 return gen_lowpart (mode, x);
4df8acd3
AN
7526}
7527
6139ff20
RK
7528/* See if X can be simplified knowing that we will only refer to it in
7529 MODE and will only refer to those bits that are nonzero in MASK.
7530 If other bits are being computed or if masking operations are done
7531 that select a superset of the bits in MASK, they can sometimes be
7532 ignored.
7533
7534 Return a possibly simplified expression, but always convert X to
7535 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f 7536
e3d616e3
RK
7537 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
7538 are all off in X. This is used when X will be complemented, by either
180b8e4b 7539 NOT, NEG, or XOR. */
dfbe1b2f
RK
7540
7541static rtx
79a490a9 7542force_to_mode (rtx x, enum machine_mode mode, unsigned HOST_WIDE_INT mask,
b1257407 7543 int just_select)
dfbe1b2f
RK
7544{
7545 enum rtx_code code = GET_CODE (x);
180b8e4b 7546 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
7547 enum machine_mode op_mode;
7548 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
7549 rtx op0, op1, temp;
7550
132d2040
RK
7551 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
7552 code below will do the wrong thing since the mode of such an
663522cb 7553 expression is VOIDmode.
be3d27d6
CI
7554
7555 Also do nothing if X is a CLOBBER; this can happen if X was
4de249d9 7556 the return value from a call to gen_lowpart. */
be3d27d6 7557 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
7558 return x;
7559
6139ff20
RK
7560 /* We want to perform the operation is its present mode unless we know
7561 that the operation is valid in MODE, in which case we do the operation
7562 in MODE. */
1c75dfa4 7563 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
ef89d648 7564 && have_insn_for (code, mode))
ef026f91 7565 ? mode : GET_MODE (x));
e3d616e3 7566
aa988991
RS
7567 /* It is not valid to do a right-shift in a narrower mode
7568 than the one it came in with. */
7569 if ((code == LSHIFTRT || code == ASHIFTRT)
7570 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
7571 op_mode = GET_MODE (x);
ef026f91
RS
7572
7573 /* Truncate MASK to fit OP_MODE. */
7574 if (op_mode)
7575 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
7576
7577 /* When we have an arithmetic operation, or a shift whose count we
50b29dbb 7578 do not know, we need to assume that all bits up to the highest-order
6139ff20 7579 bit in MASK will be needed. This is how we form such a mask. */
50b29dbb
ILT
7580 if (mask & ((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT - 1)))
7581 fuller_mask = ~(unsigned HOST_WIDE_INT) 0;
ef026f91 7582 else
50b29dbb
ILT
7583 fuller_mask = (((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1))
7584 - 1);
ef026f91
RS
7585
7586 /* Determine what bits of X are guaranteed to be (non)zero. */
7587 nonzero = nonzero_bits (x, mode);
6139ff20
RK
7588
7589 /* If none of the bits in X are needed, return a zero. */
de097a3b 7590 if (!just_select && (nonzero & mask) == 0 && !side_effects_p (x))
ccf7aef4 7591 x = const0_rtx;
dfbe1b2f 7592
6139ff20
RK
7593 /* If X is a CONST_INT, return a new one. Do this here since the
7594 test below will fail. */
481683e1 7595 if (CONST_INT_P (x))
ccf7aef4
RH
7596 {
7597 if (SCALAR_INT_MODE_P (mode))
c22cacf3 7598 return gen_int_mode (INTVAL (x) & mask, mode);
ccf7aef4
RH
7599 else
7600 {
7601 x = GEN_INT (INTVAL (x) & mask);
7602 return gen_lowpart_common (mode, x);
7603 }
7604 }
dfbe1b2f 7605
180b8e4b
RK
7606 /* If X is narrower than MODE and we want all the bits in X's mode, just
7607 get X in the proper mode. */
7608 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
663522cb 7609 && (GET_MODE_MASK (GET_MODE (x)) & ~mask) == 0)
4de249d9 7610 return gen_lowpart (mode, x);
dfbe1b2f 7611
d686d892
RS
7612 /* We can ignore the effect of a SUBREG if it narrows the mode or
7613 if the constant masks to zero all the bits the mode doesn't have. */
7614 if (GET_CODE (x) == SUBREG
7615 && subreg_lowpart_p (x)
7616 && ((GET_MODE_SIZE (GET_MODE (x))
7617 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7618 || (0 == (mask
7619 & GET_MODE_MASK (GET_MODE (x))
7620 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
7621 return force_to_mode (SUBREG_REG (x), mode, mask, next_select);
7622
7623 /* The arithmetic simplifications here only work for scalar integer modes. */
7624 if (!SCALAR_INT_MODE_P (mode) || !SCALAR_INT_MODE_P (GET_MODE (x)))
7625 return gen_lowpart_or_truncate (mode, x);
ff596cd2 7626
dfbe1b2f
RK
7627 switch (code)
7628 {
6139ff20
RK
7629 case CLOBBER:
7630 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 7631 generating something that won't match. */
6139ff20
RK
7632 return x;
7633
dfbe1b2f
RK
7634 case SIGN_EXTEND:
7635 case ZERO_EXTEND:
7636 case ZERO_EXTRACT:
7637 case SIGN_EXTRACT:
7638 x = expand_compound_operation (x);
7639 if (GET_CODE (x) != code)
b1257407 7640 return force_to_mode (x, mode, mask, next_select);
dfbe1b2f
RK
7641 break;
7642
479b1013
AN
7643 case TRUNCATE:
7644 /* Similarly for a truncate. */
7645 return force_to_mode (XEXP (x, 0), mode, mask, next_select);
7646
dfbe1b2f 7647 case AND:
6139ff20
RK
7648 /* If this is an AND with a constant, convert it into an AND
7649 whose constant is the AND of that constant with MASK. If it
7650 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 7651
481683e1 7652 if (CONST_INT_P (XEXP (x, 1)))
dfbe1b2f 7653 {
6139ff20
RK
7654 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
7655 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
7656
7657 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
7658 is just some low-order bits. If so, and it is MASK, we don't
7659 need it. */
dfbe1b2f 7660
481683e1 7661 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
d0c9db30 7662 && ((INTVAL (XEXP (x, 1)) & GET_MODE_MASK (GET_MODE (x)))
3129af4c 7663 == mask))
dfbe1b2f 7664 x = XEXP (x, 0);
d0ab8cd3 7665
71923da7
RK
7666 /* If it remains an AND, try making another AND with the bits
7667 in the mode mask that aren't in MASK turned on. If the
7668 constant in the AND is wide enough, this might make a
7669 cheaper constant. */
7670
481683e1 7671 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1))
2ca9ae17
JW
7672 && GET_MODE_MASK (GET_MODE (x)) != mask
7673 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
7674 {
7675 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
663522cb 7676 | (GET_MODE_MASK (GET_MODE (x)) & ~mask));
71923da7
RK
7677 int width = GET_MODE_BITSIZE (GET_MODE (x));
7678 rtx y;
7679
71cc389b 7680 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative
71923da7
RK
7681 number, sign extend it. */
7682 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
7683 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7684 cval |= (HOST_WIDE_INT) -1 << width;
7685
bcb34aa3
PB
7686 y = simplify_gen_binary (AND, GET_MODE (x),
7687 XEXP (x, 0), GEN_INT (cval));
f40751dd
JH
7688 if (rtx_cost (y, SET, optimize_this_for_speed_p)
7689 < rtx_cost (x, SET, optimize_this_for_speed_p))
71923da7
RK
7690 x = y;
7691 }
7692
d0ab8cd3 7693 break;
dfbe1b2f
RK
7694 }
7695
6139ff20 7696 goto binop;
dfbe1b2f
RK
7697
7698 case PLUS:
6139ff20
RK
7699 /* In (and (plus FOO C1) M), if M is a mask that just turns off
7700 low-order bits (as in an alignment operation) and FOO is already
7701 aligned to that boundary, mask C1 to that boundary as well.
7702 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
7703
7704 {
770ae6cc 7705 unsigned int width = GET_MODE_BITSIZE (mode);
9fa6d012
TG
7706 unsigned HOST_WIDE_INT smask = mask;
7707
7708 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
7709 number, sign extend it. */
7710
7711 if (width < HOST_BITS_PER_WIDE_INT
7712 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
7713 smask |= (HOST_WIDE_INT) -1 << width;
7714
481683e1 7715 if (CONST_INT_P (XEXP (x, 1))
563c12b0
RH
7716 && exact_log2 (- smask) >= 0
7717 && (nonzero_bits (XEXP (x, 0), mode) & ~smask) == 0
7718 && (INTVAL (XEXP (x, 1)) & ~smask) != 0)
7719 return force_to_mode (plus_constant (XEXP (x, 0),
7720 (INTVAL (XEXP (x, 1)) & smask)),
b1257407 7721 mode, smask, next_select);
9fa6d012 7722 }
6139ff20 7723
0f41302f 7724 /* ... fall through ... */
6139ff20 7725
dfbe1b2f 7726 case MULT:
6139ff20
RK
7727 /* For PLUS, MINUS and MULT, we need any bits less significant than the
7728 most significant bit in MASK since carries from those bits will
7729 affect the bits we are interested in. */
7730 mask = fuller_mask;
7731 goto binop;
7732
d41638e4
RH
7733 case MINUS:
7734 /* If X is (minus C Y) where C's least set bit is larger than any bit
7735 in the mask, then we may replace with (neg Y). */
481683e1 7736 if (CONST_INT_P (XEXP (x, 0))
0345195a
RK
7737 && (((unsigned HOST_WIDE_INT) (INTVAL (XEXP (x, 0))
7738 & -INTVAL (XEXP (x, 0))))
7739 > mask))
d41638e4 7740 {
f1c6ba8b
RK
7741 x = simplify_gen_unary (NEG, GET_MODE (x), XEXP (x, 1),
7742 GET_MODE (x));
b1257407 7743 return force_to_mode (x, mode, mask, next_select);
d41638e4
RH
7744 }
7745
bc02f8d3 7746 /* Similarly, if C contains every bit in the fuller_mask, then we may
d41638e4 7747 replace with (not Y). */
481683e1 7748 if (CONST_INT_P (XEXP (x, 0))
bc02f8d3 7749 && ((INTVAL (XEXP (x, 0)) | (HOST_WIDE_INT) fuller_mask)
0345195a 7750 == INTVAL (XEXP (x, 0))))
d41638e4 7751 {
f1c6ba8b
RK
7752 x = simplify_gen_unary (NOT, GET_MODE (x),
7753 XEXP (x, 1), GET_MODE (x));
b1257407 7754 return force_to_mode (x, mode, mask, next_select);
d41638e4
RH
7755 }
7756
7757 mask = fuller_mask;
7758 goto binop;
7759
dfbe1b2f
RK
7760 case IOR:
7761 case XOR:
6139ff20
RK
7762 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
7763 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
7764 operation which may be a bitfield extraction. Ensure that the
7765 constant we form is not wider than the mode of X. */
7766
7767 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
481683e1 7768 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
6139ff20
RK
7769 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7770 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
481683e1 7771 && CONST_INT_P (XEXP (x, 1))
6139ff20
RK
7772 && ((INTVAL (XEXP (XEXP (x, 0), 1))
7773 + floor_log2 (INTVAL (XEXP (x, 1))))
7774 < GET_MODE_BITSIZE (GET_MODE (x)))
7775 && (INTVAL (XEXP (x, 1))
663522cb 7776 & ~nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
7777 {
7778 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
663522cb 7779 << INTVAL (XEXP (XEXP (x, 0), 1)));
bcb34aa3
PB
7780 temp = simplify_gen_binary (GET_CODE (x), GET_MODE (x),
7781 XEXP (XEXP (x, 0), 0), temp);
7782 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), temp,
7783 XEXP (XEXP (x, 0), 1));
b1257407 7784 return force_to_mode (x, mode, mask, next_select);
6139ff20
RK
7785 }
7786
7787 binop:
dfbe1b2f 7788 /* For most binary operations, just propagate into the operation and
6d2f8887 7789 change the mode if we have an operation of that mode. */
6139ff20 7790
479b1013
AN
7791 op0 = force_to_mode (XEXP (x, 0), mode, mask, next_select);
7792 op1 = force_to_mode (XEXP (x, 1), mode, mask, next_select);
7793
7794 /* If we ended up truncating both operands, truncate the result of the
7795 operation instead. */
7796 if (GET_CODE (op0) == TRUNCATE
7797 && GET_CODE (op1) == TRUNCATE)
7798 {
7799 op0 = XEXP (op0, 0);
7800 op1 = XEXP (op1, 0);
7801 }
7802
7803 op0 = gen_lowpart_or_truncate (op_mode, op0);
7804 op1 = gen_lowpart_or_truncate (op_mode, op1);
6139ff20
RK
7805
7806 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
bcb34aa3 7807 x = simplify_gen_binary (code, op_mode, op0, op1);
d0ab8cd3 7808 break;
dfbe1b2f
RK
7809
7810 case ASHIFT:
dfbe1b2f 7811 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
7812 However, we cannot do anything with shifts where we cannot
7813 guarantee that the counts are smaller than the size of the mode
7814 because such a count will have a different meaning in a
6139ff20 7815 wider mode. */
f6785026 7816
481683e1 7817 if (! (CONST_INT_P (XEXP (x, 1))
6139ff20 7818 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
7819 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
7820 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
7821 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 7822 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026 7823 break;
663522cb 7824
6139ff20
RK
7825 /* If the shift count is a constant and we can do arithmetic in
7826 the mode of the shift, refine which bits we need. Otherwise, use the
7827 conservative form of the mask. */
481683e1 7828 if (CONST_INT_P (XEXP (x, 1))
6139ff20
RK
7829 && INTVAL (XEXP (x, 1)) >= 0
7830 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
7831 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
7832 mask >>= INTVAL (XEXP (x, 1));
7833 else
7834 mask = fuller_mask;
7835
4df8acd3
AN
7836 op0 = gen_lowpart_or_truncate (op_mode,
7837 force_to_mode (XEXP (x, 0), op_mode,
7838 mask, next_select));
6139ff20
RK
7839
7840 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
bcb34aa3 7841 x = simplify_gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 7842 break;
dfbe1b2f
RK
7843
7844 case LSHIFTRT:
1347292b
JW
7845 /* Here we can only do something if the shift count is a constant,
7846 this shift constant is valid for the host, and we can do arithmetic
7847 in OP_MODE. */
dfbe1b2f 7848
481683e1 7849 if (CONST_INT_P (XEXP (x, 1))
1347292b 7850 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 7851 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7852 {
6139ff20 7853 rtx inner = XEXP (x, 0);
402b6c2a 7854 unsigned HOST_WIDE_INT inner_mask;
6139ff20
RK
7855
7856 /* Select the mask of the bits we need for the shift operand. */
402b6c2a 7857 inner_mask = mask << INTVAL (XEXP (x, 1));
d0ab8cd3 7858
6139ff20 7859 /* We can only change the mode of the shift if we can do arithmetic
402b6c2a 7860 in the mode of the shift and INNER_MASK is no wider than the
f3b2657c
JJ
7861 width of X's mode. */
7862 if ((inner_mask & ~GET_MODE_MASK (GET_MODE (x))) != 0)
d0ab8cd3
RK
7863 op_mode = GET_MODE (x);
7864
b1257407 7865 inner = force_to_mode (inner, op_mode, inner_mask, next_select);
6139ff20
RK
7866
7867 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
bcb34aa3 7868 x = simplify_gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 7869 }
6139ff20
RK
7870
7871 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
7872 shift and AND produces only copies of the sign bit (C2 is one less
7873 than a power of two), we can do this with just a shift. */
7874
7875 if (GET_CODE (x) == LSHIFTRT
481683e1 7876 && CONST_INT_P (XEXP (x, 1))
cfff35c1
JW
7877 /* The shift puts one of the sign bit copies in the least significant
7878 bit. */
6139ff20
RK
7879 && ((INTVAL (XEXP (x, 1))
7880 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
7881 >= GET_MODE_BITSIZE (GET_MODE (x)))
7882 && exact_log2 (mask + 1) >= 0
cfff35c1
JW
7883 /* Number of bits left after the shift must be more than the mask
7884 needs. */
7885 && ((INTVAL (XEXP (x, 1)) + exact_log2 (mask + 1))
7886 <= GET_MODE_BITSIZE (GET_MODE (x)))
7887 /* Must be more sign bit copies than the mask needs. */
770ae6cc 7888 && ((int) num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6139ff20 7889 >= exact_log2 (mask + 1)))
bcb34aa3
PB
7890 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7891 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
7892 - exact_log2 (mask + 1)));
fae2db47
JW
7893
7894 goto shiftrt;
d0ab8cd3
RK
7895
7896 case ASHIFTRT:
6139ff20
RK
7897 /* If we are just looking for the sign bit, we don't need this shift at
7898 all, even if it has a variable count. */
9bf22b75 7899 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 7900 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 7901 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
b1257407 7902 return force_to_mode (XEXP (x, 0), mode, mask, next_select);
6139ff20
RK
7903
7904 /* If this is a shift by a constant, get a mask that contains those bits
7905 that are not copies of the sign bit. We then have two cases: If
7906 MASK only includes those bits, this can be a logical shift, which may
7907 allow simplifications. If MASK is a single-bit field not within
7908 those bits, we are requesting a copy of the sign bit and hence can
7909 shift the sign bit to the appropriate location. */
7910
481683e1 7911 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
7912 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
7913 {
41e8659e 7914 int i;
6139ff20 7915
3e92902c 7916 /* If the considered data is wider than HOST_WIDE_INT, we can't
b69960ac
RK
7917 represent a mask for all its bits in a single scalar.
7918 But we only care about the lower bits, so calculate these. */
7919
6a11342f 7920 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 7921 {
663522cb 7922 nonzero = ~(HOST_WIDE_INT) 0;
b69960ac
RK
7923
7924 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7925 is the number of bits a full-width mask would have set.
7926 We need only shift if these are fewer than nonzero can
7927 hold. If not, we must keep all bits set in nonzero. */
7928
7929 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
7930 < HOST_BITS_PER_WIDE_INT)
7931 nonzero >>= INTVAL (XEXP (x, 1))
7932 + HOST_BITS_PER_WIDE_INT
7933 - GET_MODE_BITSIZE (GET_MODE (x)) ;
7934 }
7935 else
7936 {
7937 nonzero = GET_MODE_MASK (GET_MODE (x));
7938 nonzero >>= INTVAL (XEXP (x, 1));
7939 }
6139ff20 7940
41e8659e
PB
7941 if ((mask & ~nonzero) == 0)
7942 {
13991abb 7943 x = simplify_shift_const (NULL_RTX, LSHIFTRT, GET_MODE (x),
41e8659e
PB
7944 XEXP (x, 0), INTVAL (XEXP (x, 1)));
7945 if (GET_CODE (x) != ASHIFTRT)
7946 return force_to_mode (x, mode, mask, next_select);
7947 }
7948
7949 else if ((i = exact_log2 (mask)) >= 0)
6139ff20
RK
7950 {
7951 x = simplify_shift_const
41e8659e
PB
7952 (NULL_RTX, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
7953 GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6139ff20
RK
7954
7955 if (GET_CODE (x) != ASHIFTRT)
b1257407 7956 return force_to_mode (x, mode, mask, next_select);
6139ff20
RK
7957 }
7958 }
7959
e0a2f705 7960 /* If MASK is 1, convert this to an LSHIFTRT. This can be done
6139ff20
RK
7961 even if the shift count isn't a constant. */
7962 if (mask == 1)
bcb34aa3
PB
7963 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
7964 XEXP (x, 0), XEXP (x, 1));
6139ff20 7965
fae2db47
JW
7966 shiftrt:
7967
7968 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
7969 we don't care about, remove it. Be sure the call above returned
7970 something that is still a shift. */
d0ab8cd3 7971
4c002f29 7972 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
481683e1 7973 && CONST_INT_P (XEXP (x, 1))
d0ab8cd3 7974 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
7975 && (INTVAL (XEXP (x, 1))
7976 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3 7977 && GET_CODE (XEXP (x, 0)) == ASHIFT
fa9ea255 7978 && XEXP (XEXP (x, 0), 1) == XEXP (x, 1))
e3d616e3 7979 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
b1257407 7980 next_select);
6139ff20 7981
dfbe1b2f
RK
7982 break;
7983
6139ff20
RK
7984 case ROTATE:
7985 case ROTATERT:
7986 /* If the shift count is constant and we can do computations
7987 in the mode of X, compute where the bits we care about are.
7988 Otherwise, we can't do anything. Don't change the mode of
7989 the shift or propagate MODE into the shift, though. */
481683e1 7990 if (CONST_INT_P (XEXP (x, 1))
6139ff20
RK
7991 && INTVAL (XEXP (x, 1)) >= 0)
7992 {
7993 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
7994 GET_MODE (x), GEN_INT (mask),
7995 XEXP (x, 1));
481683e1 7996 if (temp && CONST_INT_P (temp))
6139ff20
RK
7997 SUBST (XEXP (x, 0),
7998 force_to_mode (XEXP (x, 0), GET_MODE (x),
b1257407 7999 INTVAL (temp), next_select));
6139ff20
RK
8000 }
8001 break;
663522cb 8002
dfbe1b2f 8003 case NEG:
180b8e4b 8004 /* If we just want the low-order bit, the NEG isn't needed since it
3ef42a0c 8005 won't change the low-order bit. */
180b8e4b 8006 if (mask == 1)
b1257407 8007 return force_to_mode (XEXP (x, 0), mode, mask, just_select);
180b8e4b 8008
6139ff20
RK
8009 /* We need any bits less significant than the most significant bit in
8010 MASK since carries from those bits will affect the bits we are
8011 interested in. */
8012 mask = fuller_mask;
8013 goto unop;
8014
dfbe1b2f 8015 case NOT:
6139ff20
RK
8016 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
8017 same as the XOR case above. Ensure that the constant we form is not
8018 wider than the mode of X. */
8019
8020 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
481683e1 8021 && CONST_INT_P (XEXP (XEXP (x, 0), 1))
6139ff20
RK
8022 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
8023 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
8024 < GET_MODE_BITSIZE (GET_MODE (x)))
8025 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8026 {
6a04f4e0
AM
8027 temp = gen_int_mode (mask << INTVAL (XEXP (XEXP (x, 0), 1)),
8028 GET_MODE (x));
bcb34aa3
PB
8029 temp = simplify_gen_binary (XOR, GET_MODE (x),
8030 XEXP (XEXP (x, 0), 0), temp);
8031 x = simplify_gen_binary (LSHIFTRT, GET_MODE (x),
8032 temp, XEXP (XEXP (x, 0), 1));
6139ff20 8033
b1257407 8034 return force_to_mode (x, mode, mask, next_select);
6139ff20
RK
8035 }
8036
f82da7d2
JW
8037 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
8038 use the full mask inside the NOT. */
8039 mask = fuller_mask;
8040
6139ff20 8041 unop:
4df8acd3
AN
8042 op0 = gen_lowpart_or_truncate (op_mode,
8043 force_to_mode (XEXP (x, 0), mode, mask,
8044 next_select));
6139ff20 8045 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
f1c6ba8b 8046 x = simplify_gen_unary (code, op_mode, op0, op_mode);
6139ff20
RK
8047 break;
8048
8049 case NE:
8050 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 8051 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 8052 which is equal to STORE_FLAG_VALUE. */
663522cb 8053 if ((mask & ~STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7d103eb5 8054 && GET_MODE (XEXP (x, 0)) == mode
3aceff0d 8055 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
43196589
AS
8056 && (nonzero_bits (XEXP (x, 0), mode)
8057 == (unsigned HOST_WIDE_INT) STORE_FLAG_VALUE))
b1257407 8058 return force_to_mode (XEXP (x, 0), mode, mask, next_select);
6139ff20 8059
d0ab8cd3
RK
8060 break;
8061
8062 case IF_THEN_ELSE:
8063 /* We have no way of knowing if the IF_THEN_ELSE can itself be
8064 written in a narrower mode. We play it safe and do not do so. */
8065
8066 SUBST (XEXP (x, 1),
4df8acd3
AN
8067 gen_lowpart_or_truncate (GET_MODE (x),
8068 force_to_mode (XEXP (x, 1), mode,
8069 mask, next_select)));
d0ab8cd3 8070 SUBST (XEXP (x, 2),
4df8acd3
AN
8071 gen_lowpart_or_truncate (GET_MODE (x),
8072 force_to_mode (XEXP (x, 2), mode,
8073 mask, next_select)));
d0ab8cd3 8074 break;
663522cb 8075
e9a25f70
JL
8076 default:
8077 break;
dfbe1b2f
RK
8078 }
8079
d0ab8cd3 8080 /* Ensure we return a value of the proper mode. */
4df8acd3 8081 return gen_lowpart_or_truncate (mode, x);
dfbe1b2f
RK
8082}
8083\f
abe6e52f
RK
8084/* Return nonzero if X is an expression that has one of two values depending on
8085 whether some other value is zero or nonzero. In that case, we return the
8086 value that is being tested, *PTRUE is set to the value if the rtx being
8087 returned has a nonzero value, and *PFALSE is set to the other alternative.
8088
8089 If we return zero, we set *PTRUE and *PFALSE to X. */
8090
8091static rtx
79a490a9 8092if_then_else_cond (rtx x, rtx *ptrue, rtx *pfalse)
abe6e52f
RK
8093{
8094 enum machine_mode mode = GET_MODE (x);
8095 enum rtx_code code = GET_CODE (x);
abe6e52f
RK
8096 rtx cond0, cond1, true0, true1, false0, false1;
8097 unsigned HOST_WIDE_INT nz;
8098
14a774a9
RK
8099 /* If we are comparing a value against zero, we are done. */
8100 if ((code == NE || code == EQ)
87d9741e 8101 && XEXP (x, 1) == const0_rtx)
14a774a9 8102 {
e8758a3a
JL
8103 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
8104 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
14a774a9
RK
8105 return XEXP (x, 0);
8106 }
8107
abe6e52f
RK
8108 /* If this is a unary operation whose operand has one of two values, apply
8109 our opcode to compute those values. */
ec8e098d 8110 else if (UNARY_P (x)
14a774a9 8111 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
abe6e52f 8112 {
f1c6ba8b
RK
8113 *ptrue = simplify_gen_unary (code, mode, true0, GET_MODE (XEXP (x, 0)));
8114 *pfalse = simplify_gen_unary (code, mode, false0,
8115 GET_MODE (XEXP (x, 0)));
abe6e52f
RK
8116 return cond0;
8117 }
8118
3a19aabc 8119 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 8120 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
8121 else if (code == COMPARE)
8122 ;
8123
abe6e52f
RK
8124 /* If this is a binary operation, see if either side has only one of two
8125 values. If either one does or if both do and they are conditional on
8126 the same value, compute the new true and false values. */
ec8e098d 8127 else if (BINARY_P (x))
abe6e52f
RK
8128 {
8129 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
8130 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
8131
8132 if ((cond0 != 0 || cond1 != 0)
8133 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
8134 {
987e845a
JW
8135 /* If if_then_else_cond returned zero, then true/false are the
8136 same rtl. We must copy one of them to prevent invalid rtl
8137 sharing. */
8138 if (cond0 == 0)
8139 true0 = copy_rtx (true0);
8140 else if (cond1 == 0)
8141 true1 = copy_rtx (true1);
8142
bcb34aa3
PB
8143 if (COMPARISON_P (x))
8144 {
8145 *ptrue = simplify_gen_relational (code, mode, VOIDmode,
8146 true0, true1);
8147 *pfalse = simplify_gen_relational (code, mode, VOIDmode,
c22cacf3 8148 false0, false1);
bcb34aa3
PB
8149 }
8150 else
8151 {
8152 *ptrue = simplify_gen_binary (code, mode, true0, true1);
8153 *pfalse = simplify_gen_binary (code, mode, false0, false1);
8154 }
8155
abe6e52f
RK
8156 return cond0 ? cond0 : cond1;
8157 }
9210df58 8158
9210df58 8159 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
da7d8304 8160 operands is zero when the other is nonzero, and vice-versa,
0802d516 8161 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 8162
0802d516
RK
8163 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8164 && (code == PLUS || code == IOR || code == XOR || code == MINUS
663522cb 8165 || code == UMAX)
9210df58
RK
8166 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8167 {
8168 rtx op0 = XEXP (XEXP (x, 0), 1);
8169 rtx op1 = XEXP (XEXP (x, 1), 1);
8170
8171 cond0 = XEXP (XEXP (x, 0), 0);
8172 cond1 = XEXP (XEXP (x, 1), 0);
8173
ec8e098d
PB
8174 if (COMPARISON_P (cond0)
8175 && COMPARISON_P (cond1)
14f02e73 8176 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9210df58
RK
8177 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8178 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8179 || ((swap_condition (GET_CODE (cond0))
14f02e73 8180 == reversed_comparison_code (cond1, NULL))
9210df58
RK
8181 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8182 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8183 && ! side_effects_p (x))
8184 {
bcb34aa3
PB
8185 *ptrue = simplify_gen_binary (MULT, mode, op0, const_true_rtx);
8186 *pfalse = simplify_gen_binary (MULT, mode,
8187 (code == MINUS
8188 ? simplify_gen_unary (NEG, mode,
8189 op1, mode)
8190 : op1),
8191 const_true_rtx);
9210df58
RK
8192 return cond0;
8193 }
8194 }
8195
eaec9b3d 8196 /* Similarly for MULT, AND and UMIN, except that for these the result
9210df58 8197 is always zero. */
0802d516
RK
8198 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8199 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
8200 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
8201 {
8202 cond0 = XEXP (XEXP (x, 0), 0);
8203 cond1 = XEXP (XEXP (x, 1), 0);
8204
ec8e098d
PB
8205 if (COMPARISON_P (cond0)
8206 && COMPARISON_P (cond1)
14f02e73 8207 && ((GET_CODE (cond0) == reversed_comparison_code (cond1, NULL)
9210df58
RK
8208 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
8209 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
8210 || ((swap_condition (GET_CODE (cond0))
14f02e73 8211 == reversed_comparison_code (cond1, NULL))
9210df58
RK
8212 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
8213 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
8214 && ! side_effects_p (x))
8215 {
8216 *ptrue = *pfalse = const0_rtx;
8217 return cond0;
8218 }
8219 }
abe6e52f
RK
8220 }
8221
8222 else if (code == IF_THEN_ELSE)
8223 {
8224 /* If we have IF_THEN_ELSE already, extract the condition and
8225 canonicalize it if it is NE or EQ. */
8226 cond0 = XEXP (x, 0);
8227 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
8228 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
8229 return XEXP (cond0, 0);
8230 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
8231 {
8232 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
8233 return XEXP (cond0, 0);
8234 }
8235 else
8236 return cond0;
8237 }
8238
0631e0bf
JH
8239 /* If X is a SUBREG, we can narrow both the true and false values
8240 if the inner expression, if there is a condition. */
8241 else if (code == SUBREG
abe6e52f
RK
8242 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
8243 &true0, &false0)))
8244 {
bbe708a3
UW
8245 true0 = simplify_gen_subreg (mode, true0,
8246 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
8247 false0 = simplify_gen_subreg (mode, false0,
0631e0bf 8248 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
bbe708a3
UW
8249 if (true0 && false0)
8250 {
8251 *ptrue = true0;
8252 *pfalse = false0;
8253 return cond0;
8254 }
abe6e52f
RK
8255 }
8256
8257 /* If X is a constant, this isn't special and will cause confusions
8258 if we treat it as such. Likewise if it is equivalent to a constant. */
8259 else if (CONSTANT_P (x)
8260 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
8261 ;
8262
1f3f36d1
RH
8263 /* If we're in BImode, canonicalize on 0 and STORE_FLAG_VALUE, as that
8264 will be least confusing to the rest of the compiler. */
8265 else if (mode == BImode)
8266 {
8267 *ptrue = GEN_INT (STORE_FLAG_VALUE), *pfalse = const0_rtx;
8268 return x;
8269 }
8270
663522cb 8271 /* If X is known to be either 0 or -1, those are the true and
abe6e52f 8272 false values when testing X. */
49219895
JH
8273 else if (x == constm1_rtx || x == const0_rtx
8274 || (mode != VOIDmode
8275 && num_sign_bit_copies (x, mode) == GET_MODE_BITSIZE (mode)))
abe6e52f
RK
8276 {
8277 *ptrue = constm1_rtx, *pfalse = const0_rtx;
8278 return x;
8279 }
8280
8281 /* Likewise for 0 or a single bit. */
9eb54558 8282 else if (SCALAR_INT_MODE_P (mode)
49219895
JH
8283 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8284 && exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
abe6e52f 8285 {
578fc63d 8286 *ptrue = gen_int_mode (nz, mode), *pfalse = const0_rtx;
abe6e52f
RK
8287 return x;
8288 }
8289
8290 /* Otherwise fail; show no condition with true and false values the same. */
8291 *ptrue = *pfalse = x;
8292 return 0;
8293}
8294\f
1a26b032
RK
8295/* Return the value of expression X given the fact that condition COND
8296 is known to be true when applied to REG as its first operand and VAL
8297 as its second. X is known to not be shared and so can be modified in
8298 place.
8299
8300 We only handle the simplest cases, and specifically those cases that
8301 arise with IF_THEN_ELSE expressions. */
8302
8303static rtx
79a490a9 8304known_cond (rtx x, enum rtx_code cond, rtx reg, rtx val)
1a26b032
RK
8305{
8306 enum rtx_code code = GET_CODE (x);
f24ad0e4 8307 rtx temp;
6f7d635c 8308 const char *fmt;
1a26b032
RK
8309 int i, j;
8310
8311 if (side_effects_p (x))
8312 return x;
8313
805f1694
JL
8314 /* If either operand of the condition is a floating point value,
8315 then we have to avoid collapsing an EQ comparison. */
8316 if (cond == EQ
8317 && rtx_equal_p (x, reg)
8318 && ! FLOAT_MODE_P (GET_MODE (x))
8319 && ! FLOAT_MODE_P (GET_MODE (val)))
69bc0a1f 8320 return val;
805f1694 8321
69bc0a1f 8322 if (cond == UNEQ && rtx_equal_p (x, reg))
1a26b032
RK
8323 return val;
8324
8325 /* If X is (abs REG) and we know something about REG's relationship
8326 with zero, we may be able to simplify this. */
8327
8328 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
8329 switch (cond)
8330 {
8331 case GE: case GT: case EQ:
8332 return XEXP (x, 0);
8333 case LT: case LE:
f1c6ba8b
RK
8334 return simplify_gen_unary (NEG, GET_MODE (XEXP (x, 0)),
8335 XEXP (x, 0),
8336 GET_MODE (XEXP (x, 0)));
e9a25f70
JL
8337 default:
8338 break;
1a26b032
RK
8339 }
8340
8341 /* The only other cases we handle are MIN, MAX, and comparisons if the
8342 operands are the same as REG and VAL. */
8343
ec8e098d 8344 else if (COMPARISON_P (x) || COMMUTATIVE_ARITH_P (x))
1a26b032
RK
8345 {
8346 if (rtx_equal_p (XEXP (x, 0), val))
8347 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
8348
8349 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
8350 {
ec8e098d 8351 if (COMPARISON_P (x))
1eb8759b
RH
8352 {
8353 if (comparison_dominates_p (cond, code))
8354 return const_true_rtx;
1a26b032 8355
14f02e73 8356 code = reversed_comparison_code (x, NULL);
1eb8759b
RH
8357 if (code != UNKNOWN
8358 && comparison_dominates_p (cond, code))
8359 return const0_rtx;
8360 else
8361 return x;
8362 }
1a26b032
RK
8363 else if (code == SMAX || code == SMIN
8364 || code == UMIN || code == UMAX)
8365 {
8366 int unsignedp = (code == UMIN || code == UMAX);
8367
ac4cdf40
JE
8368 /* Do not reverse the condition when it is NE or EQ.
8369 This is because we cannot conclude anything about
8370 the value of 'SMAX (x, y)' when x is not equal to y,
23190837 8371 but we can when x equals y. */
ac4cdf40
JE
8372 if ((code == SMAX || code == UMAX)
8373 && ! (cond == EQ || cond == NE))
1a26b032
RK
8374 cond = reverse_condition (cond);
8375
8376 switch (cond)
8377 {
8378 case GE: case GT:
8379 return unsignedp ? x : XEXP (x, 1);
8380 case LE: case LT:
8381 return unsignedp ? x : XEXP (x, 0);
8382 case GEU: case GTU:
8383 return unsignedp ? XEXP (x, 1) : x;
8384 case LEU: case LTU:
8385 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
8386 default:
8387 break;
1a26b032
RK
8388 }
8389 }
8390 }
8391 }
9a360704
AO
8392 else if (code == SUBREG)
8393 {
8394 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
32e9fa48 8395 rtx new_rtx, r = known_cond (SUBREG_REG (x), cond, reg, val);
9a360704
AO
8396
8397 if (SUBREG_REG (x) != r)
8398 {
8399 /* We must simplify subreg here, before we lose track of the
8400 original inner_mode. */
32e9fa48 8401 new_rtx = simplify_subreg (GET_MODE (x), r,
9a360704 8402 inner_mode, SUBREG_BYTE (x));
32e9fa48
KG
8403 if (new_rtx)
8404 return new_rtx;
9a360704
AO
8405 else
8406 SUBST (SUBREG_REG (x), r);
8407 }
8408
8409 return x;
8410 }
4161da12
AO
8411 /* We don't have to handle SIGN_EXTEND here, because even in the
8412 case of replacing something with a modeless CONST_INT, a
8413 CONST_INT is already (supposed to be) a valid sign extension for
8414 its narrower mode, which implies it's already properly
8415 sign-extended for the wider mode. Now, for ZERO_EXTEND, the
8416 story is different. */
8417 else if (code == ZERO_EXTEND)
8418 {
8419 enum machine_mode inner_mode = GET_MODE (XEXP (x, 0));
32e9fa48 8420 rtx new_rtx, r = known_cond (XEXP (x, 0), cond, reg, val);
4161da12
AO
8421
8422 if (XEXP (x, 0) != r)
8423 {
8424 /* We must simplify the zero_extend here, before we lose
c22cacf3 8425 track of the original inner_mode. */
32e9fa48 8426 new_rtx = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
4161da12 8427 r, inner_mode);
32e9fa48
KG
8428 if (new_rtx)
8429 return new_rtx;
4161da12
AO
8430 else
8431 SUBST (XEXP (x, 0), r);
8432 }
8433
8434 return x;
8435 }
1a26b032
RK
8436
8437 fmt = GET_RTX_FORMAT (code);
8438 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8439 {
8440 if (fmt[i] == 'e')
8441 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
8442 else if (fmt[i] == 'E')
8443 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8444 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
8445 cond, reg, val));
8446 }
8447
8448 return x;
8449}
8450\f
e11fa86f
RK
8451/* See if X and Y are equal for the purposes of seeing if we can rewrite an
8452 assignment as a field assignment. */
8453
8454static int
79a490a9 8455rtx_equal_for_field_assignment_p (rtx x, rtx y)
e11fa86f 8456{
e11fa86f
RK
8457 if (x == y || rtx_equal_p (x, y))
8458 return 1;
8459
8460 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
8461 return 0;
8462
8463 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
8464 Note that all SUBREGs of MEM are paradoxical; otherwise they
8465 would have been rewritten. */
3c0cb5de
JQ
8466 if (MEM_P (x) && GET_CODE (y) == SUBREG
8467 && MEM_P (SUBREG_REG (y))
e11fa86f 8468 && rtx_equal_p (SUBREG_REG (y),
4de249d9 8469 gen_lowpart (GET_MODE (SUBREG_REG (y)), x)))
e11fa86f
RK
8470 return 1;
8471
3c0cb5de
JQ
8472 if (MEM_P (y) && GET_CODE (x) == SUBREG
8473 && MEM_P (SUBREG_REG (x))
e11fa86f 8474 && rtx_equal_p (SUBREG_REG (x),
4de249d9 8475 gen_lowpart (GET_MODE (SUBREG_REG (x)), y)))
e11fa86f
RK
8476 return 1;
8477
9ec36da5
JL
8478 /* We used to see if get_last_value of X and Y were the same but that's
8479 not correct. In one direction, we'll cause the assignment to have
8480 the wrong destination and in the case, we'll import a register into this
8481 insn that might have already have been dead. So fail if none of the
8482 above cases are true. */
8483 return 0;
e11fa86f
RK
8484}
8485\f
230d793d
RS
8486/* See if X, a SET operation, can be rewritten as a bit-field assignment.
8487 Return that assignment if so.
8488
8489 We only handle the most common cases. */
8490
8491static rtx
79a490a9 8492make_field_assignment (rtx x)
230d793d
RS
8493{
8494 rtx dest = SET_DEST (x);
8495 rtx src = SET_SRC (x);
dfbe1b2f 8496 rtx assign;
e11fa86f 8497 rtx rhs, lhs;
5f4f0e22 8498 HOST_WIDE_INT c1;
770ae6cc
RK
8499 HOST_WIDE_INT pos;
8500 unsigned HOST_WIDE_INT len;
dfbe1b2f
RK
8501 rtx other;
8502 enum machine_mode mode;
230d793d
RS
8503
8504 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
8505 a clear of a one-bit field. We will have changed it to
8506 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
8507 for a SUBREG. */
8508
8509 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
481683e1 8510 && CONST_INT_P (XEXP (XEXP (src, 0), 0))
230d793d 8511 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 8512 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 8513 {
8999a12e 8514 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 8515 1, 1, 1, 0);
76184def 8516 if (assign != 0)
38a448ca 8517 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 8518 return x;
230d793d
RS
8519 }
8520
55e79aef
RH
8521 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
8522 && subreg_lowpart_p (XEXP (src, 0))
8523 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
8524 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
8525 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
481683e1 8526 && CONST_INT_P (XEXP (SUBREG_REG (XEXP (src, 0)), 0))
55e79aef
RH
8527 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
8528 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 8529 {
8999a12e 8530 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
8531 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
8532 1, 1, 1, 0);
76184def 8533 if (assign != 0)
38a448ca 8534 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 8535 return x;
230d793d
RS
8536 }
8537
9dd11dcb 8538 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d 8539 one-bit field. */
55e79aef
RH
8540 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
8541 && XEXP (XEXP (src, 0), 0) == const1_rtx
8542 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 8543 {
8999a12e 8544 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 8545 1, 1, 1, 0);
76184def 8546 if (assign != 0)
38a448ca 8547 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 8548 return x;
230d793d
RS
8549 }
8550
55e79aef
RH
8551 /* If DEST is already a field assignment, i.e. ZERO_EXTRACT, and the
8552 SRC is an AND with all bits of that field set, then we can discard
8553 the AND. */
8554 if (GET_CODE (dest) == ZERO_EXTRACT
481683e1 8555 && CONST_INT_P (XEXP (dest, 1))
55e79aef 8556 && GET_CODE (src) == AND
481683e1 8557 && CONST_INT_P (XEXP (src, 1)))
55e79aef
RH
8558 {
8559 HOST_WIDE_INT width = INTVAL (XEXP (dest, 1));
8560 unsigned HOST_WIDE_INT and_mask = INTVAL (XEXP (src, 1));
8561 unsigned HOST_WIDE_INT ze_mask;
8562
8563 if (width >= HOST_BITS_PER_WIDE_INT)
8564 ze_mask = -1;
8565 else
8566 ze_mask = ((unsigned HOST_WIDE_INT)1 << width) - 1;
8567
8568 /* Complete overlap. We can remove the source AND. */
8569 if ((and_mask & ze_mask) == ze_mask)
8570 return gen_rtx_SET (VOIDmode, dest, XEXP (src, 0));
8571
8572 /* Partial overlap. We can reduce the source AND. */
8573 if ((and_mask & ze_mask) != and_mask)
8574 {
8575 mode = GET_MODE (src);
8576 src = gen_rtx_AND (mode, XEXP (src, 0),
eb2ab511 8577 gen_int_mode (and_mask & ze_mask, mode));
55e79aef
RH
8578 return gen_rtx_SET (VOIDmode, dest, src);
8579 }
8580 }
8581
dfbe1b2f 8582 /* The other case we handle is assignments into a constant-position
9dd11dcb 8583 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
8584 a mask that has all one bits except for a group of zero bits and
8585 OTHER is known to have zeros where C1 has ones, this is such an
8586 assignment. Compute the position and length from C1. Shift OTHER
8587 to the appropriate position, force it to the required mode, and
8588 make the extraction. Check for the AND in both operands. */
8589
9dd11dcb 8590 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
8591 return x;
8592
8593 rhs = expand_compound_operation (XEXP (src, 0));
8594 lhs = expand_compound_operation (XEXP (src, 1));
8595
8596 if (GET_CODE (rhs) == AND
481683e1 8597 && CONST_INT_P (XEXP (rhs, 1))
e11fa86f
RK
8598 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
8599 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
8600 else if (GET_CODE (lhs) == AND
481683e1 8601 && CONST_INT_P (XEXP (lhs, 1))
e11fa86f
RK
8602 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
8603 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
8604 else
8605 return x;
230d793d 8606
663522cb 8607 pos = get_pos_from_mask ((~c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 8608 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
8609 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
8610 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 8611 return x;
230d793d 8612
5f4f0e22 8613 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
8614 if (assign == 0)
8615 return x;
230d793d 8616
dfbe1b2f
RK
8617 /* The mode to use for the source is the mode of the assignment, or of
8618 what is inside a possible STRICT_LOW_PART. */
663522cb 8619 mode = (GET_CODE (assign) == STRICT_LOW_PART
dfbe1b2f 8620 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 8621
dfbe1b2f
RK
8622 /* Shift OTHER right POS places and make it the source, restricting it
8623 to the proper length and mode. */
230d793d 8624
b1257407
PB
8625 src = canon_reg_for_combine (simplify_shift_const (NULL_RTX, LSHIFTRT,
8626 GET_MODE (src),
8627 other, pos),
8628 dest);
8629 src = force_to_mode (src, mode,
6139ff20 8630 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
0345195a 8631 ? ~(unsigned HOST_WIDE_INT) 0
729a2125 8632 : ((unsigned HOST_WIDE_INT) 1 << len) - 1,
b1257407 8633 0);
230d793d 8634
6e814b8d
KH
8635 /* If SRC is masked by an AND that does not make a difference in
8636 the value being stored, strip it. */
8637 if (GET_CODE (assign) == ZERO_EXTRACT
481683e1 8638 && CONST_INT_P (XEXP (assign, 1))
6e814b8d
KH
8639 && INTVAL (XEXP (assign, 1)) < HOST_BITS_PER_WIDE_INT
8640 && GET_CODE (src) == AND
481683e1 8641 && CONST_INT_P (XEXP (src, 1))
c5c15353 8642 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (src, 1))
6e814b8d
KH
8643 == ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (assign, 1))) - 1))
8644 src = XEXP (src, 0);
8645
f1c6ba8b 8646 return gen_rtx_SET (VOIDmode, assign, src);
230d793d
RS
8647}
8648\f
8649/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
8650 if so. */
8651
8652static rtx
79a490a9 8653apply_distributive_law (rtx x)
230d793d
RS
8654{
8655 enum rtx_code code = GET_CODE (x);
2981fafe 8656 enum rtx_code inner_code;
230d793d
RS
8657 rtx lhs, rhs, other;
8658 rtx tem;
230d793d 8659
2981fafe
RS
8660 /* Distributivity is not true for floating point as it can change the
8661 value. So we don't do it unless -funsafe-math-optimizations. */
8662 if (FLOAT_MODE_P (GET_MODE (x))
8663 && ! flag_unsafe_math_optimizations)
d8a8a4da
RS
8664 return x;
8665
230d793d
RS
8666 /* The outer operation can only be one of the following: */
8667 if (code != IOR && code != AND && code != XOR
8668 && code != PLUS && code != MINUS)
8669 return x;
8670
2981fafe
RS
8671 lhs = XEXP (x, 0);
8672 rhs = XEXP (x, 1);
230d793d 8673
0f41302f
MS
8674 /* If either operand is a primitive we can't do anything, so get out
8675 fast. */
ec8e098d 8676 if (OBJECT_P (lhs) || OBJECT_P (rhs))
230d793d
RS
8677 return x;
8678
8679 lhs = expand_compound_operation (lhs);
8680 rhs = expand_compound_operation (rhs);
8681 inner_code = GET_CODE (lhs);
8682 if (inner_code != GET_CODE (rhs))
8683 return x;
8684
8685 /* See if the inner and outer operations distribute. */
8686 switch (inner_code)
8687 {
8688 case LSHIFTRT:
8689 case ASHIFTRT:
8690 case AND:
8691 case IOR:
8692 /* These all distribute except over PLUS. */
8693 if (code == PLUS || code == MINUS)
8694 return x;
8695 break;
8696
8697 case MULT:
8698 if (code != PLUS && code != MINUS)
8699 return x;
8700 break;
8701
8702 case ASHIFT:
45620ed4 8703 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
8704 break;
8705
8706 case SUBREG:
1f2a3c8f
ILT
8707 /* Non-paradoxical SUBREGs distributes over all operations,
8708 provided the inner modes and byte offsets are the same, this
8709 is an extraction of a low-order part, we don't convert an fp
8710 operation to int or vice versa, this is not a vector mode,
8711 and we would not be converting a single-word operation into a
8712 multi-word operation. The latter test is not required, but
8713 it prevents generating unneeded multi-word operations. Some
8714 of the previous tests are redundant given the latter test,
8715 but are retained because they are required for correctness.
dfbe1b2f
RK
8716
8717 We produce the result slightly differently in this case. */
8718
8719 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
ddef6bc7 8720 || SUBREG_BYTE (lhs) != SUBREG_BYTE (rhs)
dfbe1b2f 8721 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
8722 || (GET_MODE_CLASS (GET_MODE (lhs))
8723 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 8724 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 8725 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
1f2a3c8f 8726 || VECTOR_MODE_P (GET_MODE (lhs))
f13f406e
AN
8727 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD
8728 /* Result might need to be truncated. Don't change mode if
8729 explicit truncation is needed. */
8730 || !TRULY_NOOP_TRUNCATION
8731 (GET_MODE_BITSIZE (GET_MODE (x)),
8732 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (lhs)))))
230d793d
RS
8733 return x;
8734
bcb34aa3
PB
8735 tem = simplify_gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
8736 SUBREG_REG (lhs), SUBREG_REG (rhs));
4de249d9 8737 return gen_lowpart (GET_MODE (x), tem);
230d793d
RS
8738
8739 default:
8740 return x;
8741 }
8742
8743 /* Set LHS and RHS to the inner operands (A and B in the example
8744 above) and set OTHER to the common operand (C in the example).
ec8e098d 8745 There is only one way to do this unless the inner operation is
230d793d 8746 commutative. */
ec8e098d 8747 if (COMMUTATIVE_ARITH_P (lhs)
230d793d
RS
8748 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
8749 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
ec8e098d 8750 else if (COMMUTATIVE_ARITH_P (lhs)
230d793d
RS
8751 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
8752 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
ec8e098d 8753 else if (COMMUTATIVE_ARITH_P (lhs)
230d793d
RS
8754 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
8755 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
8756 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
8757 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
8758 else
8759 return x;
8760
8761 /* Form the new inner operation, seeing if it simplifies first. */
bcb34aa3 8762 tem = simplify_gen_binary (code, GET_MODE (x), lhs, rhs);
230d793d
RS
8763
8764 /* There is one exception to the general way of distributing:
a0209ac2 8765 (a | c) ^ (b | c) -> (a ^ b) & ~c */
230d793d
RS
8766 if (code == XOR && inner_code == IOR)
8767 {
8768 inner_code = AND;
f1c6ba8b 8769 other = simplify_gen_unary (NOT, GET_MODE (x), other, GET_MODE (x));
230d793d
RS
8770 }
8771
8772 /* We may be able to continuing distributing the result, so call
8773 ourselves recursively on the inner operation before forming the
8774 outer operation, which we return. */
bcb34aa3
PB
8775 return simplify_gen_binary (inner_code, GET_MODE (x),
8776 apply_distributive_law (tem), other);
8777}
8778
8779/* See if X is of the form (* (+ A B) C), and if so convert to
8780 (+ (* A C) (* B C)) and try to simplify.
8781
8782 Most of the time, this results in no change. However, if some of
8783 the operands are the same or inverses of each other, simplifications
8784 will result.
8785
8786 For example, (and (ior A B) (not B)) can occur as the result of
8787 expanding a bit field assignment. When we apply the distributive
8788 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8789 which then simplifies to (and (A (not B))).
c22cacf3 8790
bcb34aa3
PB
8791 Note that no checks happen on the validity of applying the inverse
8792 distributive law. This is pointless since we can do it in the
8793 few places where this routine is called.
8794
8795 N is the index of the term that is decomposed (the arithmetic operation,
8796 i.e. (+ A B) in the first example above). !N is the index of the term that
8797 is distributed, i.e. of C in the first example above. */
8798static rtx
8799distribute_and_simplify_rtx (rtx x, int n)
8800{
8801 enum machine_mode mode;
8802 enum rtx_code outer_code, inner_code;
8803 rtx decomposed, distributed, inner_op0, inner_op1, new_op0, new_op1, tmp;
8804
56fe3eff
DK
8805 /* Distributivity is not true for floating point as it can change the
8806 value. So we don't do it unless -funsafe-math-optimizations. */
8807 if (FLOAT_MODE_P (GET_MODE (x))
8808 && ! flag_unsafe_math_optimizations)
8809 return NULL_RTX;
8810
bcb34aa3
PB
8811 decomposed = XEXP (x, n);
8812 if (!ARITHMETIC_P (decomposed))
8813 return NULL_RTX;
8814
8815 mode = GET_MODE (x);
8816 outer_code = GET_CODE (x);
8817 distributed = XEXP (x, !n);
8818
8819 inner_code = GET_CODE (decomposed);
8820 inner_op0 = XEXP (decomposed, 0);
8821 inner_op1 = XEXP (decomposed, 1);
8822
8823 /* Special case (and (xor B C) (not A)), which is equivalent to
8824 (xor (ior A B) (ior A C)) */
8825 if (outer_code == AND && inner_code == XOR && GET_CODE (distributed) == NOT)
8826 {
8827 distributed = XEXP (distributed, 0);
8828 outer_code = IOR;
8829 }
8830
8831 if (n == 0)
8832 {
8833 /* Distribute the second term. */
8834 new_op0 = simplify_gen_binary (outer_code, mode, inner_op0, distributed);
8835 new_op1 = simplify_gen_binary (outer_code, mode, inner_op1, distributed);
8836 }
8837 else
8838 {
8839 /* Distribute the first term. */
8840 new_op0 = simplify_gen_binary (outer_code, mode, distributed, inner_op0);
8841 new_op1 = simplify_gen_binary (outer_code, mode, distributed, inner_op1);
8842 }
8843
8844 tmp = apply_distributive_law (simplify_gen_binary (inner_code, mode,
8845 new_op0, new_op1));
8846 if (GET_CODE (tmp) != outer_code
f40751dd
JH
8847 && rtx_cost (tmp, SET, optimize_this_for_speed_p)
8848 < rtx_cost (x, SET, optimize_this_for_speed_p))
bcb34aa3
PB
8849 return tmp;
8850
8851 return NULL_RTX;
230d793d
RS
8852}
8853\f
41e8659e
PB
8854/* Simplify a logical `and' of VAROP with the constant CONSTOP, to be done
8855 in MODE. Return an equivalent form, if different from (and VAROP
8856 (const_int CONSTOP)). Otherwise, return NULL_RTX. */
230d793d
RS
8857
8858static rtx
41e8659e
PB
8859simplify_and_const_int_1 (enum machine_mode mode, rtx varop,
8860 unsigned HOST_WIDE_INT constop)
230d793d 8861{
951553af 8862 unsigned HOST_WIDE_INT nonzero;
41e8659e
PB
8863 unsigned HOST_WIDE_INT orig_constop;
8864 rtx orig_varop;
42301240 8865 int i;
230d793d 8866
41e8659e
PB
8867 orig_varop = varop;
8868 orig_constop = constop;
8869 if (GET_CODE (varop) == CLOBBER)
8870 return NULL_RTX;
8871
6139ff20 8872 /* Simplify VAROP knowing that we will be only looking at some of the
8bc52806
JL
8873 bits in it.
8874
8875 Note by passing in CONSTOP, we guarantee that the bits not set in
8876 CONSTOP are not significant and will never be examined. We must
8877 ensure that is the case by explicitly masking out those bits
8878 before returning. */
b1257407 8879 varop = force_to_mode (varop, mode, constop, 0);
230d793d 8880
8bc52806
JL
8881 /* If VAROP is a CLOBBER, we will fail so return it. */
8882 if (GET_CODE (varop) == CLOBBER)
6139ff20 8883 return varop;
230d793d 8884
8bc52806
JL
8885 /* If VAROP is a CONST_INT, then we need to apply the mask in CONSTOP
8886 to VAROP and return the new constant. */
481683e1 8887 if (CONST_INT_P (varop))
bb80db7b 8888 return gen_int_mode (INTVAL (varop) & constop, mode);
8bc52806 8889
fc06d7aa
RK
8890 /* See what bits may be nonzero in VAROP. Unlike the general case of
8891 a call to nonzero_bits, here we don't care about bits outside
8892 MODE. */
8893
8894 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
9fa6d012 8895
230d793d 8896 /* Turn off all bits in the constant that are known to already be zero.
951553af 8897 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
8898 which is tested below. */
8899
951553af 8900 constop &= nonzero;
230d793d
RS
8901
8902 /* If we don't have any bits left, return zero. */
8903 if (constop == 0)
8904 return const0_rtx;
8905
42301240 8906 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
e0a2f705 8907 a power of two, we can replace this with an ASHIFT. */
42301240
RK
8908 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
8909 && (i = exact_log2 (constop)) >= 0)
8910 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
663522cb 8911
6139ff20
RK
8912 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
8913 or XOR, then try to apply the distributive law. This may eliminate
8914 operations if either branch can be simplified because of the AND.
8915 It may also make some cases more complex, but those cases probably
8916 won't match a pattern either with or without this. */
8917
8918 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
8919 return
4de249d9 8920 gen_lowpart
6139ff20
RK
8921 (mode,
8922 apply_distributive_law
bcb34aa3
PB
8923 (simplify_gen_binary (GET_CODE (varop), GET_MODE (varop),
8924 simplify_and_const_int (NULL_RTX,
8925 GET_MODE (varop),
8926 XEXP (varop, 0),
8927 constop),
8928 simplify_and_const_int (NULL_RTX,
8929 GET_MODE (varop),
8930 XEXP (varop, 1),
8931 constop))));
6139ff20 8932
b1257407 8933 /* If VAROP is PLUS, and the constant is a mask of low bits, distribute
8deb7514
RH
8934 the AND and see if one of the operands simplifies to zero. If so, we
8935 may eliminate it. */
8936
8937 if (GET_CODE (varop) == PLUS
8938 && exact_log2 (constop + 1) >= 0)
8939 {
8940 rtx o0, o1;
8941
8942 o0 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 0), constop);
8943 o1 = simplify_and_const_int (NULL_RTX, mode, XEXP (varop, 1), constop);
8944 if (o0 == const0_rtx)
8945 return o1;
8946 if (o1 == const0_rtx)
8947 return o0;
8948 }
8949
41e8659e
PB
8950 /* Make a SUBREG if necessary. If we can't make it, fail. */
8951 varop = gen_lowpart (mode, varop);
8952 if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
8953 return NULL_RTX;
230d793d
RS
8954
8955 /* If we are only masking insignificant bits, return VAROP. */
951553af 8956 if (constop == nonzero)
41e8659e 8957 return varop;
230d793d 8958
41e8659e
PB
8959 if (varop == orig_varop && constop == orig_constop)
8960 return NULL_RTX;
d0c9db30 8961
41e8659e 8962 /* Otherwise, return an AND. */
7b282ff9 8963 return simplify_gen_binary (AND, mode, varop, gen_int_mode (constop, mode));
41e8659e
PB
8964}
8965
8966
8967/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
8968 in MODE.
8969
8970 Return an equivalent form, if different from X. Otherwise, return X. If
8971 X is zero, we are to always construct the equivalent form. */
8972
8973static rtx
8974simplify_and_const_int (rtx x, enum machine_mode mode, rtx varop,
8975 unsigned HOST_WIDE_INT constop)
8976{
8977 rtx tem = simplify_and_const_int_1 (mode, varop, constop);
8978 if (tem)
8979 return tem;
230d793d 8980
41e8659e 8981 if (!x)
7b282ff9
DE
8982 x = simplify_gen_binary (AND, GET_MODE (varop), varop,
8983 gen_int_mode (constop, mode));
41e8659e
PB
8984 if (GET_MODE (x) != mode)
8985 x = gen_lowpart (mode, x);
230d793d
RS
8986 return x;
8987}
8988\f
2f93eea8 8989/* Given a REG, X, compute which bits in X can be nonzero.
230d793d
RS
8990 We don't care about bits outside of those defined in MODE.
8991
8992 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
8993 a shift, AND, or zero_extract, we can do better. */
8994
2f93eea8 8995static rtx
fa233e34
KG
8996reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
8997 const_rtx known_x ATTRIBUTE_UNUSED,
2f93eea8
PB
8998 enum machine_mode known_mode ATTRIBUTE_UNUSED,
8999 unsigned HOST_WIDE_INT known_ret ATTRIBUTE_UNUSED,
9000 unsigned HOST_WIDE_INT *nonzero)
230d793d 9001{
230d793d 9002 rtx tem;
829f8ff7 9003 reg_stat_type *rsp;
230d793d 9004
2f93eea8
PB
9005 /* If X is a register whose nonzero bits value is current, use it.
9006 Otherwise, if X is a register whose value we can find, use that
9007 value. Otherwise, use the previously-computed global nonzero bits
9008 for this register. */
9009
829f8ff7
ILT
9010 rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9011 if (rsp->last_set_value != 0
9012 && (rsp->last_set_mode == mode
9013 || (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
2f93eea8 9014 && GET_MODE_CLASS (mode) == MODE_INT))
829f8ff7
ILT
9015 && ((rsp->last_set_label >= label_tick_ebb_start
9016 && rsp->last_set_label < label_tick)
9017 || (rsp->last_set_label == label_tick
9018 && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
2f93eea8
PB
9019 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
9020 && REG_N_SETS (REGNO (x)) == 1
6fb5fa3c
DB
9021 && !REGNO_REG_SET_P
9022 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
230d793d 9023 {
829f8ff7 9024 *nonzero &= rsp->last_set_nonzero_bits;
2f93eea8 9025 return NULL;
230d793d
RS
9026 }
9027
2f93eea8 9028 tem = get_last_value (x);
230d793d 9029
2f93eea8 9030 if (tem)
0840fd91 9031 {
9afa3d54 9032#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
2f93eea8 9033 /* If X is narrower than MODE and TEM is a non-negative
c22cacf3
MS
9034 constant that would appear negative in the mode of X,
9035 sign-extend it for use in reg_nonzero_bits because some
9036 machines (maybe most) will actually do the sign-extension
9037 and this is the conservative approach.
2f93eea8 9038
c22cacf3
MS
9039 ??? For 2.5, try to tighten up the MD files in this regard
9040 instead of this kludge. */
2f93eea8
PB
9041
9042 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode)
481683e1 9043 && CONST_INT_P (tem)
2f93eea8
PB
9044 && INTVAL (tem) > 0
9045 && 0 != (INTVAL (tem)
9046 & ((HOST_WIDE_INT) 1
9047 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9048 tem = GEN_INT (INTVAL (tem)
9049 | ((HOST_WIDE_INT) (-1)
9050 << GET_MODE_BITSIZE (GET_MODE (x))));
230d793d 9051#endif
2f93eea8 9052 return tem;
230d793d 9053 }
829f8ff7 9054 else if (nonzero_sign_valid && rsp->nonzero_bits)
8fd73754 9055 {
829f8ff7 9056 unsigned HOST_WIDE_INT mask = rsp->nonzero_bits;
8fd73754 9057
2f93eea8 9058 if (GET_MODE_BITSIZE (GET_MODE (x)) < GET_MODE_BITSIZE (mode))
c22cacf3
MS
9059 /* We don't know anything about the upper bits. */
9060 mask |= GET_MODE_MASK (mode) ^ GET_MODE_MASK (GET_MODE (x));
2f93eea8 9061 *nonzero &= mask;
8fd73754
AN
9062 }
9063
2f93eea8 9064 return NULL;
8fd73754
AN
9065}
9066
d0ab8cd3 9067/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
9068 be equal to the sign bit. X will be used in mode MODE; if MODE is
9069 VOIDmode, X will be used in its own mode. The returned value will always
9070 be between 1 and the number of bits in MODE. */
d0ab8cd3 9071
2f93eea8 9072static rtx
fa233e34
KG
9073reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
9074 const_rtx known_x ATTRIBUTE_UNUSED,
2f93eea8
PB
9075 enum machine_mode known_mode
9076 ATTRIBUTE_UNUSED,
9077 unsigned int known_ret ATTRIBUTE_UNUSED,
9078 unsigned int *result)
d0ab8cd3 9079{
d0ab8cd3 9080 rtx tem;
829f8ff7
ILT
9081 reg_stat_type *rsp;
9082
9083 rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
9084 if (rsp->last_set_value != 0
9085 && rsp->last_set_mode == mode
9086 && ((rsp->last_set_label >= label_tick_ebb_start
9087 && rsp->last_set_label < label_tick)
9088 || (rsp->last_set_label == label_tick
9089 && DF_INSN_LUID (rsp->last_set) < subst_low_luid)
c22cacf3 9090 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
2f93eea8 9091 && REG_N_SETS (REGNO (x)) == 1
6fb5fa3c
DB
9092 && !REGNO_REG_SET_P
9093 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x)))))
770ae6cc 9094 {
829f8ff7 9095 *result = rsp->last_set_sign_bit_copies;
2f93eea8 9096 return NULL;
d0ab8cd3
RK
9097 }
9098
2f93eea8
PB
9099 tem = get_last_value (x);
9100 if (tem != 0)
9101 return tem;
d0ab8cd3 9102
829f8ff7 9103 if (nonzero_sign_valid && rsp->sign_bit_copies != 0
2f93eea8 9104 && GET_MODE_BITSIZE (GET_MODE (x)) == GET_MODE_BITSIZE (mode))
829f8ff7 9105 *result = rsp->sign_bit_copies;
c22cacf3 9106
2f93eea8 9107 return NULL;
d0ab8cd3
RK
9108}
9109\f
1a26b032
RK
9110/* Return the number of "extended" bits there are in X, when interpreted
9111 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
9112 unsigned quantities, this is the number of high-order zero bits.
9113 For signed quantities, this is the number of copies of the sign bit
9114 minus 1. In both case, this function returns the number of "spare"
9115 bits. For example, if two quantities for which this function returns
9116 at least 1 are added, the addition is known not to overflow.
9117
9118 This function will always return 0 unless called during combine, which
9119 implies that it must be called from a define_split. */
9120
770ae6cc 9121unsigned int
4f588890 9122extended_count (const_rtx x, enum machine_mode mode, int unsignedp)
1a26b032 9123{
951553af 9124 if (nonzero_sign_valid == 0)
1a26b032
RK
9125 return 0;
9126
9127 return (unsignedp
ac49a949 9128 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
26c34780
RS
9129 ? (unsigned int) (GET_MODE_BITSIZE (mode) - 1
9130 - floor_log2 (nonzero_bits (x, mode)))
770ae6cc 9131 : 0)
1a26b032
RK
9132 : num_sign_bit_copies (x, mode) - 1);
9133}
9134\f
230d793d
RS
9135/* This function is called from `simplify_shift_const' to merge two
9136 outer operations. Specifically, we have already found that we need
9137 to perform operation *POP0 with constant *PCONST0 at the outermost
9138 position. We would now like to also perform OP1 with constant CONST1
9139 (with *POP0 being done last).
9140
9141 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
663522cb 9142 the resulting operation. *PCOMP_P is set to 1 if we would need to
230d793d
RS
9143 complement the innermost operand, otherwise it is unchanged.
9144
9145 MODE is the mode in which the operation will be done. No bits outside
9146 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 9147 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d 9148
f822d252 9149 If *POP0 or OP1 are UNKNOWN, it means no operation is required. Only NEG, PLUS,
230d793d
RS
9150 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
9151 result is simply *PCONST0.
9152
9153 If the resulting operation cannot be expressed as one operation, we
9154 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
9155
9156static int
79a490a9 9157merge_outer_ops (enum rtx_code *pop0, HOST_WIDE_INT *pconst0, enum rtx_code op1, HOST_WIDE_INT const1, enum machine_mode mode, int *pcomp_p)
230d793d
RS
9158{
9159 enum rtx_code op0 = *pop0;
5f4f0e22 9160 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
9161
9162 const0 &= GET_MODE_MASK (mode);
9163 const1 &= GET_MODE_MASK (mode);
9164
9165 /* If OP0 is an AND, clear unimportant bits in CONST1. */
9166 if (op0 == AND)
9167 const1 &= const0;
9168
f822d252 9169 /* If OP0 or OP1 is UNKNOWN, this is easy. Similarly if they are the same or
230d793d
RS
9170 if OP0 is SET. */
9171
f822d252 9172 if (op1 == UNKNOWN || op0 == SET)
230d793d
RS
9173 return 1;
9174
f822d252 9175 else if (op0 == UNKNOWN)
230d793d
RS
9176 op0 = op1, const0 = const1;
9177
9178 else if (op0 == op1)
9179 {
9180 switch (op0)
9181 {
9182 case AND:
9183 const0 &= const1;
9184 break;
9185 case IOR:
9186 const0 |= const1;
9187 break;
9188 case XOR:
9189 const0 ^= const1;
9190 break;
9191 case PLUS:
9192 const0 += const1;
9193 break;
9194 case NEG:
f822d252 9195 op0 = UNKNOWN;
230d793d 9196 break;
e9a25f70
JL
9197 default:
9198 break;
230d793d
RS
9199 }
9200 }
9201
9202 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
9203 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
9204 return 0;
9205
9206 /* If the two constants aren't the same, we can't do anything. The
9207 remaining six cases can all be done. */
9208 else if (const0 != const1)
9209 return 0;
9210
9211 else
9212 switch (op0)
9213 {
9214 case IOR:
9215 if (op1 == AND)
9216 /* (a & b) | b == b */
9217 op0 = SET;
9218 else /* op1 == XOR */
9219 /* (a ^ b) | b == a | b */
b729186a 9220 {;}
230d793d
RS
9221 break;
9222
9223 case XOR:
9224 if (op1 == AND)
9225 /* (a & b) ^ b == (~a) & b */
9226 op0 = AND, *pcomp_p = 1;
9227 else /* op1 == IOR */
9228 /* (a | b) ^ b == a & ~b */
7d4444ea 9229 op0 = AND, const0 = ~const0;
230d793d
RS
9230 break;
9231
9232 case AND:
9233 if (op1 == IOR)
9234 /* (a | b) & b == b */
9235 op0 = SET;
9236 else /* op1 == XOR */
9237 /* (a ^ b) & b) == (~a) & b */
9238 *pcomp_p = 1;
9239 break;
e9a25f70
JL
9240 default:
9241 break;
230d793d
RS
9242 }
9243
9244 /* Check for NO-OP cases. */
9245 const0 &= GET_MODE_MASK (mode);
9246 if (const0 == 0
9247 && (op0 == IOR || op0 == XOR || op0 == PLUS))
f822d252 9248 op0 = UNKNOWN;
230d793d
RS
9249 else if (const0 == 0 && op0 == AND)
9250 op0 = SET;
e51712db
KG
9251 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
9252 && op0 == AND)
f822d252 9253 op0 = UNKNOWN;
230d793d 9254
87cfb32c
EB
9255 *pop0 = op0;
9256
7e4ce834
RH
9257 /* ??? Slightly redundant with the above mask, but not entirely.
9258 Moving this above means we'd have to sign-extend the mode mask
9259 for the final test. */
87cfb32c
EB
9260 if (op0 != UNKNOWN && op0 != NEG)
9261 *pconst0 = trunc_int_for_mode (const0, mode);
230d793d
RS
9262
9263 return 1;
9264}
9265\f
b641d7fc
AN
9266/* A helper to simplify_shift_const_1 to determine the mode we can perform
9267 the shift in. The original shift operation CODE is performed on OP in
9268 ORIG_MODE. Return the wider mode MODE if we can perform the operation
f3ce1088
AN
9269 in that mode. Return ORIG_MODE otherwise. We can also assume that the
9270 result of the shift is subject to operation OUTER_CODE with operand
9271 OUTER_CONST. */
b641d7fc
AN
9272
9273static enum machine_mode
f3ce1088
AN
9274try_widen_shift_mode (enum rtx_code code, rtx op, int count,
9275 enum machine_mode orig_mode, enum machine_mode mode,
9276 enum rtx_code outer_code, HOST_WIDE_INT outer_const)
b641d7fc
AN
9277{
9278 if (orig_mode == mode)
9279 return mode;
9280 gcc_assert (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (orig_mode));
9281
9282 /* In general we can't perform in wider mode for right shift and rotate. */
9283 switch (code)
9284 {
9285 case ASHIFTRT:
9286 /* We can still widen if the bits brought in from the left are identical
9287 to the sign bit of ORIG_MODE. */
9288 if (num_sign_bit_copies (op, mode)
9289 > (unsigned) (GET_MODE_BITSIZE (mode)
9290 - GET_MODE_BITSIZE (orig_mode)))
9291 return mode;
3e63dd3a
AN
9292 return orig_mode;
9293
b641d7fc 9294 case LSHIFTRT:
3e63dd3a
AN
9295 /* Similarly here but with zero bits. */
9296 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9297 && (nonzero_bits (op, mode) & ~GET_MODE_MASK (orig_mode)) == 0)
9298 return mode;
f3ce1088
AN
9299
9300 /* We can also widen if the bits brought in will be masked off. This
9301 operation is performed in ORIG_MODE. */
842e098c 9302 if (outer_code == AND)
f3ce1088 9303 {
842e098c 9304 int care_bits = low_bitmask_len (orig_mode, outer_const);
f3ce1088
AN
9305
9306 if (care_bits >= 0
9307 && GET_MODE_BITSIZE (orig_mode) - care_bits >= count)
9308 return mode;
9309 }
3e63dd3a
AN
9310 /* fall through */
9311
b641d7fc
AN
9312 case ROTATE:
9313 return orig_mode;
9314
9315 case ROTATERT:
9316 gcc_unreachable ();
9317
9318 default:
9319 return mode;
9320 }
9321}
9322
230d793d 9323/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
41e8659e
PB
9324 The result of the shift is RESULT_MODE. Return NULL_RTX if we cannot
9325 simplify it. Otherwise, return a simplified value.
230d793d
RS
9326
9327 The shift is normally computed in the widest mode we find in VAROP, as
9328 long as it isn't a different number of words than RESULT_MODE. Exceptions
41e8659e 9329 are ASHIFTRT and ROTATE, which are always done in their original mode. */
230d793d
RS
9330
9331static rtx
41e8659e
PB
9332simplify_shift_const_1 (enum rtx_code code, enum machine_mode result_mode,
9333 rtx varop, int orig_count)
230d793d
RS
9334{
9335 enum rtx_code orig_code = code;
41e8659e
PB
9336 rtx orig_varop = varop;
9337 int count;
230d793d
RS
9338 enum machine_mode mode = result_mode;
9339 enum machine_mode shift_mode, tmode;
770ae6cc 9340 unsigned int mode_words
230d793d
RS
9341 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
9342 /* We form (outer_op (code varop count) (outer_const)). */
f822d252 9343 enum rtx_code outer_op = UNKNOWN;
c4e861e8 9344 HOST_WIDE_INT outer_const = 0;
230d793d 9345 int complement_p = 0;
32e9fa48 9346 rtx new_rtx, x;
230d793d 9347
0051b6ca
RH
9348 /* Make sure and truncate the "natural" shift on the way in. We don't
9349 want to do this inside the loop as it makes it more difficult to
9350 combine shifts. */
0051b6ca
RH
9351 if (SHIFT_COUNT_TRUNCATED)
9352 orig_count &= GET_MODE_BITSIZE (mode) - 1;
0051b6ca 9353
230d793d
RS
9354 /* If we were given an invalid count, don't do anything except exactly
9355 what was requested. */
9356
0051b6ca 9357 if (orig_count < 0 || orig_count >= (int) GET_MODE_BITSIZE (mode))
41e8659e 9358 return NULL_RTX;
230d793d 9359
0051b6ca 9360 count = orig_count;
853d8828 9361
230d793d
RS
9362 /* Unless one of the branches of the `if' in this loop does a `continue',
9363 we will `break' the loop after the `if'. */
9364
9365 while (count != 0)
9366 {
41e8659e 9367 /* If we have an operand of (clobber (const_int 0)), fail. */
230d793d 9368 if (GET_CODE (varop) == CLOBBER)
41e8659e 9369 return NULL_RTX;
230d793d 9370
abc95ed3 9371 /* Convert ROTATERT to ROTATE. */
230d793d 9372 if (code == ROTATERT)
ad9df12f
IS
9373 {
9374 unsigned int bitsize = GET_MODE_BITSIZE (result_mode);;
9375 code = ROTATE;
9376 if (VECTOR_MODE_P (result_mode))
9377 count = bitsize / GET_MODE_NUNITS (result_mode) - count;
9378 else
9379 count = bitsize - count;
9380 }
230d793d 9381
f3ce1088
AN
9382 shift_mode = try_widen_shift_mode (code, varop, count, result_mode,
9383 mode, outer_op, outer_const);
230d793d
RS
9384
9385 /* Handle cases where the count is greater than the size of the mode
853d8828
RH
9386 minus 1. For ASHIFT, use the size minus one as the count (this can
9387 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
9388 take the count modulo the size. For other shifts, the result is
9389 zero.
230d793d
RS
9390
9391 Since these shifts are being produced by the compiler by combining
9392 multiple operations, each of which are defined, we know what the
9393 result is supposed to be. */
663522cb 9394
41e8659e 9395 if (count > (GET_MODE_BITSIZE (shift_mode) - 1))
230d793d
RS
9396 {
9397 if (code == ASHIFTRT)
9398 count = GET_MODE_BITSIZE (shift_mode) - 1;
9399 else if (code == ROTATE || code == ROTATERT)
9400 count %= GET_MODE_BITSIZE (shift_mode);
9401 else
9402 {
9403 /* We can't simply return zero because there may be an
9404 outer op. */
9405 varop = const0_rtx;
9406 count = 0;
9407 break;
9408 }
9409 }
9410
261639a2
JJ
9411 /* If we discovered we had to complement VAROP, leave. Making a NOT
9412 here would cause an infinite loop. */
9413 if (complement_p)
9414 break;
9415
312def2e
RK
9416 /* An arithmetic right shift of a quantity known to be -1 or 0
9417 is a no-op. */
9418 if (code == ASHIFTRT
9419 && (num_sign_bit_copies (varop, shift_mode)
9420 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 9421 {
312def2e
RK
9422 count = 0;
9423 break;
9424 }
d0ab8cd3 9425
312def2e
RK
9426 /* If we are doing an arithmetic right shift and discarding all but
9427 the sign bit copies, this is equivalent to doing a shift by the
9428 bitsize minus one. Convert it into that shift because it will often
9429 allow other simplifications. */
500c518b 9430
312def2e
RK
9431 if (code == ASHIFTRT
9432 && (count + num_sign_bit_copies (varop, shift_mode)
9433 >= GET_MODE_BITSIZE (shift_mode)))
9434 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 9435
230d793d
RS
9436 /* We simplify the tests below and elsewhere by converting
9437 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
e0a2f705
KH
9438 `make_compound_operation' will convert it to an ASHIFTRT for
9439 those machines (such as VAX) that don't have an LSHIFTRT. */
5f4f0e22 9440 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9441 && code == ASHIFTRT
951553af 9442 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
9443 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
9444 == 0))
230d793d
RS
9445 code = LSHIFTRT;
9446
de097a3b
EB
9447 if (((code == LSHIFTRT
9448 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9449 && !(nonzero_bits (varop, shift_mode) >> count))
9450 || (code == ASHIFT
9451 && GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
9452 && !((nonzero_bits (varop, shift_mode) << count)
9453 & GET_MODE_MASK (shift_mode))))
9454 && !side_effects_p (varop))
2d21f7d6 9455 varop = const0_rtx;
b9422b69 9456
230d793d
RS
9457 switch (GET_CODE (varop))
9458 {
9459 case SIGN_EXTEND:
9460 case ZERO_EXTEND:
9461 case SIGN_EXTRACT:
9462 case ZERO_EXTRACT:
32e9fa48
KG
9463 new_rtx = expand_compound_operation (varop);
9464 if (new_rtx != varop)
230d793d 9465 {
32e9fa48 9466 varop = new_rtx;
230d793d
RS
9467 continue;
9468 }
9469 break;
9470
9471 case MEM:
9472 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
9473 minus the width of a smaller mode, we can do this with a
9474 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
9475 if ((code == ASHIFTRT || code == LSHIFTRT)
9476 && ! mode_dependent_address_p (XEXP (varop, 0))
9477 && ! MEM_VOLATILE_P (varop)
9478 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
9479 MODE_INT, 1)) != BLKmode)
9480 {
32e9fa48 9481 new_rtx = adjust_address_nv (varop, tmode,
f1ec5147
RK
9482 BYTES_BIG_ENDIAN ? 0
9483 : count / BITS_PER_UNIT);
bf49b139 9484
f1c6ba8b 9485 varop = gen_rtx_fmt_e (code == ASHIFTRT ? SIGN_EXTEND
32e9fa48 9486 : ZERO_EXTEND, mode, new_rtx);
230d793d
RS
9487 count = 0;
9488 continue;
9489 }
9490 break;
9491
230d793d
RS
9492 case SUBREG:
9493 /* If VAROP is a SUBREG, strip it as long as the inner operand has
9494 the same number of words as what we've seen so far. Then store
9495 the widest mode in MODE. */
f9e67232
RS
9496 if (subreg_lowpart_p (varop)
9497 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9498 > GET_MODE_SIZE (GET_MODE (varop)))
26c34780
RS
9499 && (unsigned int) ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
9500 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
9501 == mode_words)
230d793d
RS
9502 {
9503 varop = SUBREG_REG (varop);
9504 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
9505 mode = GET_MODE (varop);
9506 continue;
9507 }
9508 break;
9509
9510 case MULT:
9511 /* Some machines use MULT instead of ASHIFT because MULT
9512 is cheaper. But it is still better on those machines to
9513 merge two shifts into one. */
481683e1 9514 if (CONST_INT_P (XEXP (varop, 1))
230d793d
RS
9515 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9516 {
770ae6cc 9517 varop
bcb34aa3
PB
9518 = simplify_gen_binary (ASHIFT, GET_MODE (varop),
9519 XEXP (varop, 0),
9520 GEN_INT (exact_log2 (
9521 INTVAL (XEXP (varop, 1)))));
230d793d
RS
9522 continue;
9523 }
9524 break;
9525
9526 case UDIV:
9527 /* Similar, for when divides are cheaper. */
481683e1 9528 if (CONST_INT_P (XEXP (varop, 1))
230d793d
RS
9529 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
9530 {
770ae6cc 9531 varop
bcb34aa3
PB
9532 = simplify_gen_binary (LSHIFTRT, GET_MODE (varop),
9533 XEXP (varop, 0),
9534 GEN_INT (exact_log2 (
9535 INTVAL (XEXP (varop, 1)))));
230d793d
RS
9536 continue;
9537 }
9538 break;
9539
9540 case ASHIFTRT:
8f8d8d6e
AO
9541 /* If we are extracting just the sign bit of an arithmetic
9542 right shift, that shift is not needed. However, the sign
9543 bit of a wider mode may be different from what would be
9544 interpreted as the sign bit in a narrower mode, so, if
9545 the result is narrower, don't discard the shift. */
26c34780 9546 if (code == LSHIFTRT
41e8659e 9547 && count == (GET_MODE_BITSIZE (result_mode) - 1)
8f8d8d6e
AO
9548 && (GET_MODE_BITSIZE (result_mode)
9549 >= GET_MODE_BITSIZE (GET_MODE (varop))))
230d793d
RS
9550 {
9551 varop = XEXP (varop, 0);
9552 continue;
9553 }
9554
0f41302f 9555 /* ... fall through ... */
230d793d
RS
9556
9557 case LSHIFTRT:
9558 case ASHIFT:
230d793d
RS
9559 case ROTATE:
9560 /* Here we have two nested shifts. The result is usually the
9561 AND of a new shift with a mask. We compute the result below. */
481683e1 9562 if (CONST_INT_P (XEXP (varop, 1))
230d793d
RS
9563 && INTVAL (XEXP (varop, 1)) >= 0
9564 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22 9565 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
5a9e432d
JJ
9566 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9567 && !VECTOR_MODE_P (result_mode))
230d793d
RS
9568 {
9569 enum rtx_code first_code = GET_CODE (varop);
770ae6cc 9570 unsigned int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 9571 unsigned HOST_WIDE_INT mask;
230d793d 9572 rtx mask_rtx;
230d793d 9573
230d793d
RS
9574 /* We have one common special case. We can't do any merging if
9575 the inner code is an ASHIFTRT of a smaller mode. However, if
9576 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
9577 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
9578 we can convert it to
9579 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
9580 This simplifies certain SIGN_EXTEND operations. */
9581 if (code == ASHIFT && first_code == ASHIFTRT
41e8659e 9582 && count == (GET_MODE_BITSIZE (result_mode)
26c34780 9583 - GET_MODE_BITSIZE (GET_MODE (varop))))
230d793d
RS
9584 {
9585 /* C3 has the low-order C1 bits zero. */
663522cb 9586
5f4f0e22 9587 mask = (GET_MODE_MASK (mode)
663522cb 9588 & ~(((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 9589
5f4f0e22 9590 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 9591 XEXP (varop, 0), mask);
5f4f0e22 9592 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
9593 varop, count);
9594 count = first_count;
9595 code = ASHIFTRT;
9596 continue;
9597 }
663522cb 9598
d0ab8cd3
RK
9599 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
9600 than C1 high-order bits equal to the sign bit, we can convert
e0a2f705 9601 this to either an ASHIFT or an ASHIFTRT depending on the
663522cb 9602 two counts.
230d793d
RS
9603
9604 We cannot do this if VAROP's mode is not SHIFT_MODE. */
9605
9606 if (code == ASHIFTRT && first_code == ASHIFT
9607 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
9608 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
9609 > first_count))
230d793d 9610 {
d0ab8cd3 9611 varop = XEXP (varop, 0);
41e8659e
PB
9612 count -= first_count;
9613 if (count < 0)
9614 {
9615 count = -count;
9616 code = ASHIFT;
9617 }
770ae6cc 9618
d0ab8cd3 9619 continue;
230d793d
RS
9620 }
9621
9622 /* There are some cases we can't do. If CODE is ASHIFTRT,
9623 we can only do this if FIRST_CODE is also ASHIFTRT.
9624
9625 We can't do the case when CODE is ROTATE and FIRST_CODE is
9626 ASHIFTRT.
9627
9628 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 9629 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
9630
9631 Finally, we can't do any of these if the mode is too wide
9632 unless the codes are the same.
9633
9634 Handle the case where the shift codes are the same
9635 first. */
9636
9637 if (code == first_code)
9638 {
9639 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
9640 && (code == ASHIFTRT || code == LSHIFTRT
9641 || code == ROTATE))
230d793d
RS
9642 break;
9643
9644 count += first_count;
9645 varop = XEXP (varop, 0);
9646 continue;
9647 }
9648
9649 if (code == ASHIFTRT
9650 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 9651 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 9652 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
9653 && (first_code == ASHIFTRT || first_code == LSHIFTRT
9654 || first_code == ROTATE
230d793d
RS
9655 || code == ROTATE)))
9656 break;
9657
9658 /* To compute the mask to apply after the shift, shift the
663522cb 9659 nonzero bits of the inner shift the same way the
230d793d
RS
9660 outer shift will. */
9661
951553af 9662 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
9663
9664 mask_rtx
41e8659e
PB
9665 = simplify_const_binary_operation (code, result_mode, mask_rtx,
9666 GEN_INT (count));
663522cb 9667
230d793d
RS
9668 /* Give up if we can't compute an outer operation to use. */
9669 if (mask_rtx == 0
481683e1 9670 || !CONST_INT_P (mask_rtx)
230d793d
RS
9671 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9672 INTVAL (mask_rtx),
9673 result_mode, &complement_p))
9674 break;
9675
9676 /* If the shifts are in the same direction, we add the
9677 counts. Otherwise, we subtract them. */
9678 if ((code == ASHIFTRT || code == LSHIFTRT)
9679 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
41e8659e 9680 count += first_count;
230d793d 9681 else
41e8659e 9682 count -= first_count;
230d793d 9683
663522cb 9684 /* If COUNT is positive, the new shift is usually CODE,
230d793d
RS
9685 except for the two exceptions below, in which case it is
9686 FIRST_CODE. If the count is negative, FIRST_CODE should
9687 always be used */
41e8659e 9688 if (count > 0
230d793d
RS
9689 && ((first_code == ROTATE && code == ASHIFT)
9690 || (first_code == ASHIFTRT && code == LSHIFTRT)))
41e8659e
PB
9691 code = first_code;
9692 else if (count < 0)
9693 code = first_code, count = -count;
230d793d
RS
9694
9695 varop = XEXP (varop, 0);
9696 continue;
9697 }
9698
9699 /* If we have (A << B << C) for any shift, we can convert this to
9700 (A << C << B). This wins if A is a constant. Only try this if
9701 B is not a constant. */
9702
9703 else if (GET_CODE (varop) == code
481683e1
SZ
9704 && CONST_INT_P (XEXP (varop, 0))
9705 && !CONST_INT_P (XEXP (varop, 1)))
230d793d 9706 {
32e9fa48 9707 rtx new_rtx = simplify_const_binary_operation (code, mode,
41e8659e
PB
9708 XEXP (varop, 0),
9709 GEN_INT (count));
32e9fa48 9710 varop = gen_rtx_fmt_ee (code, mode, new_rtx, XEXP (varop, 1));
230d793d
RS
9711 count = 0;
9712 continue;
9713 }
9714 break;
9715
9716 case NOT:
881ef7be
UB
9717 if (VECTOR_MODE_P (mode))
9718 break;
9719
230d793d 9720 /* Make this fit the case below. */
f1c6ba8b
RK
9721 varop = gen_rtx_XOR (mode, XEXP (varop, 0),
9722 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
9723 continue;
9724
9725 case IOR:
9726 case AND:
9727 case XOR:
9728 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9729 with C the size of VAROP - 1 and the shift is logical if
9730 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9731 we have an (le X 0) operation. If we have an arithmetic shift
9732 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9733 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9734
9735 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9736 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9737 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9738 && (code == LSHIFTRT || code == ASHIFTRT)
41e8659e 9739 && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
230d793d
RS
9740 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9741 {
9742 count = 0;
f1c6ba8b
RK
9743 varop = gen_rtx_LE (GET_MODE (varop), XEXP (varop, 1),
9744 const0_rtx);
230d793d
RS
9745
9746 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
f1c6ba8b 9747 varop = gen_rtx_NEG (GET_MODE (varop), varop);
230d793d
RS
9748
9749 continue;
9750 }
9751
9752 /* If we have (shift (logical)), move the logical to the outside
9753 to allow it to possibly combine with another logical and the
9754 shift to combine with another shift. This also canonicalizes to
9755 what a ZERO_EXTRACT looks like. Also, some machines have
9756 (and (shift)) insns. */
9757
481683e1 9758 if (CONST_INT_P (XEXP (varop, 1))
39a44a4e
RK
9759 /* We can't do this if we have (ashiftrt (xor)) and the
9760 constant has its sign bit set in shift_mode. */
9761 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
9762 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
9763 shift_mode))
32e9fa48 9764 && (new_rtx = simplify_const_binary_operation (code, result_mode,
c22cacf3
MS
9765 XEXP (varop, 1),
9766 GEN_INT (count))) != 0
481683e1 9767 && CONST_INT_P (new_rtx)
230d793d 9768 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
32e9fa48 9769 INTVAL (new_rtx), result_mode, &complement_p))
230d793d
RS
9770 {
9771 varop = XEXP (varop, 0);
9772 continue;
9773 }
9774
9775 /* If we can't do that, try to simplify the shift in each arm of the
9776 logical expression, make a new logical expression, and apply
39a44a4e
RK
9777 the inverse distributive law. This also can't be done
9778 for some (ashiftrt (xor)). */
481683e1 9779 if (CONST_INT_P (XEXP (varop, 1))
446f52f4 9780 && !(code == ASHIFTRT && GET_CODE (varop) == XOR
c22cacf3 9781 && 0 > trunc_int_for_mode (INTVAL (XEXP (varop, 1)),
446f52f4 9782 shift_mode)))
39a44a4e
RK
9783 {
9784 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9785 XEXP (varop, 0), count);
9786 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
9787 XEXP (varop, 1), count);
230d793d 9788
bcb34aa3
PB
9789 varop = simplify_gen_binary (GET_CODE (varop), shift_mode,
9790 lhs, rhs);
39a44a4e 9791 varop = apply_distributive_law (varop);
230d793d 9792
39a44a4e 9793 count = 0;
c22cacf3 9794 continue;
39a44a4e 9795 }
230d793d
RS
9796 break;
9797
9798 case EQ:
beb235f8 9799 /* Convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 9800 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
9801 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9802 that may be nonzero. */
9803 if (code == LSHIFTRT
230d793d
RS
9804 && XEXP (varop, 1) == const0_rtx
9805 && GET_MODE (XEXP (varop, 0)) == result_mode
41e8659e 9806 && count == (GET_MODE_BITSIZE (result_mode) - 1)
5f4f0e22 9807 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
41e8659e 9808 && STORE_FLAG_VALUE == -1
951553af 9809 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9810 && merge_outer_ops (&outer_op, &outer_const, XOR,
9811 (HOST_WIDE_INT) 1, result_mode,
9812 &complement_p))
230d793d
RS
9813 {
9814 varop = XEXP (varop, 0);
9815 count = 0;
9816 continue;
9817 }
9818 break;
9819
9820 case NEG:
d0ab8cd3
RK
9821 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9822 than the number of bits in the mode is equivalent to A. */
26c34780 9823 if (code == LSHIFTRT
41e8659e 9824 && count == (GET_MODE_BITSIZE (result_mode) - 1)
951553af 9825 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 9826 {
d0ab8cd3 9827 varop = XEXP (varop, 0);
230d793d
RS
9828 count = 0;
9829 continue;
9830 }
9831
9832 /* NEG commutes with ASHIFT since it is multiplication. Move the
9833 NEG outside to allow shifts to combine. */
9834 if (code == ASHIFT
5f4f0e22
CH
9835 && merge_outer_ops (&outer_op, &outer_const, NEG,
9836 (HOST_WIDE_INT) 0, result_mode,
9837 &complement_p))
230d793d
RS
9838 {
9839 varop = XEXP (varop, 0);
9840 continue;
9841 }
9842 break;
9843
9844 case PLUS:
d0ab8cd3
RK
9845 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9846 is one less than the number of bits in the mode is
9847 equivalent to (xor A 1). */
26c34780 9848 if (code == LSHIFTRT
41e8659e 9849 && count == (GET_MODE_BITSIZE (result_mode) - 1)
230d793d 9850 && XEXP (varop, 1) == constm1_rtx
951553af 9851 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9852 && merge_outer_ops (&outer_op, &outer_const, XOR,
9853 (HOST_WIDE_INT) 1, result_mode,
9854 &complement_p))
230d793d
RS
9855 {
9856 count = 0;
9857 varop = XEXP (varop, 0);
9858 continue;
9859 }
9860
3f508eca 9861 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 9862 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
9863 bits are known zero in FOO, we can replace the PLUS with FOO.
9864 Similarly in the other operand order. This code occurs when
9865 we are computing the size of a variable-size array. */
9866
9867 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9868 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
9869 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9870 && (nonzero_bits (XEXP (varop, 1), result_mode)
9871 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
9872 {
9873 varop = XEXP (varop, 0);
9874 continue;
9875 }
9876 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9877 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9878 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9879 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9880 >> count)
951553af
RK
9881 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9882 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9883 result_mode)))
9884 {
9885 varop = XEXP (varop, 1);
9886 continue;
9887 }
9888
230d793d
RS
9889 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9890 if (code == ASHIFT
481683e1 9891 && CONST_INT_P (XEXP (varop, 1))
32e9fa48 9892 && (new_rtx = simplify_const_binary_operation (ASHIFT, result_mode,
c22cacf3
MS
9893 XEXP (varop, 1),
9894 GEN_INT (count))) != 0
481683e1 9895 && CONST_INT_P (new_rtx)
230d793d 9896 && merge_outer_ops (&outer_op, &outer_const, PLUS,
32e9fa48 9897 INTVAL (new_rtx), result_mode, &complement_p))
230d793d
RS
9898 {
9899 varop = XEXP (varop, 0);
9900 continue;
9901 }
b757b9f8
PH
9902
9903 /* Check for 'PLUS signbit', which is the canonical form of 'XOR
9904 signbit', and attempt to change the PLUS to an XOR and move it to
9905 the outer operation as is done above in the AND/IOR/XOR case
9906 leg for shift(logical). See details in logical handling above
471854f8 9907 for reasoning in doing so. */
b757b9f8 9908 if (code == LSHIFTRT
481683e1 9909 && CONST_INT_P (XEXP (varop, 1))
b757b9f8 9910 && mode_signbit_p (result_mode, XEXP (varop, 1))
32e9fa48 9911 && (new_rtx = simplify_const_binary_operation (code, result_mode,
c22cacf3
MS
9912 XEXP (varop, 1),
9913 GEN_INT (count))) != 0
481683e1 9914 && CONST_INT_P (new_rtx)
b757b9f8 9915 && merge_outer_ops (&outer_op, &outer_const, XOR,
32e9fa48 9916 INTVAL (new_rtx), result_mode, &complement_p))
b757b9f8
PH
9917 {
9918 varop = XEXP (varop, 0);
9919 continue;
9920 }
9921
230d793d
RS
9922 break;
9923
9924 case MINUS:
9925 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9926 with C the size of VAROP - 1 and the shift is logical if
9927 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9928 we have a (gt X 0) operation. If the shift is arithmetic with
9929 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9930 we have a (neg (gt X 0)) operation. */
9931
0802d516
RK
9932 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9933 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
41e8659e 9934 && count == (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)
230d793d 9935 && (code == LSHIFTRT || code == ASHIFTRT)
481683e1 9936 && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
41e8659e 9937 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
230d793d
RS
9938 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9939 {
9940 count = 0;
f1c6ba8b
RK
9941 varop = gen_rtx_GT (GET_MODE (varop), XEXP (varop, 1),
9942 const0_rtx);
230d793d
RS
9943
9944 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
f1c6ba8b 9945 varop = gen_rtx_NEG (GET_MODE (varop), varop);
230d793d
RS
9946
9947 continue;
9948 }
9949 break;
6e0ef100
JC
9950
9951 case TRUNCATE:
9952 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9953 if the truncate does not affect the value. */
9954 if (code == LSHIFTRT
9955 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
481683e1 9956 && CONST_INT_P (XEXP (XEXP (varop, 0), 1))
6e0ef100 9957 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9958 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9959 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9960 {
9961 rtx varop_inner = XEXP (varop, 0);
9962
770ae6cc 9963 varop_inner
f1c6ba8b
RK
9964 = gen_rtx_LSHIFTRT (GET_MODE (varop_inner),
9965 XEXP (varop_inner, 0),
9966 GEN_INT
9967 (count + INTVAL (XEXP (varop_inner, 1))));
9968 varop = gen_rtx_TRUNCATE (GET_MODE (varop), varop_inner);
6e0ef100
JC
9969 count = 0;
9970 continue;
9971 }
9972 break;
663522cb 9973
e9a25f70
JL
9974 default:
9975 break;
230d793d
RS
9976 }
9977
9978 break;
9979 }
9980
f3ce1088
AN
9981 shift_mode = try_widen_shift_mode (code, varop, count, result_mode, mode,
9982 outer_op, outer_const);
230d793d
RS
9983
9984 /* We have now finished analyzing the shift. The result should be
9985 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
f822d252 9986 OUTER_OP is non-UNKNOWN, it is an operation that needs to be applied
230d793d 9987 to the result of the shift. OUTER_CONST is the relevant constant,
41e8659e 9988 but we must turn off all bits turned off in the shift. */
230d793d 9989
41e8659e
PB
9990 if (outer_op == UNKNOWN
9991 && orig_code == code && orig_count == count
9992 && varop == orig_varop
9993 && shift_mode == GET_MODE (varop))
9994 return NULL_RTX;
230d793d 9995
41e8659e
PB
9996 /* Make a SUBREG if necessary. If we can't make it, fail. */
9997 varop = gen_lowpart (shift_mode, varop);
9998 if (varop == NULL_RTX || GET_CODE (varop) == CLOBBER)
9999 return NULL_RTX;
230d793d 10000
224eeff2
RK
10001 /* If we have an outer operation and we just made a shift, it is
10002 possible that we could have simplified the shift were it not
10003 for the outer operation. So try to do the simplification
10004 recursively. */
10005
41e8659e
PB
10006 if (outer_op != UNKNOWN)
10007 x = simplify_shift_const_1 (code, shift_mode, varop, count);
10008 else
10009 x = NULL_RTX;
10010
10011 if (x == NULL_RTX)
10012 x = simplify_gen_binary (code, shift_mode, varop, GEN_INT (count));
224eeff2 10013
e0a2f705 10014 /* If we were doing an LSHIFTRT in a wider mode than it was originally,
230d793d
RS
10015 turn off all the bits that the shift would have turned off. */
10016 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 10017 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d 10018 GET_MODE_MASK (result_mode) >> orig_count);
663522cb 10019
230d793d 10020 /* Do the remainder of the processing in RESULT_MODE. */
f243cee6 10021 x = gen_lowpart_or_truncate (result_mode, x);
230d793d
RS
10022
10023 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
10024 operation. */
10025 if (complement_p)
e869aa39 10026 x = simplify_gen_unary (NOT, result_mode, x, result_mode);
230d793d 10027
f822d252 10028 if (outer_op != UNKNOWN)
230d793d 10029 {
87cfb32c
EB
10030 if (GET_RTX_CLASS (outer_op) != RTX_UNARY
10031 && GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 10032 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
10033
10034 if (outer_op == AND)
5f4f0e22 10035 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d 10036 else if (outer_op == SET)
de097a3b
EB
10037 {
10038 /* This means that we have determined that the result is
10039 equivalent to a constant. This should be rare. */
10040 if (!side_effects_p (x))
10041 x = GEN_INT (outer_const);
10042 }
ec8e098d 10043 else if (GET_RTX_CLASS (outer_op) == RTX_UNARY)
f1c6ba8b 10044 x = simplify_gen_unary (outer_op, result_mode, x, result_mode);
230d793d 10045 else
bcb34aa3
PB
10046 x = simplify_gen_binary (outer_op, result_mode, x,
10047 GEN_INT (outer_const));
230d793d
RS
10048 }
10049
10050 return x;
663522cb 10051}
41e8659e
PB
10052
10053/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
10054 The result of the shift is RESULT_MODE. If we cannot simplify it,
10055 return X or, if it is NULL, synthesize the expression with
10056 simplify_gen_binary. Otherwise, return a simplified value.
10057
10058 The shift is normally computed in the widest mode we find in VAROP, as
10059 long as it isn't a different number of words than RESULT_MODE. Exceptions
10060 are ASHIFTRT and ROTATE, which are always done in their original mode. */
10061
10062static rtx
10063simplify_shift_const (rtx x, enum rtx_code code, enum machine_mode result_mode,
10064 rtx varop, int count)
10065{
10066 rtx tem = simplify_shift_const_1 (code, result_mode, varop, count);
10067 if (tem)
10068 return tem;
10069
10070 if (!x)
10071 x = simplify_gen_binary (code, GET_MODE (varop), varop, GEN_INT (count));
10072 if (GET_MODE (x) != result_mode)
10073 x = gen_lowpart (result_mode, x);
10074 return x;
10075}
10076
230d793d
RS
10077\f
10078/* Like recog, but we receive the address of a pointer to a new pattern.
10079 We try to match the rtx that the pointer points to.
10080 If that fails, we may try to modify or replace the pattern,
10081 storing the replacement into the same pointer object.
10082
10083 Modifications include deletion or addition of CLOBBERs.
10084
10085 PNOTES is a pointer to a location where any REG_UNUSED notes added for
10086 the CLOBBERs are placed.
10087
10088 The value is the final insn code from the pattern ultimately matched,
10089 or -1. */
10090
10091static int
79a490a9 10092recog_for_combine (rtx *pnewpat, rtx insn, rtx *pnotes)
230d793d 10093{
b3694847 10094 rtx pat = *pnewpat;
230d793d
RS
10095 int insn_code_number;
10096 int num_clobbers_to_add = 0;
10097 int i;
10098 rtx notes = 0;
e6d83128 10099 rtx old_notes, old_pat;
230d793d 10100
974f4146
RK
10101 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
10102 we use to indicate that something didn't match. If we find such a
10103 thing, force rejection. */
d96023cf 10104 if (GET_CODE (pat) == PARALLEL)
974f4146 10105 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
10106 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
10107 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
10108 return -1;
10109
e6d83128
JH
10110 old_pat = PATTERN (insn);
10111 old_notes = REG_NOTES (insn);
10112 PATTERN (insn) = pat;
10113 REG_NOTES (insn) = 0;
c1194d74 10114
e6d83128 10115 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
b6bcd676
RIL
10116 if (dump_file && (dump_flags & TDF_DETAILS))
10117 {
10118 if (insn_code_number < 0)
10119 fputs ("Failed to match this instruction:\n", dump_file);
10120 else
10121 fputs ("Successfully matched this instruction:\n", dump_file);
10122 print_rtl_single (dump_file, pat);
10123 }
230d793d
RS
10124
10125 /* If it isn't, there is the possibility that we previously had an insn
10126 that clobbered some register as a side effect, but the combined
10127 insn doesn't need to do that. So try once more without the clobbers
10128 unless this represents an ASM insn. */
10129
10130 if (insn_code_number < 0 && ! check_asm_operands (pat)
10131 && GET_CODE (pat) == PARALLEL)
10132 {
10133 int pos;
10134
10135 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
10136 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
10137 {
10138 if (i != pos)
10139 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
10140 pos++;
10141 }
10142
10143 SUBST_INT (XVECLEN (pat, 0), pos);
10144
10145 if (pos == 1)
10146 pat = XVECEXP (pat, 0, 0);
10147
e6d83128
JH
10148 PATTERN (insn) = pat;
10149 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
b6bcd676
RIL
10150 if (dump_file && (dump_flags & TDF_DETAILS))
10151 {
10152 if (insn_code_number < 0)
10153 fputs ("Failed to match this instruction:\n", dump_file);
10154 else
10155 fputs ("Successfully matched this instruction:\n", dump_file);
10156 print_rtl_single (dump_file, pat);
10157 }
230d793d 10158 }
e6d83128
JH
10159 PATTERN (insn) = old_pat;
10160 REG_NOTES (insn) = old_notes;
230d793d 10161
b5832b43
JH
10162 /* Recognize all noop sets, these will be killed by followup pass. */
10163 if (insn_code_number < 0 && GET_CODE (pat) == SET && set_noop_p (pat))
10164 insn_code_number = NOOP_MOVE_INSN_CODE, num_clobbers_to_add = 0;
10165
230d793d
RS
10166 /* If we had any clobbers to add, make a new pattern than contains
10167 them. Then check to make sure that all of them are dead. */
10168 if (num_clobbers_to_add)
10169 {
38a448ca 10170 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
bf103ec2
R
10171 rtvec_alloc (GET_CODE (pat) == PARALLEL
10172 ? (XVECLEN (pat, 0)
10173 + num_clobbers_to_add)
10174 : num_clobbers_to_add + 1));
230d793d
RS
10175
10176 if (GET_CODE (pat) == PARALLEL)
10177 for (i = 0; i < XVECLEN (pat, 0); i++)
10178 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
10179 else
10180 XVECEXP (newpat, 0, 0) = pat;
10181
10182 add_clobbers (newpat, insn_code_number);
10183
10184 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
10185 i < XVECLEN (newpat, 0); i++)
10186 {
f8cfc6aa 10187 if (REG_P (XEXP (XVECEXP (newpat, 0, i), 0))
230d793d
RS
10188 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
10189 return -1;
b8698a0f 10190 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) != SCRATCH)
6fb5fa3c
DB
10191 {
10192 gcc_assert (REG_P (XEXP (XVECEXP (newpat, 0, i), 0)));
efc0b2bd
ILT
10193 notes = alloc_reg_note (REG_UNUSED,
10194 XEXP (XVECEXP (newpat, 0, i), 0), notes);
6fb5fa3c 10195 }
230d793d
RS
10196 }
10197 pat = newpat;
10198 }
10199
10200 *pnewpat = pat;
10201 *pnotes = notes;
10202
10203 return insn_code_number;
10204}
10205\f
4de249d9
PB
10206/* Like gen_lowpart_general but for use by combine. In combine it
10207 is not possible to create any new pseudoregs. However, it is
10208 safe to create invalid memory addresses, because combine will
10209 try to recognize them and all they will do is make the combine
10210 attempt fail.
230d793d
RS
10211
10212 If for some reason this cannot do its job, an rtx
10213 (clobber (const_int 0)) is returned.
10214 An insn containing that will not be recognized. */
10215
230d793d 10216static rtx
7a32a925 10217gen_lowpart_for_combine (enum machine_mode omode, rtx x)
230d793d 10218{
7a32a925
RH
10219 enum machine_mode imode = GET_MODE (x);
10220 unsigned int osize = GET_MODE_SIZE (omode);
10221 unsigned int isize = GET_MODE_SIZE (imode);
230d793d
RS
10222 rtx result;
10223
7a32a925 10224 if (omode == imode)
230d793d
RS
10225 return x;
10226
7a32a925
RH
10227 /* Return identity if this is a CONST or symbolic reference. */
10228 if (omode == Pmode
cafe096b
EC
10229 && (GET_CODE (x) == CONST
10230 || GET_CODE (x) == SYMBOL_REF
10231 || GET_CODE (x) == LABEL_REF))
10232 return x;
10233
eae957a8
RK
10234 /* We can only support MODE being wider than a word if X is a
10235 constant integer or has a mode the same size. */
7a32a925
RH
10236 if (GET_MODE_SIZE (omode) > UNITS_PER_WORD
10237 && ! ((imode == VOIDmode
481683e1 10238 && (CONST_INT_P (x)
eae957a8 10239 || GET_CODE (x) == CONST_DOUBLE))
7a32a925
RH
10240 || isize == osize))
10241 goto fail;
230d793d
RS
10242
10243 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
10244 won't know what to do. So we will strip off the SUBREG here and
10245 process normally. */
3c0cb5de 10246 if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
230d793d
RS
10247 {
10248 x = SUBREG_REG (x);
32f2ce02
HPN
10249
10250 /* For use in case we fall down into the address adjustments
10251 further below, we need to adjust the known mode and size of
10252 x; imode and isize, since we just adjusted x. */
10253 imode = GET_MODE (x);
10254
10255 if (imode == omode)
230d793d 10256 return x;
32f2ce02
HPN
10257
10258 isize = GET_MODE_SIZE (imode);
230d793d
RS
10259 }
10260
7a32a925
RH
10261 result = gen_lowpart_common (omode, x);
10262
230d793d
RS
10263 if (result)
10264 return result;
10265
3c0cb5de 10266 if (MEM_P (x))
230d793d 10267 {
b3694847 10268 int offset = 0;
230d793d
RS
10269
10270 /* Refuse to work on a volatile memory ref or one with a mode-dependent
10271 address. */
10272 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7a32a925 10273 goto fail;
230d793d
RS
10274
10275 /* If we want to refer to something bigger than the original memref,
9a5a17f3 10276 generate a paradoxical subreg instead. That will force a reload
230d793d 10277 of the original memref X. */
7a32a925
RH
10278 if (isize < osize)
10279 return gen_rtx_SUBREG (omode, x, 0);
230d793d 10280
f76b9db2 10281 if (WORDS_BIG_ENDIAN)
7a32a925 10282 offset = MAX (isize, UNITS_PER_WORD) - MAX (osize, UNITS_PER_WORD);
c5c76735 10283
6c6cfbfd
KH
10284 /* Adjust the address so that the address-after-the-data is
10285 unchanged. */
f76b9db2 10286 if (BYTES_BIG_ENDIAN)
7a32a925 10287 offset -= MIN (UNITS_PER_WORD, osize) - MIN (UNITS_PER_WORD, isize);
f1ec5147 10288
7a32a925 10289 return adjust_address_nv (x, omode, offset);
230d793d
RS
10290 }
10291
10292 /* If X is a comparison operator, rewrite it in a new mode. This
10293 probably won't match, but may allow further simplifications. */
ec8e098d 10294 else if (COMPARISON_P (x))
7a32a925 10295 return gen_rtx_fmt_ee (GET_CODE (x), omode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
10296
10297 /* If we couldn't simplify X any other way, just enclose it in a
10298 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 10299 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 10300 else
dfbe1b2f 10301 {
ddef6bc7 10302 int offset = 0;
e0e08ac2 10303 rtx res;
dfbe1b2f 10304
7a32a925
RH
10305 offset = subreg_lowpart_offset (omode, imode);
10306 if (imode == VOIDmode)
80ba02b1 10307 {
7a32a925
RH
10308 imode = int_mode_for_mode (omode);
10309 x = gen_lowpart_common (imode, x);
10310 if (x == NULL)
10311 goto fail;
80ba02b1 10312 }
7a32a925 10313 res = simplify_gen_subreg (omode, x, imode, offset);
e0e08ac2
JH
10314 if (res)
10315 return res;
dfbe1b2f 10316 }
7a32a925
RH
10317
10318 fail:
b267bd41 10319 return gen_rtx_CLOBBER (omode, const0_rtx);
230d793d
RS
10320}
10321\f
230d793d
RS
10322/* Simplify a comparison between *POP0 and *POP1 where CODE is the
10323 comparison code that will be tested.
10324
10325 The result is a possibly different comparison code to use. *POP0 and
10326 *POP1 may be updated.
10327
10328 It is possible that we might detect that a comparison is either always
10329 true or always false. However, we do not perform general constant
5089e22e 10330 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
10331 should have been detected earlier. Hence we ignore all such cases. */
10332
10333static enum rtx_code
79a490a9 10334simplify_comparison (enum rtx_code code, rtx *pop0, rtx *pop1)
230d793d
RS
10335{
10336 rtx op0 = *pop0;
10337 rtx op1 = *pop1;
10338 rtx tem, tem1;
10339 int i;
10340 enum machine_mode mode, tmode;
10341
10342 /* Try a few ways of applying the same transformation to both operands. */
10343 while (1)
10344 {
3a19aabc
RK
10345#ifndef WORD_REGISTER_OPERATIONS
10346 /* The test below this one won't handle SIGN_EXTENDs on these machines,
10347 so check specially. */
10348 if (code != GTU && code != GEU && code != LTU && code != LEU
10349 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
10350 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10351 && GET_CODE (XEXP (op1, 0)) == ASHIFT
10352 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
10353 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
10354 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 10355 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
481683e1 10356 && CONST_INT_P (XEXP (op0, 1))
fa9ea255
KH
10357 && XEXP (op0, 1) == XEXP (op1, 1)
10358 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10359 && XEXP (op0, 1) == XEXP (XEXP (op1, 0), 1)
3a19aabc
RK
10360 && (INTVAL (XEXP (op0, 1))
10361 == (GET_MODE_BITSIZE (GET_MODE (op0))
10362 - (GET_MODE_BITSIZE
10363 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
10364 {
10365 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
10366 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
10367 }
10368#endif
10369
230d793d
RS
10370 /* If both operands are the same constant shift, see if we can ignore the
10371 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 10372 this shift are known to be zero for both inputs and if the type of
230d793d 10373 comparison is compatible with the shift. */
67232b23
RK
10374 if (GET_CODE (op0) == GET_CODE (op1)
10375 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
10376 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 10377 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
10378 && (code != GT && code != LT && code != GE && code != LE))
10379 || (GET_CODE (op0) == ASHIFTRT
10380 && (code != GTU && code != LTU
99dc5306 10381 && code != GEU && code != LEU)))
481683e1 10382 && CONST_INT_P (XEXP (op0, 1))
67232b23
RK
10383 && INTVAL (XEXP (op0, 1)) >= 0
10384 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10385 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
10386 {
10387 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 10388 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10389 int shift_count = INTVAL (XEXP (op0, 1));
10390
10391 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
10392 mask &= (mask >> shift_count) << shift_count;
45620ed4 10393 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
10394 mask = (mask & (mask << shift_count)) >> shift_count;
10395
663522cb
KH
10396 if ((nonzero_bits (XEXP (op0, 0), mode) & ~mask) == 0
10397 && (nonzero_bits (XEXP (op1, 0), mode) & ~mask) == 0)
230d793d
RS
10398 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
10399 else
10400 break;
10401 }
10402
10403 /* If both operands are AND's of a paradoxical SUBREG by constant, the
10404 SUBREGs are of the same mode, and, in both cases, the AND would
10405 be redundant if the comparison was done in the narrower mode,
10406 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
10407 and the operand's possibly nonzero bits are 0xffffff01; in that case
10408 if we only care about QImode, we don't need the AND). This case
10409 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
10410 STORE_FLAG_VALUE == 1 (e.g., the 386).
10411
10412 Similarly, check for a case where the AND's are ZERO_EXTEND
10413 operations from some narrower mode even though a SUBREG is not
10414 present. */
230d793d 10415
663522cb 10416 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
481683e1
SZ
10417 && CONST_INT_P (XEXP (op0, 1))
10418 && CONST_INT_P (XEXP (op1, 1)))
230d793d 10419 {
7e4dc511
RK
10420 rtx inner_op0 = XEXP (op0, 0);
10421 rtx inner_op1 = XEXP (op1, 0);
10422 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
10423 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
10424 int changed = 0;
663522cb 10425
7e4dc511
RK
10426 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
10427 && (GET_MODE_SIZE (GET_MODE (inner_op0))
10428 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
10429 && (GET_MODE (SUBREG_REG (inner_op0))
10430 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 10431 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 10432 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 10433 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 10434 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
10435 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
10436 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
10437 {
10438 op0 = SUBREG_REG (inner_op0);
10439 op1 = SUBREG_REG (inner_op1);
10440
10441 /* The resulting comparison is always unsigned since we masked
0f41302f 10442 off the original sign bit. */
7e4dc511
RK
10443 code = unsigned_condition (code);
10444
10445 changed = 1;
10446 }
230d793d 10447
7e4dc511
RK
10448 else if (c0 == c1)
10449 for (tmode = GET_CLASS_NARROWEST_MODE
10450 (GET_MODE_CLASS (GET_MODE (op0)));
10451 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 10452 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511 10453 {
4de249d9
PB
10454 op0 = gen_lowpart (tmode, inner_op0);
10455 op1 = gen_lowpart (tmode, inner_op1);
66415c8b 10456 code = unsigned_condition (code);
7e4dc511
RK
10457 changed = 1;
10458 break;
10459 }
10460
10461 if (! changed)
10462 break;
230d793d 10463 }
3a19aabc 10464
ad25ba17
RK
10465 /* If both operands are NOT, we can strip off the outer operation
10466 and adjust the comparison code for swapped operands; similarly for
10467 NEG, except that this must be an equality comparison. */
10468 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
10469 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
10470 && (code == EQ || code == NE)))
10471 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 10472
230d793d
RS
10473 else
10474 break;
10475 }
663522cb 10476
230d793d 10477 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
10478 comparison code appropriately, but don't do this if the second operand
10479 is already a constant integer. */
8c9864f3 10480 if (swap_commutative_operands_p (op0, op1))
230d793d
RS
10481 {
10482 tem = op0, op0 = op1, op1 = tem;
10483 code = swap_condition (code);
10484 }
10485
10486 /* We now enter a loop during which we will try to simplify the comparison.
10487 For the most part, we only are concerned with comparisons with zero,
10488 but some things may really be comparisons with zero but not start
10489 out looking that way. */
10490
481683e1 10491 while (CONST_INT_P (op1))
230d793d
RS
10492 {
10493 enum machine_mode mode = GET_MODE (op0);
770ae6cc 10494 unsigned int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 10495 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
10496 int equality_comparison_p;
10497 int sign_bit_comparison_p;
10498 int unsigned_comparison_p;
5f4f0e22 10499 HOST_WIDE_INT const_op;
230d793d
RS
10500
10501 /* We only want to handle integral modes. This catches VOIDmode,
10502 CCmode, and the floating-point modes. An exception is that we
10503 can handle VOIDmode if OP0 is a COMPARE or a comparison
10504 operation. */
10505
10506 if (GET_MODE_CLASS (mode) != MODE_INT
10507 && ! (mode == VOIDmode
ec8e098d 10508 && (GET_CODE (op0) == COMPARE || COMPARISON_P (op0))))
230d793d
RS
10509 break;
10510
10511 /* Get the constant we are comparing against and turn off all bits
10512 not on in our mode. */
71012d97
GK
10513 const_op = INTVAL (op1);
10514 if (mode != VOIDmode)
10515 const_op = trunc_int_for_mode (const_op, mode);
b4fbaca7 10516 op1 = GEN_INT (const_op);
230d793d
RS
10517
10518 /* If we are comparing against a constant power of two and the value
951553af 10519 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
10520 `and'ed with that bit), we can replace this with a comparison
10521 with zero. */
10522 if (const_op
10523 && (code == EQ || code == NE || code == GE || code == GEU
10524 || code == LT || code == LTU)
5f4f0e22 10525 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10526 && exact_log2 (const_op) >= 0
e51712db 10527 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
10528 {
10529 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
10530 op1 = const0_rtx, const_op = 0;
10531 }
10532
d0ab8cd3
RK
10533 /* Similarly, if we are comparing a value known to be either -1 or
10534 0 with -1, change it to the opposite comparison against zero. */
10535
10536 if (const_op == -1
10537 && (code == EQ || code == NE || code == GT || code == LE
10538 || code == GEU || code == LTU)
10539 && num_sign_bit_copies (op0, mode) == mode_width)
10540 {
10541 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
10542 op1 = const0_rtx, const_op = 0;
10543 }
10544
230d793d 10545 /* Do some canonicalizations based on the comparison code. We prefer
663522cb 10546 comparisons against zero and then prefer equality comparisons.
4803a34a 10547 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
10548
10549 switch (code)
10550 {
10551 case LT:
4803a34a
RK
10552 /* < C is equivalent to <= (C - 1) */
10553 if (const_op > 0)
230d793d 10554 {
4803a34a 10555 const_op -= 1;
5f4f0e22 10556 op1 = GEN_INT (const_op);
230d793d
RS
10557 code = LE;
10558 /* ... fall through to LE case below. */
10559 }
10560 else
10561 break;
10562
10563 case LE:
4803a34a
RK
10564 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
10565 if (const_op < 0)
10566 {
10567 const_op += 1;
5f4f0e22 10568 op1 = GEN_INT (const_op);
4803a34a
RK
10569 code = LT;
10570 }
230d793d
RS
10571
10572 /* If we are doing a <= 0 comparison on a value known to have
10573 a zero sign bit, we can replace this with == 0. */
10574 else if (const_op == 0
5f4f0e22 10575 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10576 && (nonzero_bits (op0, mode)
5f4f0e22 10577 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10578 code = EQ;
10579 break;
10580
10581 case GE:
0f41302f 10582 /* >= C is equivalent to > (C - 1). */
4803a34a 10583 if (const_op > 0)
230d793d 10584 {
4803a34a 10585 const_op -= 1;
5f4f0e22 10586 op1 = GEN_INT (const_op);
230d793d
RS
10587 code = GT;
10588 /* ... fall through to GT below. */
10589 }
10590 else
10591 break;
10592
10593 case GT:
663522cb 10594 /* > C is equivalent to >= (C + 1); we do this for C < 0. */
4803a34a
RK
10595 if (const_op < 0)
10596 {
10597 const_op += 1;
5f4f0e22 10598 op1 = GEN_INT (const_op);
4803a34a
RK
10599 code = GE;
10600 }
230d793d
RS
10601
10602 /* If we are doing a > 0 comparison on a value known to have
10603 a zero sign bit, we can replace this with != 0. */
10604 else if (const_op == 0
5f4f0e22 10605 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10606 && (nonzero_bits (op0, mode)
5f4f0e22 10607 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10608 code = NE;
10609 break;
10610
230d793d 10611 case LTU:
4803a34a
RK
10612 /* < C is equivalent to <= (C - 1). */
10613 if (const_op > 0)
10614 {
10615 const_op -= 1;
5f4f0e22 10616 op1 = GEN_INT (const_op);
4803a34a 10617 code = LEU;
0f41302f 10618 /* ... fall through ... */
4803a34a 10619 }
d0ab8cd3
RK
10620
10621 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
10622 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10623 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10624 {
10625 const_op = 0, op1 = const0_rtx;
10626 code = GE;
10627 break;
10628 }
4803a34a
RK
10629 else
10630 break;
230d793d
RS
10631
10632 case LEU:
10633 /* unsigned <= 0 is equivalent to == 0 */
10634 if (const_op == 0)
10635 code = EQ;
d0ab8cd3 10636
0f41302f 10637 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
10638 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10639 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10640 {
10641 const_op = 0, op1 = const0_rtx;
10642 code = GE;
10643 }
230d793d
RS
10644 break;
10645
4803a34a 10646 case GEU:
b8ff6ca0 10647 /* >= C is equivalent to > (C - 1). */
4803a34a
RK
10648 if (const_op > 1)
10649 {
10650 const_op -= 1;
5f4f0e22 10651 op1 = GEN_INT (const_op);
4803a34a 10652 code = GTU;
0f41302f 10653 /* ... fall through ... */
4803a34a 10654 }
d0ab8cd3
RK
10655
10656 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
10657 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10658 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10659 {
10660 const_op = 0, op1 = const0_rtx;
10661 code = LT;
8b2e69e1 10662 break;
d0ab8cd3 10663 }
4803a34a
RK
10664 else
10665 break;
10666
230d793d
RS
10667 case GTU:
10668 /* unsigned > 0 is equivalent to != 0 */
10669 if (const_op == 0)
10670 code = NE;
d0ab8cd3
RK
10671
10672 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2 10673 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
e869aa39 10674 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10675 {
10676 const_op = 0, op1 = const0_rtx;
10677 code = LT;
10678 }
230d793d 10679 break;
e9a25f70
JL
10680
10681 default:
10682 break;
230d793d
RS
10683 }
10684
10685 /* Compute some predicates to simplify code below. */
10686
10687 equality_comparison_p = (code == EQ || code == NE);
10688 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10689 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
d5010e66 10690 || code == GEU);
230d793d 10691
6139ff20
RK
10692 /* If this is a sign bit comparison and we can do arithmetic in
10693 MODE, say that we will only be needing the sign bit of OP0. */
10694 if (sign_bit_comparison_p
10695 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10696 op0 = force_to_mode (op0, mode,
10697 ((HOST_WIDE_INT) 1
10698 << (GET_MODE_BITSIZE (mode) - 1)),
b1257407 10699 0);
6139ff20 10700
230d793d
RS
10701 /* Now try cases based on the opcode of OP0. If none of the cases
10702 does a "continue", we exit this loop immediately after the
10703 switch. */
10704
10705 switch (GET_CODE (op0))
10706 {
10707 case ZERO_EXTRACT:
10708 /* If we are extracting a single bit from a variable position in
10709 a constant that has only a single bit set and are comparing it
663522cb 10710 with zero, we can convert this into an equality comparison
d7cd794f 10711 between the position and the location of the single bit. */
a475bff7
RH
10712 /* Except we can't if SHIFT_COUNT_TRUNCATED is set, since we might
10713 have already reduced the shift count modulo the word size. */
10714 if (!SHIFT_COUNT_TRUNCATED
481683e1 10715 && CONST_INT_P (XEXP (op0, 0))
230d793d
RS
10716 && XEXP (op0, 1) == const1_rtx
10717 && equality_comparison_p && const_op == 0
d7cd794f 10718 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 10719 {
f76b9db2 10720 if (BITS_BIG_ENDIAN)
0d8e55d8 10721 {
da920570
ZW
10722 enum machine_mode new_mode
10723 = mode_for_extraction (EP_extzv, 1);
10724 if (new_mode == MAX_MACHINE_MODE)
10725 i = BITS_PER_WORD - 1 - i;
10726 else
10727 {
10728 mode = new_mode;
10729 i = (GET_MODE_BITSIZE (mode) - 1 - i);
10730 }
0d8e55d8 10731 }
230d793d
RS
10732
10733 op0 = XEXP (op0, 2);
5f4f0e22 10734 op1 = GEN_INT (i);
230d793d
RS
10735 const_op = i;
10736
10737 /* Result is nonzero iff shift count is equal to I. */
10738 code = reverse_condition (code);
10739 continue;
10740 }
230d793d 10741
0f41302f 10742 /* ... fall through ... */
230d793d
RS
10743
10744 case SIGN_EXTRACT:
10745 tem = expand_compound_operation (op0);
10746 if (tem != op0)
10747 {
10748 op0 = tem;
10749 continue;
10750 }
10751 break;
10752
10753 case NOT:
10754 /* If testing for equality, we can take the NOT of the constant. */
10755 if (equality_comparison_p
10756 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10757 {
10758 op0 = XEXP (op0, 0);
10759 op1 = tem;
10760 continue;
10761 }
10762
10763 /* If just looking at the sign bit, reverse the sense of the
10764 comparison. */
10765 if (sign_bit_comparison_p)
10766 {
10767 op0 = XEXP (op0, 0);
10768 code = (code == GE ? LT : GE);
10769 continue;
10770 }
10771 break;
10772
10773 case NEG:
10774 /* If testing for equality, we can take the NEG of the constant. */
10775 if (equality_comparison_p
10776 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10777 {
10778 op0 = XEXP (op0, 0);
10779 op1 = tem;
10780 continue;
10781 }
10782
10783 /* The remaining cases only apply to comparisons with zero. */
10784 if (const_op != 0)
10785 break;
10786
10787 /* When X is ABS or is known positive,
10788 (neg X) is < 0 if and only if X != 0. */
10789
10790 if (sign_bit_comparison_p
10791 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10792 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10793 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10794 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10795 {
10796 op0 = XEXP (op0, 0);
10797 code = (code == LT ? NE : EQ);
10798 continue;
10799 }
10800
3bed8141 10801 /* If we have NEG of something whose two high-order bits are the
0f41302f 10802 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10803 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10804 {
10805 op0 = XEXP (op0, 0);
10806 code = swap_condition (code);
10807 continue;
10808 }
10809 break;
10810
10811 case ROTATE:
10812 /* If we are testing equality and our count is a constant, we
10813 can perform the inverse operation on our RHS. */
481683e1 10814 if (equality_comparison_p && CONST_INT_P (XEXP (op0, 1))
230d793d
RS
10815 && (tem = simplify_binary_operation (ROTATERT, mode,
10816 op1, XEXP (op0, 1))) != 0)
10817 {
10818 op0 = XEXP (op0, 0);
10819 op1 = tem;
10820 continue;
10821 }
10822
10823 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10824 a particular bit. Convert it to an AND of a constant of that
10825 bit. This will be converted into a ZERO_EXTRACT. */
10826 if (const_op == 0 && sign_bit_comparison_p
481683e1 10827 && CONST_INT_P (XEXP (op0, 1))
5f4f0e22 10828 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10829 {
5f4f0e22
CH
10830 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10831 ((HOST_WIDE_INT) 1
10832 << (mode_width - 1
10833 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10834 code = (code == LT ? NE : EQ);
10835 continue;
10836 }
10837
663522cb 10838 /* Fall through. */
230d793d
RS
10839
10840 case ABS:
10841 /* ABS is ignorable inside an equality comparison with zero. */
10842 if (const_op == 0 && equality_comparison_p)
10843 {
10844 op0 = XEXP (op0, 0);
10845 continue;
10846 }
10847 break;
230d793d
RS
10848
10849 case SIGN_EXTEND:
aa2d0bc3
AO
10850 /* Can simplify (compare (zero/sign_extend FOO) CONST) to
10851 (compare FOO CONST) if CONST fits in FOO's mode and we
10852 are either testing inequality or have an unsigned
10853 comparison with ZERO_EXTEND or a signed comparison with
10854 SIGN_EXTEND. But don't do it if we don't have a compare
10855 insn of the given mode, since we'd have to revert it
10856 later on, and then we wouldn't know whether to sign- or
10857 zero-extend. */
10858 mode = GET_MODE (XEXP (op0, 0));
10859 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10860 && ! unsigned_comparison_p
10861 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5f4f0e22 10862 && ((unsigned HOST_WIDE_INT) const_op
c22cacf3 10863 < (((unsigned HOST_WIDE_INT) 1
aa2d0bc3 10864 << (GET_MODE_BITSIZE (mode) - 1))))
fedc1775 10865 && have_insn_for (COMPARE, mode))
230d793d
RS
10866 {
10867 op0 = XEXP (op0, 0);
10868 continue;
10869 }
10870 break;
10871
10872 case SUBREG:
f917ae96
EB
10873 /* Check for the case where we are comparing A - C1 with C2, that is
10874
10875 (subreg:MODE (plus (A) (-C1))) op (C2)
10876
10877 with C1 a constant, and try to lift the SUBREG, i.e. to do the
10878 comparison in the wider mode. One of the following two conditions
10879 must be true in order for this to be valid:
10880
10881 1. The mode extension results in the same bit pattern being added
10882 on both sides and the comparison is equality or unsigned. As
10883 C2 has been truncated to fit in MODE, the pattern can only be
10884 all 0s or all 1s.
10885
10886 2. The mode extension results in the sign bit being copied on
10887 each side.
10888
10889 The difficulty here is that we have predicates for A but not for
10890 (A - C1) so we need to check that C1 is within proper bounds so
10891 as to perturbate A as little as possible. */
a687e897
RK
10892
10893 if (mode_width <= HOST_BITS_PER_WIDE_INT
10894 && subreg_lowpart_p (op0)
f917ae96 10895 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) > mode_width
a687e897 10896 && GET_CODE (SUBREG_REG (op0)) == PLUS
481683e1 10897 && CONST_INT_P (XEXP (SUBREG_REG (op0), 1)))
a687e897 10898 {
f917ae96
EB
10899 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (op0));
10900 rtx a = XEXP (SUBREG_REG (op0), 0);
10901 HOST_WIDE_INT c1 = -INTVAL (XEXP (SUBREG_REG (op0), 1));
10902
10903 if ((c1 > 0
c22cacf3 10904 && (unsigned HOST_WIDE_INT) c1
f917ae96
EB
10905 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)
10906 && (equality_comparison_p || unsigned_comparison_p)
10907 /* (A - C1) zero-extends if it is positive and sign-extends
10908 if it is negative, C2 both zero- and sign-extends. */
10909 && ((0 == (nonzero_bits (a, inner_mode)
10910 & ~GET_MODE_MASK (mode))
10911 && const_op >= 0)
10912 /* (A - C1) sign-extends if it is positive and 1-extends
10913 if it is negative, C2 both sign- and 1-extends. */
10914 || (num_sign_bit_copies (a, inner_mode)
10915 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
10916 - mode_width)
10917 && const_op < 0)))
10918 || ((unsigned HOST_WIDE_INT) c1
10919 < (unsigned HOST_WIDE_INT) 1 << (mode_width - 2)
10920 /* (A - C1) always sign-extends, like C2. */
10921 && num_sign_bit_copies (a, inner_mode)
10922 > (unsigned int) (GET_MODE_BITSIZE (inner_mode)
2d88fcc2 10923 - (mode_width - 1))))
f917ae96
EB
10924 {
10925 op0 = SUBREG_REG (op0);
10926 continue;
c22cacf3 10927 }
a687e897
RK
10928 }
10929
fe0cf571
RK
10930 /* If the inner mode is narrower and we are extracting the low part,
10931 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10932 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10933 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10934 /* Fall through */ ;
10935 else
230d793d
RS
10936 break;
10937
0f41302f 10938 /* ... fall through ... */
230d793d
RS
10939
10940 case ZERO_EXTEND:
aa2d0bc3
AO
10941 mode = GET_MODE (XEXP (op0, 0));
10942 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10943 && (unsigned_comparison_p || equality_comparison_p)
10944 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10945 && ((unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode))
fedc1775 10946 && have_insn_for (COMPARE, mode))
230d793d
RS
10947 {
10948 op0 = XEXP (op0, 0);
10949 continue;
10950 }
10951 break;
10952
10953 case PLUS:
20fdd649 10954 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10955 this for equality comparisons due to pathological cases involving
230d793d 10956 overflows. */
20fdd649
RK
10957 if (equality_comparison_p
10958 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10959 op1, XEXP (op0, 1))))
230d793d
RS
10960 {
10961 op0 = XEXP (op0, 0);
10962 op1 = tem;
10963 continue;
10964 }
10965
10966 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10967 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10968 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10969 {
10970 op0 = XEXP (XEXP (op0, 0), 0);
10971 code = (code == LT ? EQ : NE);
10972 continue;
10973 }
10974 break;
10975
10976 case MINUS:
65945ec1
HPN
10977 /* We used to optimize signed comparisons against zero, but that
10978 was incorrect. Unsigned comparisons against zero (GTU, LEU)
10979 arrive here as equality comparisons, or (GEU, LTU) are
10980 optimized away. No need to special-case them. */
0bd4b461 10981
20fdd649
RK
10982 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10983 (eq B (minus A C)), whichever simplifies. We can only do
10984 this for equality comparisons due to pathological cases involving
10985 overflows. */
10986 if (equality_comparison_p
10987 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10988 XEXP (op0, 1), op1)))
10989 {
10990 op0 = XEXP (op0, 0);
10991 op1 = tem;
10992 continue;
10993 }
10994
10995 if (equality_comparison_p
10996 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10997 XEXP (op0, 0), op1)))
10998 {
10999 op0 = XEXP (op0, 1);
11000 op1 = tem;
11001 continue;
11002 }
11003
230d793d
RS
11004 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
11005 of bits in X minus 1, is one iff X > 0. */
11006 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
481683e1 11007 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
26c34780
RS
11008 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (op0, 0), 1))
11009 == mode_width - 1
230d793d
RS
11010 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11011 {
11012 op0 = XEXP (op0, 1);
11013 code = (code == GE ? LE : GT);
11014 continue;
11015 }
11016 break;
11017
11018 case XOR:
11019 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
11020 if C is zero or B is a constant. */
11021 if (equality_comparison_p
11022 && 0 != (tem = simplify_binary_operation (XOR, mode,
11023 XEXP (op0, 1), op1)))
11024 {
11025 op0 = XEXP (op0, 0);
11026 op1 = tem;
11027 continue;
11028 }
11029 break;
11030
11031 case EQ: case NE:
69bc0a1f
JH
11032 case UNEQ: case LTGT:
11033 case LT: case LTU: case UNLT: case LE: case LEU: case UNLE:
11034 case GT: case GTU: case UNGT: case GE: case GEU: case UNGE:
c22cacf3 11035 case UNORDERED: case ORDERED:
230d793d
RS
11036 /* We can't do anything if OP0 is a condition code value, rather
11037 than an actual data value. */
11038 if (const_op != 0
8beccec8 11039 || CC0_P (XEXP (op0, 0))
230d793d
RS
11040 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
11041 break;
11042
11043 /* Get the two operands being compared. */
11044 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
11045 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
11046 else
11047 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
11048
11049 /* Check for the cases where we simply want the result of the
11050 earlier test or the opposite of that result. */
9a915772 11051 if (code == NE || code == EQ
5f4f0e22 11052 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 11053 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 11054 && (STORE_FLAG_VALUE
5f4f0e22
CH
11055 & (((HOST_WIDE_INT) 1
11056 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
aa6683f7 11057 && (code == LT || code == GE)))
230d793d 11058 {
aa6683f7
GK
11059 enum rtx_code new_code;
11060 if (code == LT || code == NE)
11061 new_code = GET_CODE (op0);
11062 else
14f02e73 11063 new_code = reversed_comparison_code (op0, NULL);
23190837 11064
aa6683f7 11065 if (new_code != UNKNOWN)
9a915772 11066 {
aa6683f7
GK
11067 code = new_code;
11068 op0 = tem;
11069 op1 = tem1;
9a915772
JH
11070 continue;
11071 }
230d793d
RS
11072 }
11073 break;
11074
11075 case IOR:
da7d8304 11076 /* The sign bit of (ior (plus X (const_int -1)) X) is nonzero
230d793d
RS
11077 iff X <= 0. */
11078 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
11079 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
11080 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
11081 {
11082 op0 = XEXP (op0, 1);
11083 code = (code == GE ? GT : LE);
11084 continue;
11085 }
11086 break;
11087
11088 case AND:
11089 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
11090 will be converted to a ZERO_EXTRACT later. */
11091 if (const_op == 0 && equality_comparison_p
45620ed4 11092 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
11093 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
11094 {
11095 op0 = simplify_and_const_int
41e8659e 11096 (NULL_RTX, mode, gen_rtx_LSHIFTRT (mode,
c22cacf3
MS
11097 XEXP (op0, 1),
11098 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 11099 (HOST_WIDE_INT) 1);
230d793d
RS
11100 continue;
11101 }
11102
11103 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
11104 zero and X is a comparison and C1 and C2 describe only bits set
11105 in STORE_FLAG_VALUE, we can compare with X. */
11106 if (const_op == 0 && equality_comparison_p
5f4f0e22 11107 && mode_width <= HOST_BITS_PER_WIDE_INT
481683e1 11108 && CONST_INT_P (XEXP (op0, 1))
230d793d 11109 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
481683e1 11110 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
230d793d 11111 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 11112 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
11113 {
11114 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
11115 << INTVAL (XEXP (XEXP (op0, 0), 1)));
663522cb 11116 if ((~STORE_FLAG_VALUE & mask) == 0
ec8e098d 11117 && (COMPARISON_P (XEXP (XEXP (op0, 0), 0))
230d793d 11118 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
ec8e098d 11119 && COMPARISON_P (tem))))
230d793d
RS
11120 {
11121 op0 = XEXP (XEXP (op0, 0), 0);
11122 continue;
11123 }
11124 }
11125
11126 /* If we are doing an equality comparison of an AND of a bit equal
11127 to the sign bit, replace this with a LT or GE comparison of
11128 the underlying value. */
11129 if (equality_comparison_p
11130 && const_op == 0
481683e1 11131 && CONST_INT_P (XEXP (op0, 1))
5f4f0e22 11132 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 11133 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 11134 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
11135 {
11136 op0 = XEXP (op0, 0);
11137 code = (code == EQ ? GE : LT);
11138 continue;
11139 }
11140
11141 /* If this AND operation is really a ZERO_EXTEND from a narrower
11142 mode, the constant fits within that mode, and this is either an
11143 equality or unsigned comparison, try to do this comparison in
1e84b34e
AN
11144 the narrower mode.
11145
11146 Note that in:
11147
11148 (ne:DI (and:DI (reg:DI 4) (const_int 0xffffffff)) (const_int 0))
11149 -> (ne:DI (reg:SI 4) (const_int 0))
11150
11151 unless TRULY_NOOP_TRUNCATION allows it or the register is
11152 known to hold a value of the required mode the
11153 transformation is invalid. */
230d793d 11154 if ((equality_comparison_p || unsigned_comparison_p)
481683e1 11155 && CONST_INT_P (XEXP (op0, 1))
230d793d
RS
11156 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
11157 & GET_MODE_MASK (mode))
11158 + 1)) >= 0
11159 && const_op >> i == 0
1e84b34e
AN
11160 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode
11161 && (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (tmode),
11162 GET_MODE_BITSIZE (GET_MODE (op0)))
11163 || (REG_P (XEXP (op0, 0))
11164 && reg_truncated_to_mode (tmode, XEXP (op0, 0)))))
230d793d 11165 {
4de249d9 11166 op0 = gen_lowpart (tmode, XEXP (op0, 0));
230d793d
RS
11167 continue;
11168 }
e5e809f4 11169
70e1b8fc
AM
11170 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1
11171 fits in both M1 and M2 and the SUBREG is either paradoxical
11172 or represents the low part, permute the SUBREG and the AND
11173 and try again. */
11174 if (GET_CODE (XEXP (op0, 0)) == SUBREG)
11175 {
11176 unsigned HOST_WIDE_INT c1;
11177 tmode = GET_MODE (SUBREG_REG (XEXP (op0, 0)));
678e68fc
JW
11178 /* Require an integral mode, to avoid creating something like
11179 (AND:SF ...). */
70e1b8fc
AM
11180 if (SCALAR_INT_MODE_P (tmode)
11181 /* It is unsafe to commute the AND into the SUBREG if the
11182 SUBREG is paradoxical and WORD_REGISTER_OPERATIONS is
11183 not defined. As originally written the upper bits
11184 have a defined value due to the AND operation.
11185 However, if we commute the AND inside the SUBREG then
11186 they no longer have defined values and the meaning of
11187 the code has been changed. */
11188 && (0
9ec36da5 11189#ifdef WORD_REGISTER_OPERATIONS
70e1b8fc
AM
11190 || (mode_width > GET_MODE_BITSIZE (tmode)
11191 && mode_width <= BITS_PER_WORD)
9ec36da5 11192#endif
70e1b8fc
AM
11193 || (mode_width <= GET_MODE_BITSIZE (tmode)
11194 && subreg_lowpart_p (XEXP (op0, 0))))
481683e1 11195 && CONST_INT_P (XEXP (op0, 1))
70e1b8fc
AM
11196 && mode_width <= HOST_BITS_PER_WIDE_INT
11197 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
11198 && ((c1 = INTVAL (XEXP (op0, 1))) & ~mask) == 0
11199 && (c1 & ~GET_MODE_MASK (tmode)) == 0
11200 && c1 != mask
11201 && c1 != GET_MODE_MASK (tmode))
11202 {
bcb34aa3
PB
11203 op0 = simplify_gen_binary (AND, tmode,
11204 SUBREG_REG (XEXP (op0, 0)),
11205 gen_int_mode (c1, tmode));
4de249d9 11206 op0 = gen_lowpart (mode, op0);
70e1b8fc
AM
11207 continue;
11208 }
e5e809f4
JL
11209 }
11210
34ed3bb0
KH
11211 /* Convert (ne (and (not X) 1) 0) to (eq (and X 1) 0). */
11212 if (const_op == 0 && equality_comparison_p
11213 && XEXP (op0, 1) == const1_rtx
11214 && GET_CODE (XEXP (op0, 0)) == NOT)
11215 {
11216 op0 = simplify_and_const_int
e5686da7 11217 (NULL_RTX, mode, XEXP (XEXP (op0, 0), 0), (HOST_WIDE_INT) 1);
34ed3bb0
KH
11218 code = (code == NE ? EQ : NE);
11219 continue;
11220 }
11221
9f8e169e 11222 /* Convert (ne (and (lshiftrt (not X)) 1) 0) to
5565e874
KH
11223 (eq (and (lshiftrt X) 1) 0).
11224 Also handle the case where (not X) is expressed using xor. */
9f8e169e
RH
11225 if (const_op == 0 && equality_comparison_p
11226 && XEXP (op0, 1) == const1_rtx
5565e874 11227 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT)
9f8e169e 11228 {
5565e874
KH
11229 rtx shift_op = XEXP (XEXP (op0, 0), 0);
11230 rtx shift_count = XEXP (XEXP (op0, 0), 1);
11231
11232 if (GET_CODE (shift_op) == NOT
11233 || (GET_CODE (shift_op) == XOR
481683e1
SZ
11234 && CONST_INT_P (XEXP (shift_op, 1))
11235 && CONST_INT_P (shift_count)
5565e874
KH
11236 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
11237 && (INTVAL (XEXP (shift_op, 1))
11238 == (HOST_WIDE_INT) 1 << INTVAL (shift_count))))
11239 {
11240 op0 = simplify_and_const_int
11241 (NULL_RTX, mode,
11242 gen_rtx_LSHIFTRT (mode, XEXP (shift_op, 0), shift_count),
11243 (HOST_WIDE_INT) 1);
11244 code = (code == NE ? EQ : NE);
11245 continue;
11246 }
9f8e169e 11247 }
230d793d
RS
11248 break;
11249
11250 case ASHIFT:
45620ed4 11251 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 11252 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 11253 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
11254 shifted right N bits so long as the low-order N bits of C are
11255 zero. */
481683e1 11256 if (CONST_INT_P (XEXP (op0, 1))
230d793d
RS
11257 && INTVAL (XEXP (op0, 1)) >= 0
11258 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
11259 < HOST_BITS_PER_WIDE_INT)
11260 && ((const_op
34785d05 11261 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 11262 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 11263 && (nonzero_bits (XEXP (op0, 0), mode)
663522cb
KH
11264 & ~(mask >> (INTVAL (XEXP (op0, 1))
11265 + ! equality_comparison_p))) == 0)
230d793d 11266 {
7ce787fe
NC
11267 /* We must perform a logical shift, not an arithmetic one,
11268 as we want the top N bits of C to be zero. */
aaaec114 11269 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
663522cb 11270
7ce787fe 11271 temp >>= INTVAL (XEXP (op0, 1));
2496c7bd 11272 op1 = gen_int_mode (temp, mode);
230d793d
RS
11273 op0 = XEXP (op0, 0);
11274 continue;
11275 }
11276
dfbe1b2f 11277 /* If we are doing a sign bit comparison, it means we are testing
230d793d 11278 a particular bit. Convert it to the appropriate AND. */
481683e1 11279 if (sign_bit_comparison_p && CONST_INT_P (XEXP (op0, 1))
5f4f0e22 11280 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 11281 {
5f4f0e22
CH
11282 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11283 ((HOST_WIDE_INT) 1
11284 << (mode_width - 1
11285 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
11286 code = (code == LT ? NE : EQ);
11287 continue;
11288 }
dfbe1b2f
RK
11289
11290 /* If this an equality comparison with zero and we are shifting
11291 the low bit to the sign bit, we can convert this to an AND of the
11292 low-order bit. */
11293 if (const_op == 0 && equality_comparison_p
481683e1 11294 && CONST_INT_P (XEXP (op0, 1))
26c34780
RS
11295 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11296 == mode_width - 1)
dfbe1b2f 11297 {
5f4f0e22
CH
11298 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
11299 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
11300 continue;
11301 }
230d793d
RS
11302 break;
11303
11304 case ASHIFTRT:
d0ab8cd3
RK
11305 /* If this is an equality comparison with zero, we can do this
11306 as a logical shift, which might be much simpler. */
11307 if (equality_comparison_p && const_op == 0
481683e1 11308 && CONST_INT_P (XEXP (op0, 1)))
d0ab8cd3
RK
11309 {
11310 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
11311 XEXP (op0, 0),
11312 INTVAL (XEXP (op0, 1)));
11313 continue;
11314 }
11315
230d793d
RS
11316 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
11317 do the comparison in a narrower mode. */
11318 if (! unsigned_comparison_p
481683e1 11319 && CONST_INT_P (XEXP (op0, 1))
230d793d
RS
11320 && GET_CODE (XEXP (op0, 0)) == ASHIFT
11321 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
11322 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 11323 MODE_INT, 1)) != BLKmode
67e469d7
AM
11324 && (((unsigned HOST_WIDE_INT) const_op
11325 + (GET_MODE_MASK (tmode) >> 1) + 1)
11326 <= GET_MODE_MASK (tmode)))
230d793d 11327 {
4de249d9 11328 op0 = gen_lowpart (tmode, XEXP (XEXP (op0, 0), 0));
230d793d
RS
11329 continue;
11330 }
11331
14a774a9
RK
11332 /* Likewise if OP0 is a PLUS of a sign extension with a
11333 constant, which is usually represented with the PLUS
11334 between the shifts. */
11335 if (! unsigned_comparison_p
481683e1 11336 && CONST_INT_P (XEXP (op0, 1))
14a774a9 11337 && GET_CODE (XEXP (op0, 0)) == PLUS
481683e1 11338 && CONST_INT_P (XEXP (XEXP (op0, 0), 1))
14a774a9
RK
11339 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
11340 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
11341 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
11342 MODE_INT, 1)) != BLKmode
67e469d7
AM
11343 && (((unsigned HOST_WIDE_INT) const_op
11344 + (GET_MODE_MASK (tmode) >> 1) + 1)
11345 <= GET_MODE_MASK (tmode)))
14a774a9
RK
11346 {
11347 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
11348 rtx add_const = XEXP (XEXP (op0, 0), 1);
bcb34aa3
PB
11349 rtx new_const = simplify_gen_binary (ASHIFTRT, GET_MODE (op0),
11350 add_const, XEXP (op0, 1));
14a774a9 11351
bcb34aa3
PB
11352 op0 = simplify_gen_binary (PLUS, tmode,
11353 gen_lowpart (tmode, inner),
11354 new_const);
14a774a9
RK
11355 continue;
11356 }
11357
0f41302f 11358 /* ... fall through ... */
230d793d
RS
11359 case LSHIFTRT:
11360 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 11361 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
11362 by comparing FOO with C shifted left N bits so long as no
11363 overflow occurs. */
481683e1 11364 if (CONST_INT_P (XEXP (op0, 1))
230d793d 11365 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
11366 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
11367 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 11368 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 11369 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
67e469d7
AM
11370 && (((unsigned HOST_WIDE_INT) const_op
11371 + (GET_CODE (op0) != LSHIFTRT
11372 ? ((GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1)) >> 1)
11373 + 1)
11374 : 0))
11375 <= GET_MODE_MASK (mode) >> INTVAL (XEXP (op0, 1))))
230d793d 11376 {
145d3bf2
RE
11377 /* If the shift was logical, then we must make the condition
11378 unsigned. */
11379 if (GET_CODE (op0) == LSHIFTRT)
11380 code = unsigned_condition (code);
11381
230d793d 11382 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 11383 op1 = GEN_INT (const_op);
230d793d
RS
11384 op0 = XEXP (op0, 0);
11385 continue;
11386 }
11387
11388 /* If we are using this shift to extract just the sign bit, we
11389 can replace this with an LT or GE comparison. */
11390 if (const_op == 0
11391 && (equality_comparison_p || sign_bit_comparison_p)
481683e1 11392 && CONST_INT_P (XEXP (op0, 1))
26c34780
RS
11393 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
11394 == mode_width - 1)
230d793d
RS
11395 {
11396 op0 = XEXP (op0, 0);
11397 code = (code == NE || code == GT ? LT : GE);
11398 continue;
11399 }
11400 break;
663522cb 11401
e9a25f70
JL
11402 default:
11403 break;
230d793d
RS
11404 }
11405
11406 break;
11407 }
11408
11409 /* Now make any compound operations involved in this comparison. Then,
76d31c63 11410 check for an outmost SUBREG on OP0 that is not doing anything or is
5add6d1a
JL
11411 paradoxical. The latter transformation must only be performed when
11412 it is known that the "extra" bits will be the same in op0 and op1 or
11413 that they don't matter. There are three cases to consider:
11414
11415 1. SUBREG_REG (op0) is a register. In this case the bits are don't
11416 care bits and we can assume they have any convenient value. So
11417 making the transformation is safe.
11418
11419 2. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is not defined.
11420 In this case the upper bits of op0 are undefined. We should not make
11421 the simplification in that case as we do not know the contents of
11422 those bits.
11423
11424 3. SUBREG_REG (op0) is a memory and LOAD_EXTEND_OP is defined and not
f822d252 11425 UNKNOWN. In that case we know those bits are zeros or ones. We must
5add6d1a
JL
11426 also be sure that they are the same as the upper bits of op1.
11427
11428 We can never remove a SUBREG for a non-equality comparison because
11429 the sign bit is in a different place in the underlying object. */
230d793d
RS
11430
11431 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
11432 op1 = make_compound_operation (op1, SET);
11433
11434 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
11435 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
fa4e13e0 11436 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op0))) == MODE_INT
5add6d1a 11437 && (code == NE || code == EQ))
230d793d 11438 {
5add6d1a
JL
11439 if (GET_MODE_SIZE (GET_MODE (op0))
11440 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))
11441 {
dc5c3188
UW
11442 /* For paradoxical subregs, allow case 1 as above. Case 3 isn't
11443 implemented. */
c22cacf3 11444 if (REG_P (SUBREG_REG (op0)))
dc5c3188
UW
11445 {
11446 op0 = SUBREG_REG (op0);
4de249d9 11447 op1 = gen_lowpart (GET_MODE (op0), op1);
dc5c3188 11448 }
5add6d1a
JL
11449 }
11450 else if ((GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
11451 <= HOST_BITS_PER_WIDE_INT)
11452 && (nonzero_bits (SUBREG_REG (op0),
11453 GET_MODE (SUBREG_REG (op0)))
11454 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11455 {
4de249d9 11456 tem = gen_lowpart (GET_MODE (SUBREG_REG (op0)), op1);
230d793d 11457
5add6d1a
JL
11458 if ((nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
11459 & ~GET_MODE_MASK (GET_MODE (op0))) == 0)
11460 op0 = SUBREG_REG (op0), op1 = tem;
11461 }
11462 }
230d793d
RS
11463
11464 /* We now do the opposite procedure: Some machines don't have compare
11465 insns in all modes. If OP0's mode is an integer mode smaller than a
11466 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
11467 mode for which we can do the compare. There are a number of cases in
11468 which we can use the wider mode. */
230d793d
RS
11469
11470 mode = GET_MODE (op0);
11471 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
11472 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
ef89d648 11473 && ! have_insn_for (COMPARE, mode))
230d793d 11474 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
11475 (tmode != VOIDmode
11476 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 11477 tmode = GET_MODE_WIDER_MODE (tmode))
ef89d648 11478 if (have_insn_for (COMPARE, tmode))
230d793d 11479 {
d4c5ac1f
AM
11480 int zero_extended;
11481
5d49d0ea
PB
11482 /* If this is a test for negative, we can make an explicit
11483 test of the sign bit. Test this first so we can use
11484 a paradoxical subreg to extend OP0. */
11485
11486 if (op1 == const0_rtx && (code == LT || code == GE)
11487 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11488 {
11489 op0 = simplify_gen_binary (AND, tmode,
11490 gen_lowpart (tmode, op0),
11491 GEN_INT ((HOST_WIDE_INT) 1
11492 << (GET_MODE_BITSIZE (mode)
11493 - 1)));
11494 code = (code == LT) ? NE : EQ;
11495 break;
11496 }
11497
951553af 11498 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
11499 narrower mode and this is an equality or unsigned comparison,
11500 we can use the wider mode. Similarly for sign-extended
7e4dc511 11501 values, in which case it is true for all comparisons. */
d4c5ac1f
AM
11502 zero_extended = ((code == EQ || code == NE
11503 || code == GEU || code == GTU
11504 || code == LEU || code == LTU)
11505 && (nonzero_bits (op0, tmode)
11506 & ~GET_MODE_MASK (mode)) == 0
481683e1 11507 && ((CONST_INT_P (op1)
d4c5ac1f
AM
11508 || (nonzero_bits (op1, tmode)
11509 & ~GET_MODE_MASK (mode)) == 0)));
11510
11511 if (zero_extended
7e4dc511 11512 || ((num_sign_bit_copies (op0, tmode)
26c34780
RS
11513 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11514 - GET_MODE_BITSIZE (mode)))
a687e897 11515 && (num_sign_bit_copies (op1, tmode)
26c34780
RS
11516 > (unsigned int) (GET_MODE_BITSIZE (tmode)
11517 - GET_MODE_BITSIZE (mode)))))
a687e897 11518 {
14a774a9
RK
11519 /* If OP0 is an AND and we don't have an AND in MODE either,
11520 make a new AND in the proper mode. */
11521 if (GET_CODE (op0) == AND
ef89d648 11522 && !have_insn_for (AND, mode))
bcb34aa3
PB
11523 op0 = simplify_gen_binary (AND, tmode,
11524 gen_lowpart (tmode,
11525 XEXP (op0, 0)),
11526 gen_lowpart (tmode,
11527 XEXP (op0, 1)));
5d49d0ea
PB
11528 else
11529 {
11530 if (zero_extended)
11531 {
11532 op0 = simplify_gen_unary (ZERO_EXTEND, tmode, op0, mode);
11533 op1 = simplify_gen_unary (ZERO_EXTEND, tmode, op1, mode);
11534 }
11535 else
11536 {
11537 op0 = simplify_gen_unary (SIGN_EXTEND, tmode, op0, mode);
11538 op1 = simplify_gen_unary (SIGN_EXTEND, tmode, op1, mode);
11539 }
11540 break;
11541 }
230d793d 11542 }
230d793d
RS
11543 }
11544
b7a775b2
RK
11545#ifdef CANONICALIZE_COMPARISON
11546 /* If this machine only supports a subset of valid comparisons, see if we
11547 can convert an unsupported one into a supported one. */
11548 CANONICALIZE_COMPARISON (code, op0, op1);
11549#endif
11550
230d793d
RS
11551 *pop0 = op0;
11552 *pop1 = op1;
11553
11554 return code;
11555}
11556\f
49c3b9a8
JJ
11557/* Utility function for record_value_for_reg. Count number of
11558 rtxs in X. */
11559static int
11560count_rtxs (rtx x)
11561{
11562 enum rtx_code code = GET_CODE (x);
11563 const char *fmt;
6ffef2ad 11564 int i, j, ret = 1;
49c3b9a8
JJ
11565
11566 if (GET_RTX_CLASS (code) == '2'
11567 || GET_RTX_CLASS (code) == 'c')
11568 {
11569 rtx x0 = XEXP (x, 0);
11570 rtx x1 = XEXP (x, 1);
11571
11572 if (x0 == x1)
11573 return 1 + 2 * count_rtxs (x0);
11574
11575 if ((GET_RTX_CLASS (GET_CODE (x1)) == '2'
11576 || GET_RTX_CLASS (GET_CODE (x1)) == 'c')
11577 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11578 return 2 + 2 * count_rtxs (x0)
11579 + count_rtxs (x == XEXP (x1, 0)
11580 ? XEXP (x1, 1) : XEXP (x1, 0));
11581
11582 if ((GET_RTX_CLASS (GET_CODE (x0)) == '2'
11583 || GET_RTX_CLASS (GET_CODE (x0)) == 'c')
11584 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11585 return 2 + 2 * count_rtxs (x1)
11586 + count_rtxs (x == XEXP (x0, 0)
11587 ? XEXP (x0, 1) : XEXP (x0, 0));
11588 }
11589
11590 fmt = GET_RTX_FORMAT (code);
11591 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11592 if (fmt[i] == 'e')
11593 ret += count_rtxs (XEXP (x, i));
6ffef2ad
RS
11594 else if (fmt[i] == 'E')
11595 for (j = 0; j < XVECLEN (x, i); j++)
11596 ret += count_rtxs (XVECEXP (x, i, j));
49c3b9a8
JJ
11597
11598 return ret;
11599}
11600\f
230d793d 11601/* Utility function for following routine. Called when X is part of a value
5eaad481 11602 being stored into last_set_value. Sets last_set_table_tick
230d793d
RS
11603 for each register mentioned. Similar to mention_regs in cse.c */
11604
11605static void
79a490a9 11606update_table_tick (rtx x)
230d793d 11607{
b3694847
SS
11608 enum rtx_code code = GET_CODE (x);
11609 const char *fmt = GET_RTX_FORMAT (code);
6ffef2ad 11610 int i, j;
230d793d
RS
11611
11612 if (code == REG)
11613 {
770ae6cc 11614 unsigned int regno = REGNO (x);
09e18274 11615 unsigned int endregno = END_REGNO (x);
770ae6cc 11616 unsigned int r;
230d793d 11617
770ae6cc 11618 for (r = regno; r < endregno; r++)
829f8ff7
ILT
11619 {
11620 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, r);
11621 rsp->last_set_table_tick = label_tick;
11622 }
230d793d
RS
11623
11624 return;
11625 }
663522cb 11626
230d793d 11627 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
230d793d 11628 if (fmt[i] == 'e')
8fd73754
AN
11629 {
11630 /* Check for identical subexpressions. If x contains
11631 identical subexpression we only have to traverse one of
11632 them. */
ec8e098d 11633 if (i == 0 && ARITHMETIC_P (x))
8fd73754
AN
11634 {
11635 /* Note that at this point x1 has already been
11636 processed. */
11637 rtx x0 = XEXP (x, 0);
11638 rtx x1 = XEXP (x, 1);
11639
11640 /* If x0 and x1 are identical then there is no need to
11641 process x0. */
11642 if (x0 == x1)
11643 break;
11644
11645 /* If x0 is identical to a subexpression of x1 then while
11646 processing x1, x0 has already been processed. Thus we
11647 are done with x. */
ec8e098d 11648 if (ARITHMETIC_P (x1)
8fd73754
AN
11649 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
11650 break;
11651
11652 /* If x1 is identical to a subexpression of x0 then we
11653 still have to process the rest of x0. */
ec8e098d 11654 if (ARITHMETIC_P (x0)
8fd73754
AN
11655 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
11656 {
11657 update_table_tick (XEXP (x0, x1 == XEXP (x0, 0) ? 1 : 0));
11658 break;
11659 }
11660 }
73a39fc4 11661
8fd73754
AN
11662 update_table_tick (XEXP (x, i));
11663 }
6ffef2ad
RS
11664 else if (fmt[i] == 'E')
11665 for (j = 0; j < XVECLEN (x, i); j++)
11666 update_table_tick (XVECEXP (x, i, j));
230d793d
RS
11667}
11668
11669/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
11670 are saying that the register is clobbered and we no longer know its
5eaad481
PB
11671 value. If INSN is zero, don't update reg_stat[].last_set; this is
11672 only permitted with VALUE also zero and is used to invalidate the
11673 register. */
230d793d
RS
11674
11675static void
79a490a9 11676record_value_for_reg (rtx reg, rtx insn, rtx value)
230d793d 11677{
770ae6cc 11678 unsigned int regno = REGNO (reg);
09e18274 11679 unsigned int endregno = END_REGNO (reg);
770ae6cc 11680 unsigned int i;
829f8ff7 11681 reg_stat_type *rsp;
230d793d
RS
11682
11683 /* If VALUE contains REG and we have a previous value for REG, substitute
11684 the previous value. */
11685 if (value && insn && reg_overlap_mentioned_p (reg, value))
11686 {
11687 rtx tem;
11688
11689 /* Set things up so get_last_value is allowed to see anything set up to
11690 our insn. */
6fb5fa3c 11691 subst_low_luid = DF_INSN_LUID (insn);
663522cb 11692 tem = get_last_value (reg);
230d793d 11693
14a774a9
RK
11694 /* If TEM is simply a binary operation with two CLOBBERs as operands,
11695 it isn't going to be useful and will take a lot of time to process,
11696 so just use the CLOBBER. */
11697
230d793d 11698 if (tem)
14a774a9 11699 {
ec8e098d 11700 if (ARITHMETIC_P (tem)
14a774a9
RK
11701 && GET_CODE (XEXP (tem, 0)) == CLOBBER
11702 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
11703 tem = XEXP (tem, 0);
49c3b9a8
JJ
11704 else if (count_occurrences (value, reg, 1) >= 2)
11705 {
11706 /* If there are two or more occurrences of REG in VALUE,
11707 prevent the value from growing too much. */
11708 if (count_rtxs (tem) > MAX_LAST_VALUE_RTL)
11709 tem = gen_rtx_CLOBBER (GET_MODE (tem), const0_rtx);
11710 }
14a774a9
RK
11711
11712 value = replace_rtx (copy_rtx (value), reg, tem);
11713 }
230d793d
RS
11714 }
11715
11716 /* For each register modified, show we don't know its value, that
ef026f91
RS
11717 we don't know about its bitwise content, that its value has been
11718 updated, and that we don't know the location of the death of the
11719 register. */
770ae6cc 11720 for (i = regno; i < endregno; i++)
230d793d 11721 {
829f8ff7
ILT
11722 rsp = VEC_index (reg_stat_type, reg_stat, i);
11723
230d793d 11724 if (insn)
829f8ff7 11725 rsp->last_set = insn;
770ae6cc 11726
829f8ff7 11727 rsp->last_set_value = 0;
32e8bb8e 11728 rsp->last_set_mode = VOIDmode;
829f8ff7
ILT
11729 rsp->last_set_nonzero_bits = 0;
11730 rsp->last_set_sign_bit_copies = 0;
11731 rsp->last_death = 0;
32e8bb8e 11732 rsp->truncated_to_mode = VOIDmode;
230d793d
RS
11733 }
11734
11735 /* Mark registers that are being referenced in this value. */
11736 if (value)
11737 update_table_tick (value);
11738
11739 /* Now update the status of each register being set.
11740 If someone is using this register in this block, set this register
11741 to invalid since we will get confused between the two lives in this
11742 basic block. This makes using this register always invalid. In cse, we
11743 scan the table to invalidate all entries using this register, but this
11744 is too much work for us. */
11745
11746 for (i = regno; i < endregno; i++)
11747 {
829f8ff7
ILT
11748 rsp = VEC_index (reg_stat_type, reg_stat, i);
11749 rsp->last_set_label = label_tick;
6fb5fa3c 11750 if (!insn
829f8ff7
ILT
11751 || (value && rsp->last_set_table_tick >= label_tick_ebb_start))
11752 rsp->last_set_invalid = 1;
230d793d 11753 else
829f8ff7 11754 rsp->last_set_invalid = 0;
230d793d
RS
11755 }
11756
11757 /* The value being assigned might refer to X (like in "x++;"). In that
11758 case, we must replace it with (clobber (const_int 0)) to prevent
11759 infinite loops. */
829f8ff7 11760 rsp = VEC_index (reg_stat_type, reg_stat, regno);
9baea66c 11761 if (value && !get_last_value_validate (&value, insn, label_tick, 0))
230d793d
RS
11762 {
11763 value = copy_rtx (value);
9baea66c 11764 if (!get_last_value_validate (&value, insn, label_tick, 1))
230d793d
RS
11765 value = 0;
11766 }
11767
55310dad
RK
11768 /* For the main register being modified, update the value, the mode, the
11769 nonzero bits, and the number of sign bit copies. */
11770
829f8ff7 11771 rsp->last_set_value = value;
230d793d 11772
55310dad
RK
11773 if (value)
11774 {
0a0440c9 11775 enum machine_mode mode = GET_MODE (reg);
6fb5fa3c 11776 subst_low_luid = DF_INSN_LUID (insn);
829f8ff7 11777 rsp->last_set_mode = mode;
0a0440c9
JJ
11778 if (GET_MODE_CLASS (mode) == MODE_INT
11779 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
11780 mode = nonzero_bits_mode;
829f8ff7
ILT
11781 rsp->last_set_nonzero_bits = nonzero_bits (value, mode);
11782 rsp->last_set_sign_bit_copies
55310dad
RK
11783 = num_sign_bit_copies (value, GET_MODE (reg));
11784 }
230d793d
RS
11785}
11786
230d793d 11787/* Called via note_stores from record_dead_and_set_regs to handle one
84832317
MM
11788 SET or CLOBBER in an insn. DATA is the instruction in which the
11789 set is occurring. */
230d793d
RS
11790
11791static void
7bc980e1 11792record_dead_and_set_regs_1 (rtx dest, const_rtx setter, void *data)
230d793d 11793{
84832317
MM
11794 rtx record_dead_insn = (rtx) data;
11795
ca89d290
RK
11796 if (GET_CODE (dest) == SUBREG)
11797 dest = SUBREG_REG (dest);
11798
0efa4029
PB
11799 if (!record_dead_insn)
11800 {
11801 if (REG_P (dest))
11802 record_value_for_reg (dest, NULL_RTX, NULL_RTX);
11803 return;
11804 }
11805
f8cfc6aa 11806 if (REG_P (dest))
230d793d
RS
11807 {
11808 /* If we are setting the whole register, we know its value. Otherwise
11809 show that we don't know the value. We can handle SUBREG in
11810 some cases. */
11811 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11812 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11813 else if (GET_CODE (setter) == SET
11814 && GET_CODE (SET_DEST (setter)) == SUBREG
11815 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 11816 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 11817 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3 11818 record_value_for_reg (dest, record_dead_insn,
4de249d9 11819 gen_lowpart (GET_MODE (dest),
d0ab8cd3 11820 SET_SRC (setter)));
230d793d 11821 else
5f4f0e22 11822 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d 11823 }
3c0cb5de 11824 else if (MEM_P (dest)
230d793d
RS
11825 /* Ignore pushes, they clobber nothing. */
11826 && ! push_operand (dest, GET_MODE (dest)))
6fb5fa3c 11827 mem_last_set = DF_INSN_LUID (record_dead_insn);
230d793d
RS
11828}
11829
11830/* Update the records of when each REG was most recently set or killed
11831 for the things done by INSN. This is the last thing done in processing
11832 INSN in the combiner loop.
11833
5eaad481
PB
11834 We update reg_stat[], in particular fields last_set, last_set_value,
11835 last_set_mode, last_set_nonzero_bits, last_set_sign_bit_copies,
11836 last_death, and also the similar information mem_last_set (which insn
6fb5fa3c 11837 most recently modified memory) and last_call_luid (which insn was the
5eaad481 11838 most recent subroutine call). */
230d793d
RS
11839
11840static void
79a490a9 11841record_dead_and_set_regs (rtx insn)
230d793d 11842{
b3694847 11843 rtx link;
770ae6cc 11844 unsigned int i;
55310dad 11845
230d793d
RS
11846 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11847 {
dbc131f3 11848 if (REG_NOTE_KIND (link) == REG_DEAD
f8cfc6aa 11849 && REG_P (XEXP (link, 0)))
dbc131f3 11850 {
770ae6cc 11851 unsigned int regno = REGNO (XEXP (link, 0));
09e18274 11852 unsigned int endregno = END_REGNO (XEXP (link, 0));
dbc131f3
RK
11853
11854 for (i = regno; i < endregno; i++)
829f8ff7
ILT
11855 {
11856 reg_stat_type *rsp;
11857
11858 rsp = VEC_index (reg_stat_type, reg_stat, i);
11859 rsp->last_death = insn;
11860 }
dbc131f3 11861 }
230d793d 11862 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 11863 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
11864 }
11865
4b4bf941 11866 if (CALL_P (insn))
55310dad
RK
11867 {
11868 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
29655d3d 11869 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
55310dad 11870 {
829f8ff7
ILT
11871 reg_stat_type *rsp;
11872
11873 rsp = VEC_index (reg_stat_type, reg_stat, i);
11874 rsp->last_set_invalid = 1;
11875 rsp->last_set = insn;
11876 rsp->last_set_value = 0;
32e8bb8e 11877 rsp->last_set_mode = VOIDmode;
829f8ff7
ILT
11878 rsp->last_set_nonzero_bits = 0;
11879 rsp->last_set_sign_bit_copies = 0;
11880 rsp->last_death = 0;
32e8bb8e 11881 rsp->truncated_to_mode = VOIDmode;
55310dad
RK
11882 }
11883
6fb5fa3c 11884 last_call_luid = mem_last_set = DF_INSN_LUID (insn);
29655d3d 11885
0efa4029 11886 /* We can't combine into a call pattern. Remember, though, that
6fb5fa3c 11887 the return value register is set at this LUID. We could
0efa4029
PB
11888 still replace a register with the return value from the
11889 wrong subroutine call! */
11890 note_stores (PATTERN (insn), record_dead_and_set_regs_1, NULL_RTX);
55310dad 11891 }
0efa4029
PB
11892 else
11893 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
230d793d 11894}
732f2ac9 11895
732f2ac9
JJ
11896/* If a SUBREG has the promoted bit set, it is in fact a property of the
11897 register present in the SUBREG, so for each such SUBREG go back and
11898 adjust nonzero and sign bit information of the registers that are
11899 known to have some zero/sign bits set.
11900
11901 This is needed because when combine blows the SUBREGs away, the
11902 information on zero/sign bits is lost and further combines can be
11903 missed because of that. */
11904
11905static void
79a490a9 11906record_promoted_value (rtx insn, rtx subreg)
732f2ac9 11907{
4a71b24f 11908 rtx links, set;
770ae6cc 11909 unsigned int regno = REGNO (SUBREG_REG (subreg));
732f2ac9
JJ
11910 enum machine_mode mode = GET_MODE (subreg);
11911
25af74a0 11912 if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
732f2ac9
JJ
11913 return;
11914
663522cb 11915 for (links = LOG_LINKS (insn); links;)
732f2ac9 11916 {
829f8ff7
ILT
11917 reg_stat_type *rsp;
11918
732f2ac9
JJ
11919 insn = XEXP (links, 0);
11920 set = single_set (insn);
11921
f8cfc6aa 11922 if (! set || !REG_P (SET_DEST (set))
732f2ac9
JJ
11923 || REGNO (SET_DEST (set)) != regno
11924 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11925 {
11926 links = XEXP (links, 1);
11927 continue;
11928 }
11929
829f8ff7
ILT
11930 rsp = VEC_index (reg_stat_type, reg_stat, regno);
11931 if (rsp->last_set == insn)
663522cb 11932 {
7879b81e 11933 if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
829f8ff7 11934 rsp->last_set_nonzero_bits &= GET_MODE_MASK (mode);
663522cb 11935 }
732f2ac9 11936
f8cfc6aa 11937 if (REG_P (SET_SRC (set)))
732f2ac9
JJ
11938 {
11939 regno = REGNO (SET_SRC (set));
11940 links = LOG_LINKS (insn);
11941 }
11942 else
11943 break;
11944 }
11945}
11946
4df8acd3
AN
11947/* Check if X, a register, is known to contain a value already
11948 truncated to MODE. In this case we can use a subreg to refer to
11949 the truncated value even though in the generic case we would need
11950 an explicit truncation. */
11951
11952static bool
fa233e34 11953reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
4df8acd3 11954{
829f8ff7
ILT
11955 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
11956 enum machine_mode truncated = rsp->truncated_to_mode;
4df8acd3 11957
6fb5fa3c 11958 if (truncated == 0
829f8ff7 11959 || rsp->truncation_label < label_tick_ebb_start)
4df8acd3
AN
11960 return false;
11961 if (GET_MODE_SIZE (truncated) <= GET_MODE_SIZE (mode))
11962 return true;
11963 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
11964 GET_MODE_BITSIZE (truncated)))
11965 return true;
11966 return false;
11967}
11968
36dafbd1
AN
11969/* Callback for for_each_rtx. If *P is a hard reg or a subreg record the mode
11970 that the register is accessed in. For non-TRULY_NOOP_TRUNCATION targets we
11971 might be able to turn a truncate into a subreg using this information.
11972 Return -1 if traversing *P is complete or 0 otherwise. */
732f2ac9 11973
36dafbd1
AN
11974static int
11975record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
732f2ac9 11976{
36dafbd1 11977 rtx x = *p;
4df8acd3 11978 enum machine_mode truncated_mode;
829f8ff7 11979 reg_stat_type *rsp;
c22cacf3 11980
4df8acd3
AN
11981 if (GET_CODE (x) == SUBREG && REG_P (SUBREG_REG (x)))
11982 {
11983 enum machine_mode original_mode = GET_MODE (SUBREG_REG (x));
11984 truncated_mode = GET_MODE (x);
11985
11986 if (GET_MODE_SIZE (original_mode) <= GET_MODE_SIZE (truncated_mode))
36dafbd1 11987 return -1;
4df8acd3
AN
11988
11989 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (truncated_mode),
11990 GET_MODE_BITSIZE (original_mode)))
36dafbd1 11991 return -1;
4df8acd3
AN
11992
11993 x = SUBREG_REG (x);
11994 }
c0220ea4 11995 /* ??? For hard-regs we now record everything. We might be able to
4df8acd3
AN
11996 optimize this using last_set_mode. */
11997 else if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
11998 truncated_mode = GET_MODE (x);
11999 else
36dafbd1 12000 return 0;
4df8acd3 12001
829f8ff7
ILT
12002 rsp = VEC_index (reg_stat_type, reg_stat, REGNO (x));
12003 if (rsp->truncated_to_mode == 0
12004 || rsp->truncation_label < label_tick_ebb_start
4df8acd3 12005 || (GET_MODE_SIZE (truncated_mode)
829f8ff7 12006 < GET_MODE_SIZE (rsp->truncated_to_mode)))
4df8acd3 12007 {
829f8ff7
ILT
12008 rsp->truncated_to_mode = truncated_mode;
12009 rsp->truncation_label = label_tick;
4df8acd3 12010 }
36dafbd1
AN
12011
12012 return -1;
4df8acd3
AN
12013}
12014
36dafbd1
AN
12015/* Callback for note_uses. Find hardregs and subregs of pseudos and
12016 the modes they are used in. This can help truning TRUNCATEs into
12017 SUBREGs. */
4df8acd3
AN
12018
12019static void
36dafbd1 12020record_truncated_values (rtx *x, void *data ATTRIBUTE_UNUSED)
4df8acd3 12021{
36dafbd1
AN
12022 for_each_rtx (x, record_truncated_value, NULL);
12023}
4df8acd3 12024
36dafbd1
AN
12025/* Scan X for promoted SUBREGs. For each one found,
12026 note what it implies to the registers used in it. */
12027
12028static void
12029check_promoted_subreg (rtx insn, rtx x)
12030{
12031 if (GET_CODE (x) == SUBREG
12032 && SUBREG_PROMOTED_VAR_P (x)
12033 && REG_P (SUBREG_REG (x)))
12034 record_promoted_value (insn, x);
732f2ac9
JJ
12035 else
12036 {
12037 const char *format = GET_RTX_FORMAT (GET_CODE (x));
12038 int i, j;
12039
12040 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
663522cb 12041 switch (format[i])
732f2ac9
JJ
12042 {
12043 case 'e':
36dafbd1 12044 check_promoted_subreg (insn, XEXP (x, i));
732f2ac9
JJ
12045 break;
12046 case 'V':
12047 case 'E':
12048 if (XVEC (x, i) != 0)
12049 for (j = 0; j < XVECLEN (x, i); j++)
36dafbd1 12050 check_promoted_subreg (insn, XVECEXP (x, i, j));
732f2ac9
JJ
12051 break;
12052 }
12053 }
12054}
230d793d 12055\f
9baea66c
EB
12056/* Verify that all the registers and memory references mentioned in *LOC are
12057 still valid. *LOC was part of a value set in INSN when label_tick was
12058 equal to TICK. Return 0 if some are not. If REPLACE is nonzero, replace
12059 the invalid references with (clobber (const_int 0)) and return 1. This
12060 replacement is useful because we often can get useful information about
12061 the form of a value (e.g., if it was produced by a shift that always
12062 produces -1 or 0) even though we don't know exactly what registers it
12063 was produced from. */
230d793d
RS
12064
12065static int
79a490a9 12066get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
230d793d
RS
12067{
12068 rtx x = *loc;
6f7d635c 12069 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d 12070 int len = GET_RTX_LENGTH (GET_CODE (x));
6ffef2ad 12071 int i, j;
230d793d 12072
f8cfc6aa 12073 if (REG_P (x))
230d793d 12074 {
770ae6cc 12075 unsigned int regno = REGNO (x);
09e18274 12076 unsigned int endregno = END_REGNO (x);
770ae6cc 12077 unsigned int j;
230d793d
RS
12078
12079 for (j = regno; j < endregno; j++)
829f8ff7
ILT
12080 {
12081 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, j);
12082 if (rsp->last_set_invalid
12083 /* If this is a pseudo-register that was only set once and not
12084 live at the beginning of the function, it is always valid. */
12085 || (! (regno >= FIRST_PSEUDO_REGISTER
12086 && REG_N_SETS (regno) == 1
12087 && (!REGNO_REG_SET_P
12088 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno)))
12089 && rsp->last_set_label > tick))
230d793d
RS
12090 {
12091 if (replace)
38a448ca 12092 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
12093 return replace;
12094 }
829f8ff7 12095 }
230d793d
RS
12096
12097 return 1;
12098 }
9baea66c
EB
12099 /* If this is a memory reference, make sure that there were no stores after
12100 it that might have clobbered the value. We don't have alias info, so we
12101 assume any store invalidates it. Moreover, we only have local UIDs, so
12102 we also assume that there were stores in the intervening basic blocks. */
389fdba0 12103 else if (MEM_P (x) && !MEM_READONLY_P (x)
9baea66c 12104 && (tick != label_tick || DF_INSN_LUID (insn) <= mem_last_set))
9a893315
JW
12105 {
12106 if (replace)
38a448ca 12107 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
12108 return replace;
12109 }
230d793d
RS
12110
12111 for (i = 0; i < len; i++)
8fd73754
AN
12112 {
12113 if (fmt[i] == 'e')
12114 {
12115 /* Check for identical subexpressions. If x contains
12116 identical subexpression we only have to traverse one of
12117 them. */
ec8e098d 12118 if (i == 1 && ARITHMETIC_P (x))
8fd73754
AN
12119 {
12120 /* Note that at this point x0 has already been checked
12121 and found valid. */
12122 rtx x0 = XEXP (x, 0);
12123 rtx x1 = XEXP (x, 1);
12124
12125 /* If x0 and x1 are identical then x is also valid. */
12126 if (x0 == x1)
12127 return 1;
12128
12129 /* If x1 is identical to a subexpression of x0 then
12130 while checking x0, x1 has already been checked. Thus
12131 it is valid and so as x. */
ec8e098d 12132 if (ARITHMETIC_P (x0)
8fd73754
AN
12133 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
12134 return 1;
12135
12136 /* If x0 is identical to a subexpression of x1 then x is
12137 valid iff the rest of x1 is valid. */
ec8e098d 12138 if (ARITHMETIC_P (x1)
8fd73754
AN
12139 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
12140 return
12141 get_last_value_validate (&XEXP (x1,
12142 x0 == XEXP (x1, 0) ? 1 : 0),
12143 insn, tick, replace);
12144 }
12145
12146 if (get_last_value_validate (&XEXP (x, i), insn, tick,
12147 replace) == 0)
12148 return 0;
12149 }
8fd73754 12150 else if (fmt[i] == 'E')
6ffef2ad
RS
12151 for (j = 0; j < XVECLEN (x, i); j++)
12152 if (get_last_value_validate (&XVECEXP (x, i, j),
12153 insn, tick, replace) == 0)
12154 return 0;
8fd73754 12155 }
230d793d
RS
12156
12157 /* If we haven't found a reason for it to be invalid, it is valid. */
12158 return 1;
12159}
12160
12161/* Get the last value assigned to X, if known. Some registers
12162 in the value may be replaced with (clobber (const_int 0)) if their value
12163 is known longer known reliably. */
12164
12165static rtx
fa233e34 12166get_last_value (const_rtx x)
230d793d 12167{
770ae6cc 12168 unsigned int regno;
230d793d 12169 rtx value;
829f8ff7 12170 reg_stat_type *rsp;
230d793d
RS
12171
12172 /* If this is a non-paradoxical SUBREG, get the value of its operand and
12173 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 12174 we cannot predict what values the "extra" bits might have. */
230d793d
RS
12175 if (GET_CODE (x) == SUBREG
12176 && subreg_lowpart_p (x)
12177 && (GET_MODE_SIZE (GET_MODE (x))
12178 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
12179 && (value = get_last_value (SUBREG_REG (x))) != 0)
4de249d9 12180 return gen_lowpart (GET_MODE (x), value);
230d793d 12181
f8cfc6aa 12182 if (!REG_P (x))
230d793d
RS
12183 return 0;
12184
12185 regno = REGNO (x);
829f8ff7
ILT
12186 rsp = VEC_index (reg_stat_type, reg_stat, regno);
12187 value = rsp->last_set_value;
230d793d 12188
57cf50a4
GRK
12189 /* If we don't have a value, or if it isn't for this basic block and
12190 it's either a hard register, set more than once, or it's a live
663522cb 12191 at the beginning of the function, return 0.
57cf50a4 12192
eaec9b3d 12193 Because if it's not live at the beginning of the function then the reg
57cf50a4
GRK
12194 is always set before being used (is never used without being set).
12195 And, if it's set only once, and it's always set before use, then all
12196 uses must have the same last value, even if it's not from this basic
12197 block. */
230d793d
RS
12198
12199 if (value == 0
829f8ff7 12200 || (rsp->last_set_label < label_tick_ebb_start
57cf50a4
GRK
12201 && (regno < FIRST_PSEUDO_REGISTER
12202 || REG_N_SETS (regno) != 1
6fb5fa3c
DB
12203 || REGNO_REG_SET_P
12204 (DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), regno))))
230d793d
RS
12205 return 0;
12206
4255220d 12207 /* If the value was set in a later insn than the ones we are processing,
ca4cd906 12208 we can't use it even if the register was only set once. */
829f8ff7
ILT
12209 if (rsp->last_set_label == label_tick
12210 && DF_INSN_LUID (rsp->last_set) >= subst_low_luid)
ca4cd906 12211 return 0;
d0ab8cd3
RK
12212
12213 /* If the value has all its registers valid, return it. */
9baea66c 12214 if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 0))
230d793d
RS
12215 return value;
12216
12217 /* Otherwise, make a copy and replace any invalid register with
12218 (clobber (const_int 0)). If that fails for some reason, return 0. */
12219
12220 value = copy_rtx (value);
9baea66c 12221 if (get_last_value_validate (&value, rsp->last_set, rsp->last_set_label, 1))
230d793d
RS
12222 return value;
12223
12224 return 0;
12225}
12226\f
12227/* Return nonzero if expression X refers to a REG or to memory
6fb5fa3c 12228 that is set in an instruction more recent than FROM_LUID. */
230d793d
RS
12229
12230static int
7bc980e1 12231use_crosses_set_p (const_rtx x, int from_luid)
230d793d 12232{
b3694847
SS
12233 const char *fmt;
12234 int i;
12235 enum rtx_code code = GET_CODE (x);
230d793d
RS
12236
12237 if (code == REG)
12238 {
770ae6cc 12239 unsigned int regno = REGNO (x);
09e18274 12240 unsigned endreg = END_REGNO (x);
663522cb 12241
230d793d
RS
12242#ifdef PUSH_ROUNDING
12243 /* Don't allow uses of the stack pointer to be moved,
12244 because we don't know whether the move crosses a push insn. */
f73ad30e 12245 if (regno == STACK_POINTER_REGNUM && PUSH_ARGS)
230d793d
RS
12246 return 1;
12247#endif
770ae6cc 12248 for (; regno < endreg; regno++)
829f8ff7
ILT
12249 {
12250 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
12251 if (rsp->last_set
12252 && rsp->last_set_label == label_tick
12253 && DF_INSN_LUID (rsp->last_set) > from_luid)
12254 return 1;
12255 }
e28f5732 12256 return 0;
230d793d
RS
12257 }
12258
6fb5fa3c 12259 if (code == MEM && mem_last_set > from_luid)
230d793d
RS
12260 return 1;
12261
12262 fmt = GET_RTX_FORMAT (code);
12263
12264 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12265 {
12266 if (fmt[i] == 'E')
12267 {
b3694847 12268 int j;
230d793d 12269 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6fb5fa3c 12270 if (use_crosses_set_p (XVECEXP (x, i, j), from_luid))
230d793d
RS
12271 return 1;
12272 }
12273 else if (fmt[i] == 'e'
6fb5fa3c 12274 && use_crosses_set_p (XEXP (x, i), from_luid))
230d793d
RS
12275 return 1;
12276 }
12277 return 0;
12278}
12279\f
12280/* Define three variables used for communication between the following
12281 routines. */
12282
770ae6cc 12283static unsigned int reg_dead_regno, reg_dead_endregno;
230d793d
RS
12284static int reg_dead_flag;
12285
12286/* Function called via note_stores from reg_dead_at_p.
12287
663522cb 12288 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
12289 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
12290
12291static void
7bc980e1 12292reg_dead_at_p_1 (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
230d793d 12293{
770ae6cc 12294 unsigned int regno, endregno;
230d793d 12295
f8cfc6aa 12296 if (!REG_P (dest))
230d793d
RS
12297 return;
12298
12299 regno = REGNO (dest);
09e18274 12300 endregno = END_REGNO (dest);
230d793d
RS
12301 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
12302 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
12303}
12304
da7d8304 12305/* Return nonzero if REG is known to be dead at INSN.
230d793d
RS
12306
12307 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
12308 referencing REG, it is dead. If we hit a SET referencing REG, it is
12309 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
12310 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
12311 must be assumed to be always live. */
230d793d
RS
12312
12313static int
79a490a9 12314reg_dead_at_p (rtx reg, rtx insn)
230d793d 12315{
e0082a72 12316 basic_block block;
770ae6cc 12317 unsigned int i;
230d793d
RS
12318
12319 /* Set variables for reg_dead_at_p_1. */
12320 reg_dead_regno = REGNO (reg);
09e18274 12321 reg_dead_endregno = END_REGNO (reg);
230d793d
RS
12322
12323 reg_dead_flag = 0;
12324
45da19e3
UW
12325 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. For fixed registers
12326 we allow the machine description to decide whether use-and-clobber
12327 patterns are OK. */
6e25d159
RK
12328 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
12329 {
12330 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
45da19e3 12331 if (!fixed_regs[i] && TEST_HARD_REG_BIT (newpat_used_regs, i))
6e25d159
RK
12332 return 0;
12333 }
12334
d25aa7ab
PB
12335 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, or
12336 beginning of basic block. */
12337 block = BLOCK_FOR_INSN (insn);
12338 for (;;)
230d793d 12339 {
d25aa7ab
PB
12340 if (INSN_P (insn))
12341 {
12342 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
12343 if (reg_dead_flag)
12344 return reg_dead_flag == 1 ? 1 : 0;
230d793d 12345
d25aa7ab
PB
12346 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
12347 return 1;
12348 }
230d793d 12349
d25aa7ab
PB
12350 if (insn == BB_HEAD (block))
12351 break;
230d793d 12352
d25aa7ab 12353 insn = PREV_INSN (insn);
230d793d
RS
12354 }
12355
d25aa7ab 12356 /* Look at live-in sets for the basic block that we were in. */
230d793d 12357 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
89a95777 12358 if (REGNO_REG_SET_P (df_get_live_in (block), i))
230d793d
RS
12359 return 0;
12360
12361 return 1;
12362}
6e25d159 12363\f
6fb5fa3c 12364/* Note hard registers in X that are used. */
6e25d159
RK
12365
12366static void
79a490a9 12367mark_used_regs_combine (rtx x)
6e25d159 12368{
770ae6cc
RK
12369 RTX_CODE code = GET_CODE (x);
12370 unsigned int regno;
6e25d159
RK
12371 int i;
12372
12373 switch (code)
12374 {
12375 case LABEL_REF:
12376 case SYMBOL_REF:
12377 case CONST_INT:
12378 case CONST:
12379 case CONST_DOUBLE:
69ef87e2 12380 case CONST_VECTOR:
6e25d159
RK
12381 case PC:
12382 case ADDR_VEC:
12383 case ADDR_DIFF_VEC:
12384 case ASM_INPUT:
12385#ifdef HAVE_cc0
12386 /* CC0 must die in the insn after it is set, so we don't need to take
12387 special note of it here. */
12388 case CC0:
12389#endif
12390 return;
12391
12392 case CLOBBER:
12393 /* If we are clobbering a MEM, mark any hard registers inside the
12394 address as used. */
3c0cb5de 12395 if (MEM_P (XEXP (x, 0)))
6e25d159
RK
12396 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
12397 return;
12398
12399 case REG:
12400 regno = REGNO (x);
12401 /* A hard reg in a wide mode may really be multiple registers.
12402 If so, mark all of them just like the first. */
12403 if (regno < FIRST_PSEUDO_REGISTER)
12404 {
3eae4643 12405 /* None of this applies to the stack, frame or arg pointers. */
6e25d159
RK
12406 if (regno == STACK_POINTER_REGNUM
12407#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
12408 || regno == HARD_FRAME_POINTER_REGNUM
12409#endif
12410#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
12411 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
12412#endif
12413 || regno == FRAME_POINTER_REGNUM)
12414 return;
12415
09e18274 12416 add_to_hard_reg_set (&newpat_used_regs, GET_MODE (x), regno);
6e25d159
RK
12417 }
12418 return;
12419
12420 case SET:
12421 {
12422 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
12423 the address. */
b3694847 12424 rtx testreg = SET_DEST (x);
6e25d159 12425
e048778f
RK
12426 while (GET_CODE (testreg) == SUBREG
12427 || GET_CODE (testreg) == ZERO_EXTRACT
e048778f 12428 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
12429 testreg = XEXP (testreg, 0);
12430
3c0cb5de 12431 if (MEM_P (testreg))
6e25d159
RK
12432 mark_used_regs_combine (XEXP (testreg, 0));
12433
12434 mark_used_regs_combine (SET_SRC (x));
6e25d159 12435 }
e9a25f70
JL
12436 return;
12437
12438 default:
12439 break;
6e25d159
RK
12440 }
12441
12442 /* Recursively scan the operands of this expression. */
12443
12444 {
b3694847 12445 const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
12446
12447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
12448 {
663522cb 12449 if (fmt[i] == 'e')
6e25d159 12450 mark_used_regs_combine (XEXP (x, i));
663522cb
KH
12451 else if (fmt[i] == 'E')
12452 {
b3694847 12453 int j;
6e25d159 12454
663522cb
KH
12455 for (j = 0; j < XVECLEN (x, i); j++)
12456 mark_used_regs_combine (XVECEXP (x, i, j));
12457 }
6e25d159
RK
12458 }
12459 }
12460}
230d793d
RS
12461\f
12462/* Remove register number REGNO from the dead registers list of INSN.
12463
12464 Return the note used to record the death, if there was one. */
12465
12466rtx
79a490a9 12467remove_death (unsigned int regno, rtx insn)
230d793d 12468{
b3694847 12469 rtx note = find_regno_note (insn, REG_DEAD, regno);
230d793d
RS
12470
12471 if (note)
6fb5fa3c 12472 remove_note (insn, note);
230d793d
RS
12473
12474 return note;
12475}
12476
12477/* For each register (hardware or pseudo) used within expression X, if its
6fb5fa3c 12478 death is in an instruction with luid between FROM_LUID (inclusive) and
230d793d 12479 TO_INSN (exclusive), put a REG_DEAD note for that register in the
663522cb 12480 list headed by PNOTES.
230d793d 12481
6eb12cef
RK
12482 That said, don't move registers killed by maybe_kill_insn.
12483
230d793d
RS
12484 This is done when X is being merged by combination into TO_INSN. These
12485 notes will then be distributed as needed. */
12486
12487static void
6fb5fa3c 12488move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
79a490a9 12489 rtx *pnotes)
230d793d 12490{
b3694847
SS
12491 const char *fmt;
12492 int len, i;
12493 enum rtx_code code = GET_CODE (x);
230d793d
RS
12494
12495 if (code == REG)
12496 {
770ae6cc 12497 unsigned int regno = REGNO (x);
829f8ff7 12498 rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno)->last_death;
e340018d 12499
3eae4643 12500 /* Don't move the register if it gets killed in between from and to. */
6eb12cef 12501 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
770ae6cc 12502 && ! reg_referenced_p (x, maybe_kill_insn))
6eb12cef
RK
12503 return;
12504
6fb5fa3c 12505 if (where_dead
45f39d78 12506 && BLOCK_FOR_INSN (where_dead) == BLOCK_FOR_INSN (to_insn)
6fb5fa3c
DB
12507 && DF_INSN_LUID (where_dead) >= from_luid
12508 && DF_INSN_LUID (where_dead) < DF_INSN_LUID (to_insn))
230d793d 12509 {
dbc131f3 12510 rtx note = remove_death (regno, where_dead);
230d793d
RS
12511
12512 /* It is possible for the call above to return 0. This can occur
5eaad481 12513 when last_death points to I2 or I1 that we combined with.
dbc131f3
RK
12514 In that case make a new note.
12515
12516 We must also check for the case where X is a hard register
12517 and NOTE is a death note for a range of hard registers
12518 including X. In that case, we must put REG_DEAD notes for
12519 the remaining registers in place of NOTE. */
12520
12521 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
12522 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 12523 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3 12524 {
770ae6cc 12525 unsigned int deadregno = REGNO (XEXP (note, 0));
09e18274
RS
12526 unsigned int deadend = END_HARD_REGNO (XEXP (note, 0));
12527 unsigned int ourend = END_HARD_REGNO (x);
770ae6cc 12528 unsigned int i;
dbc131f3
RK
12529
12530 for (i = deadregno; i < deadend; i++)
12531 if (i < regno || i >= ourend)
65c5f2a6 12532 add_reg_note (where_dead, REG_DEAD, regno_reg_rtx[i]);
dbc131f3 12533 }
770ae6cc 12534
24e46fc4
JW
12535 /* If we didn't find any note, or if we found a REG_DEAD note that
12536 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
12537 register, then to be safe we must check for REG_DEAD notes
12538 for each register other than the first. They could have
12539 their own REG_DEAD notes lying around. */
24e46fc4
JW
12540 else if ((note == 0
12541 || (note != 0
12542 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
12543 < GET_MODE_SIZE (GET_MODE (x)))))
12544 && regno < FIRST_PSEUDO_REGISTER
66fd46b6 12545 && hard_regno_nregs[regno][GET_MODE (x)] > 1)
fabd69e8 12546 {
09e18274 12547 unsigned int ourend = END_HARD_REGNO (x);
770ae6cc 12548 unsigned int i, offset;
fabd69e8
RK
12549 rtx oldnotes = 0;
12550
24e46fc4 12551 if (note)
66fd46b6 12552 offset = hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))];
24e46fc4
JW
12553 else
12554 offset = 1;
12555
12556 for (i = regno + offset; i < ourend; i++)
e50126e8 12557 move_deaths (regno_reg_rtx[i],
6fb5fa3c 12558 maybe_kill_insn, from_luid, to_insn, &oldnotes);
fabd69e8 12559 }
230d793d 12560
dbc131f3 12561 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
12562 {
12563 XEXP (note, 1) = *pnotes;
12564 *pnotes = note;
12565 }
12566 else
efc0b2bd 12567 *pnotes = alloc_reg_note (REG_DEAD, x, *pnotes);
230d793d
RS
12568 }
12569
12570 return;
12571 }
12572
12573 else if (GET_CODE (x) == SET)
12574 {
12575 rtx dest = SET_DEST (x);
12576
6fb5fa3c 12577 move_deaths (SET_SRC (x), maybe_kill_insn, from_luid, to_insn, pnotes);
230d793d 12578
a7c99304
RK
12579 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
12580 that accesses one word of a multi-word item, some
12581 piece of everything register in the expression is used by
12582 this insn, so remove any old death. */
ddef6bc7 12583 /* ??? So why do we test for equality of the sizes? */
a7c99304
RK
12584
12585 if (GET_CODE (dest) == ZERO_EXTRACT
12586 || GET_CODE (dest) == STRICT_LOW_PART
12587 || (GET_CODE (dest) == SUBREG
12588 && (((GET_MODE_SIZE (GET_MODE (dest))
12589 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
12590 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
12591 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 12592 {
6fb5fa3c 12593 move_deaths (dest, maybe_kill_insn, from_luid, to_insn, pnotes);
a7c99304 12594 return;
230d793d
RS
12595 }
12596
a7c99304
RK
12597 /* If this is some other SUBREG, we know it replaces the entire
12598 value, so use that as the destination. */
12599 if (GET_CODE (dest) == SUBREG)
12600 dest = SUBREG_REG (dest);
12601
12602 /* If this is a MEM, adjust deaths of anything used in the address.
12603 For a REG (the only other possibility), the entire value is
12604 being replaced so the old value is not used in this insn. */
230d793d 12605
3c0cb5de 12606 if (MEM_P (dest))
6fb5fa3c 12607 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_luid,
6eb12cef 12608 to_insn, pnotes);
230d793d
RS
12609 return;
12610 }
12611
12612 else if (GET_CODE (x) == CLOBBER)
12613 return;
12614
12615 len = GET_RTX_LENGTH (code);
12616 fmt = GET_RTX_FORMAT (code);
12617
12618 for (i = 0; i < len; i++)
12619 {
12620 if (fmt[i] == 'E')
12621 {
b3694847 12622 int j;
230d793d 12623 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6fb5fa3c 12624 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_luid,
6eb12cef 12625 to_insn, pnotes);
230d793d
RS
12626 }
12627 else if (fmt[i] == 'e')
6fb5fa3c 12628 move_deaths (XEXP (x, i), maybe_kill_insn, from_luid, to_insn, pnotes);
230d793d
RS
12629 }
12630}
12631\f
a7c99304
RK
12632/* Return 1 if X is the target of a bit-field assignment in BODY, the
12633 pattern of an insn. X must be a REG. */
230d793d
RS
12634
12635static int
79a490a9 12636reg_bitfield_target_p (rtx x, rtx body)
230d793d
RS
12637{
12638 int i;
12639
12640 if (GET_CODE (body) == SET)
a7c99304
RK
12641 {
12642 rtx dest = SET_DEST (body);
12643 rtx target;
770ae6cc 12644 unsigned int regno, tregno, endregno, endtregno;
a7c99304
RK
12645
12646 if (GET_CODE (dest) == ZERO_EXTRACT)
12647 target = XEXP (dest, 0);
12648 else if (GET_CODE (dest) == STRICT_LOW_PART)
12649 target = SUBREG_REG (XEXP (dest, 0));
12650 else
12651 return 0;
12652
12653 if (GET_CODE (target) == SUBREG)
12654 target = SUBREG_REG (target);
12655
f8cfc6aa 12656 if (!REG_P (target))
a7c99304
RK
12657 return 0;
12658
12659 tregno = REGNO (target), regno = REGNO (x);
12660 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
12661 return target == x;
12662
09e18274
RS
12663 endtregno = end_hard_regno (GET_MODE (target), tregno);
12664 endregno = end_hard_regno (GET_MODE (x), regno);
a7c99304
RK
12665
12666 return endregno > tregno && regno < endtregno;
12667 }
230d793d
RS
12668
12669 else if (GET_CODE (body) == PARALLEL)
12670 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 12671 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
12672 return 1;
12673
12674 return 0;
663522cb 12675}
b5b8b0ac
AO
12676
12677/* Return the next insn after INSN that is neither a NOTE nor a
12678 DEBUG_INSN. This routine does not look inside SEQUENCEs. */
12679
12680static rtx
12681next_nonnote_nondebug_insn (rtx insn)
12682{
12683 while (insn)
12684 {
12685 insn = NEXT_INSN (insn);
12686 if (insn == 0)
12687 break;
12688 if (NOTE_P (insn))
12689 continue;
12690 if (DEBUG_INSN_P (insn))
12691 continue;
12692 break;
12693 }
12694
12695 return insn;
12696}
12697
12698
230d793d
RS
12699\f
12700/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
12701 as appropriate. I3 and I2 are the insns resulting from the combination
12702 insns including FROM (I2 may be zero).
12703
4bbae09f
ILT
12704 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
12705 not need REG_DEAD notes because they are being substituted for. This
12706 saves searching in the most common cases.
12707
230d793d
RS
12708 Each note in the list is either ignored or placed on some insns, depending
12709 on the type of note. */
12710
12711static void
4bbae09f
ILT
12712distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
12713 rtx elim_i1)
230d793d
RS
12714{
12715 rtx note, next_note;
12716 rtx tem;
12717
12718 for (note = notes; note; note = next_note)
12719 {
12720 rtx place = 0, place2 = 0;
12721
230d793d
RS
12722 next_note = XEXP (note, 1);
12723 switch (REG_NOTE_KIND (note))
12724 {
c9903b44 12725 case REG_BR_PROB:
4db384c9 12726 case REG_BR_PRED:
c9903b44
DE
12727 /* Doesn't matter much where we put this, as long as it's somewhere.
12728 It is preferable to keep these notes on branches, which is most
12729 likely to be i3. */
4a8d0c9c
RH
12730 place = i3;
12731 break;
12732
6e885ee3
ZD
12733 case REG_VALUE_PROFILE:
12734 /* Just get rid of this note, as it is unused later anyway. */
12735 break;
12736
f7cfa78d 12737 case REG_NON_LOCAL_GOTO:
4b4bf941 12738 if (JUMP_P (i3))
f7cfa78d 12739 place = i3;
f7cfa78d 12740 else
341c100f
NS
12741 {
12742 gcc_assert (i2 && JUMP_P (i2));
12743 place = i2;
12744 }
f7cfa78d
GS
12745 break;
12746
4b7c585f 12747 case REG_EH_REGION:
662795a8 12748 /* These notes must remain with the call or trapping instruction. */
4b4bf941 12749 if (CALL_P (i3))
662795a8 12750 place = i3;
4b4bf941 12751 else if (i2 && CALL_P (i2))
662795a8 12752 place = i2;
341c100f 12753 else
662795a8 12754 {
341c100f 12755 gcc_assert (flag_non_call_exceptions);
662795a8
RH
12756 if (may_trap_p (i3))
12757 place = i3;
12758 else if (i2 && may_trap_p (i2))
12759 place = i2;
12760 /* ??? Otherwise assume we've combined things such that we
12761 can now prove that the instructions can't trap. Drop the
12762 note in this case. */
12763 }
662795a8
RH
12764 break;
12765
ca3920ad 12766 case REG_NORETURN:
ab61c93f 12767 case REG_SETJMP:
0e403ec3
AS
12768 /* These notes must remain with the call. It should not be
12769 possible for both I2 and I3 to be a call. */
4b4bf941 12770 if (CALL_P (i3))
4b7c585f 12771 place = i3;
4b7c585f 12772 else
341c100f
NS
12773 {
12774 gcc_assert (i2 && CALL_P (i2));
12775 place = i2;
12776 }
4b7c585f
JL
12777 break;
12778
230d793d 12779 case REG_UNUSED:
07d0cbdd 12780 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
12781 REG_UNUSED notes from that insn.
12782
12783 Any clobbers from i2 or i1 can only exist if they were added by
12784 recog_for_combine. In that case, recog_for_combine created the
12785 necessary REG_UNUSED notes. Trying to keep any original
12786 REG_UNUSED notes from these insns can cause incorrect output
12787 if it is for the same register as the original i3 dest.
12788 In that case, we will notice that the register is set in i3,
12789 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
12790 is wrong. However, it is possible to have REG_UNUSED notes from
12791 i2 or i1 for register which were both used and clobbered, so
12792 we keep notes from i2 or i1 if they will turn into REG_DEAD
12793 notes. */
176c9e6b 12794
230d793d
RS
12795 /* If this register is set or clobbered in I3, put the note there
12796 unless there is one already. */
07d0cbdd 12797 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 12798 {
07d0cbdd
JW
12799 if (from_insn != i3)
12800 break;
12801
f8cfc6aa 12802 if (! (REG_P (XEXP (note, 0))
230d793d
RS
12803 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
12804 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
12805 place = i3;
12806 }
12807 /* Otherwise, if this register is used by I3, then this register
12808 now dies here, so we must put a REG_DEAD note here unless there
12809 is one already. */
12810 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
f8cfc6aa 12811 && ! (REG_P (XEXP (note, 0))
770ae6cc
RK
12812 ? find_regno_note (i3, REG_DEAD,
12813 REGNO (XEXP (note, 0)))
230d793d
RS
12814 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
12815 {
12816 PUT_REG_NOTE_KIND (note, REG_DEAD);
12817 place = i3;
12818 }
12819 break;
12820
12821 case REG_EQUAL:
12822 case REG_EQUIV:
9ae8ffe7 12823 case REG_NOALIAS:
230d793d
RS
12824 /* These notes say something about results of an insn. We can
12825 only support them if they used to be on I3 in which case they
a687e897
RK
12826 remain on I3. Otherwise they are ignored.
12827
12828 If the note refers to an expression that is not a constant, we
12829 must also ignore the note since we cannot tell whether the
12830 equivalence is still true. It might be possible to do
12831 slightly better than this (we only have a problem if I2DEST
12832 or I1DEST is present in the expression), but it doesn't
12833 seem worth the trouble. */
12834
12835 if (from_insn == i3
12836 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
12837 place = i3;
12838 break;
12839
12840 case REG_INC:
230d793d
RS
12841 /* These notes say something about how a register is used. They must
12842 be present on any use of the register in I2 or I3. */
12843 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
12844 place = i3;
12845
12846 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
12847 {
12848 if (place)
12849 place2 = i2;
12850 else
12851 place = i2;
12852 }
12853 break;
12854
cf7c4aa6
HPN
12855 case REG_LABEL_TARGET:
12856 case REG_LABEL_OPERAND:
e55b4486
RH
12857 /* This can show up in several ways -- either directly in the
12858 pattern, or hidden off in the constant pool with (or without?)
12859 a REG_EQUAL note. */
12860 /* ??? Ignore the without-reg_equal-note problem for now. */
12861 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
12862 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
12863 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12864 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
12865 place = i3;
12866
12867 if (i2
12868 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
663522cb 12869 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
e55b4486
RH
12870 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
12871 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
12872 {
12873 if (place)
12874 place2 = i2;
12875 else
12876 place = i2;
12877 }
2a3b43b6 12878
cf7c4aa6
HPN
12879 /* For REG_LABEL_TARGET on a JUMP_P, we prefer to put the note
12880 as a JUMP_LABEL or decrement LABEL_NUSES if it's already
12881 there. */
12882 if (place && JUMP_P (place)
12883 && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12884 && (JUMP_LABEL (place) == NULL
12885 || JUMP_LABEL (place) == XEXP (note, 0)))
2a3b43b6 12886 {
341c100f 12887 rtx label = JUMP_LABEL (place);
c22cacf3 12888
341c100f 12889 if (!label)
b54567e2 12890 JUMP_LABEL (place) = XEXP (note, 0);
cf7c4aa6
HPN
12891 else if (LABEL_P (label))
12892 LABEL_NUSES (label)--;
2a3b43b6 12893 }
cf7c4aa6
HPN
12894
12895 if (place2 && JUMP_P (place2)
12896 && REG_NOTE_KIND (note) == REG_LABEL_TARGET
12897 && (JUMP_LABEL (place2) == NULL
12898 || JUMP_LABEL (place2) == XEXP (note, 0)))
2a3b43b6 12899 {
341c100f 12900 rtx label = JUMP_LABEL (place2);
c22cacf3 12901
341c100f 12902 if (!label)
b54567e2 12903 JUMP_LABEL (place2) = XEXP (note, 0);
cf7c4aa6
HPN
12904 else if (LABEL_P (label))
12905 LABEL_NUSES (label)--;
2a3b43b6
JJ
12906 place2 = 0;
12907 }
e55b4486
RH
12908 break;
12909
c1194d74 12910 case REG_NONNEG:
6001794d 12911 /* This note says something about the value of a register prior
c1194d74
JW
12912 to the execution of an insn. It is too much trouble to see
12913 if the note is still correct in all situations. It is better
12914 to simply delete it. */
230d793d
RS
12915 break;
12916
230d793d 12917 case REG_DEAD:
1ff3ac02
RS
12918 /* If we replaced the right hand side of FROM_INSN with a
12919 REG_EQUAL note, the original use of the dying register
12920 will not have been combined into I3 and I2. In such cases,
12921 FROM_INSN is guaranteed to be the first of the combined
12922 instructions, so we simply need to search back before
12923 FROM_INSN for the previous use or set of this register,
12924 then alter the notes there appropriately.
12925
12926 If the register is used as an input in I3, it dies there.
da7d8304 12927 Similarly for I2, if it is nonzero and adjacent to I3.
230d793d
RS
12928
12929 If the register is not used as an input in either I3 or I2
12930 and it is not one of the registers we were supposed to eliminate,
12931 there are two possibilities. We might have a non-adjacent I2
12932 or we might have somehow eliminated an additional register
12933 from a computation. For example, we might have had A & B where
12934 we discover that B will always be zero. In this case we will
12935 eliminate the reference to A.
12936
12937 In both cases, we must search to see if we can find a previous
12938 use of A and put the death note there. */
12939
e5d7a520 12940 if (from_insn
64c91930
EB
12941 && from_insn == i2mod
12942 && !reg_overlap_mentioned_p (XEXP (note, 0), i2mod_new_rhs))
1ff3ac02
RS
12943 tem = from_insn;
12944 else
12945 {
12946 if (from_insn
12947 && CALL_P (from_insn)
12948 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12949 place = from_insn;
12950 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
12951 place = i3;
b5b8b0ac 12952 else if (i2 != 0 && next_nonnote_nondebug_insn (i2) == i3
1ff3ac02
RS
12953 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12954 place = i2;
64c91930
EB
12955 else if ((rtx_equal_p (XEXP (note, 0), elim_i2)
12956 && !(i2mod
12957 && reg_overlap_mentioned_p (XEXP (note, 0),
12958 i2mod_old_rhs)))
1ff3ac02
RS
12959 || rtx_equal_p (XEXP (note, 0), elim_i1))
12960 break;
12961 tem = i3;
12962 }
4bbae09f 12963
230d793d 12964 if (place == 0)
38d8473f 12965 {
f6366fc7 12966 basic_block bb = this_basic_block;
d3a923ee 12967
1ff3ac02 12968 for (tem = PREV_INSN (tem); place == 0; tem = PREV_INSN (tem))
38d8473f 12969 {
b5b8b0ac 12970 if (!NONDEBUG_INSN_P (tem))
d3a923ee 12971 {
a813c111 12972 if (tem == BB_HEAD (bb))
d3a923ee
RH
12973 break;
12974 continue;
12975 }
12976
64c91930
EB
12977 /* If the register is being set at TEM, see if that is all
12978 TEM is doing. If so, delete TEM. Otherwise, make this
12979 into a REG_UNUSED note instead. Don't delete sets to
12980 global register vars. */
12981 if ((REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER
12982 || !global_regs[REGNO (XEXP (note, 0))])
2cd54c2a 12983 && reg_set_p (XEXP (note, 0), PATTERN (tem)))
38d8473f
RK
12984 {
12985 rtx set = single_set (tem);
e5e809f4 12986 rtx inner_dest = 0;
e51712db 12987#ifdef HAVE_cc0
f5c97640 12988 rtx cc0_setter = NULL_RTX;
e51712db 12989#endif
e5e809f4
JL
12990
12991 if (set != 0)
12992 for (inner_dest = SET_DEST (set);
663522cb
KH
12993 (GET_CODE (inner_dest) == STRICT_LOW_PART
12994 || GET_CODE (inner_dest) == SUBREG
12995 || GET_CODE (inner_dest) == ZERO_EXTRACT);
e5e809f4
JL
12996 inner_dest = XEXP (inner_dest, 0))
12997 ;
38d8473f
RK
12998
12999 /* Verify that it was the set, and not a clobber that
663522cb 13000 modified the register.
f5c97640
RH
13001
13002 CC0 targets must be careful to maintain setter/user
13003 pairs. If we cannot delete the setter due to side
13004 effects, mark the user with an UNUSED note instead
13005 of deleting it. */
38d8473f
RK
13006
13007 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
13008 && rtx_equal_p (XEXP (note, 0), inner_dest)
13009#ifdef HAVE_cc0
13010 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
13011 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
13012 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
13013#endif
13014 )
38d8473f
RK
13015 {
13016 /* Move the notes and links of TEM elsewhere.
663522cb 13017 This might delete other dead insns recursively.
38d8473f
RK
13018 First set the pattern to something that won't use
13019 any register. */
f9af39d0 13020 rtx old_notes = REG_NOTES (tem);
38d8473f
RK
13021
13022 PATTERN (tem) = pc_rtx;
f9af39d0 13023 REG_NOTES (tem) = NULL;
38d8473f 13024
4bbae09f
ILT
13025 distribute_notes (old_notes, tem, tem, NULL_RTX,
13026 NULL_RTX, NULL_RTX);
38d8473f
RK
13027 distribute_links (LOG_LINKS (tem));
13028
6773e15f 13029 SET_INSN_DELETED (tem);
0ce2b299
EB
13030 if (tem == i2)
13031 i2 = NULL_RTX;
f5c97640
RH
13032
13033#ifdef HAVE_cc0
13034 /* Delete the setter too. */
13035 if (cc0_setter)
13036 {
13037 PATTERN (cc0_setter) = pc_rtx;
f9af39d0
RE
13038 old_notes = REG_NOTES (cc0_setter);
13039 REG_NOTES (cc0_setter) = NULL;
f5c97640 13040
f9af39d0 13041 distribute_notes (old_notes, cc0_setter,
4bbae09f
ILT
13042 cc0_setter, NULL_RTX,
13043 NULL_RTX, NULL_RTX);
f5c97640
RH
13044 distribute_links (LOG_LINKS (cc0_setter));
13045
6773e15f 13046 SET_INSN_DELETED (cc0_setter);
0ce2b299
EB
13047 if (cc0_setter == i2)
13048 i2 = NULL_RTX;
f5c97640
RH
13049 }
13050#endif
38d8473f
RK
13051 }
13052 else
13053 {
13054 PUT_REG_NOTE_KIND (note, REG_UNUSED);
663522cb 13055
38d8473f 13056 /* If there isn't already a REG_UNUSED note, put one
b30e1617
DJ
13057 here. Do not place a REG_DEAD note, even if
13058 the register is also used here; that would not
13059 match the algorithm used in lifetime analysis
13060 and can cause the consistency check in the
13061 scheduler to fail. */
38d8473f
RK
13062 if (! find_regno_note (tem, REG_UNUSED,
13063 REGNO (XEXP (note, 0))))
13064 place = tem;
13065 break;
d3a923ee
RH
13066 }
13067 }
13068 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
4b4bf941 13069 || (CALL_P (tem)
d3a923ee
RH
13070 && find_reg_fusage (tem, USE, XEXP (note, 0))))
13071 {
13072 place = tem;
13073
13074 /* If we are doing a 3->2 combination, and we have a
13075 register which formerly died in i3 and was not used
13076 by i2, which now no longer dies in i3 and is used in
13077 i2 but does not die in i2, and place is between i2
13078 and i3, then we may need to move a link from place to
13079 i2. */
6fb5fa3c 13080 if (i2 && DF_INSN_LUID (place) > DF_INSN_LUID (i2)
663522cb 13081 && from_insn
6fb5fa3c 13082 && DF_INSN_LUID (from_insn) > DF_INSN_LUID (i2)
d3a923ee
RH
13083 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
13084 {
13085 rtx links = LOG_LINKS (place);
13086 LOG_LINKS (place) = 0;
13087 distribute_links (links);
13088 }
13089 break;
13090 }
13091
a813c111 13092 if (tem == BB_HEAD (bb))
230d793d 13093 break;
38d8473f 13094 }
663522cb 13095
38d8473f 13096 }
230d793d
RS
13097
13098 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4 13099 anything with this note if it is still a REG_DEAD note.
e8679703 13100 We check here if it is set at all, not if is it totally replaced,
e5e809f4
JL
13101 which is what `dead_or_set_p' checks, so also check for it being
13102 set partially. */
13103
230d793d
RS
13104 if (place && REG_NOTE_KIND (note) == REG_DEAD)
13105 {
770ae6cc 13106 unsigned int regno = REGNO (XEXP (note, 0));
829f8ff7 13107 reg_stat_type *rsp = VEC_index (reg_stat_type, reg_stat, regno);
e7139885 13108
230d793d
RS
13109 if (dead_or_set_p (place, XEXP (note, 0))
13110 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
13111 {
13112 /* Unless the register previously died in PLACE, clear
5eaad481 13113 last_death. [I no longer understand why this is
230d793d 13114 being done.] */
829f8ff7
ILT
13115 if (rsp->last_death != place)
13116 rsp->last_death = 0;
230d793d
RS
13117 place = 0;
13118 }
13119 else
829f8ff7 13120 rsp->last_death = place;
230d793d
RS
13121
13122 /* If this is a death note for a hard reg that is occupying
13123 multiple registers, ensure that we are still using all
13124 parts of the object. If we find a piece of the object
03afaf36
R
13125 that is unused, we must arrange for an appropriate REG_DEAD
13126 note to be added for it. However, we can't just emit a USE
13127 and tag the note to it, since the register might actually
13128 be dead; so we recourse, and the recursive call then finds
13129 the previous insn that used this register. */
230d793d
RS
13130
13131 if (place && regno < FIRST_PSEUDO_REGISTER
66fd46b6 13132 && hard_regno_nregs[regno][GET_MODE (XEXP (note, 0))] > 1)
230d793d 13133 {
09e18274 13134 unsigned int endregno = END_HARD_REGNO (XEXP (note, 0));
230d793d 13135 int all_used = 1;
770ae6cc 13136 unsigned int i;
230d793d
RS
13137
13138 for (i = regno; i < endregno; i++)
03afaf36
R
13139 if ((! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
13140 && ! find_regno_fusage (place, USE, i))
13141 || dead_or_set_regno_p (place, i))
13142 all_used = 0;
a394b17b 13143
230d793d
RS
13144 if (! all_used)
13145 {
13146 /* Put only REG_DEAD notes for pieces that are
03afaf36 13147 not already dead or set. */
230d793d 13148
03afaf36 13149 for (i = regno; i < endregno;
66fd46b6 13150 i += hard_regno_nregs[i][reg_raw_mode[i]])
230d793d 13151 {
e50126e8 13152 rtx piece = regno_reg_rtx[i];
f6366fc7 13153 basic_block bb = this_basic_block;
230d793d 13154
03afaf36 13155 if (! dead_or_set_p (place, piece)
230d793d
RS
13156 && ! reg_bitfield_target_p (piece,
13157 PATTERN (place)))
03afaf36 13158 {
efc0b2bd
ILT
13159 rtx new_note = alloc_reg_note (REG_DEAD, piece,
13160 NULL_RTX);
03afaf36
R
13161
13162 distribute_notes (new_note, place, place,
4bbae09f 13163 NULL_RTX, NULL_RTX, NULL_RTX);
03afaf36 13164 }
c762163e
R
13165 else if (! refers_to_regno_p (i, i + 1,
13166 PATTERN (place), 0)
13167 && ! find_regno_fusage (place, USE, i))
13168 for (tem = PREV_INSN (place); ;
13169 tem = PREV_INSN (tem))
13170 {
b5b8b0ac 13171 if (!NONDEBUG_INSN_P (tem))
c762163e 13172 {
a813c111 13173 if (tem == BB_HEAD (bb))
6fb5fa3c 13174 break;
c762163e
R
13175 continue;
13176 }
13177 if (dead_or_set_p (tem, piece)
13178 || reg_bitfield_target_p (piece,
13179 PATTERN (tem)))
13180 {
65c5f2a6 13181 add_reg_note (tem, REG_UNUSED, piece);
c762163e
R
13182 break;
13183 }
13184 }
13185
230d793d
RS
13186 }
13187
13188 place = 0;
13189 }
13190 }
13191 }
13192 break;
13193
13194 default:
13195 /* Any other notes should not be present at this point in the
13196 compilation. */
341c100f 13197 gcc_unreachable ();
230d793d
RS
13198 }
13199
13200 if (place)
13201 {
13202 XEXP (note, 1) = REG_NOTES (place);
13203 REG_NOTES (place) = note;
13204 }
13205
13206 if (place2)
efc0b2bd 13207 add_reg_note (place2, REG_NOTE_KIND (note), XEXP (note, 0));
230d793d
RS
13208 }
13209}
13210\f
13211/* Similarly to above, distribute the LOG_LINKS that used to be present on
8c03ca00
EB
13212 I3, I2, and I1 to new locations. This is also called to add a link
13213 pointing at I3 when I3's destination is changed. */
230d793d
RS
13214
13215static void
79a490a9 13216distribute_links (rtx links)
230d793d
RS
13217{
13218 rtx link, next_link;
13219
13220 for (link = links; link; link = next_link)
13221 {
13222 rtx place = 0;
13223 rtx insn;
13224 rtx set, reg;
13225
13226 next_link = XEXP (link, 1);
13227
13228 /* If the insn that this link points to is a NOTE or isn't a single
13229 set, ignore it. In the latter case, it isn't clear what we
663522cb 13230 can do other than ignore the link, since we can't tell which
230d793d
RS
13231 register it was for. Such links wouldn't be used by combine
13232 anyway.
13233
13234 It is not possible for the destination of the target of the link to
13235 have been changed by combine. The only potential of this is if we
13236 replace I3, I2, and I1 by I3 and I2. But in that case the
13237 destination of I2 also remains unchanged. */
13238
4b4bf941 13239 if (NOTE_P (XEXP (link, 0))
230d793d
RS
13240 || (set = single_set (XEXP (link, 0))) == 0)
13241 continue;
13242
13243 reg = SET_DEST (set);
13244 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
230d793d
RS
13245 || GET_CODE (reg) == STRICT_LOW_PART)
13246 reg = XEXP (reg, 0);
13247
13248 /* A LOG_LINK is defined as being placed on the first insn that uses
13249 a register and points to the insn that sets the register. Start
13250 searching at the next insn after the target of the link and stop
13251 when we reach a set of the register or the end of the basic block.
13252
13253 Note that this correctly handles the link that used to point from
5089e22e 13254 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
13255 since most links don't point very far away. */
13256
13257 for (insn = NEXT_INSN (XEXP (link, 0));
f6366fc7 13258 (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR
a813c111 13259 || BB_HEAD (this_basic_block->next_bb) != insn));
230d793d 13260 insn = NEXT_INSN (insn))
b5b8b0ac
AO
13261 if (DEBUG_INSN_P (insn))
13262 continue;
13263 else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
230d793d
RS
13264 {
13265 if (reg_referenced_p (reg, PATTERN (insn)))
13266 place = insn;
13267 break;
13268 }
4b4bf941 13269 else if (CALL_P (insn)
663522cb 13270 && find_reg_fusage (insn, USE, reg))
6e2d1486
RK
13271 {
13272 place = insn;
13273 break;
13274 }
892c9f1f
RK
13275 else if (INSN_P (insn) && reg_set_p (reg, insn))
13276 break;
230d793d
RS
13277
13278 /* If we found a place to put the link, place it there unless there
13279 is already a link to the same insn as LINK at that point. */
13280
13281 if (place)
13282 {
13283 rtx link2;
13284
13285 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
13286 if (XEXP (link2, 0) == XEXP (link, 0))
13287 break;
13288
13289 if (link2 == 0)
13290 {
13291 XEXP (link, 1) = LOG_LINKS (place);
13292 LOG_LINKS (place) = link;
abe6e52f
RK
13293
13294 /* Set added_links_insn to the earliest insn we added a
13295 link to. */
663522cb 13296 if (added_links_insn == 0
6fb5fa3c 13297 || DF_INSN_LUID (added_links_insn) > DF_INSN_LUID (place))
abe6e52f 13298 added_links_insn = place;
230d793d
RS
13299 }
13300 }
13301 }
13302}
13303\f
67962db5
RS
13304/* Subroutine of unmentioned_reg_p and callback from for_each_rtx.
13305 Check whether the expression pointer to by LOC is a register or
13306 memory, and if so return 1 if it isn't mentioned in the rtx EXPR.
13307 Otherwise return zero. */
13308
13309static int
13310unmentioned_reg_p_1 (rtx *loc, void *expr)
13311{
13312 rtx x = *loc;
13313
13314 if (x != NULL_RTX
3c0cb5de 13315 && (REG_P (x) || MEM_P (x))
67962db5
RS
13316 && ! reg_mentioned_p (x, (rtx) expr))
13317 return 1;
13318 return 0;
13319}
13320
13321/* Check for any register or memory mentioned in EQUIV that is not
13322 mentioned in EXPR. This is used to restrict EQUIV to "specializations"
13323 of EXPR where some registers may have been replaced by constants. */
13324
13325static bool
13326unmentioned_reg_p (rtx equiv, rtx expr)
13327{
13328 return for_each_rtx (&equiv, unmentioned_reg_p_1, expr);
13329}
13330\f
230d793d 13331void
79a490a9 13332dump_combine_stats (FILE *file)
230d793d 13333{
ab532386 13334 fprintf
230d793d
RS
13335 (file,
13336 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
13337 combine_attempts, combine_merges, combine_extras, combine_successes);
13338}
13339
13340void
79a490a9 13341dump_combine_total_stats (FILE *file)
230d793d 13342{
ab532386 13343 fprintf
230d793d
RS
13344 (file,
13345 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
13346 total_attempts, total_merges, total_extras, total_successes);
13347}
ef330312 13348\f
ef330312
PB
13349static bool
13350gate_handle_combine (void)
13351{
13352 return (optimize > 0);
13353}
13354
13355/* Try combining insns through substitution. */
c2924966 13356static unsigned int
ef330312
PB
13357rest_of_handle_combine (void)
13358{
6fb5fa3c
DB
13359 int rebuild_jump_labels_after_combine;
13360
13361 df_set_flags (DF_LR_RUN_DCE + DF_DEFER_INSN_RESCAN);
13362 df_note_add_problem ();
13363 df_analyze ();
13364
13365 regstat_init_n_sets_and_refs ();
13366
13367 rebuild_jump_labels_after_combine
ef330312
PB
13368 = combine_instructions (get_insns (), max_reg_num ());
13369
13370 /* Combining insns may have turned an indirect jump into a
13371 direct jump. Rebuild the JUMP_LABEL fields of jumping
13372 instructions. */
13373 if (rebuild_jump_labels_after_combine)
13374 {
13375 timevar_push (TV_JUMP);
13376 rebuild_jump_labels (get_insns ());
6fb5fa3c 13377 cleanup_cfg (0);
ef330312 13378 timevar_pop (TV_JUMP);
ef330312 13379 }
6fb5fa3c
DB
13380
13381 regstat_free_n_sets_and_refs ();
c2924966 13382 return 0;
ef330312
PB
13383}
13384
8ddbbcae 13385struct rtl_opt_pass pass_combine =
ef330312 13386{
8ddbbcae
JH
13387 {
13388 RTL_PASS,
ef330312
PB
13389 "combine", /* name */
13390 gate_handle_combine, /* gate */
13391 rest_of_handle_combine, /* execute */
13392 NULL, /* sub */
13393 NULL, /* next */
13394 0, /* static_pass_number */
13395 TV_COMBINE, /* tv_id */
d25aa7ab 13396 PROP_cfglayout, /* properties_required */
ef330312
PB
13397 0, /* properties_provided */
13398 0, /* properties_destroyed */
13399 0, /* todo_flags_start */
13400 TODO_dump_func |
a36b8a1e 13401 TODO_df_finish | TODO_verify_rtl_sharing |
ef330312 13402 TODO_ggc_collect, /* todo_flags_finish */
8ddbbcae 13403 }
ef330312 13404};