]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/stmt.c
tree.def (RTL_EXPR): Remove.
[thirdparty/gcc.git] / gcc / stmt.c
CommitLineData
5e6908ea 1/* Expands front end tree to back end RTL for GCC
4559fd9e 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
a6dd4094 3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
28d81abb 4
1322177d 5This file is part of GCC.
28d81abb 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
28d81abb 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
28d81abb
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
28d81abb 21
28d81abb
RK
22/* This file handles the generation of rtl code from tree structure
23 above the level of expressions, using subroutines in exp*.c and emit-rtl.c.
24 It also creates the rtl expressions for parameters and auto variables
25 and has full responsibility for allocating stack slots.
26
27 The functions whose names start with `expand_' are called by the
28 parser to generate RTL instructions for various kinds of constructs.
29
30 Some control and binding constructs require calling several such
31 functions at different times. For example, a simple if-then
32 is expanded by calling `expand_start_cond' (with the condition-expression
33 as argument) before parsing the then-clause and calling `expand_end_cond'
34 after parsing the then-clause. */
35
36#include "config.h"
670ee920 37#include "system.h"
4977bab6
ZW
38#include "coretypes.h"
39#include "tm.h"
ccd043a9 40
28d81abb
RK
41#include "rtl.h"
42#include "tree.h"
6baf1cc8 43#include "tm_p.h"
28d81abb 44#include "flags.h"
6adb4e3a 45#include "except.h"
28d81abb 46#include "function.h"
28d81abb 47#include "insn-config.h"
28d81abb 48#include "expr.h"
e78d8e51 49#include "libfuncs.h"
28d81abb 50#include "hard-reg-set.h"
28d81abb
RK
51#include "loop.h"
52#include "recog.h"
ca695ac9 53#include "machmode.h"
10f0ad3d 54#include "toplev.h"
d6f4ec51 55#include "output.h"
87ff9c8e 56#include "ggc.h"
43577e6b 57#include "langhooks.h"
969d70ca 58#include "predict.h"
9bb231fd 59#include "optabs.h"
61f71b34 60#include "target.h"
66fd46b6 61#include "regs.h"
28d81abb
RK
62\f
63/* Functions and data structures for expanding case statements. */
64
65/* Case label structure, used to hold info on labels within case
66 statements. We handle "range" labels; for a single-value label
67 as in C, the high and low limits are the same.
68
5720c7e7
RK
69 An AVL tree of case nodes is initially created, and later transformed
70 to a list linked via the RIGHT fields in the nodes. Nodes with
71 higher case values are later in the list.
28d81abb
RK
72
73 Switch statements can be output in one of two forms. A branch table
74 is used if there are more than a few labels and the labels are dense
75 within the range between the smallest and largest case value. If a
76 branch table is used, no further manipulations are done with the case
77 node chain.
78
79 The alternative to the use of a branch table is to generate a series
80 of compare and jump insns. When that is done, we use the LEFT, RIGHT,
81 and PARENT fields to hold a binary tree. Initially the tree is
de14fd73
RK
82 totally unbalanced, with everything on the right. We balance the tree
83 with nodes on the left having lower case values than the parent
28d81abb
RK
84 and nodes on the right having higher values. We then output the tree
85 in order. */
86
e2500fed 87struct case_node GTY(())
28d81abb
RK
88{
89 struct case_node *left; /* Left son in binary tree */
90 struct case_node *right; /* Right son in binary tree; also node chain */
91 struct case_node *parent; /* Parent of node in binary tree */
92 tree low; /* Lowest index value for this label */
93 tree high; /* Highest index value for this label */
94 tree code_label; /* Label to jump to when node matches */
57641239 95 int balance;
28d81abb
RK
96};
97
98typedef struct case_node case_node;
99typedef struct case_node *case_node_ptr;
100
101/* These are used by estimate_case_costs and balance_case_nodes. */
102
103/* This must be a signed type, and non-ANSI compilers lack signed char. */
e7749837 104static short cost_table_[129];
28d81abb 105static int use_cost_table;
2a2137c4
RH
106static int cost_table_initialized;
107
108/* Special care is needed because we allow -1, but TREE_INT_CST_LOW
109 is unsigned. */
cf403648 110#define COST_TABLE(I) cost_table_[(unsigned HOST_WIDE_INT) ((I) + 1)]
28d81abb
RK
111\f
112/* Stack of control and binding constructs we are currently inside.
113
114 These constructs begin when you call `expand_start_WHATEVER'
115 and end when you call `expand_end_WHATEVER'. This stack records
116 info about how the construct began that tells the end-function
117 what to do. It also may provide information about the construct
118 to alter the behavior of other constructs within the body.
119 For example, they may affect the behavior of C `break' and `continue'.
120
121 Each construct gets one `struct nesting' object.
122 All of these objects are chained through the `all' field.
123 `nesting_stack' points to the first object (innermost construct).
124 The position of an entry on `nesting_stack' is in its `depth' field.
125
126 Each type of construct has its own individual stack.
6af8eb57 127 For example, loops have `cond_stack'. Each object points to the
28d81abb
RK
128 next object of the same type through the `next' field.
129
130 Some constructs are visible to `break' exit-statements and others
131 are not. Which constructs are visible depends on the language.
132 Therefore, the data structure allows each construct to be visible
133 or not, according to the args given when the construct is started.
134 The construct is visible if the `exit_label' field is non-null.
135 In that case, the value should be a CODE_LABEL rtx. */
136
e2500fed 137struct nesting GTY(())
28d81abb
RK
138{
139 struct nesting *all;
140 struct nesting *next;
141 int depth;
142 rtx exit_label;
e2500fed
GK
143 enum nesting_desc {
144 COND_NESTING,
e2500fed
GK
145 BLOCK_NESTING,
146 CASE_NESTING
147 } desc;
148 union nesting_u
28d81abb
RK
149 {
150 /* For conds (if-then and if-then-else statements). */
e2500fed 151 struct nesting_cond
28d81abb
RK
152 {
153 /* Label for the end of the if construct.
154 There is none if EXITFLAG was not set
155 and no `else' has been seen yet. */
156 rtx endif_label;
157 /* Label for the end of this alternative.
0f41302f 158 This may be the end of the if or the next else/elseif. */
28d81abb 159 rtx next_label;
e2500fed 160 } GTY ((tag ("COND_NESTING"))) cond;
28d81abb 161 /* For variable binding contours. */
e2500fed 162 struct nesting_block
28d81abb
RK
163 {
164 /* Sequence number of this binding contour within the function,
165 in order of entry. */
166 int block_start_count;
b93a436e 167 /* Nonzero => value to restore stack to on exit. */
28d81abb
RK
168 rtx stack_level;
169 /* The NOTE that starts this contour.
170 Used by expand_goto to check whether the destination
171 is within each contour or not. */
172 rtx first_insn;
173 /* Innermost containing binding contour that has a stack level. */
174 struct nesting *innermost_stack_block;
175 /* List of cleanups to be run on exit from this contour.
176 This is a list of expressions to be evaluated.
177 The TREE_PURPOSE of each link is the ..._DECL node
178 which the cleanup pertains to. */
179 tree cleanups;
180 /* List of cleanup-lists of blocks containing this block,
181 as they were at the locus where this block appears.
182 There is an element for each containing block,
183 ordered innermost containing block first.
e976b8b2 184 The tail of this list can be 0,
28d81abb
RK
185 if all remaining elements would be empty lists.
186 The element's TREE_VALUE is the cleanup-list of that block,
187 which may be null. */
188 tree outer_cleanups;
189 /* Chain of labels defined inside this binding contour.
190 For contours that have stack levels or cleanups. */
191 struct label_chain *label_chain;
e0a2f705 192 /* Nonzero if this is associated with an EH region. */
e976b8b2
MS
193 int exception_region;
194 /* The saved target_temp_slot_level from our outer block.
195 We may reset target_temp_slot_level to be the level of
196 this block, if that is done, target_temp_slot_level
197 reverts to the saved target_temp_slot_level at the very
198 end of the block. */
3f1d071b 199 int block_target_temp_slot_level;
e976b8b2
MS
200 /* True if we are currently emitting insns in an area of
201 output code that is controlled by a conditional
202 expression. This is used by the cleanup handling code to
203 generate conditional cleanup actions. */
204 int conditional_code;
205 /* A place to move the start of the exception region for any
206 of the conditional cleanups, must be at the end or after
207 the start of the last unconditional cleanup, and before any
208 conditional branch points. */
209 rtx last_unconditional_cleanup;
e2500fed 210 } GTY ((tag ("BLOCK_NESTING"))) block;
e8eebd31 211 /* For switch (C) or case (Pascal) statements. */
e2500fed 212 struct nesting_case
28d81abb
RK
213 {
214 /* The insn after which the case dispatch should finally
215 be emitted. Zero for a dummy. */
216 rtx start;
57641239
RK
217 /* A list of case labels; it is first built as an AVL tree.
218 During expand_end_case, this is converted to a list, and may be
219 rearranged into a nearly balanced binary tree. */
28d81abb
RK
220 struct case_node *case_list;
221 /* Label to jump to if no case matches. */
222 tree default_label;
223 /* The expression to be dispatched on. */
224 tree index_expr;
225 /* Type that INDEX_EXPR should be converted to. */
226 tree nominal_type;
28d81abb 227 /* Name of this kind of statement, for warnings. */
dff01034 228 const char *printname;
a11759a3
JR
229 /* Used to save no_line_numbers till we see the first case label.
230 We set this to -1 when we see the first case label in this
231 case statement. */
232 int line_number_status;
e2500fed
GK
233 } GTY ((tag ("CASE_NESTING"))) case_stmt;
234 } GTY ((desc ("%1.desc"))) data;
28d81abb
RK
235};
236
28d81abb
RK
237/* Allocate and return a new `struct nesting'. */
238
703ad42b 239#define ALLOC_NESTING() ggc_alloc (sizeof (struct nesting))
28d81abb 240
6ed1d6c5
RS
241/* Pop the nesting stack element by element until we pop off
242 the element which is at the top of STACK.
243 Update all the other stacks, popping off elements from them
244 as we pop them from nesting_stack. */
28d81abb
RK
245
246#define POPSTACK(STACK) \
6ed1d6c5
RS
247do { struct nesting *target = STACK; \
248 struct nesting *this; \
249 do { this = nesting_stack; \
6ed1d6c5
RS
250 if (cond_stack == this) \
251 cond_stack = cond_stack->next; \
252 if (block_stack == this) \
253 block_stack = block_stack->next; \
254 if (stack_block_stack == this) \
255 stack_block_stack = stack_block_stack->next; \
256 if (case_stack == this) \
257 case_stack = case_stack->next; \
6ed1d6c5 258 nesting_depth = nesting_stack->depth - 1; \
e2500fed 259 nesting_stack = this->all; } \
6ed1d6c5 260 while (this != target); } while (0)
28d81abb
RK
261\f
262/* In some cases it is impossible to generate code for a forward goto
263 until the label definition is seen. This happens when it may be necessary
264 for the goto to reset the stack pointer: we don't yet know how to do that.
265 So expand_goto puts an entry on this fixup list.
266 Each time a binding contour that resets the stack is exited,
267 we check each fixup.
268 If the target label has now been defined, we can insert the proper code. */
269
e2500fed 270struct goto_fixup GTY(())
28d81abb
RK
271{
272 /* Points to following fixup. */
273 struct goto_fixup *next;
274 /* Points to the insn before the jump insn.
275 If more code must be inserted, it goes after this insn. */
276 rtx before_jump;
277 /* The LABEL_DECL that this jump is jumping to, or 0
278 for break, continue or return. */
279 tree target;
7629c936
RS
280 /* The BLOCK for the place where this goto was found. */
281 tree context;
28d81abb
RK
282 /* The CODE_LABEL rtx that this is jumping to. */
283 rtx target_rtl;
284 /* Number of binding contours started in current function
285 before the label reference. */
286 int block_start_count;
287 /* The outermost stack level that should be restored for this jump.
288 Each time a binding contour that resets the stack is exited,
289 if the target label is *not* yet defined, this slot is updated. */
290 rtx stack_level;
291 /* List of lists of cleanup expressions to be run by this goto.
292 There is one element for each block that this goto is within.
e976b8b2 293 The tail of this list can be 0,
28d81abb
RK
294 if all remaining elements would be empty.
295 The TREE_VALUE contains the cleanup list of that block as of the
296 time this goto was seen.
297 The TREE_ADDRESSABLE flag is 1 for a block that has been exited. */
298 tree cleanup_list_list;
299};
300
28d81abb
RK
301/* Within any binding contour that must restore a stack level,
302 all labels are recorded with a chain of these structures. */
303
e2500fed 304struct label_chain GTY(())
28d81abb
RK
305{
306 /* Points to following fixup. */
307 struct label_chain *next;
308 tree label;
309};
e9a25f70 310
e2500fed 311struct stmt_status GTY(())
3f1d071b
BS
312{
313 /* Chain of all pending binding contours. */
e2500fed 314 struct nesting * x_block_stack;
3f1d071b
BS
315
316 /* If any new stacks are added here, add them to POPSTACKS too. */
317
318 /* Chain of all pending binding contours that restore stack levels
319 or have cleanups. */
e2500fed 320 struct nesting * x_stack_block_stack;
3f1d071b
BS
321
322 /* Chain of all pending conditional statements. */
e2500fed 323 struct nesting * x_cond_stack;
3f1d071b 324
3f1d071b 325 /* Chain of all pending case or switch statements. */
e2500fed 326 struct nesting * x_case_stack;
3f1d071b
BS
327
328 /* Separate chain including all of the above,
329 chained through the `all' field. */
e2500fed 330 struct nesting * x_nesting_stack;
3f1d071b
BS
331
332 /* Number of entries on nesting_stack now. */
333 int x_nesting_depth;
334
335 /* Number of binding contours started so far in this function. */
336 int x_block_start_count;
337
c8608cd6
GDR
338 /* Location of last line-number note, whether we actually
339 emitted it or not. */
340 location_t x_emit_locus;
3f1d071b
BS
341
342 struct goto_fixup *x_goto_fixup_chain;
343};
344
01d939e8
BS
345#define block_stack (cfun->stmt->x_block_stack)
346#define stack_block_stack (cfun->stmt->x_stack_block_stack)
347#define cond_stack (cfun->stmt->x_cond_stack)
01d939e8
BS
348#define case_stack (cfun->stmt->x_case_stack)
349#define nesting_stack (cfun->stmt->x_nesting_stack)
350#define nesting_depth (cfun->stmt->x_nesting_depth)
351#define current_block_start_count (cfun->stmt->x_block_start_count)
c8608cd6 352#define emit_locus (cfun->stmt->x_emit_locus)
01d939e8 353#define goto_fixup_chain (cfun->stmt->x_goto_fixup_chain)
e9a25f70 354
272d0bee 355/* Nonzero if we are using EH to handle cleanups. */
6de9cd9a 356int using_eh_for_cleanups_p = 0;
e9a25f70 357
46c5ad27 358static int n_occurrences (int, const char *);
46c5ad27
AJ
359static bool decl_conflicts_with_clobbers_p (tree, const HARD_REG_SET);
360static void expand_goto_internal (tree, rtx, rtx);
361static int expand_fixup (tree, rtx, rtx);
46c5ad27 362static void expand_nl_goto_receiver (void);
46c5ad27
AJ
363static void fixup_gotos (struct nesting *, rtx, tree, rtx, int);
364static bool check_operand_nalternatives (tree, tree);
365static bool check_unique_operand_names (tree, tree);
366static char *resolve_operand_name_1 (char *, tree, tree);
367static void expand_null_return_1 (rtx);
368static enum br_predictor return_prediction (rtx);
c988af2b 369static rtx shift_return_value (rtx);
46c5ad27 370static void expand_value_return (rtx);
46c5ad27 371static void expand_cleanups (tree, int, int);
46c5ad27
AJ
372static void do_jump_if_equal (rtx, rtx, rtx, int);
373static int estimate_case_costs (case_node_ptr);
374static bool same_case_target_p (rtx, rtx);
375static void strip_default_case_nodes (case_node_ptr *, rtx);
376static bool lshift_cheap_p (void);
377static int case_bit_test_cmp (const void *, const void *);
378static void emit_case_bit_tests (tree, tree, tree, tree, case_node_ptr, rtx);
379static void group_case_nodes (case_node_ptr);
380static void balance_case_nodes (case_node_ptr *, case_node_ptr);
381static int node_has_low_bound (case_node_ptr, tree);
382static int node_has_high_bound (case_node_ptr, tree);
383static int node_is_bounded (case_node_ptr, tree);
384static void emit_jump_if_reachable (rtx);
385static void emit_case_nodes (rtx, case_node_ptr, rtx, tree);
386static struct case_node *case_tree2list (case_node *, case_node *);
28d81abb 387\f
e9a25f70 388void
46c5ad27 389using_eh_for_cleanups (void)
e9a25f70
JL
390{
391 using_eh_for_cleanups_p = 1;
392}
393
28d81abb 394void
46c5ad27 395init_stmt_for_function (void)
28d81abb 396{
3a70d621 397 cfun->stmt = ggc_alloc_cleared (sizeof (struct stmt_status));
28d81abb 398}
3f1d071b 399\f
3f1d071b 400/* Record the current file and line. Called from emit_line_note. */
0cea056b 401
28d81abb 402void
0cea056b 403set_file_and_line_for_stmt (location_t location)
3f1d071b 404{
61d84605
MM
405 /* If we're outputting an inline function, and we add a line note,
406 there may be no CFUN->STMT information. So, there's no need to
407 update it. */
408 if (cfun->stmt)
0cea056b 409 emit_locus = location;
28d81abb 410}
3f1d071b 411
28d81abb
RK
412/* Emit a no-op instruction. */
413
414void
46c5ad27 415emit_nop (void)
28d81abb 416{
ca695ac9
JB
417 rtx last_insn;
418
b93a436e
JL
419 last_insn = get_last_insn ();
420 if (!optimize
421 && (GET_CODE (last_insn) == CODE_LABEL
422 || (GET_CODE (last_insn) == NOTE
423 && prev_real_insn (last_insn) == 0)))
424 emit_insn (gen_nop ());
28d81abb
RK
425}
426\f
427/* Return the rtx-label that corresponds to a LABEL_DECL,
428 creating it if necessary. */
429
430rtx
46c5ad27 431label_rtx (tree label)
28d81abb
RK
432{
433 if (TREE_CODE (label) != LABEL_DECL)
434 abort ();
435
19e7881c 436 if (!DECL_RTL_SET_P (label))
6de9cd9a
DN
437 {
438 rtx r = gen_label_rtx ();
439 SET_DECL_RTL (label, r);
440 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
441 LABEL_PRESERVE_P (r) = 1;
442 }
28d81abb 443
19e7881c 444 return DECL_RTL (label);
28d81abb
RK
445}
446
046e4e36
ZW
447/* As above, but also put it on the forced-reference list of the
448 function that contains it. */
449rtx
46c5ad27 450force_label_rtx (tree label)
046e4e36
ZW
451{
452 rtx ref = label_rtx (label);
453 tree function = decl_function_context (label);
454 struct function *p;
455
456 if (!function)
457 abort ();
458
6de9cd9a 459 if (function != current_function_decl)
046e4e36
ZW
460 p = find_function_data (function);
461 else
462 p = cfun;
463
464 p->expr->x_forced_labels = gen_rtx_EXPR_LIST (VOIDmode, ref,
465 p->expr->x_forced_labels);
466 return ref;
467}
19e7881c 468
28d81abb
RK
469/* Add an unconditional jump to LABEL as the next sequential instruction. */
470
471void
46c5ad27 472emit_jump (rtx label)
28d81abb
RK
473{
474 do_pending_stack_adjust ();
475 emit_jump_insn (gen_jump (label));
476 emit_barrier ();
477}
478
479/* Emit code to jump to the address
480 specified by the pointer expression EXP. */
481
482void
46c5ad27 483expand_computed_goto (tree exp)
28d81abb 484{
b93a436e 485 rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
ed9a9db1 486
5ae6cd0d 487 x = convert_memory_address (Pmode, x);
ffa1a1ce 488
b93a436e 489 emit_queue ();
eb4e1c01
JH
490 do_pending_stack_adjust ();
491 emit_indirect_jump (x);
28d81abb
RK
492}
493\f
494/* Handle goto statements and the labels that they can go to. */
495
496/* Specify the location in the RTL code of a label LABEL,
497 which is a LABEL_DECL tree node.
498
499 This is used for the kind of label that the user can jump to with a
500 goto statement, and for alternatives of a switch or case statement.
501 RTL labels generated for loops and conditionals don't go through here;
502 they are generated directly at the RTL level, by other functions below.
503
504 Note that this has nothing to do with defining label *names*.
505 Languages vary in how they do that and what that even means. */
506
507void
46c5ad27 508expand_label (tree label)
28d81abb
RK
509{
510 struct label_chain *p;
6de9cd9a 511 rtx label_r = label_rtx (label);
28d81abb
RK
512
513 do_pending_stack_adjust ();
6de9cd9a 514 emit_label (label_r);
28d81abb
RK
515 if (DECL_NAME (label))
516 LABEL_NAME (DECL_RTL (label)) = IDENTIFIER_POINTER (DECL_NAME (label));
517
6de9cd9a
DN
518 if (DECL_NONLOCAL (label))
519 {
520 expand_nl_goto_receiver ();
521 nonlocal_goto_handler_labels
522 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
523 nonlocal_goto_handler_labels);
524 }
525
526 if (FORCED_LABEL (label))
527 forced_labels = gen_rtx_EXPR_LIST (VOIDmode, label_r, forced_labels);
528
529 if (DECL_NONLOCAL (label) || FORCED_LABEL (label))
530 maybe_set_first_label_num (label_r);
531
28d81abb
RK
532 if (stack_block_stack != 0)
533 {
703ad42b 534 p = ggc_alloc (sizeof (struct label_chain));
28d81abb
RK
535 p->next = stack_block_stack->data.block.label_chain;
536 stack_block_stack->data.block.label_chain = p;
537 p->label = label;
538 }
539}
540
28d81abb
RK
541/* Generate RTL code for a `goto' statement with target label LABEL.
542 LABEL should be a LABEL_DECL tree node that was or will later be
543 defined with `expand_label'. */
544
545void
46c5ad27 546expand_goto (tree label)
28d81abb 547{
6de9cd9a
DN
548#ifdef ENABLE_CHECKING
549 /* Check for a nonlocal goto to a containing function. Should have
550 gotten translated to __builtin_nonlocal_goto. */
551 tree context = decl_function_context (label);
28d81abb 552 if (context != 0 && context != current_function_decl)
6de9cd9a 553 abort ();
28d81abb 554#endif
4b01bd16 555
6de9cd9a 556 expand_goto_internal (label, label_rtx (label), NULL_RTX);
28d81abb
RK
557}
558
559/* Generate RTL code for a `goto' statement with target label BODY.
560 LABEL should be a LABEL_REF.
561 LAST_INSN, if non-0, is the rtx we should consider as the last
562 insn emitted (for the purposes of cleaning up a return). */
563
564static void
46c5ad27 565expand_goto_internal (tree body, rtx label, rtx last_insn)
28d81abb
RK
566{
567 struct nesting *block;
568 rtx stack_level = 0;
569
570 if (GET_CODE (label) != CODE_LABEL)
571 abort ();
572
573 /* If label has already been defined, we can tell now
574 whether and how we must alter the stack level. */
575
576 if (PREV_INSN (label) != 0)
577 {
578 /* Find the innermost pending block that contains the label.
579 (Check containment by comparing insn-uids.)
580 Then restore the outermost stack level within that block,
581 and do cleanups of all blocks contained in it. */
582 for (block = block_stack; block; block = block->next)
583 {
584 if (INSN_UID (block->data.block.first_insn) < INSN_UID (label))
585 break;
586 if (block->data.block.stack_level != 0)
587 stack_level = block->data.block.stack_level;
588 /* Execute the cleanups for blocks we are exiting. */
589 if (block->data.block.cleanups != 0)
590 {
b39b8084 591 expand_cleanups (block->data.block.cleanups, 1, 1);
28d81abb
RK
592 do_pending_stack_adjust ();
593 }
594 }
595
596 if (stack_level)
597 {
0f41302f
MS
598 /* Ensure stack adjust isn't done by emit_jump, as this
599 would clobber the stack pointer. This one should be
600 deleted as dead by flow. */
28d81abb
RK
601 clear_pending_stack_adjust ();
602 do_pending_stack_adjust ();
7393c642
RK
603
604 /* Don't do this adjust if it's to the end label and this function
605 is to return with a depressed stack pointer. */
606 if (label == return_label
c4a6c0f3
RK
607 && (((TREE_CODE (TREE_TYPE (current_function_decl))
608 == FUNCTION_TYPE)
609 && (TYPE_RETURNS_STACK_DEPRESSED
610 (TREE_TYPE (current_function_decl))))))
7393c642
RK
611 ;
612 else
613 emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
28d81abb
RK
614 }
615
616 if (body != 0 && DECL_TOO_LATE (body))
617 error ("jump to `%s' invalidly jumps into binding contour",
618 IDENTIFIER_POINTER (DECL_NAME (body)));
619 }
620 /* Label not yet defined: may need to put this goto
621 on the fixup list. */
622 else if (! expand_fixup (body, label, last_insn))
623 {
624 /* No fixup needed. Record that the label is the target
625 of at least one goto that has no fixup. */
626 if (body != 0)
627 TREE_ADDRESSABLE (body) = 1;
628 }
629
630 emit_jump (label);
631}
632\f
633/* Generate if necessary a fixup for a goto
634 whose target label in tree structure (if any) is TREE_LABEL
635 and whose target in rtl is RTL_LABEL.
636
637 If LAST_INSN is nonzero, we pretend that the jump appears
638 after insn LAST_INSN instead of at the current point in the insn stream.
639
023b57e6
RS
640 The fixup will be used later to insert insns just before the goto.
641 Those insns will restore the stack level as appropriate for the
642 target label, and will (in the case of C++) also invoke any object
643 destructors which have to be invoked when we exit the scopes which
644 are exited by the goto.
28d81abb
RK
645
646 Value is nonzero if a fixup is made. */
647
648static int
46c5ad27 649expand_fixup (tree tree_label, rtx rtl_label, rtx last_insn)
28d81abb
RK
650{
651 struct nesting *block, *end_block;
652
653 /* See if we can recognize which block the label will be output in.
654 This is possible in some very common cases.
655 If we succeed, set END_BLOCK to that block.
656 Otherwise, set it to 0. */
657
658 if (cond_stack
659 && (rtl_label == cond_stack->data.cond.endif_label
660 || rtl_label == cond_stack->data.cond.next_label))
661 end_block = cond_stack;
28d81abb
RK
662 else
663 end_block = 0;
664
665 /* Now set END_BLOCK to the binding level to which we will return. */
666
667 if (end_block)
668 {
669 struct nesting *next_block = end_block->all;
670 block = block_stack;
671
672 /* First see if the END_BLOCK is inside the innermost binding level.
673 If so, then no cleanups or stack levels are relevant. */
674 while (next_block && next_block != block)
675 next_block = next_block->all;
676
677 if (next_block)
678 return 0;
679
680 /* Otherwise, set END_BLOCK to the innermost binding level
681 which is outside the relevant control-structure nesting. */
682 next_block = block_stack->next;
683 for (block = block_stack; block != end_block; block = block->all)
684 if (block == next_block)
685 next_block = next_block->next;
686 end_block = next_block;
687 }
688
689 /* Does any containing block have a stack level or cleanups?
690 If not, no fixup is needed, and that is the normal case
691 (the only case, for standard C). */
692 for (block = block_stack; block != end_block; block = block->next)
693 if (block->data.block.stack_level != 0
694 || block->data.block.cleanups != 0)
695 break;
696
697 if (block != end_block)
698 {
699 /* Ok, a fixup is needed. Add a fixup to the list of such. */
703ad42b 700 struct goto_fixup *fixup = ggc_alloc (sizeof (struct goto_fixup));
28d81abb
RK
701 /* In case an old stack level is restored, make sure that comes
702 after any pending stack adjust. */
703 /* ?? If the fixup isn't to come at the present position,
704 doing the stack adjust here isn't useful. Doing it with our
705 settings at that location isn't useful either. Let's hope
706 someone does it! */
707 if (last_insn == 0)
708 do_pending_stack_adjust ();
28d81abb
RK
709 fixup->target = tree_label;
710 fixup->target_rtl = rtl_label;
023b57e6
RS
711
712 /* Create a BLOCK node and a corresponding matched set of
12f61228 713 NOTE_INSN_BLOCK_BEG and NOTE_INSN_BLOCK_END notes at
023b57e6
RS
714 this point. The notes will encapsulate any and all fixup
715 code which we might later insert at this point in the insn
716 stream. Also, the BLOCK node will be the parent (i.e. the
717 `SUPERBLOCK') of any other BLOCK nodes which we might create
0679e3fc
JM
718 later on when we are expanding the fixup code.
719
6af8eb57
SB
720 Note that optimization passes might move the *_BLOCK notes away,
721 so we use a NOTE_INSN_DELETED as a placeholder. */
023b57e6
RS
722
723 {
786de7eb
KH
724 rtx original_before_jump
725 = last_insn ? last_insn : get_last_insn ();
0679e3fc 726 rtx start;
a97901e6 727 rtx end;
e6fd097e
MM
728 tree block;
729
730 block = make_node (BLOCK);
731 TREE_USED (block) = 1;
732
4025c040
RH
733 BLOCK_CHAIN (block)
734 = BLOCK_CHAIN (DECL_INITIAL (current_function_decl));
735 BLOCK_CHAIN (DECL_INITIAL (current_function_decl))
736 = block;
023b57e6 737
786de7eb 738 start_sequence ();
2e040219 739 start = emit_note (NOTE_INSN_BLOCK_BEG);
4025c040 740 NOTE_BLOCK (start) = block;
2e040219
NS
741 fixup->before_jump = emit_note (NOTE_INSN_DELETED);
742 end = emit_note (NOTE_INSN_BLOCK_END);
4025c040 743 NOTE_BLOCK (end) = block;
786de7eb
KH
744 fixup->context = block;
745 end_sequence ();
2f937369 746 emit_insn_after (start, original_before_jump);
023b57e6
RS
747 }
748
3f1d071b 749 fixup->block_start_count = current_block_start_count;
28d81abb
RK
750 fixup->stack_level = 0;
751 fixup->cleanup_list_list
e976b8b2 752 = ((block->data.block.outer_cleanups
28d81abb 753 || block->data.block.cleanups)
37366632 754 ? tree_cons (NULL_TREE, block->data.block.cleanups,
28d81abb
RK
755 block->data.block.outer_cleanups)
756 : 0);
757 fixup->next = goto_fixup_chain;
758 goto_fixup_chain = fixup;
759 }
760
761 return block != 0;
762}
cfc3d13f
RK
763\f
764/* Expand any needed fixups in the outputmost binding level of the
765 function. FIRST_INSN is the first insn in the function. */
ca695ac9 766
cfc3d13f 767void
46c5ad27 768expand_fixups (rtx first_insn)
cfc3d13f 769{
9714cf43 770 fixup_gotos (NULL, NULL_RTX, NULL_TREE, first_insn, 0);
cfc3d13f 771}
ca695ac9 772
28d81abb
RK
773/* When exiting a binding contour, process all pending gotos requiring fixups.
774 THISBLOCK is the structure that describes the block being exited.
775 STACK_LEVEL is the rtx for the stack level to restore exiting this contour.
776 CLEANUP_LIST is a list of expressions to evaluate on exiting this contour.
777 FIRST_INSN is the insn that began this contour.
778
779 Gotos that jump out of this contour must restore the
780 stack level and do the cleanups before actually jumping.
781
cda26058
RK
782 DONT_JUMP_IN positive means report error if there is a jump into this
783 contour from before the beginning of the contour. This is also done if
784 STACK_LEVEL is nonzero unless DONT_JUMP_IN is negative. */
28d81abb 785
704f4dca 786static void
46c5ad27
AJ
787fixup_gotos (struct nesting *thisblock, rtx stack_level,
788 tree cleanup_list, rtx first_insn, int dont_jump_in)
28d81abb 789{
b3694847 790 struct goto_fixup *f, *prev;
28d81abb
RK
791
792 /* F is the fixup we are considering; PREV is the previous one. */
793 /* We run this loop in two passes so that cleanups of exited blocks
794 are run first, and blocks that are exited are marked so
795 afterwards. */
796
797 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
798 {
799 /* Test for a fixup that is inactive because it is already handled. */
800 if (f->before_jump == 0)
801 {
802 /* Delete inactive fixup from the chain, if that is easy to do. */
803 if (prev != 0)
804 prev->next = f->next;
805 }
806 /* Has this fixup's target label been defined?
807 If so, we can finalize it. */
808 else if (PREV_INSN (f->target_rtl) != 0)
809 {
b3694847 810 rtx cleanup_insns;
7629c936 811
28d81abb 812 /* If this fixup jumped into this contour from before the beginning
14a774a9
RK
813 of this contour, report an error. This code used to use
814 the first non-label insn after f->target_rtl, but that's
815 wrong since such can be added, by things like put_var_into_stack
816 and have INSN_UIDs that are out of the range of the block. */
28d81abb
RK
817 /* ??? Bug: this does not detect jumping in through intermediate
818 blocks that have stack levels or cleanups.
819 It detects only a problem with the innermost block
820 around the label. */
821 if (f->target != 0
cda26058
RK
822 && (dont_jump_in > 0 || (dont_jump_in == 0 && stack_level)
823 || cleanup_list)
14a774a9 824 && INSN_UID (first_insn) < INSN_UID (f->target_rtl)
28d81abb 825 && INSN_UID (first_insn) > INSN_UID (f->before_jump)
33bc3ff5 826 && ! DECL_ERROR_ISSUED (f->target))
28d81abb 827 {
ddd2d57e
RH
828 error ("%Jlabel '%D' used before containing binding contour",
829 f->target, f->target);
28d81abb 830 /* Prevent multiple errors for one label. */
33bc3ff5 831 DECL_ERROR_ISSUED (f->target) = 1;
28d81abb
RK
832 }
833
7629c936
RS
834 /* We will expand the cleanups into a sequence of their own and
835 then later on we will attach this new sequence to the insn
836 stream just ahead of the actual jump insn. */
837
838 start_sequence ();
839
023b57e6
RS
840 /* Temporarily restore the lexical context where we will
841 logically be inserting the fixup code. We do this for the
842 sake of getting the debugging information right. */
843
ae2bcd98
RS
844 lang_hooks.decls.pushlevel (0);
845 lang_hooks.decls.set_block (f->context);
7629c936
RS
846
847 /* Expand the cleanups for blocks this jump exits. */
28d81abb
RK
848 if (f->cleanup_list_list)
849 {
850 tree lists;
851 for (lists = f->cleanup_list_list; lists; lists = TREE_CHAIN (lists))
852 /* Marked elements correspond to blocks that have been closed.
853 Do their cleanups. */
854 if (TREE_ADDRESSABLE (lists)
855 && TREE_VALUE (lists) != 0)
7629c936 856 {
b39b8084 857 expand_cleanups (TREE_VALUE (lists), 1, 1);
7629c936
RS
858 /* Pop any pushes done in the cleanups,
859 in case function is about to return. */
860 do_pending_stack_adjust ();
861 }
28d81abb
RK
862 }
863
864 /* Restore stack level for the biggest contour that this
865 jump jumps out of. */
7393c642
RK
866 if (f->stack_level
867 && ! (f->target_rtl == return_label
c4a6c0f3
RK
868 && ((TREE_CODE (TREE_TYPE (current_function_decl))
869 == FUNCTION_TYPE)
4381f7c2 870 && (TYPE_RETURNS_STACK_DEPRESSED
c4a6c0f3 871 (TREE_TYPE (current_function_decl))))))
59257ff7 872 emit_stack_restore (SAVE_BLOCK, f->stack_level, f->before_jump);
7629c936
RS
873
874 /* Finish up the sequence containing the insns which implement the
875 necessary cleanups, and then attach that whole sequence to the
876 insn stream just ahead of the actual jump insn. Attaching it
877 at that point insures that any cleanups which are in fact
878 implicit C++ object destructions (which must be executed upon
879 leaving the block) appear (to the debugger) to be taking place
880 in an area of the generated code where the object(s) being
881 destructed are still "in scope". */
882
883 cleanup_insns = get_insns ();
ae2bcd98 884 lang_hooks.decls.poplevel (1, 0, 0);
7629c936
RS
885
886 end_sequence ();
2f937369 887 emit_insn_after (cleanup_insns, f->before_jump);
7629c936 888
28d81abb
RK
889 f->before_jump = 0;
890 }
891 }
892
6bc2f582
RK
893 /* For any still-undefined labels, do the cleanups for this block now.
894 We must do this now since items in the cleanup list may go out
0f41302f 895 of scope when the block ends. */
28d81abb
RK
896 for (prev = 0, f = goto_fixup_chain; f; prev = f, f = f->next)
897 if (f->before_jump != 0
898 && PREV_INSN (f->target_rtl) == 0
899 /* Label has still not appeared. If we are exiting a block with
900 a stack level to restore, that started before the fixup,
901 mark this stack level as needing restoration
6d2f8887 902 when the fixup is later finalized. */
28d81abb 903 && thisblock != 0
6bc2f582
RK
904 /* Note: if THISBLOCK == 0 and we have a label that hasn't appeared, it
905 means the label is undefined. That's erroneous, but possible. */
28d81abb
RK
906 && (thisblock->data.block.block_start_count
907 <= f->block_start_count))
908 {
909 tree lists = f->cleanup_list_list;
6bc2f582
RK
910 rtx cleanup_insns;
911
28d81abb
RK
912 for (; lists; lists = TREE_CHAIN (lists))
913 /* If the following elt. corresponds to our containing block
914 then the elt. must be for this block. */
915 if (TREE_CHAIN (lists) == thisblock->data.block.outer_cleanups)
6bc2f582
RK
916 {
917 start_sequence ();
ae2bcd98
RS
918 lang_hooks.decls.pushlevel (0);
919 lang_hooks.decls.set_block (f->context);
b39b8084 920 expand_cleanups (TREE_VALUE (lists), 1, 1);
f0959e58 921 do_pending_stack_adjust ();
6bc2f582 922 cleanup_insns = get_insns ();
ae2bcd98 923 lang_hooks.decls.poplevel (1, 0, 0);
6bc2f582 924 end_sequence ();
412c00dc
RK
925 if (cleanup_insns != 0)
926 f->before_jump
2f937369 927 = emit_insn_after (cleanup_insns, f->before_jump);
6bc2f582 928
e07ed33f 929 f->cleanup_list_list = TREE_CHAIN (lists);
6bc2f582 930 }
28d81abb
RK
931
932 if (stack_level)
933 f->stack_level = stack_level;
934 }
935}
2a230e9d
BS
936\f
937/* Return the number of times character C occurs in string S. */
938static int
46c5ad27 939n_occurrences (int c, const char *s)
2a230e9d
BS
940{
941 int n = 0;
942 while (*s)
943 n += (*s++ == c);
944 return n;
945}
28d81abb
RK
946\f
947/* Generate RTL for an asm statement (explicit assembler code).
4c46ea23
EB
948 STRING is a STRING_CST node containing the assembler code text,
949 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
950 insn is volatile; don't optimize it. */
28d81abb
RK
951
952void
46c5ad27 953expand_asm (tree string, int vol)
28d81abb 954{
4c46ea23
EB
955 rtx body;
956
957 if (TREE_CODE (string) == ADDR_EXPR)
958 string = TREE_OPERAND (string, 0);
959
839ee4bc 960 body = gen_rtx_ASM_INPUT (VOIDmode, TREE_STRING_POINTER (string));
4c46ea23
EB
961
962 MEM_VOLATILE_P (body) = vol;
28d81abb 963
4c46ea23 964 emit_insn (body);
28d81abb
RK
965}
966
40b18c0a
MM
967/* Parse the output constraint pointed to by *CONSTRAINT_P. It is the
968 OPERAND_NUMth output operand, indexed from zero. There are NINPUTS
969 inputs and NOUTPUTS outputs to this extended-asm. Upon return,
970 *ALLOWS_MEM will be TRUE iff the constraint allows the use of a
971 memory operand. Similarly, *ALLOWS_REG will be TRUE iff the
972 constraint allows the use of a register operand. And, *IS_INOUT
973 will be true if the operand is read-write, i.e., if it is used as
974 an input as well as an output. If *CONSTRAINT_P is not in
975 canonical form, it will be made canonical. (Note that `+' will be
14b493d6 976 replaced with `=' as part of this process.)
40b18c0a
MM
977
978 Returns TRUE if all went well; FALSE if an error occurred. */
979
980bool
46c5ad27
AJ
981parse_output_constraint (const char **constraint_p, int operand_num,
982 int ninputs, int noutputs, bool *allows_mem,
983 bool *allows_reg, bool *is_inout)
40b18c0a
MM
984{
985 const char *constraint = *constraint_p;
986 const char *p;
987
988 /* Assume the constraint doesn't allow the use of either a register
989 or memory. */
990 *allows_mem = false;
991 *allows_reg = false;
992
993 /* Allow the `=' or `+' to not be at the beginning of the string,
994 since it wasn't explicitly documented that way, and there is a
995 large body of code that puts it last. Swap the character to
996 the front, so as not to uglify any place else. */
997 p = strchr (constraint, '=');
998 if (!p)
999 p = strchr (constraint, '+');
1000
1001 /* If the string doesn't contain an `=', issue an error
1002 message. */
1003 if (!p)
1004 {
1005 error ("output operand constraint lacks `='");
1006 return false;
1007 }
1008
1009 /* If the constraint begins with `+', then the operand is both read
1010 from and written to. */
1011 *is_inout = (*p == '+');
1012
40b18c0a
MM
1013 /* Canonicalize the output constraint so that it begins with `='. */
1014 if (p != constraint || is_inout)
1015 {
1016 char *buf;
1017 size_t c_len = strlen (constraint);
1018
1019 if (p != constraint)
1020 warning ("output constraint `%c' for operand %d is not at the beginning",
1021 *p, operand_num);
1022
1023 /* Make a copy of the constraint. */
1024 buf = alloca (c_len + 1);
1025 strcpy (buf, constraint);
1026 /* Swap the first character and the `=' or `+'. */
1027 buf[p - constraint] = buf[0];
1028 /* Make sure the first character is an `='. (Until we do this,
1029 it might be a `+'.) */
1030 buf[0] = '=';
1031 /* Replace the constraint with the canonicalized string. */
1032 *constraint_p = ggc_alloc_string (buf, c_len);
1033 constraint = *constraint_p;
1034 }
1035
1036 /* Loop through the constraint string. */
97488870 1037 for (p = constraint + 1; *p; p += CONSTRAINT_LEN (*p, p))
40b18c0a
MM
1038 switch (*p)
1039 {
1040 case '+':
1041 case '=':
357351e5 1042 error ("operand constraint contains incorrectly positioned '+' or '='");
40b18c0a 1043 return false;
786de7eb 1044
40b18c0a
MM
1045 case '%':
1046 if (operand_num + 1 == ninputs + noutputs)
1047 {
1048 error ("`%%' constraint used with last operand");
1049 return false;
1050 }
1051 break;
1052
1053 case 'V': case 'm': case 'o':
1054 *allows_mem = true;
1055 break;
1056
1057 case '?': case '!': case '*': case '&': case '#':
1058 case 'E': case 'F': case 'G': case 'H':
1059 case 's': case 'i': case 'n':
1060 case 'I': case 'J': case 'K': case 'L': case 'M':
1061 case 'N': case 'O': case 'P': case ',':
1062 break;
1063
1064 case '0': case '1': case '2': case '3': case '4':
1065 case '5': case '6': case '7': case '8': case '9':
84b72302 1066 case '[':
40b18c0a
MM
1067 error ("matching constraint not valid in output operand");
1068 return false;
1069
1070 case '<': case '>':
1071 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1072 excepting those that expand_call created. So match memory
1073 and hope. */
1074 *allows_mem = true;
1075 break;
1076
1077 case 'g': case 'X':
1078 *allows_reg = true;
1079 *allows_mem = true;
1080 break;
786de7eb 1081
40b18c0a
MM
1082 case 'p': case 'r':
1083 *allows_reg = true;
1084 break;
1085
1086 default:
1087 if (!ISALPHA (*p))
1088 break;
97488870 1089 if (REG_CLASS_FROM_CONSTRAINT (*p, p) != NO_REGS)
40b18c0a 1090 *allows_reg = true;
97488870
R
1091#ifdef EXTRA_CONSTRAINT_STR
1092 else if (EXTRA_ADDRESS_CONSTRAINT (*p, p))
ccfc6cc8 1093 *allows_reg = true;
97488870 1094 else if (EXTRA_MEMORY_CONSTRAINT (*p, p))
ccfc6cc8 1095 *allows_mem = true;
40b18c0a
MM
1096 else
1097 {
1098 /* Otherwise we can't assume anything about the nature of
1099 the constraint except that it isn't purely registers.
1100 Treat it like "g" and hope for the best. */
1101 *allows_reg = true;
1102 *allows_mem = true;
1103 }
1104#endif
1105 break;
1106 }
1107
1108 return true;
1109}
1110
6be2e1f8
RH
1111/* Similar, but for input constraints. */
1112
1456deaf 1113bool
46c5ad27
AJ
1114parse_input_constraint (const char **constraint_p, int input_num,
1115 int ninputs, int noutputs, int ninout,
1116 const char * const * constraints,
1117 bool *allows_mem, bool *allows_reg)
6be2e1f8
RH
1118{
1119 const char *constraint = *constraint_p;
1120 const char *orig_constraint = constraint;
1121 size_t c_len = strlen (constraint);
1122 size_t j;
f3da0ead 1123 bool saw_match = false;
6be2e1f8
RH
1124
1125 /* Assume the constraint doesn't allow the use of either
1126 a register or memory. */
1127 *allows_mem = false;
1128 *allows_reg = false;
1129
1130 /* Make sure constraint has neither `=', `+', nor '&'. */
1131
97488870 1132 for (j = 0; j < c_len; j += CONSTRAINT_LEN (constraint[j], constraint+j))
6be2e1f8
RH
1133 switch (constraint[j])
1134 {
1135 case '+': case '=': case '&':
1136 if (constraint == orig_constraint)
1137 {
1138 error ("input operand constraint contains `%c'", constraint[j]);
1139 return false;
1140 }
1141 break;
1142
1143 case '%':
1144 if (constraint == orig_constraint
1145 && input_num + 1 == ninputs - ninout)
1146 {
1147 error ("`%%' constraint used with last operand");
1148 return false;
1149 }
1150 break;
1151
1152 case 'V': case 'm': case 'o':
1153 *allows_mem = true;
1154 break;
1155
1156 case '<': case '>':
1157 case '?': case '!': case '*': case '#':
1158 case 'E': case 'F': case 'G': case 'H':
1159 case 's': case 'i': case 'n':
1160 case 'I': case 'J': case 'K': case 'L': case 'M':
1161 case 'N': case 'O': case 'P': case ',':
1162 break;
1163
1164 /* Whether or not a numeric constraint allows a register is
1165 decided by the matching constraint, and so there is no need
1166 to do anything special with them. We must handle them in
1167 the default case, so that we don't unnecessarily force
1168 operands to memory. */
1169 case '0': case '1': case '2': case '3': case '4':
1170 case '5': case '6': case '7': case '8': case '9':
1171 {
1172 char *end;
1173 unsigned long match;
1174
f3da0ead
JM
1175 saw_match = true;
1176
6be2e1f8
RH
1177 match = strtoul (constraint + j, &end, 10);
1178 if (match >= (unsigned long) noutputs)
1179 {
1180 error ("matching constraint references invalid operand number");
1181 return false;
1182 }
1183
1184 /* Try and find the real constraint for this dup. Only do this
1185 if the matching constraint is the only alternative. */
1186 if (*end == '\0'
1187 && (j == 0 || (j == 1 && constraint[0] == '%')))
1188 {
1189 constraint = constraints[match];
1190 *constraint_p = constraint;
1191 c_len = strlen (constraint);
1192 j = 0;
97488870
R
1193 /* ??? At the end of the loop, we will skip the first part of
1194 the matched constraint. This assumes not only that the
1195 other constraint is an output constraint, but also that
1196 the '=' or '+' come first. */
6be2e1f8
RH
1197 break;
1198 }
1199 else
1200 j = end - constraint;
97488870
R
1201 /* Anticipate increment at end of loop. */
1202 j--;
6be2e1f8
RH
1203 }
1204 /* Fall through. */
1205
1206 case 'p': case 'r':
1207 *allows_reg = true;
1208 break;
1209
1210 case 'g': case 'X':
1211 *allows_reg = true;
1212 *allows_mem = true;
1213 break;
1214
1215 default:
1216 if (! ISALPHA (constraint[j]))
1217 {
1218 error ("invalid punctuation `%c' in constraint", constraint[j]);
1219 return false;
1220 }
97488870
R
1221 if (REG_CLASS_FROM_CONSTRAINT (constraint[j], constraint + j)
1222 != NO_REGS)
6be2e1f8 1223 *allows_reg = true;
97488870
R
1224#ifdef EXTRA_CONSTRAINT_STR
1225 else if (EXTRA_ADDRESS_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 1226 *allows_reg = true;
97488870 1227 else if (EXTRA_MEMORY_CONSTRAINT (constraint[j], constraint + j))
ccfc6cc8 1228 *allows_mem = true;
6be2e1f8
RH
1229 else
1230 {
1231 /* Otherwise we can't assume anything about the nature of
1232 the constraint except that it isn't purely registers.
1233 Treat it like "g" and hope for the best. */
1234 *allows_reg = true;
1235 *allows_mem = true;
1236 }
1237#endif
1238 break;
1239 }
1240
f3da0ead
JM
1241 if (saw_match && !*allows_reg)
1242 warning ("matching constraint does not allow a register");
1243
6be2e1f8
RH
1244 return true;
1245}
1246
6de9cd9a
DN
1247/* INPUT is one of the input operands from EXPR, an ASM_EXPR. Returns true
1248 if it is an operand which must be passed in memory (i.e. an "m"
1249 constraint), false otherwise. */
1250
1251bool
1252asm_op_is_mem_input (tree input, tree expr)
1253{
1254 const char *constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (input)));
1255 tree outputs = ASM_OUTPUTS (expr);
1256 int noutputs = list_length (outputs);
1257 const char **constraints
1258 = (const char **) alloca ((noutputs) * sizeof (const char *));
1259 int i = 0;
1260 bool allows_mem, allows_reg;
1261 tree t;
1262
1263 /* Collect output constraints. */
1264 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1265 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1266
1267 /* We pass 0 for input_num, ninputs and ninout; they are only used for
1268 error checking which will be done at expand time. */
1269 parse_input_constraint (&constraint, 0, 0, noutputs, 0, constraints,
1270 &allows_mem, &allows_reg);
1271 return (!allows_reg && allows_mem);
1272}
1273
acb5d088
HPN
1274/* Check for overlap between registers marked in CLOBBERED_REGS and
1275 anything inappropriate in DECL. Emit error and return TRUE for error,
1276 FALSE for ok. */
1277
1278static bool
46c5ad27 1279decl_conflicts_with_clobbers_p (tree decl, const HARD_REG_SET clobbered_regs)
acb5d088
HPN
1280{
1281 /* Conflicts between asm-declared register variables and the clobber
1282 list are not allowed. */
1283 if ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
1284 && DECL_REGISTER (decl)
34146b94 1285 && REG_P (DECL_RTL (decl))
acb5d088
HPN
1286 && REGNO (DECL_RTL (decl)) < FIRST_PSEUDO_REGISTER)
1287 {
1288 rtx reg = DECL_RTL (decl);
1289 unsigned int regno;
1290
1291 for (regno = REGNO (reg);
1292 regno < (REGNO (reg)
66fd46b6 1293 + hard_regno_nregs[REGNO (reg)][GET_MODE (reg)]);
acb5d088
HPN
1294 regno++)
1295 if (TEST_HARD_REG_BIT (clobbered_regs, regno))
1296 {
1297 error ("asm-specifier for variable `%s' conflicts with asm clobber list",
1298 IDENTIFIER_POINTER (DECL_NAME (decl)));
1299
1300 /* Reset registerness to stop multiple errors emitted for a
1301 single variable. */
1302 DECL_REGISTER (decl) = 0;
1303 return true;
1304 }
1305 }
1306 return false;
1307}
1308
28d81abb
RK
1309/* Generate RTL for an asm statement with arguments.
1310 STRING is the instruction template.
1311 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
1312 Each output or input has an expression in the TREE_VALUE and
2ec37136 1313 and a tree list in TREE_PURPOSE which in turn contains a constraint
786de7eb 1314 name in TREE_VALUE (or NULL_TREE) and a constraint string
2ec37136 1315 in TREE_PURPOSE.
28d81abb
RK
1316 CLOBBERS is a list of STRING_CST nodes each naming a hard register
1317 that is clobbered by this insn.
1318
1319 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
1320 Some elements of OUTPUTS may be replaced with trees representing temporary
1321 values. The caller should copy those temporary values to the originally
1322 specified lvalues.
1323
1324 VOL nonzero means the insn is volatile; don't optimize it. */
1325
1326void
46c5ad27 1327expand_asm_operands (tree string, tree outputs, tree inputs,
177560b2 1328 tree clobbers, int vol, location_t locus)
28d81abb 1329{
84b72302 1330 rtvec argvec, constraintvec;
28d81abb
RK
1331 rtx body;
1332 int ninputs = list_length (inputs);
1333 int noutputs = list_length (outputs);
6be2e1f8 1334 int ninout;
b4ccaa16 1335 int nclobbers;
acb5d088
HPN
1336 HARD_REG_SET clobbered_regs;
1337 int clobber_conflict_found = 0;
28d81abb 1338 tree tail;
7dc8b126 1339 tree t;
b3694847 1340 int i;
28d81abb 1341 /* Vector of RTX's of evaluated output operands. */
703ad42b
KG
1342 rtx *output_rtx = alloca (noutputs * sizeof (rtx));
1343 int *inout_opnum = alloca (noutputs * sizeof (int));
1344 rtx *real_output_rtx = alloca (noutputs * sizeof (rtx));
235c5021 1345 enum machine_mode *inout_mode
703ad42b 1346 = alloca (noutputs * sizeof (enum machine_mode));
84b72302 1347 const char **constraints
703ad42b 1348 = alloca ((noutputs + ninputs) * sizeof (const char *));
1b3d8f8a 1349 int old_generating_concat_p = generating_concat_p;
28d81abb 1350
e5e809f4 1351 /* An ASM with no outputs needs to be treated as volatile, for now. */
296f8acc
JL
1352 if (noutputs == 0)
1353 vol = 1;
1354
84b72302
RH
1355 if (! check_operand_nalternatives (outputs, inputs))
1356 return;
1357
7dc8b126
JM
1358 string = resolve_asm_operand_names (string, outputs, inputs);
1359
1360 /* Collect constraints. */
1361 i = 0;
1362 for (t = outputs; t ; t = TREE_CHAIN (t), i++)
1363 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1364 for (t = inputs; t ; t = TREE_CHAIN (t), i++)
1365 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
84b72302 1366
57bcb97a
RH
1367 /* Sometimes we wish to automatically clobber registers across an asm.
1368 Case in point is when the i386 backend moved from cc0 to a hard reg --
f63d1bf7 1369 maintaining source-level compatibility means automatically clobbering
57bcb97a 1370 the flags register. */
67dfe110 1371 clobbers = targetm.md_asm_clobbers (clobbers);
57bcb97a 1372
b4ccaa16
RS
1373 /* Count the number of meaningful clobbered registers, ignoring what
1374 we would ignore later. */
1375 nclobbers = 0;
acb5d088 1376 CLEAR_HARD_REG_SET (clobbered_regs);
b4ccaa16
RS
1377 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
1378 {
47ee9bcb 1379 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
14a774a9 1380
c09e6498
RS
1381 i = decode_reg_name (regname);
1382 if (i >= 0 || i == -4)
b4ccaa16 1383 ++nclobbers;
7859e3ac
DE
1384 else if (i == -2)
1385 error ("unknown register name `%s' in `asm'", regname);
acb5d088
HPN
1386
1387 /* Mark clobbered registers. */
1388 if (i >= 0)
e54b4cae
EB
1389 {
1390 /* Clobbering the PIC register is an error */
fc555370 1391 if (i == (int) PIC_OFFSET_TABLE_REGNUM)
e54b4cae
EB
1392 {
1393 error ("PIC register `%s' clobbered in `asm'", regname);
1394 return;
1395 }
1396
1397 SET_HARD_REG_BIT (clobbered_regs, i);
1398 }
b4ccaa16
RS
1399 }
1400
6be2e1f8
RH
1401 /* First pass over inputs and outputs checks validity and sets
1402 mark_addressable if needed. */
1403
1404 ninout = 0;
28d81abb
RK
1405 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1406 {
1407 tree val = TREE_VALUE (tail);
b50a024d 1408 tree type = TREE_TYPE (val);
6be2e1f8 1409 const char *constraint;
40b18c0a
MM
1410 bool is_inout;
1411 bool allows_reg;
1412 bool allows_mem;
28d81abb
RK
1413
1414 /* If there's an erroneous arg, emit no insn. */
40b18c0a 1415 if (type == error_mark_node)
28d81abb
RK
1416 return;
1417
40b18c0a
MM
1418 /* Try to parse the output constraint. If that fails, there's
1419 no point in going further. */
6be2e1f8
RH
1420 constraint = constraints[i];
1421 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
1422 &allows_mem, &allows_reg, &is_inout))
1423 return;
1424
1425 if (! allows_reg
1426 && (allows_mem
1427 || is_inout
1428 || (DECL_P (val)
f8cfc6aa 1429 && REG_P (DECL_RTL (val))
6be2e1f8 1430 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
ae2bcd98 1431 lang_hooks.mark_addressable (val);
6be2e1f8
RH
1432
1433 if (is_inout)
1434 ninout++;
1435 }
1436
1437 ninputs += ninout;
1438 if (ninputs + noutputs > MAX_RECOG_OPERANDS)
1439 {
1440 error ("more than %d operands in `asm'", MAX_RECOG_OPERANDS);
1441 return;
1442 }
1443
1444 for (i = 0, tail = inputs; tail; i++, tail = TREE_CHAIN (tail))
1445 {
1446 bool allows_reg, allows_mem;
1447 const char *constraint;
1448
1449 /* If there's an erroneous arg, emit no insn, because the ASM_INPUT
1450 would get VOIDmode and that could cause a crash in reload. */
1451 if (TREE_TYPE (TREE_VALUE (tail)) == error_mark_node)
1452 return;
1453
1454 constraint = constraints[i + noutputs];
1455 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1456 constraints, &allows_mem, &allows_reg))
40b18c0a 1457 return;
d09a75ae 1458
6be2e1f8 1459 if (! allows_reg && allows_mem)
ae2bcd98 1460 lang_hooks.mark_addressable (TREE_VALUE (tail));
6be2e1f8
RH
1461 }
1462
1463 /* Second pass evaluates arguments. */
1464
1465 ninout = 0;
1466 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1467 {
1468 tree val = TREE_VALUE (tail);
1469 tree type = TREE_TYPE (val);
1470 bool is_inout;
1471 bool allows_reg;
1472 bool allows_mem;
5b50aa9d 1473 rtx op;
6be2e1f8
RH
1474
1475 if (!parse_output_constraint (&constraints[i], i, ninputs,
1476 noutputs, &allows_mem, &allows_reg,
1477 &is_inout))
1478 abort ();
1479
d09a75ae
RK
1480 /* If an output operand is not a decl or indirect ref and our constraint
1481 allows a register, make a temporary to act as an intermediate.
1482 Make the asm insn write into that, then our caller will copy it to
1483 the real output operand. Likewise for promoted variables. */
28d81abb 1484
1b3d8f8a
GK
1485 generating_concat_p = 0;
1486
947255ed 1487 real_output_rtx[i] = NULL_RTX;
1afbe1c4
RH
1488 if ((TREE_CODE (val) == INDIRECT_REF
1489 && allows_mem)
2f939d94 1490 || (DECL_P (val)
f8cfc6aa
JQ
1491 && (allows_mem || REG_P (DECL_RTL (val)))
1492 && ! (REG_P (DECL_RTL (val))
d09a75ae 1493 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
11579f33 1494 || ! allows_reg
2a230e9d 1495 || is_inout)
d09a75ae 1496 {
5b50aa9d 1497 op = expand_expr (val, NULL_RTX, VOIDmode, EXPAND_WRITE);
3c0cb5de 1498 if (MEM_P (op))
5b50aa9d 1499 op = validize_mem (op);
d09a75ae 1500
3c0cb5de 1501 if (! allows_reg && !MEM_P (op))
d09a75ae 1502 error ("output number %d not directly addressable", i);
3c0cb5de 1503 if ((! allows_mem && MEM_P (op))
5b50aa9d 1504 || GET_CODE (op) == CONCAT)
947255ed 1505 {
5b50aa9d
RH
1506 real_output_rtx[i] = protect_from_queue (op, 1);
1507 op = gen_reg_rtx (GET_MODE (op));
11579f33 1508 if (is_inout)
5b50aa9d 1509 emit_move_insn (op, real_output_rtx[i]);
947255ed 1510 }
d09a75ae 1511 }
b50a024d 1512 else
e619bb8d 1513 {
5b50aa9d
RH
1514 op = assign_temp (type, 0, 0, 1);
1515 op = validize_mem (op);
1516 TREE_VALUE (tail) = make_tree (type, op);
b50a024d 1517 }
5b50aa9d 1518 output_rtx[i] = op;
235c5021 1519
1b3d8f8a
GK
1520 generating_concat_p = old_generating_concat_p;
1521
2a230e9d 1522 if (is_inout)
235c5021 1523 {
6be2e1f8 1524 inout_mode[ninout] = TYPE_MODE (type);
235c5021
RK
1525 inout_opnum[ninout++] = i;
1526 }
acb5d088
HPN
1527
1528 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1529 clobber_conflict_found = 1;
28d81abb
RK
1530 }
1531
84b72302
RH
1532 /* Make vectors for the expression-rtx, constraint strings,
1533 and named operands. */
28d81abb
RK
1534
1535 argvec = rtvec_alloc (ninputs);
84b72302 1536 constraintvec = rtvec_alloc (ninputs);
28d81abb 1537
6462bb43
AO
1538 body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
1539 : GET_MODE (output_rtx[0])),
839ee4bc 1540 TREE_STRING_POINTER (string),
84b72302 1541 empty_string, 0, argvec, constraintvec,
6773e15f 1542 locus);
c85f7c16 1543
78418280 1544 MEM_VOLATILE_P (body) = vol;
28d81abb
RK
1545
1546 /* Eval the inputs and put them into ARGVEC.
1547 Put their constraints into ASM_INPUTs and store in CONSTRAINTS. */
1548
84b72302 1549 for (i = 0, tail = inputs; tail; tail = TREE_CHAIN (tail), ++i)
28d81abb 1550 {
6be2e1f8
RH
1551 bool allows_reg, allows_mem;
1552 const char *constraint;
1553 tree val, type;
1f06ee8d 1554 rtx op;
28d81abb 1555
6be2e1f8
RH
1556 constraint = constraints[i + noutputs];
1557 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, ninout,
1558 constraints, &allows_mem, &allows_reg))
1559 abort ();
2a230e9d 1560
6be2e1f8 1561 generating_concat_p = 0;
65fed0cb 1562
6be2e1f8
RH
1563 val = TREE_VALUE (tail);
1564 type = TREE_TYPE (val);
017e1b43
RH
1565 op = expand_expr (val, NULL_RTX, VOIDmode,
1566 (allows_mem && !allows_reg
1567 ? EXPAND_MEMORY : EXPAND_NORMAL));
65fed0cb 1568
1b3d8f8a 1569 /* Never pass a CONCAT to an ASM. */
1b3d8f8a
GK
1570 if (GET_CODE (op) == CONCAT)
1571 op = force_reg (GET_MODE (op), op);
3c0cb5de 1572 else if (MEM_P (op))
5b50aa9d 1573 op = validize_mem (op);
1b3d8f8a 1574
1afbe1c4 1575 if (asm_operand_ok (op, constraint) <= 0)
65fed0cb 1576 {
11579f33 1577 if (allows_reg)
6be2e1f8 1578 op = force_reg (TYPE_MODE (type), op);
11579f33 1579 else if (!allows_mem)
84b72302
RH
1580 warning ("asm operand %d probably doesn't match constraints",
1581 i + noutputs);
3c0cb5de 1582 else if (MEM_P (op))
6be2e1f8 1583 {
d50ad6af
RH
1584 /* We won't recognize either volatile memory or memory
1585 with a queued address as available a memory_operand
1586 at this point. Ignore it: clearly this *is* a memory. */
6be2e1f8 1587 }
1f06ee8d 1588 else
017e1b43 1589 {
71ed1fdb
RH
1590 warning ("use of memory input without lvalue in "
1591 "asm operand %d is deprecated", i + noutputs);
017e1b43
RH
1592
1593 if (CONSTANT_P (op))
1594 {
9c858681
RS
1595 rtx mem = force_const_mem (TYPE_MODE (type), op);
1596 if (mem)
1597 op = validize_mem (mem);
1598 else
1599 op = force_reg (TYPE_MODE (type), op);
017e1b43 1600 }
f8cfc6aa 1601 if (REG_P (op)
9c858681
RS
1602 || GET_CODE (op) == SUBREG
1603 || GET_CODE (op) == ADDRESSOF
1604 || GET_CODE (op) == CONCAT)
017e1b43
RH
1605 {
1606 tree qual_type = build_qualified_type (type,
1607 (TYPE_QUALS (type)
1608 | TYPE_QUAL_CONST));
1609 rtx memloc = assign_temp (qual_type, 1, 1, 1);
1610 memloc = validize_mem (memloc);
1611 emit_move_insn (memloc, op);
1612 op = memloc;
1613 }
1614 }
65fed0cb 1615 }
6be2e1f8 1616
1b3d8f8a 1617 generating_concat_p = old_generating_concat_p;
6462bb43 1618 ASM_OPERANDS_INPUT (body, i) = op;
2a230e9d 1619
6462bb43 1620 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
839ee4bc 1621 = gen_rtx_ASM_INPUT (TYPE_MODE (type), constraints[i + noutputs]);
acb5d088
HPN
1622
1623 if (decl_conflicts_with_clobbers_p (val, clobbered_regs))
1624 clobber_conflict_found = 1;
28d81abb
RK
1625 }
1626
14a774a9
RK
1627 /* Protect all the operands from the queue now that they have all been
1628 evaluated. */
28d81abb 1629
1b3d8f8a
GK
1630 generating_concat_p = 0;
1631
235c5021 1632 for (i = 0; i < ninputs - ninout; i++)
6462bb43
AO
1633 ASM_OPERANDS_INPUT (body, i)
1634 = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
28d81abb
RK
1635
1636 for (i = 0; i < noutputs; i++)
1637 output_rtx[i] = protect_from_queue (output_rtx[i], 1);
1638
4381f7c2 1639 /* For in-out operands, copy output rtx to input rtx. */
235c5021
RK
1640 for (i = 0; i < ninout; i++)
1641 {
235c5021 1642 int j = inout_opnum[i];
84b72302 1643 char buffer[16];
235c5021 1644
6462bb43 1645 ASM_OPERANDS_INPUT (body, ninputs - ninout + i)
235c5021 1646 = output_rtx[j];
84b72302
RH
1647
1648 sprintf (buffer, "%d", j);
6462bb43 1649 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, ninputs - ninout + i)
485bad26 1650 = gen_rtx_ASM_INPUT (inout_mode[i], ggc_strdup (buffer));
235c5021
RK
1651 }
1652
1b3d8f8a
GK
1653 generating_concat_p = old_generating_concat_p;
1654
28d81abb 1655 /* Now, for each output, construct an rtx
84b72302
RH
1656 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
1657 ARGVEC CONSTRAINTS OPNAMES))
28d81abb
RK
1658 If there is more than one, put them inside a PARALLEL. */
1659
1660 if (noutputs == 1 && nclobbers == 0)
1661 {
839ee4bc 1662 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
4977bab6 1663 emit_insn (gen_rtx_SET (VOIDmode, output_rtx[0], body));
28d81abb 1664 }
14a774a9 1665
28d81abb
RK
1666 else if (noutputs == 0 && nclobbers == 0)
1667 {
1668 /* No output operands: put in a raw ASM_OPERANDS rtx. */
4977bab6 1669 emit_insn (body);
28d81abb 1670 }
14a774a9 1671
28d81abb
RK
1672 else
1673 {
1674 rtx obody = body;
1675 int num = noutputs;
14a774a9
RK
1676
1677 if (num == 0)
1678 num = 1;
1679
38a448ca 1680 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
28d81abb
RK
1681
1682 /* For each output operand, store a SET. */
28d81abb
RK
1683 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1684 {
1685 XVECEXP (body, 0, i)
38a448ca
RH
1686 = gen_rtx_SET (VOIDmode,
1687 output_rtx[i],
c5c76735 1688 gen_rtx_ASM_OPERANDS
6462bb43 1689 (GET_MODE (output_rtx[i]),
839ee4bc
RO
1690 TREE_STRING_POINTER (string),
1691 constraints[i], i, argvec, constraintvec,
6773e15f 1692 locus));
c5c76735 1693
28d81abb
RK
1694 MEM_VOLATILE_P (SET_SRC (XVECEXP (body, 0, i))) = vol;
1695 }
1696
1697 /* If there are no outputs (but there are some clobbers)
1698 store the bare ASM_OPERANDS into the PARALLEL. */
1699
1700 if (i == 0)
1701 XVECEXP (body, 0, i++) = obody;
1702
1703 /* Store (clobber REG) for each clobbered register specified. */
1704
b4ccaa16 1705 for (tail = clobbers; tail; tail = TREE_CHAIN (tail))
28d81abb 1706 {
47ee9bcb 1707 const char *regname = TREE_STRING_POINTER (TREE_VALUE (tail));
b4ac57ab 1708 int j = decode_reg_name (regname);
acb5d088 1709 rtx clobbered_reg;
28d81abb 1710
b4ac57ab 1711 if (j < 0)
28d81abb 1712 {
c09e6498 1713 if (j == -3) /* `cc', which is not a register */
dcfedcd0
RK
1714 continue;
1715
c09e6498
RS
1716 if (j == -4) /* `memory', don't cache memory across asm */
1717 {
bffc6177 1718 XVECEXP (body, 0, i++)
38a448ca 1719 = gen_rtx_CLOBBER (VOIDmode,
c5c76735
JL
1720 gen_rtx_MEM
1721 (BLKmode,
1722 gen_rtx_SCRATCH (VOIDmode)));
c09e6498
RS
1723 continue;
1724 }
1725
956d6950 1726 /* Ignore unknown register, error already signaled. */
cc1f5387 1727 continue;
28d81abb
RK
1728 }
1729
1730 /* Use QImode since that's guaranteed to clobber just one reg. */
acb5d088
HPN
1731 clobbered_reg = gen_rtx_REG (QImode, j);
1732
1733 /* Do sanity check for overlap between clobbers and respectively
1734 input and outputs that hasn't been handled. Such overlap
1735 should have been detected and reported above. */
1736 if (!clobber_conflict_found)
1737 {
1738 int opno;
1739
1740 /* We test the old body (obody) contents to avoid tripping
1741 over the under-construction body. */
1742 for (opno = 0; opno < noutputs; opno++)
1743 if (reg_overlap_mentioned_p (clobbered_reg, output_rtx[opno]))
1744 internal_error ("asm clobber conflict with output operand");
1745
1746 for (opno = 0; opno < ninputs - ninout; opno++)
1747 if (reg_overlap_mentioned_p (clobbered_reg,
1748 ASM_OPERANDS_INPUT (obody, opno)))
1749 internal_error ("asm clobber conflict with input operand");
1750 }
1751
b4ccaa16 1752 XVECEXP (body, 0, i++)
acb5d088 1753 = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
28d81abb
RK
1754 }
1755
4977bab6 1756 emit_insn (body);
28d81abb
RK
1757 }
1758
947255ed
RH
1759 /* For any outputs that needed reloading into registers, spill them
1760 back to where they belong. */
1761 for (i = 0; i < noutputs; ++i)
1762 if (real_output_rtx[i])
1763 emit_move_insn (real_output_rtx[i], output_rtx[i]);
1764
28d81abb
RK
1765 free_temp_slots ();
1766}
84b72302 1767
6de9cd9a
DN
1768void
1769expand_asm_expr (tree exp)
1770{
1771 int noutputs, i;
1772 tree outputs, tail;
1773 tree *o;
1774
1775 if (ASM_INPUT_P (exp))
1776 {
1777 expand_asm (ASM_STRING (exp), ASM_VOLATILE_P (exp));
1778 return;
1779 }
1780
1781 outputs = ASM_OUTPUTS (exp);
1782 noutputs = list_length (outputs);
1783 /* o[I] is the place that output number I should be written. */
1784 o = (tree *) alloca (noutputs * sizeof (tree));
1785
1786 /* Record the contents of OUTPUTS before it is modified. */
1787 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1788 o[i] = TREE_VALUE (tail);
1789
1790 /* Generate the ASM_OPERANDS insn; store into the TREE_VALUEs of
1791 OUTPUTS some trees for where the values were actually stored. */
1792 expand_asm_operands (ASM_STRING (exp), outputs, ASM_INPUTS (exp),
1793 ASM_CLOBBERS (exp), ASM_VOLATILE_P (exp),
1794 input_location);
1795
1796 /* Copy all the intermediate outputs into the specified outputs. */
1797 for (i = 0, tail = outputs; tail; tail = TREE_CHAIN (tail), i++)
1798 {
1799 if (o[i] != TREE_VALUE (tail))
1800 {
1801 expand_assignment (o[i], TREE_VALUE (tail), 0);
1802 free_temp_slots ();
1803
1804 /* Restore the original value so that it's correct the next
1805 time we expand this function. */
1806 TREE_VALUE (tail) = o[i];
1807 }
1808 }
1809
1810 /* Those MODIFY_EXPRs could do autoincrements. */
1811 emit_queue ();
1812}
1813
84b72302
RH
1814/* A subroutine of expand_asm_operands. Check that all operands have
1815 the same number of alternatives. Return true if so. */
1816
1817static bool
46c5ad27 1818check_operand_nalternatives (tree outputs, tree inputs)
84b72302
RH
1819{
1820 if (outputs || inputs)
1821 {
1822 tree tmp = TREE_PURPOSE (outputs ? outputs : inputs);
1823 int nalternatives
1824 = n_occurrences (',', TREE_STRING_POINTER (TREE_VALUE (tmp)));
1825 tree next = inputs;
1826
1827 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
1828 {
1829 error ("too many alternatives in `asm'");
1830 return false;
1831 }
1832
1833 tmp = outputs;
1834 while (tmp)
1835 {
1836 const char *constraint
1837 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (tmp)));
1838
1839 if (n_occurrences (',', constraint) != nalternatives)
1840 {
1841 error ("operand constraints for `asm' differ in number of alternatives");
1842 return false;
1843 }
1844
1845 if (TREE_CHAIN (tmp))
1846 tmp = TREE_CHAIN (tmp);
1847 else
1848 tmp = next, next = 0;
1849 }
1850 }
1851
1852 return true;
1853}
1854
1855/* A subroutine of expand_asm_operands. Check that all operand names
1856 are unique. Return true if so. We rely on the fact that these names
1857 are identifiers, and so have been canonicalized by get_identifier,
1858 so all we need are pointer comparisons. */
1859
1860static bool
46c5ad27 1861check_unique_operand_names (tree outputs, tree inputs)
84b72302
RH
1862{
1863 tree i, j;
1864
1865 for (i = outputs; i ; i = TREE_CHAIN (i))
1866 {
1867 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1868 if (! i_name)
1869 continue;
1870
1871 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1872 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1873 goto failure;
1874 }
1875
1876 for (i = inputs; i ; i = TREE_CHAIN (i))
1877 {
1878 tree i_name = TREE_PURPOSE (TREE_PURPOSE (i));
1879 if (! i_name)
1880 continue;
1881
1882 for (j = TREE_CHAIN (i); j ; j = TREE_CHAIN (j))
fc552851 1883 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1884 goto failure;
1885 for (j = outputs; j ; j = TREE_CHAIN (j))
fc552851 1886 if (simple_cst_equal (i_name, TREE_PURPOSE (TREE_PURPOSE (j))))
84b72302
RH
1887 goto failure;
1888 }
1889
1890 return true;
1891
1892 failure:
1893 error ("duplicate asm operand name '%s'",
fc552851 1894 TREE_STRING_POINTER (TREE_PURPOSE (TREE_PURPOSE (i))));
84b72302
RH
1895 return false;
1896}
1897
1898/* A subroutine of expand_asm_operands. Resolve the names of the operands
1899 in *POUTPUTS and *PINPUTS to numbers, and replace the name expansions in
1900 STRING and in the constraints to those numbers. */
1901
7dc8b126
JM
1902tree
1903resolve_asm_operand_names (tree string, tree outputs, tree inputs)
84b72302 1904{
7dc8b126 1905 char *buffer;
84b72302 1906 char *p;
40209195 1907 const char *c;
84b72302
RH
1908 tree t;
1909
1456deaf
JM
1910 check_unique_operand_names (outputs, inputs);
1911
7dc8b126
JM
1912 /* Substitute [<name>] in input constraint strings. There should be no
1913 named operands in output constraints. */
1914 for (t = inputs; t ; t = TREE_CHAIN (t))
1915 {
40209195 1916 c = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
7dc8b126
JM
1917 if (strchr (c, '[') != NULL)
1918 {
1919 p = buffer = xstrdup (c);
1920 while ((p = strchr (p, '[')) != NULL)
1921 p = resolve_operand_name_1 (p, outputs, inputs);
1922 TREE_VALUE (TREE_PURPOSE (t))
1923 = build_string (strlen (buffer), buffer);
1924 free (buffer);
1925 }
1926 }
1927
40209195
JM
1928 /* Now check for any needed substitutions in the template. */
1929 c = TREE_STRING_POINTER (string);
1930 while ((c = strchr (c, '%')) != NULL)
84b72302 1931 {
40209195
JM
1932 if (c[1] == '[')
1933 break;
1934 else if (ISALPHA (c[1]) && c[2] == '[')
1935 break;
7abcb63a
RH
1936 else
1937 {
40209195 1938 c += 1;
7abcb63a
RH
1939 continue;
1940 }
84b72302
RH
1941 }
1942
40209195
JM
1943 if (c)
1944 {
1945 /* OK, we need to make a copy so we can perform the substitutions.
1946 Assume that we will not need extra space--we get to remove '['
1947 and ']', which means we cannot have a problem until we have more
1948 than 999 operands. */
1949 buffer = xstrdup (TREE_STRING_POINTER (string));
1950 p = buffer + (c - TREE_STRING_POINTER (string));
1951
1952 while ((p = strchr (p, '%')) != NULL)
1953 {
1954 if (p[1] == '[')
1955 p += 1;
1956 else if (ISALPHA (p[1]) && p[2] == '[')
1957 p += 2;
1958 else
1959 {
1960 p += 1;
1961 continue;
1962 }
1963
1964 p = resolve_operand_name_1 (p, outputs, inputs);
1965 }
1966
1967 string = build_string (strlen (buffer), buffer);
1968 free (buffer);
1969 }
84b72302 1970
84b72302
RH
1971 return string;
1972}
1973
1974/* A subroutine of resolve_operand_names. P points to the '[' for a
1975 potential named operand of the form [<name>]. In place, replace
786de7eb 1976 the name and brackets with a number. Return a pointer to the
84b72302
RH
1977 balance of the string after substitution. */
1978
1979static char *
46c5ad27 1980resolve_operand_name_1 (char *p, tree outputs, tree inputs)
84b72302
RH
1981{
1982 char *q;
1983 int op;
1984 tree t;
1985 size_t len;
1986
1987 /* Collect the operand name. */
1988 q = strchr (p, ']');
1989 if (!q)
1990 {
1991 error ("missing close brace for named operand");
1992 return strchr (p, '\0');
1993 }
1994 len = q - p - 1;
1995
1996 /* Resolve the name to a number. */
1997 for (op = 0, t = outputs; t ; t = TREE_CHAIN (t), op++)
1998 {
fc552851
RS
1999 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2000 if (name)
edd1967d 2001 {
fc552851 2002 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2003 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2004 goto found;
2005 }
84b72302
RH
2006 }
2007 for (t = inputs; t ; t = TREE_CHAIN (t), op++)
2008 {
fc552851
RS
2009 tree name = TREE_PURPOSE (TREE_PURPOSE (t));
2010 if (name)
edd1967d 2011 {
fc552851 2012 const char *c = TREE_STRING_POINTER (name);
edd1967d
RH
2013 if (strncmp (c, p + 1, len) == 0 && c[len] == '\0')
2014 goto found;
2015 }
84b72302
RH
2016 }
2017
2018 *q = '\0';
2019 error ("undefined named operand '%s'", p + 1);
2020 op = 0;
2021 found:
2022
2023 /* Replace the name with the number. Unfortunately, not all libraries
2024 get the return value of sprintf correct, so search for the end of the
2025 generated string by hand. */
2026 sprintf (p, "%d", op);
2027 p = strchr (p, '\0');
2028
2029 /* Verify the no extra buffer space assumption. */
2030 if (p > q)
2031 abort ();
2032
2033 /* Shift the rest of the buffer down to fill the gap. */
2034 memmove (p, q + 1, strlen (q + 1) + 1);
2035
2036 return p;
2037}
28d81abb 2038\f
4dfa0342 2039/* Generate RTL to evaluate the expression EXP. */
28d81abb
RK
2040
2041void
46c5ad27 2042expand_expr_stmt (tree exp)
1574ef13
AO
2043{
2044 rtx value;
2045 tree type;
b6ec8c5f 2046
4dfa0342 2047 value = expand_expr (exp, const0_rtx, VOIDmode, 0);
1574ef13 2048 type = TREE_TYPE (exp);
28d81abb
RK
2049
2050 /* If all we do is reference a volatile value in memory,
2051 copy it to a register to be sure it is actually touched. */
3c0cb5de 2052 if (value && MEM_P (value) && TREE_THIS_VOLATILE (exp))
28d81abb 2053 {
1574ef13 2054 if (TYPE_MODE (type) == VOIDmode)
6a5bbbe6 2055 ;
1574ef13
AO
2056 else if (TYPE_MODE (type) != BLKmode)
2057 value = copy_to_reg (value);
28d81abb 2058 else
ddbe9812
RS
2059 {
2060 rtx lab = gen_label_rtx ();
4381f7c2 2061
ddbe9812 2062 /* Compare the value with itself to reference it. */
1574ef13
AO
2063 emit_cmp_and_jump_insns (value, value, EQ,
2064 expand_expr (TYPE_SIZE (type),
c5d5d461 2065 NULL_RTX, VOIDmode, 0),
d43e0b7d 2066 BLKmode, 0, lab);
ddbe9812
RS
2067 emit_label (lab);
2068 }
28d81abb
RK
2069 }
2070
4dfa0342 2071 /* Free any temporaries used to evaluate this expression. */
28d81abb
RK
2072 free_temp_slots ();
2073
2074 emit_queue ();
2075}
2076
2077/* Warn if EXP contains any computations whose results are not used.
b9861bff
RH
2078 Return 1 if a warning is printed; 0 otherwise. LOCUS is the
2079 (potential) location of the expression. */
28d81abb 2080
150a992a 2081int
b9861bff 2082warn_if_unused_value (tree exp, location_t locus)
28d81abb 2083{
b9861bff 2084 restart:
28d81abb
RK
2085 if (TREE_USED (exp))
2086 return 0;
2087
9790cefd
RH
2088 /* Don't warn about void constructs. This includes casting to void,
2089 void function calls, and statement expressions with a final cast
2090 to void. */
2091 if (VOID_TYPE_P (TREE_TYPE (exp)))
2092 return 0;
2093
b9861bff
RH
2094 if (EXPR_LOCUS (exp))
2095 locus = *EXPR_LOCUS (exp);
2096
28d81abb
RK
2097 switch (TREE_CODE (exp))
2098 {
2099 case PREINCREMENT_EXPR:
2100 case POSTINCREMENT_EXPR:
2101 case PREDECREMENT_EXPR:
2102 case POSTDECREMENT_EXPR:
2103 case MODIFY_EXPR:
2104 case INIT_EXPR:
2105 case TARGET_EXPR:
2106 case CALL_EXPR:
81797aba 2107 case TRY_CATCH_EXPR:
28d81abb
RK
2108 case WITH_CLEANUP_EXPR:
2109 case EXIT_EXPR:
28d81abb
RK
2110 return 0;
2111
2112 case BIND_EXPR:
2113 /* For a binding, warn if no side effect within it. */
b9861bff
RH
2114 exp = BIND_EXPR_BODY (exp);
2115 goto restart;
28d81abb 2116
de73f171 2117 case SAVE_EXPR:
b9861bff
RH
2118 exp = TREE_OPERAND (exp, 0);
2119 goto restart;
de73f171 2120
28d81abb
RK
2121 case TRUTH_ORIF_EXPR:
2122 case TRUTH_ANDIF_EXPR:
2123 /* In && or ||, warn if 2nd operand has no side effect. */
b9861bff
RH
2124 exp = TREE_OPERAND (exp, 1);
2125 goto restart;
28d81abb
RK
2126
2127 case COMPOUND_EXPR:
6de9cd9a 2128 if (TREE_NO_WARNING (exp))
a646a211 2129 return 0;
b9861bff 2130 if (warn_if_unused_value (TREE_OPERAND (exp, 0), locus))
28d81abb 2131 return 1;
4d23e509
RS
2132 /* Let people do `(foo (), 0)' without a warning. */
2133 if (TREE_CONSTANT (TREE_OPERAND (exp, 1)))
2134 return 0;
b9861bff
RH
2135 exp = TREE_OPERAND (exp, 1);
2136 goto restart;
28d81abb
RK
2137
2138 case NOP_EXPR:
2139 case CONVERT_EXPR:
b4ac57ab 2140 case NON_LVALUE_EXPR:
28d81abb 2141 /* Don't warn about conversions not explicit in the user's program. */
6de9cd9a 2142 if (TREE_NO_WARNING (exp))
28d81abb
RK
2143 return 0;
2144 /* Assignment to a cast usually results in a cast of a modify.
55cd1c09
JW
2145 Don't complain about that. There can be an arbitrary number of
2146 casts before the modify, so we must loop until we find the first
2147 non-cast expression and then test to see if that is a modify. */
2148 {
2149 tree tem = TREE_OPERAND (exp, 0);
2150
2151 while (TREE_CODE (tem) == CONVERT_EXPR || TREE_CODE (tem) == NOP_EXPR)
2152 tem = TREE_OPERAND (tem, 0);
2153
de73f171
RK
2154 if (TREE_CODE (tem) == MODIFY_EXPR || TREE_CODE (tem) == INIT_EXPR
2155 || TREE_CODE (tem) == CALL_EXPR)
55cd1c09
JW
2156 return 0;
2157 }
7133e992 2158 goto maybe_warn;
28d81abb 2159
d1e1adfb
JM
2160 case INDIRECT_REF:
2161 /* Don't warn about automatic dereferencing of references, since
2162 the user cannot control it. */
2163 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == REFERENCE_TYPE)
b9861bff
RH
2164 {
2165 exp = TREE_OPERAND (exp, 0);
2166 goto restart;
2167 }
4381f7c2
KH
2168 /* Fall through. */
2169
28d81abb 2170 default:
ddbe9812 2171 /* Referencing a volatile value is a side effect, so don't warn. */
2f939d94 2172 if ((DECL_P (exp)
ddbe9812
RS
2173 || TREE_CODE_CLASS (TREE_CODE (exp)) == 'r')
2174 && TREE_THIS_VOLATILE (exp))
2175 return 0;
8d5e6e25
RK
2176
2177 /* If this is an expression which has no operands, there is no value
2178 to be unused. There are no such language-independent codes,
2179 but front ends may define such. */
2180 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'e'
2181 && TREE_CODE_LENGTH (TREE_CODE (exp)) == 0)
2182 return 0;
2183
7133e992
JJ
2184 maybe_warn:
2185 /* If this is an expression with side effects, don't warn. */
2186 if (TREE_SIDE_EFFECTS (exp))
2187 return 0;
2188
b9861bff 2189 warning ("%Hvalue computed is not used", &locus);
28d81abb
RK
2190 return 1;
2191 }
2192}
28d81abb 2193\f
28d81abb
RK
2194/* Generate RTL for the start of an if-then. COND is the expression
2195 whose truth should be tested.
2196
2197 If EXITFLAG is nonzero, this conditional is visible to
2198 `exit_something'. */
2199
2200void
46c5ad27 2201expand_start_cond (tree cond, int exitflag)
28d81abb
RK
2202{
2203 struct nesting *thiscond = ALLOC_NESTING ();
2204
2205 /* Make an entry on cond_stack for the cond we are entering. */
2206
e2500fed 2207 thiscond->desc = COND_NESTING;
28d81abb
RK
2208 thiscond->next = cond_stack;
2209 thiscond->all = nesting_stack;
2210 thiscond->depth = ++nesting_depth;
2211 thiscond->data.cond.next_label = gen_label_rtx ();
2212 /* Before we encounter an `else', we don't need a separate exit label
2213 unless there are supposed to be exit statements
2214 to exit this conditional. */
2215 thiscond->exit_label = exitflag ? gen_label_rtx () : 0;
2216 thiscond->data.cond.endif_label = thiscond->exit_label;
2217 cond_stack = thiscond;
2218 nesting_stack = thiscond;
2219
b93a436e 2220 do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
28d81abb
RK
2221}
2222
2223/* Generate RTL between then-clause and the elseif-clause
2224 of an if-then-elseif-.... */
2225
2226void
46c5ad27 2227expand_start_elseif (tree cond)
28d81abb
RK
2228{
2229 if (cond_stack->data.cond.endif_label == 0)
2230 cond_stack->data.cond.endif_label = gen_label_rtx ();
2231 emit_jump (cond_stack->data.cond.endif_label);
2232 emit_label (cond_stack->data.cond.next_label);
2233 cond_stack->data.cond.next_label = gen_label_rtx ();
37366632 2234 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
28d81abb
RK
2235}
2236
2237/* Generate RTL between the then-clause and the else-clause
2238 of an if-then-else. */
2239
2240void
46c5ad27 2241expand_start_else (void)
28d81abb
RK
2242{
2243 if (cond_stack->data.cond.endif_label == 0)
2244 cond_stack->data.cond.endif_label = gen_label_rtx ();
ca695ac9 2245
28d81abb
RK
2246 emit_jump (cond_stack->data.cond.endif_label);
2247 emit_label (cond_stack->data.cond.next_label);
0f41302f 2248 cond_stack->data.cond.next_label = 0; /* No more _else or _elseif calls. */
28d81abb
RK
2249}
2250
d947ba59
RK
2251/* After calling expand_start_else, turn this "else" into an "else if"
2252 by providing another condition. */
2253
2254void
46c5ad27 2255expand_elseif (tree cond)
d947ba59
RK
2256{
2257 cond_stack->data.cond.next_label = gen_label_rtx ();
2258 do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
2259}
2260
28d81abb
RK
2261/* Generate RTL for the end of an if-then.
2262 Pop the record for it off of cond_stack. */
2263
2264void
46c5ad27 2265expand_end_cond (void)
28d81abb
RK
2266{
2267 struct nesting *thiscond = cond_stack;
2268
b93a436e
JL
2269 do_pending_stack_adjust ();
2270 if (thiscond->data.cond.next_label)
2271 emit_label (thiscond->data.cond.next_label);
2272 if (thiscond->data.cond.endif_label)
2273 emit_label (thiscond->data.cond.endif_label);
28d81abb
RK
2274
2275 POPSTACK (cond_stack);
28d81abb
RK
2276}
2277\f
0e9e1e0a 2278/* Return nonzero if we should preserve sub-expressions as separate
28d81abb 2279 pseudos. We never do so if we aren't optimizing. We always do so
6af8eb57 2280 if -fexpensive-optimizations. */
28d81abb
RK
2281
2282int
46c5ad27 2283preserve_subexpressions_p (void)
28d81abb 2284{
28d81abb
RK
2285 if (flag_expensive_optimizations)
2286 return 1;
2287
6af8eb57 2288 if (optimize == 0 || cfun == 0 || cfun->stmt == 0)
28d81abb
RK
2289 return 0;
2290
6af8eb57 2291 return 1;
28d81abb
RK
2292}
2293
28d81abb
RK
2294\f
2295/* Generate RTL to return from the current function, with no value.
2296 (That is, we do not do anything about returning any value.) */
2297
2298void
46c5ad27 2299expand_null_return (void)
28d81abb 2300{
969d70ca
JH
2301 rtx last_insn;
2302
2303 last_insn = get_last_insn ();
bd695e1e 2304
4381f7c2 2305 /* If this function was declared to return a value, but we
bd695e1e 2306 didn't, clobber the return registers so that they are not
a1f300c0 2307 propagated live to the rest of the function. */
c13fde05 2308 clobber_return_register ();
28d81abb 2309
396ad517 2310 expand_null_return_1 (last_insn);
28d81abb
RK
2311}
2312
6e3077c6
EB
2313/* Generate RTL to return directly from the current function.
2314 (That is, we bypass any return value.) */
2315
2316void
2317expand_naked_return (void)
2318{
2319 rtx last_insn, end_label;
2320
2321 last_insn = get_last_insn ();
2322 end_label = naked_return_label;
2323
2324 clear_pending_stack_adjust ();
2325 do_pending_stack_adjust ();
6e3077c6
EB
2326
2327 if (end_label == 0)
2328 end_label = naked_return_label = gen_label_rtx ();
2329 expand_goto_internal (NULL_TREE, end_label, last_insn);
2330}
2331
969d70ca
JH
2332/* Try to guess whether the value of return means error code. */
2333static enum br_predictor
46c5ad27 2334return_prediction (rtx val)
969d70ca
JH
2335{
2336 /* Different heuristics for pointers and scalars. */
2337 if (POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
2338 {
2339 /* NULL is usually not returned. */
2340 if (val == const0_rtx)
2341 return PRED_NULL_RETURN;
2342 }
2343 else
2344 {
2345 /* Negative return values are often used to indicate
2346 errors. */
2347 if (GET_CODE (val) == CONST_INT
2348 && INTVAL (val) < 0)
2349 return PRED_NEGATIVE_RETURN;
2350 /* Constant return values are also usually erors,
2351 zero/one often mean booleans so exclude them from the
2352 heuristics. */
2353 if (CONSTANT_P (val)
2354 && (val != const0_rtx && val != const1_rtx))
2355 return PRED_CONST_RETURN;
2356 }
2357 return PRED_NO_PREDICTION;
2358}
2359
c988af2b
RS
2360
2361/* If the current function returns values in the most significant part
2362 of a register, shift return value VAL appropriately. The mode of
2363 the function's return type is known not to be BLKmode. */
2364
2365static rtx
2366shift_return_value (rtx val)
2367{
2368 tree type;
2369
2370 type = TREE_TYPE (DECL_RESULT (current_function_decl));
2371 if (targetm.calls.return_in_msb (type))
2372 {
2373 rtx target;
2374 HOST_WIDE_INT shift;
2375
2376 target = DECL_RTL (DECL_RESULT (current_function_decl));
2377 shift = (GET_MODE_BITSIZE (GET_MODE (target))
2378 - BITS_PER_UNIT * int_size_in_bytes (type));
2379 if (shift > 0)
2380 val = expand_binop (GET_MODE (target), ashl_optab,
2381 gen_lowpart (GET_MODE (target), val),
2382 GEN_INT (shift), target, 1, OPTAB_WIDEN);
2383 }
2384 return val;
2385}
2386
2387
28d81abb
RK
2388/* Generate RTL to return from the current function, with value VAL. */
2389
8d800403 2390static void
46c5ad27 2391expand_value_return (rtx val)
28d81abb 2392{
969d70ca
JH
2393 rtx last_insn;
2394 rtx return_reg;
2395 enum br_predictor pred;
2396
d50672ef
JH
2397 if (flag_guess_branch_prob
2398 && (pred = return_prediction (val)) != PRED_NO_PREDICTION)
969d70ca
JH
2399 {
2400 /* Emit information for branch prediction. */
2401 rtx note;
2402
2e040219 2403 note = emit_note (NOTE_INSN_PREDICTION);
969d70ca
JH
2404
2405 NOTE_PREDICTION (note) = NOTE_PREDICT (pred, NOT_TAKEN);
2406
2407 }
2408
2409 last_insn = get_last_insn ();
2410 return_reg = DECL_RTL (DECL_RESULT (current_function_decl));
28d81abb
RK
2411
2412 /* Copy the value to the return location
2413 unless it's already there. */
2414
2415 if (return_reg != val)
77636079 2416 {
77636079 2417 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
61f71b34
DD
2418 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
2419 {
8df83eae 2420 int unsignedp = TYPE_UNSIGNED (type);
61f71b34
DD
2421 enum machine_mode old_mode
2422 = DECL_MODE (DECL_RESULT (current_function_decl));
2423 enum machine_mode mode
2424 = promote_mode (type, old_mode, &unsignedp, 1);
2425
2426 if (mode != old_mode)
2427 val = convert_modes (mode, old_mode, val, unsignedp);
2428 }
14a774a9 2429 if (GET_CODE (return_reg) == PARALLEL)
6e985040 2430 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
14a774a9 2431 else
77636079
RS
2432 emit_move_insn (return_reg, val);
2433 }
14a774a9 2434
396ad517 2435 expand_null_return_1 (last_insn);
28d81abb
RK
2436}
2437
2438/* Output a return with no value. If LAST_INSN is nonzero,
396ad517 2439 pretend that the return takes place after LAST_INSN. */
28d81abb
RK
2440
2441static void
46c5ad27 2442expand_null_return_1 (rtx last_insn)
28d81abb 2443{
b79c5284 2444 rtx end_label = return_label;
28d81abb
RK
2445
2446 clear_pending_stack_adjust ();
2447 do_pending_stack_adjust ();
28d81abb 2448
396ad517
JDA
2449 if (end_label == 0)
2450 end_label = return_label = gen_label_rtx ();
37366632 2451 expand_goto_internal (NULL_TREE, end_label, last_insn);
28d81abb
RK
2452}
2453\f
2454/* Generate RTL to evaluate the expression RETVAL and return it
2455 from the current function. */
2456
2457void
46c5ad27 2458expand_return (tree retval)
28d81abb
RK
2459{
2460 /* If there are any cleanups to be performed, then they will
2461 be inserted following LAST_INSN. It is desirable
2462 that the last_insn, for such purposes, should be the
2463 last insn before computing the return value. Otherwise, cleanups
2464 which call functions can clobber the return value. */
2465 /* ??? rms: I think that is erroneous, because in C++ it would
2466 run destructors on variables that might be used in the subsequent
2467 computation of the return value. */
2468 rtx last_insn = 0;
19e7881c 2469 rtx result_rtl;
b3694847 2470 rtx val = 0;
28d81abb 2471 tree retval_rhs;
28d81abb
RK
2472
2473 /* If function wants no value, give it none. */
2474 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
2475 {
37366632 2476 expand_expr (retval, NULL_RTX, VOIDmode, 0);
7e70e7c5 2477 emit_queue ();
28d81abb
RK
2478 expand_null_return ();
2479 return;
2480 }
2481
ea11ca7e 2482 if (retval == error_mark_node)
c9407e4c
MM
2483 {
2484 /* Treat this like a return of no value from a function that
2485 returns a value. */
2486 expand_null_return ();
786de7eb 2487 return;
c9407e4c 2488 }
ea11ca7e 2489 else if (TREE_CODE (retval) == RESULT_DECL)
28d81abb
RK
2490 retval_rhs = retval;
2491 else if ((TREE_CODE (retval) == MODIFY_EXPR || TREE_CODE (retval) == INIT_EXPR)
2492 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
2493 retval_rhs = TREE_OPERAND (retval, 1);
28d81abb 2494 else
6de9cd9a 2495 retval_rhs = retval;
28d81abb 2496
7cc8342c 2497 last_insn = get_last_insn ();
28d81abb 2498
19e7881c
MM
2499 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
2500
4c485b63
JL
2501 /* If the result is an aggregate that is being returned in one (or more)
2502 registers, load the registers here. The compiler currently can't handle
2503 copying a BLKmode value into registers. We could put this code in a
2504 more general area (for use by everyone instead of just function
2505 call/return), but until this feature is generally usable it is kept here
3ffeb8f1
JW
2506 (and in expand_call). The value must go into a pseudo in case there
2507 are cleanups that will clobber the real return register. */
4c485b63
JL
2508
2509 if (retval_rhs != 0
2510 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
f8cfc6aa 2511 && REG_P (result_rtl))
4c485b63 2512 {
770ae6cc
RK
2513 int i;
2514 unsigned HOST_WIDE_INT bitpos, xbitpos;
c988af2b 2515 unsigned HOST_WIDE_INT padding_correction = 0;
770ae6cc
RK
2516 unsigned HOST_WIDE_INT bytes
2517 = int_size_in_bytes (TREE_TYPE (retval_rhs));
4c485b63 2518 int n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
770ae6cc
RK
2519 unsigned int bitsize
2520 = MIN (TYPE_ALIGN (TREE_TYPE (retval_rhs)), BITS_PER_WORD);
703ad42b 2521 rtx *result_pseudos = alloca (sizeof (rtx) * n_regs);
c16ddde3 2522 rtx result_reg, src = NULL_RTX, dst = NULL_RTX;
4c485b63 2523 rtx result_val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
af55da56 2524 enum machine_mode tmpmode, result_reg_mode;
4c485b63 2525
2954d7db
RK
2526 if (bytes == 0)
2527 {
2528 expand_null_return ();
2529 return;
2530 }
2531
c988af2b
RS
2532 /* If the structure doesn't take up a whole number of words, see
2533 whether the register value should be padded on the left or on
2534 the right. Set PADDING_CORRECTION to the number of padding
2535 bits needed on the left side.
2536
2537 In most ABIs, the structure will be returned at the least end of
2538 the register, which translates to right padding on little-endian
2539 targets and left padding on big-endian targets. The opposite
2540 holds if the structure is returned at the most significant
2541 end of the register. */
2542 if (bytes % UNITS_PER_WORD != 0
2543 && (targetm.calls.return_in_msb (TREE_TYPE (retval_rhs))
2544 ? !BYTES_BIG_ENDIAN
2545 : BYTES_BIG_ENDIAN))
2546 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2547 * BITS_PER_UNIT));
a7f875d7 2548
4381f7c2 2549 /* Copy the structure BITSIZE bits at a time. */
c988af2b 2550 for (bitpos = 0, xbitpos = padding_correction;
a7f875d7
RK
2551 bitpos < bytes * BITS_PER_UNIT;
2552 bitpos += bitsize, xbitpos += bitsize)
4c485b63 2553 {
a7f875d7 2554 /* We need a new destination pseudo each time xbitpos is
c988af2b 2555 on a word boundary and when xbitpos == padding_correction
a7f875d7
RK
2556 (the first time through). */
2557 if (xbitpos % BITS_PER_WORD == 0
c988af2b 2558 || xbitpos == padding_correction)
4c485b63 2559 {
a7f875d7
RK
2560 /* Generate an appropriate register. */
2561 dst = gen_reg_rtx (word_mode);
2562 result_pseudos[xbitpos / BITS_PER_WORD] = dst;
2563
8a38ed86
AM
2564 /* Clear the destination before we move anything into it. */
2565 emit_move_insn (dst, CONST0_RTX (GET_MODE (dst)));
4c485b63 2566 }
a7f875d7
RK
2567
2568 /* We need a new source operand each time bitpos is on a word
2569 boundary. */
2570 if (bitpos % BITS_PER_WORD == 0)
2571 src = operand_subword_force (result_val,
2572 bitpos / BITS_PER_WORD,
2573 BLKmode);
2574
2575 /* Use bitpos for the source extraction (left justified) and
2576 xbitpos for the destination store (right justified). */
2577 store_bit_field (dst, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2578 extract_bit_field (src, bitsize,
2579 bitpos % BITS_PER_WORD, 1,
19caa751 2580 NULL_RTX, word_mode, word_mode,
04050c69
RK
2581 BITS_PER_WORD),
2582 BITS_PER_WORD);
4c485b63
JL
2583 }
2584
c988af2b
RS
2585 tmpmode = GET_MODE (result_rtl);
2586 if (tmpmode == BLKmode)
2587 {
2588 /* Find the smallest integer mode large enough to hold the
2589 entire structure and use that mode instead of BLKmode
2590 on the USE insn for the return register. */
2591 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2592 tmpmode != VOIDmode;
2593 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
2594 /* Have we found a large enough mode? */
2595 if (GET_MODE_SIZE (tmpmode) >= bytes)
2596 break;
4c485b63 2597
c988af2b
RS
2598 /* No suitable mode found. */
2599 if (tmpmode == VOIDmode)
2600 abort ();
4c485b63 2601
c988af2b
RS
2602 PUT_MODE (result_rtl, tmpmode);
2603 }
3ffeb8f1 2604
af55da56
JW
2605 if (GET_MODE_SIZE (tmpmode) < GET_MODE_SIZE (word_mode))
2606 result_reg_mode = word_mode;
2607 else
2608 result_reg_mode = tmpmode;
2609 result_reg = gen_reg_rtx (result_reg_mode);
2610
3ffeb8f1 2611 emit_queue ();
3ffeb8f1 2612 for (i = 0; i < n_regs; i++)
af55da56 2613 emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
3ffeb8f1 2614 result_pseudos[i]);
4c485b63 2615
af55da56
JW
2616 if (tmpmode != result_reg_mode)
2617 result_reg = gen_lowpart (tmpmode, result_reg);
2618
4c485b63
JL
2619 expand_value_return (result_reg);
2620 }
7cc8342c
RH
2621 else if (retval_rhs != 0
2622 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
f8cfc6aa 2623 && (REG_P (result_rtl)
7cc8342c 2624 || (GET_CODE (result_rtl) == PARALLEL)))
28d81abb 2625 {
14a774a9
RK
2626 /* Calculate the return value into a temporary (usually a pseudo
2627 reg). */
1da68f56
RK
2628 tree ot = TREE_TYPE (DECL_RESULT (current_function_decl));
2629 tree nt = build_qualified_type (ot, TYPE_QUALS (ot) | TYPE_QUAL_CONST);
2630
2631 val = assign_temp (nt, 0, 0, 1);
dd98f85c
JM
2632 val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
2633 val = force_not_mem (val);
28d81abb 2634 emit_queue ();
28d81abb 2635 /* Return the calculated value, doing cleanups first. */
c988af2b 2636 expand_value_return (shift_return_value (val));
28d81abb
RK
2637 }
2638 else
2639 {
2640 /* No cleanups or no hard reg used;
2641 calculate value into hard return reg. */
cba389cd 2642 expand_expr (retval, const0_rtx, VOIDmode, 0);
28d81abb 2643 emit_queue ();
14a774a9 2644 expand_value_return (result_rtl);
28d81abb
RK
2645 }
2646}
28d81abb 2647\f
28d81abb
RK
2648/* Generate the RTL code for entering a binding contour.
2649 The variables are declared one by one, by calls to `expand_decl'.
2650
8e91754e
MM
2651 FLAGS is a bitwise or of the following flags:
2652
2653 1 - Nonzero if this construct should be visible to
2654 `exit_something'.
2655
2656 2 - Nonzero if this contour does not require a
2657 NOTE_INSN_BLOCK_BEG note. Virtually all calls from
2658 language-independent code should set this flag because they
2659 will not create corresponding BLOCK nodes. (There should be
2660 a one-to-one correspondence between NOTE_INSN_BLOCK_BEG notes
2661 and BLOCKs.) If this flag is set, MARK_ENDS should be zero
4381f7c2 2662 when expand_end_bindings is called.
a97901e6
MM
2663
2664 If we are creating a NOTE_INSN_BLOCK_BEG note, a BLOCK may
2665 optionally be supplied. If so, it becomes the NOTE_BLOCK for the
2666 note. */
28d81abb
RK
2667
2668void
46c5ad27 2669expand_start_bindings_and_block (int flags, tree block)
28d81abb
RK
2670{
2671 struct nesting *thisblock = ALLOC_NESTING ();
8e91754e
MM
2672 rtx note;
2673 int exit_flag = ((flags & 1) != 0);
2674 int block_flag = ((flags & 2) == 0);
4381f7c2 2675
a97901e6
MM
2676 /* If a BLOCK is supplied, then the caller should be requesting a
2677 NOTE_INSN_BLOCK_BEG note. */
2678 if (!block_flag && block)
2679 abort ();
8e91754e 2680
a97901e6 2681 /* Create a note to mark the beginning of the block. */
1ea463a2 2682 note = emit_note (NOTE_INSN_DELETED);
4381f7c2 2683
28d81abb
RK
2684 /* Make an entry on block_stack for the block we are entering. */
2685
e2500fed 2686 thisblock->desc = BLOCK_NESTING;
28d81abb
RK
2687 thisblock->next = block_stack;
2688 thisblock->all = nesting_stack;
2689 thisblock->depth = ++nesting_depth;
2690 thisblock->data.block.stack_level = 0;
2691 thisblock->data.block.cleanups = 0;
e976b8b2 2692 thisblock->data.block.exception_region = 0;
3f1d071b 2693 thisblock->data.block.block_target_temp_slot_level = target_temp_slot_level;
e976b8b2
MS
2694
2695 thisblock->data.block.conditional_code = 0;
2696 thisblock->data.block.last_unconditional_cleanup = note;
a571f7a0
MM
2697 /* When we insert instructions after the last unconditional cleanup,
2698 we don't adjust last_insn. That means that a later add_insn will
2699 clobber the instructions we've just added. The easiest way to
2700 fix this is to just insert another instruction here, so that the
2701 instructions inserted after the last unconditional cleanup are
2702 never the last instruction. */
2e040219 2703 emit_note (NOTE_INSN_DELETED);
e976b8b2 2704
28d81abb
RK
2705 if (block_stack
2706 && !(block_stack->data.block.cleanups == NULL_TREE
2707 && block_stack->data.block.outer_cleanups == NULL_TREE))
2708 thisblock->data.block.outer_cleanups
2709 = tree_cons (NULL_TREE, block_stack->data.block.cleanups,
2710 block_stack->data.block.outer_cleanups);
2711 else
2712 thisblock->data.block.outer_cleanups = 0;
28d81abb
RK
2713 thisblock->data.block.label_chain = 0;
2714 thisblock->data.block.innermost_stack_block = stack_block_stack;
2715 thisblock->data.block.first_insn = note;
3f1d071b 2716 thisblock->data.block.block_start_count = ++current_block_start_count;
28d81abb
RK
2717 thisblock->exit_label = exit_flag ? gen_label_rtx () : 0;
2718 block_stack = thisblock;
2719 nesting_stack = thisblock;
2720
b93a436e
JL
2721 /* Make a new level for allocating stack slots. */
2722 push_temp_slots ();
28d81abb
RK
2723}
2724
e976b8b2
MS
2725/* Specify the scope of temporaries created by TARGET_EXPRs. Similar
2726 to CLEANUP_POINT_EXPR, but handles cases when a series of calls to
2727 expand_expr are made. After we end the region, we know that all
2728 space for all temporaries that were created by TARGET_EXPRs will be
2729 destroyed and their space freed for reuse. */
2730
2731void
46c5ad27 2732expand_start_target_temps (void)
e976b8b2
MS
2733{
2734 /* This is so that even if the result is preserved, the space
2735 allocated will be freed, as we know that it is no longer in use. */
2736 push_temp_slots ();
2737
2738 /* Start a new binding layer that will keep track of all cleanup
2739 actions to be performed. */
8e91754e 2740 expand_start_bindings (2);
e976b8b2
MS
2741
2742 target_temp_slot_level = temp_slot_level;
2743}
2744
2745void
46c5ad27 2746expand_end_target_temps (void)
e976b8b2
MS
2747{
2748 expand_end_bindings (NULL_TREE, 0, 0);
4381f7c2 2749
e976b8b2
MS
2750 /* This is so that even if the result is preserved, the space
2751 allocated will be freed, as we know that it is no longer in use. */
2752 pop_temp_slots ();
2753}
2754
0e9e1e0a 2755/* Given a pointer to a BLOCK node return nonzero if (and only if) the node
deb5e280
JM
2756 in question represents the outermost pair of curly braces (i.e. the "body
2757 block") of a function or method.
2758
2759 For any BLOCK node representing a "body block" of a function or method, the
2760 BLOCK_SUPERCONTEXT of the node will point to another BLOCK node which
2761 represents the outermost (function) scope for the function or method (i.e.
2762 the one which includes the formal parameters). The BLOCK_SUPERCONTEXT of
4381f7c2 2763 *that* node in turn will point to the relevant FUNCTION_DECL node. */
deb5e280
JM
2764
2765int
46c5ad27 2766is_body_block (tree stmt)
deb5e280 2767{
2896d056
ZW
2768 if (lang_hooks.no_body_blocks)
2769 return 0;
2770
deb5e280
JM
2771 if (TREE_CODE (stmt) == BLOCK)
2772 {
2773 tree parent = BLOCK_SUPERCONTEXT (stmt);
2774
2775 if (parent && TREE_CODE (parent) == BLOCK)
2776 {
2777 tree grandparent = BLOCK_SUPERCONTEXT (parent);
2778
2779 if (grandparent && TREE_CODE (grandparent) == FUNCTION_DECL)
2780 return 1;
2781 }
2782 }
2783
2784 return 0;
2785}
2786
e976b8b2
MS
2787/* True if we are currently emitting insns in an area of output code
2788 that is controlled by a conditional expression. This is used by
2789 the cleanup handling code to generate conditional cleanup actions. */
2790
2791int
46c5ad27 2792conditional_context (void)
e976b8b2
MS
2793{
2794 return block_stack && block_stack->data.block.conditional_code;
2795}
2796
91088ddb
JM
2797/* Return an opaque pointer to the current nesting level, so frontend code
2798 can check its own sanity. */
2799
2800struct nesting *
46c5ad27 2801current_nesting_level (void)
91088ddb
JM
2802{
2803 return cfun ? block_stack : 0;
2804}
2805
ba716ac9
BS
2806/* Emit code to restore vital registers at the beginning of a nonlocal goto
2807 handler. */
2808static void
46c5ad27 2809expand_nl_goto_receiver (void)
ba716ac9 2810{
6de9cd9a 2811 /* Clobber the FP when we get here, so we have to make sure it's
e292dbb0
WH
2812 marked as used by this function. */
2813 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
2814
2815 /* Mark the static chain as clobbered here so life information
2816 doesn't get messed up for it. */
2817 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
2818
ba716ac9
BS
2819#ifdef HAVE_nonlocal_goto
2820 if (! HAVE_nonlocal_goto)
2821#endif
2822 /* First adjust our frame pointer to its actual value. It was
2823 previously set to the start of the virtual area corresponding to
2824 the stacked variables when we branched here and now needs to be
2825 adjusted to the actual hardware fp value.
2826
2827 Assignments are to virtual registers are converted by
2828 instantiate_virtual_regs into the corresponding assignment
2829 to the underlying register (fp in this case) that makes
2830 the original assignment true.
2831 So the following insn will actually be
2832 decrementing fp by STARTING_FRAME_OFFSET. */
2833 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
2834
2835#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2836 if (fixed_regs[ARG_POINTER_REGNUM])
2837 {
2838#ifdef ELIMINABLE_REGS
2839 /* If the argument pointer can be eliminated in favor of the
2840 frame pointer, we don't need to restore it. We assume here
2841 that if such an elimination is present, it can always be used.
2842 This is the case on all known machines; if we don't make this
2843 assumption, we do unnecessary saving on many machines. */
8b60264b 2844 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
ba716ac9
BS
2845 size_t i;
2846
b6a1cbae 2847 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
ba716ac9
BS
2848 if (elim_regs[i].from == ARG_POINTER_REGNUM
2849 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
2850 break;
2851
b6a1cbae 2852 if (i == ARRAY_SIZE (elim_regs))
ba716ac9
BS
2853#endif
2854 {
2855 /* Now restore our arg pointer from the address at which it
278ed218 2856 was saved in our stack frame. */
ba716ac9 2857 emit_move_insn (virtual_incoming_args_rtx,
278ed218 2858 copy_to_reg (get_arg_pointer_save_area (cfun)));
ba716ac9
BS
2859 }
2860 }
2861#endif
2862
2863#ifdef HAVE_nonlocal_goto_receiver
2864 if (HAVE_nonlocal_goto_receiver)
2865 emit_insn (gen_nonlocal_goto_receiver ());
2866#endif
e292dbb0
WH
2867
2868 /* @@@ This is a kludge. Not all machine descriptions define a blockage
2869 insn, but we must not allow the code we just generated to be reordered
2870 by scheduling. Specifically, the update of the frame pointer must
2871 happen immediately, not later. So emit an ASM_INPUT to act as blockage
2872 insn. */
2873 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
ba716ac9
BS
2874}
2875
ba716677
MM
2876/* Warn about any unused VARS (which may contain nodes other than
2877 VAR_DECLs, but such nodes are ignored). The nodes are connected
2878 via the TREE_CHAIN field. */
2879
2880void
46c5ad27 2881warn_about_unused_variables (tree vars)
ba716677
MM
2882{
2883 tree decl;
2884
078721e1 2885 if (warn_unused_variable)
ba716677 2886 for (decl = vars; decl; decl = TREE_CHAIN (decl))
4381f7c2 2887 if (TREE_CODE (decl) == VAR_DECL
ba716677
MM
2888 && ! TREE_USED (decl)
2889 && ! DECL_IN_SYSTEM_HEADER (decl)
4381f7c2 2890 && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
ddd2d57e 2891 warning ("%Junused variable '%D'", decl, decl);
ba716677
MM
2892}
2893
28d81abb 2894/* Generate RTL code to terminate a binding contour.
e97b5c12
MM
2895
2896 VARS is the chain of VAR_DECL nodes for the variables bound in this
2897 contour. There may actually be other nodes in this chain, but any
2898 nodes other than VAR_DECLS are ignored.
2899
28d81abb
RK
2900 MARK_ENDS is nonzero if we should put a note at the beginning
2901 and end of this binding contour.
2902
cda26058
RK
2903 DONT_JUMP_IN is positive if it is not valid to jump into this contour,
2904 zero if we can jump into this contour only if it does not have a saved
2905 stack level, and negative if we are not to check for invalid use of
2906 labels (because the front end does that). */
28d81abb
RK
2907
2908void
1ea463a2
RH
2909expand_end_bindings (tree vars, int mark_ends ATTRIBUTE_UNUSED,
2910 int dont_jump_in)
28d81abb 2911{
b3694847 2912 struct nesting *thisblock = block_stack;
e976b8b2 2913
ba716677
MM
2914 /* If any of the variables in this scope were not used, warn the
2915 user. */
2916 warn_about_unused_variables (vars);
28d81abb 2917
28d81abb
RK
2918 if (thisblock->exit_label)
2919 {
2920 do_pending_stack_adjust ();
2921 emit_label (thisblock->exit_label);
2922 }
2923
72eb1038
BH
2924 /* Don't allow jumping into a block that has a stack level.
2925 Cleanups are allowed, though. */
cda26058
RK
2926 if (dont_jump_in > 0
2927 || (dont_jump_in == 0 && thisblock->data.block.stack_level != 0))
28d81abb
RK
2928 {
2929 struct label_chain *chain;
2930
2931 /* Any labels in this block are no longer valid to go to.
2932 Mark them to cause an error message. */
2933 for (chain = thisblock->data.block.label_chain; chain; chain = chain->next)
2934 {
2935 DECL_TOO_LATE (chain->label) = 1;
2936 /* If any goto without a fixup came to this label,
2937 that must be an error, because gotos without fixups
72eb1038 2938 come from outside all saved stack-levels. */
28d81abb 2939 if (TREE_ADDRESSABLE (chain->label))
ddd2d57e
RH
2940 error ("%Jlabel '%D' used before containing binding contour",
2941 chain->label, chain->label);
28d81abb
RK
2942 }
2943 }
2944
2945 /* Restore stack level in effect before the block
2946 (only if variable-size objects allocated). */
2947 /* Perform any cleanups associated with the block. */
2948
2949 if (thisblock->data.block.stack_level != 0
2950 || thisblock->data.block.cleanups != 0)
2951 {
04da69d3
JM
2952 int reachable;
2953 rtx insn;
28d81abb 2954
04da69d3
JM
2955 /* Only clean up here if this point can actually be reached. */
2956 insn = get_last_insn ();
2957 if (GET_CODE (insn) == NOTE)
2958 insn = prev_nonnote_insn (insn);
d1ee23e5 2959 reachable = (! insn || GET_CODE (insn) != BARRIER);
4381f7c2 2960
50d1b7a1 2961 /* Do the cleanups. */
b39b8084 2962 expand_cleanups (thisblock->data.block.cleanups, 0, reachable);
50d1b7a1
MS
2963 if (reachable)
2964 do_pending_stack_adjust ();
28d81abb 2965
50d1b7a1
MS
2966 /* Restore the stack level. */
2967
2968 if (reachable && thisblock->data.block.stack_level != 0)
2969 {
2970 emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
2971 thisblock->data.block.stack_level, NULL_RTX);
6de9cd9a
DN
2972 if (cfun->nonlocal_goto_save_area)
2973 update_nonlocal_goto_save_area ();
28d81abb
RK
2974 }
2975
2976 /* Any gotos out of this block must also do these things.
59257ff7
RK
2977 Also report any gotos with fixups that came to labels in this
2978 level. */
28d81abb
RK
2979 fixup_gotos (thisblock,
2980 thisblock->data.block.stack_level,
2981 thisblock->data.block.cleanups,
2982 thisblock->data.block.first_insn,
2983 dont_jump_in);
2984 }
2985
c7d2d61d
RS
2986 /* Mark the beginning and end of the scope if requested.
2987 We do this now, after running cleanups on the variables
2988 just going out of scope, so they are in scope for their cleanups. */
2989
1ea463a2
RH
2990 /* Get rid of the beginning-mark if we don't make an end-mark. */
2991 NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
c7d2d61d 2992
e976b8b2 2993 /* Restore the temporary level of TARGET_EXPRs. */
3f1d071b 2994 target_temp_slot_level = thisblock->data.block.block_target_temp_slot_level;
e976b8b2 2995
28d81abb
RK
2996 /* Restore block_stack level for containing block. */
2997
2998 stack_block_stack = thisblock->data.block.innermost_stack_block;
2999 POPSTACK (block_stack);
3000
3001 /* Pop the stack slot nesting and free any slots at this level. */
3002 pop_temp_slots ();
3003}
3004\f
7393c642
RK
3005/* Generate code to save the stack pointer at the start of the current block
3006 and set up to restore it on exit. */
3007
3008void
46c5ad27 3009save_stack_pointer (void)
7393c642
RK
3010{
3011 struct nesting *thisblock = block_stack;
3012
3013 if (thisblock->data.block.stack_level == 0)
3014 {
3015 emit_stack_save (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
3016 &thisblock->data.block.stack_level,
3017 thisblock->data.block.first_insn);
3018 stack_block_stack = thisblock;
3019 }
3020}
3021\f
28d81abb 3022/* Generate RTL for the automatic variable declaration DECL.
ec5cd386 3023 (Other kinds of declarations are simply ignored if seen here.) */
28d81abb
RK
3024
3025void
46c5ad27 3026expand_decl (tree decl)
28d81abb 3027{
ca695ac9
JB
3028 tree type;
3029
ca695ac9 3030 type = TREE_TYPE (decl);
28d81abb 3031
eabb9ed0
RK
3032 /* For a CONST_DECL, set mode, alignment, and sizes from those of the
3033 type in case this node is used in a reference. */
3034 if (TREE_CODE (decl) == CONST_DECL)
3035 {
3036 DECL_MODE (decl) = TYPE_MODE (type);
3037 DECL_ALIGN (decl) = TYPE_ALIGN (type);
3038 DECL_SIZE (decl) = TYPE_SIZE (type);
3039 DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
3040 return;
3041 }
28d81abb 3042
eabb9ed0
RK
3043 /* Otherwise, only automatic variables need any expansion done. Static and
3044 external variables, and external functions, will be handled by
3045 `assemble_variable' (called from finish_decl). TYPE_DECL requires
3046 nothing. PARM_DECLs are handled in `assign_parms'. */
28d81abb
RK
3047 if (TREE_CODE (decl) != VAR_DECL)
3048 return;
eabb9ed0 3049
44fe2e80 3050 if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
28d81abb
RK
3051 return;
3052
3053 /* Create the RTL representation for the variable. */
3054
3055 if (type == error_mark_node)
19e7881c 3056 SET_DECL_RTL (decl, gen_rtx_MEM (BLKmode, const0_rtx));
1da68f56 3057
28d81abb
RK
3058 else if (DECL_SIZE (decl) == 0)
3059 /* Variable with incomplete type. */
3060 {
abde42f7 3061 rtx x;
28d81abb
RK
3062 if (DECL_INITIAL (decl) == 0)
3063 /* Error message was already done; now avoid a crash. */
abde42f7 3064 x = gen_rtx_MEM (BLKmode, const0_rtx);
28d81abb
RK
3065 else
3066 /* An initializer is going to decide the size of this array.
3067 Until we know the size, represent its address with a reg. */
abde42f7 3068 x = gen_rtx_MEM (BLKmode, gen_reg_rtx (Pmode));
3bdf5ad1 3069
abde42f7
JH
3070 set_mem_attributes (x, decl, 1);
3071 SET_DECL_RTL (decl, x);
28d81abb
RK
3072 }
3073 else if (DECL_MODE (decl) != BLKmode
3074 /* If -ffloat-store, don't put explicit float vars
3075 into regs. */
3076 && !(flag_float_store
3077 && TREE_CODE (type) == REAL_TYPE)
3078 && ! TREE_THIS_VOLATILE (decl)
6a29edea 3079 && ! DECL_NONLOCAL (decl)
7dc8b126 3080 && (DECL_REGISTER (decl) || DECL_ARTIFICIAL (decl) || optimize))
28d81abb
RK
3081 {
3082 /* Automatic variable that can go in a register. */
8df83eae 3083 int unsignedp = TYPE_UNSIGNED (type);
28612f9e
RK
3084 enum machine_mode reg_mode
3085 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
98f3b471 3086
19e7881c 3087 SET_DECL_RTL (decl, gen_reg_rtx (reg_mode));
0d4903b8 3088
0b068ee9 3089 /* Note if the object is a user variable. */
7dc8b126 3090 if (!DECL_ARTIFICIAL (decl))
0b068ee9
JL
3091 {
3092 mark_user_reg (DECL_RTL (decl));
3093
3094 /* Trust user variables which have a pointer type to really
3095 be pointers. Do not trust compiler generated temporaries
3096 as our type system is totally busted as it relates to
3097 pointer arithmetic which translates into lots of compiler
3098 generated objects with pointer types, but which are not really
3099 pointers. */
3100 if (POINTER_TYPE_P (type))
3101 mark_reg_pointer (DECL_RTL (decl),
3102 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (decl))));
3103 }
258a120b
JM
3104
3105 maybe_set_unchanging (DECL_RTL (decl), decl);
d96a6d1a
JM
3106
3107 /* If something wants our address, try to use ADDRESSOF. */
3108 if (TREE_ADDRESSABLE (decl))
f29a2bd1 3109 put_var_into_stack (decl, /*rescan=*/false);
28d81abb 3110 }
0df15c2c 3111
4559fd9e 3112 else if (TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
5e4ef18a 3113 && ! (flag_stack_check && ! STACK_CHECK_BUILTIN
05bccae2
RK
3114 && 0 < compare_tree_int (DECL_SIZE_UNIT (decl),
3115 STACK_CHECK_MAX_VAR_SIZE)))
28d81abb
RK
3116 {
3117 /* Variable of fixed size that goes on the stack. */
3118 rtx oldaddr = 0;
3119 rtx addr;
0d4903b8 3120 rtx x;
28d81abb
RK
3121
3122 /* If we previously made RTL for this decl, it must be an array
3123 whose size was determined by the initializer.
3124 The old address was a register; set that register now
3125 to the proper address. */
19e7881c 3126 if (DECL_RTL_SET_P (decl))
28d81abb 3127 {
3c0cb5de 3128 if (!MEM_P (DECL_RTL (decl))
f8cfc6aa 3129 || !REG_P (XEXP (DECL_RTL (decl), 0)))
28d81abb
RK
3130 abort ();
3131 oldaddr = XEXP (DECL_RTL (decl), 0);
3132 }
3133
28d81abb
RK
3134 /* Set alignment we actually gave this decl. */
3135 DECL_ALIGN (decl) = (DECL_MODE (decl) == BLKmode ? BIGGEST_ALIGNMENT
3136 : GET_MODE_BITSIZE (DECL_MODE (decl)));
11cf4d18 3137 DECL_USER_ALIGN (decl) = 0;
28d81abb 3138
9432c136 3139 x = assign_temp (decl, 1, 1, 1);
0d4903b8
RK
3140 set_mem_attributes (x, decl, 1);
3141 SET_DECL_RTL (decl, x);
3142
28d81abb
RK
3143 if (oldaddr)
3144 {
3145 addr = force_operand (XEXP (DECL_RTL (decl), 0), oldaddr);
3146 if (addr != oldaddr)
3147 emit_move_insn (oldaddr, addr);
3148 }
28d81abb
RK
3149 }
3150 else
3151 /* Dynamic-size object: must push space on the stack. */
3152 {
abde42f7 3153 rtx address, size, x;
28d81abb
RK
3154
3155 /* Record the stack pointer on entry to block, if have
3156 not already done so. */
7393c642
RK
3157 do_pending_stack_adjust ();
3158 save_stack_pointer ();
28d81abb 3159
1c9766da
RK
3160 /* Compute the variable's size, in bytes. This will expand any
3161 needed SAVE_EXPRs for the first time. */
4559fd9e 3162 size = expand_expr (DECL_SIZE_UNIT (decl), NULL_RTX, VOIDmode, 0);
28d81abb
RK
3163 free_temp_slots ();
3164
ff91ad08 3165 /* Allocate space on the stack for the variable. Note that
4381f7c2 3166 DECL_ALIGN says how the variable is to be aligned and we
ff91ad08
RK
3167 cannot use it to conclude anything about the alignment of
3168 the size. */
37366632 3169 address = allocate_dynamic_stack_space (size, NULL_RTX,
ff91ad08 3170 TYPE_ALIGN (TREE_TYPE (decl)));
28d81abb 3171
28d81abb 3172 /* Reference the variable indirect through that rtx. */
abde42f7
JH
3173 x = gen_rtx_MEM (DECL_MODE (decl), address);
3174 set_mem_attributes (x, decl, 1);
3175 SET_DECL_RTL (decl, x);
28d81abb 3176
2207e295 3177
28d81abb
RK
3178 /* Indicate the alignment we actually gave this variable. */
3179#ifdef STACK_BOUNDARY
3180 DECL_ALIGN (decl) = STACK_BOUNDARY;
3181#else
3182 DECL_ALIGN (decl) = BIGGEST_ALIGNMENT;
3183#endif
11cf4d18 3184 DECL_USER_ALIGN (decl) = 0;
28d81abb 3185 }
28d81abb
RK
3186}
3187\f
6de9cd9a
DN
3188/* Emit code to allocate T_SIZE bytes of dynamic stack space for ALLOC. */
3189void
3190expand_stack_alloc (tree alloc, tree t_size)
3191{
3192 rtx address, dest, size;
3193 tree var, type;
3194
3195 if (TREE_CODE (alloc) != ADDR_EXPR)
3196 abort ();
3197 var = TREE_OPERAND (alloc, 0);
3198 if (TREE_CODE (var) != VAR_DECL)
3199 abort ();
3200
3201 type = TREE_TYPE (var);
3202
6de9cd9a
DN
3203 /* Compute the variable's size, in bytes. */
3204 size = expand_expr (t_size, NULL_RTX, VOIDmode, 0);
3205 free_temp_slots ();
3206
3207 /* Allocate space on the stack for the variable. */
3208 address = XEXP (DECL_RTL (var), 0);
3209 dest = allocate_dynamic_stack_space (size, address, TYPE_ALIGN (type));
3210 if (dest != address)
3211 emit_move_insn (address, dest);
3212
3213 /* Indicate the alignment we actually gave this variable. */
3214#ifdef STACK_BOUNDARY
3215 DECL_ALIGN (var) = STACK_BOUNDARY;
3216#else
3217 DECL_ALIGN (var) = BIGGEST_ALIGNMENT;
3218#endif
3219 DECL_USER_ALIGN (var) = 0;
3220}
3221
3222/* Emit code to save the current value of stack. */
3223rtx
3224expand_stack_save (void)
3225{
3226 rtx ret = NULL_RTX;
3227
3228 do_pending_stack_adjust ();
3229 emit_stack_save (SAVE_BLOCK, &ret, NULL_RTX);
3230 return ret;
3231}
3232
3233/* Emit code to restore the current value of stack. */
3234void
3235expand_stack_restore (tree var)
3236{
3237 rtx sa = DECL_RTL (var);
3238
3239 emit_stack_restore (SAVE_BLOCK, sa, NULL_RTX);
3240}
3241\f
28d81abb
RK
3242/* Emit code to perform the initialization of a declaration DECL. */
3243
3244void
46c5ad27 3245expand_decl_init (tree decl)
28d81abb 3246{
b4ac57ab
RS
3247 int was_used = TREE_USED (decl);
3248
ac79cd5a
RK
3249 /* If this is a CONST_DECL, we don't have to generate any code. Likewise
3250 for static decls. */
3251 if (TREE_CODE (decl) == CONST_DECL
3252 || TREE_STATIC (decl))
28d81abb
RK
3253 return;
3254
3255 /* Compute and store the initial value now. */
3256
59a7f9bf
DJ
3257 push_temp_slots ();
3258
28d81abb
RK
3259 if (DECL_INITIAL (decl) == error_mark_node)
3260 {
3261 enum tree_code code = TREE_CODE (TREE_TYPE (decl));
e5e809f4 3262
28d81abb 3263 if (code == INTEGER_TYPE || code == REAL_TYPE || code == ENUMERAL_TYPE
e5e809f4 3264 || code == POINTER_TYPE || code == REFERENCE_TYPE)
28d81abb 3265 expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
b90f141a 3266 0);
28d81abb
RK
3267 emit_queue ();
3268 }
3269 else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
3270 {
f31686a3 3271 emit_line_note (DECL_SOURCE_LOCATION (decl));
b90f141a 3272 expand_assignment (decl, DECL_INITIAL (decl), 0);
28d81abb
RK
3273 emit_queue ();
3274 }
3275
b4ac57ab
RS
3276 /* Don't let the initialization count as "using" the variable. */
3277 TREE_USED (decl) = was_used;
3278
28d81abb 3279 /* Free any temporaries we made while initializing the decl. */
ae8c59c0 3280 preserve_temp_slots (NULL_RTX);
28d81abb 3281 free_temp_slots ();
59a7f9bf 3282 pop_temp_slots ();
28d81abb
RK
3283}
3284
3285/* CLEANUP is an expression to be executed at exit from this binding contour;
3286 for example, in C++, it might call the destructor for this variable.
3287
4847c938
MS
3288 We wrap CLEANUP in an UNSAVE_EXPR node, so that we can expand the
3289 CLEANUP multiple times, and have the correct semantics. This
e976b8b2
MS
3290 happens in exception handling, for gotos, returns, breaks that
3291 leave the current scope.
28d81abb
RK
3292
3293 If CLEANUP is nonzero and DECL is zero, we record a cleanup
6d2f8887 3294 that is not associated with any particular variable. */
28d81abb
RK
3295
3296int
46c5ad27 3297expand_decl_cleanup (tree decl, tree cleanup)
28d81abb 3298{
3f1d071b 3299 struct nesting *thisblock;
28d81abb
RK
3300
3301 /* Error if we are not in any block. */
01d939e8 3302 if (cfun == 0 || block_stack == 0)
28d81abb
RK
3303 return 0;
3304
3f1d071b
BS
3305 thisblock = block_stack;
3306
28d81abb
RK
3307 /* Record the cleanup if there is one. */
3308
3309 if (cleanup != 0)
3310 {
e976b8b2
MS
3311 tree t;
3312 rtx seq;
3313 tree *cleanups = &thisblock->data.block.cleanups;
3314 int cond_context = conditional_context ();
3315
3316 if (cond_context)
3317 {
3318 rtx flag = gen_reg_rtx (word_mode);
3319 rtx set_flag_0;
3320 tree cond;
3321
3322 start_sequence ();
3323 emit_move_insn (flag, const0_rtx);
3324 set_flag_0 = get_insns ();
3325 end_sequence ();
3326
3327 thisblock->data.block.last_unconditional_cleanup
2f937369 3328 = emit_insn_after (set_flag_0,
e976b8b2
MS
3329 thisblock->data.block.last_unconditional_cleanup);
3330
3331 emit_move_insn (flag, const1_rtx);
3332
b0c48229 3333 cond = build_decl (VAR_DECL, NULL_TREE,
ae2bcd98 3334 lang_hooks.types.type_for_mode (word_mode, 1));
19e7881c 3335 SET_DECL_RTL (cond, flag);
e976b8b2
MS
3336
3337 /* Conditionalize the cleanup. */
3338 cleanup = build (COND_EXPR, void_type_node,
ae2bcd98 3339 lang_hooks.truthvalue_conversion (cond),
e976b8b2
MS
3340 cleanup, integer_zero_node);
3341 cleanup = fold (cleanup);
3342
e2500fed 3343 cleanups = &thisblock->data.block.cleanups;
e976b8b2
MS
3344 }
3345
4847c938 3346 cleanup = unsave_expr (cleanup);
e976b8b2 3347
1f8f4a0b 3348 t = *cleanups = tree_cons (decl, cleanup, *cleanups);
e976b8b2
MS
3349
3350 if (! cond_context)
3351 /* If this block has a cleanup, it belongs in stack_block_stack. */
3352 stack_block_stack = thisblock;
3353
3354 if (cond_context)
3355 {
3356 start_sequence ();
3357 }
4847c938 3358
52a11cbf 3359 if (! using_eh_for_cleanups_p)
e976b8b2 3360 TREE_ADDRESSABLE (t) = 1;
52a11cbf
RH
3361 else
3362 expand_eh_region_start ();
3363
e976b8b2
MS
3364 if (cond_context)
3365 {
3366 seq = get_insns ();
3367 end_sequence ();
7e82801f
MS
3368 if (seq)
3369 thisblock->data.block.last_unconditional_cleanup
2f937369
DM
3370 = emit_insn_after (seq,
3371 thisblock->data.block.last_unconditional_cleanup);
e976b8b2
MS
3372 }
3373 else
3374 {
3375 thisblock->data.block.last_unconditional_cleanup
3376 = get_last_insn ();
ef97beff
JJ
3377 /* When we insert instructions after the last unconditional cleanup,
3378 we don't adjust last_insn. That means that a later add_insn will
3379 clobber the instructions we've just added. The easiest way to
3380 fix this is to just insert another instruction here, so that the
3381 instructions inserted after the last unconditional cleanup are
3382 never the last instruction. */
2e040219 3383 emit_note (NOTE_INSN_DELETED);
e976b8b2 3384 }
28d81abb
RK
3385 }
3386 return 1;
3387}
659e5a7a
JM
3388
3389/* Like expand_decl_cleanup, but maybe only run the cleanup if an exception
3390 is thrown. */
3391
3392int
46c5ad27 3393expand_decl_cleanup_eh (tree decl, tree cleanup, int eh_only)
659e5a7a
JM
3394{
3395 int ret = expand_decl_cleanup (decl, cleanup);
3396 if (cleanup && ret)
3397 {
3398 tree node = block_stack->data.block.cleanups;
3399 CLEANUP_EH_ONLY (node) = eh_only;
3400 }
3401 return ret;
3402}
28d81abb
RK
3403\f
3404/* DECL is an anonymous union. CLEANUP is a cleanup for DECL.
3405 DECL_ELTS is the list of elements that belong to DECL's type.
3406 In each, the TREE_VALUE is a VAR_DECL, and the TREE_PURPOSE a cleanup. */
3407
3408void
46c5ad27 3409expand_anon_union_decl (tree decl, tree cleanup, tree decl_elts)
28d81abb 3410{
01d939e8 3411 struct nesting *thisblock = cfun == 0 ? 0 : block_stack;
28d81abb 3412 rtx x;
8a693bd0 3413 tree t;
28d81abb 3414
8a693bd0
MM
3415 /* If any of the elements are addressable, so is the entire union. */
3416 for (t = decl_elts; t; t = TREE_CHAIN (t))
3417 if (TREE_ADDRESSABLE (TREE_VALUE (t)))
3418 {
3419 TREE_ADDRESSABLE (decl) = 1;
3420 break;
3421 }
4381f7c2 3422
ec5cd386
RK
3423 expand_decl (decl);
3424 expand_decl_cleanup (decl, cleanup);
28d81abb
RK
3425 x = DECL_RTL (decl);
3426
8a693bd0
MM
3427 /* Go through the elements, assigning RTL to each. */
3428 for (t = decl_elts; t; t = TREE_CHAIN (t))
28d81abb 3429 {
8a693bd0
MM
3430 tree decl_elt = TREE_VALUE (t);
3431 tree cleanup_elt = TREE_PURPOSE (t);
28d81abb
RK
3432 enum machine_mode mode = TYPE_MODE (TREE_TYPE (decl_elt));
3433
3256b817
JJ
3434 /* If any of the elements are addressable, so is the entire
3435 union. */
3436 if (TREE_USED (decl_elt))
3437 TREE_USED (decl) = 1;
3438
7b9032dd
JM
3439 /* Propagate the union's alignment to the elements. */
3440 DECL_ALIGN (decl_elt) = DECL_ALIGN (decl);
11cf4d18 3441 DECL_USER_ALIGN (decl_elt) = DECL_USER_ALIGN (decl);
7b9032dd
JM
3442
3443 /* If the element has BLKmode and the union doesn't, the union is
3444 aligned such that the element doesn't need to have BLKmode, so
3445 change the element's mode to the appropriate one for its size. */
3446 if (mode == BLKmode && DECL_MODE (decl) != BLKmode)
3447 DECL_MODE (decl_elt) = mode
05bccae2 3448 = mode_for_size_tree (DECL_SIZE (decl_elt), MODE_INT, 1);
7b9032dd 3449
28d81abb
RK
3450 /* (SUBREG (MEM ...)) at RTL generation time is invalid, so we
3451 instead create a new MEM rtx with the proper mode. */
3c0cb5de 3452 if (MEM_P (x))
28d81abb
RK
3453 {
3454 if (mode == GET_MODE (x))
19e7881c 3455 SET_DECL_RTL (decl_elt, x);
28d81abb 3456 else
f1ec5147 3457 SET_DECL_RTL (decl_elt, adjust_address_nv (x, mode, 0));
28d81abb 3458 }
f8cfc6aa 3459 else if (REG_P (x))
28d81abb
RK
3460 {
3461 if (mode == GET_MODE (x))
19e7881c 3462 SET_DECL_RTL (decl_elt, x);
28d81abb 3463 else
ddef6bc7 3464 SET_DECL_RTL (decl_elt, gen_lowpart_SUBREG (mode, x));
28d81abb
RK
3465 }
3466 else
3467 abort ();
3468
3469 /* Record the cleanup if there is one. */
3470
3471 if (cleanup != 0)
3472 thisblock->data.block.cleanups
1f8f4a0b
MM
3473 = tree_cons (decl_elt, cleanup_elt,
3474 thisblock->data.block.cleanups);
28d81abb
RK
3475 }
3476}
3477\f
3478/* Expand a list of cleanups LIST.
3479 Elements may be expressions or may be nested lists.
3480
0e9e1e0a 3481 If IN_FIXUP is nonzero, we are generating this cleanup for a fixup
50d1b7a1
MS
3482 goto and handle protection regions specially in that case.
3483
3484 If REACHABLE, we emit code, otherwise just inform the exception handling
3485 code about this finalization. */
28d81abb
RK
3486
3487static void
46c5ad27 3488expand_cleanups (tree list, int in_fixup, int reachable)
28d81abb
RK
3489{
3490 tree tail;
3491 for (tail = list; tail; tail = TREE_CHAIN (tail))
b39b8084
CL
3492 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3493 expand_cleanups (TREE_VALUE (tail), in_fixup, reachable);
3494 else
28d81abb 3495 {
b39b8084
CL
3496 if (! in_fixup && using_eh_for_cleanups_p)
3497 expand_eh_region_end_cleanup (TREE_VALUE (tail));
61d6b1cc 3498
b39b8084
CL
3499 if (reachable && !CLEANUP_EH_ONLY (tail))
3500 {
3501 /* Cleanups may be run multiple times. For example,
3502 when exiting a binding contour, we expand the
3503 cleanups associated with that contour. When a goto
3504 within that binding contour has a target outside that
3505 contour, it will expand all cleanups from its scope to
3506 the target. Though the cleanups are expanded multiple
3507 times, the control paths are non-overlapping so the
3508 cleanups will not be executed twice. */
3509
3510 /* We may need to protect from outer cleanups. */
3511 if (in_fixup && using_eh_for_cleanups_p)
50d1b7a1 3512 {
b39b8084 3513 expand_eh_region_start ();
52a11cbf 3514
b39b8084 3515 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
e5e809f4 3516
b39b8084 3517 expand_eh_region_end_fixup (TREE_VALUE (tail));
50d1b7a1 3518 }
b39b8084
CL
3519 else
3520 expand_expr (TREE_VALUE (tail), const0_rtx, VOIDmode, 0);
3521
3522 free_temp_slots ();
28d81abb
RK
3523 }
3524 }
3525}
3526
e976b8b2
MS
3527/* Mark when the context we are emitting RTL for as a conditional
3528 context, so that any cleanup actions we register with
3529 expand_decl_init will be properly conditionalized when those
3530 cleanup actions are later performed. Must be called before any
956d6950 3531 expression (tree) is expanded that is within a conditional context. */
e976b8b2
MS
3532
3533void
46c5ad27 3534start_cleanup_deferral (void)
e976b8b2 3535{
e3eef942
JW
3536 /* block_stack can be NULL if we are inside the parameter list. It is
3537 OK to do nothing, because cleanups aren't possible here. */
3538 if (block_stack)
3539 ++block_stack->data.block.conditional_code;
e976b8b2
MS
3540}
3541
3542/* Mark the end of a conditional region of code. Because cleanup
956d6950 3543 deferrals may be nested, we may still be in a conditional region
e976b8b2
MS
3544 after we end the currently deferred cleanups, only after we end all
3545 deferred cleanups, are we back in unconditional code. */
3546
3547void
46c5ad27 3548end_cleanup_deferral (void)
e976b8b2 3549{
e3eef942
JW
3550 /* block_stack can be NULL if we are inside the parameter list. It is
3551 OK to do nothing, because cleanups aren't possible here. */
3552 if (block_stack)
3553 --block_stack->data.block.conditional_code;
e976b8b2
MS
3554}
3555
28d81abb 3556tree
46c5ad27 3557last_cleanup_this_contour (void)
28d81abb
RK
3558{
3559 if (block_stack == 0)
3560 return 0;
3561
3562 return block_stack->data.block.cleanups;
3563}
3564
6de9cd9a
DN
3565
3566/* Return nonzero if any containing block has a stack level or
3567 cleanups. */
3568
3569int
3570containing_blocks_have_cleanups_or_stack_level (void)
3571{
3572 struct nesting *block;
3573
3574 for (block = block_stack; block; block = block->next)
3575 if (block->data.block.stack_level != 0
3576 || block->data.block.cleanups != 0)
3577 return 1;
3578
3579 return 0;
3580}
3581
28d81abb 3582/* Return 1 if there are any pending cleanups at this point.
de1f5659
JL
3583 Check the current contour as well as contours that enclose
3584 the current contour. */
28d81abb
RK
3585
3586int
46c5ad27 3587any_pending_cleanups (void)
28d81abb
RK
3588{
3589 struct nesting *block;
3590
01d939e8 3591 if (cfun == NULL || cfun->stmt == NULL || block_stack == 0)
28d81abb
RK
3592 return 0;
3593
de1f5659 3594 if (block_stack->data.block.cleanups != NULL)
28d81abb 3595 return 1;
ce47ca44
JL
3596
3597 if (block_stack->data.block.outer_cleanups == 0)
28d81abb
RK
3598 return 0;
3599
3600 for (block = block_stack->next; block; block = block->next)
3601 if (block->data.block.cleanups != 0)
3602 return 1;
3603
3604 return 0;
3605}
3606\f
3607/* Enter a case (Pascal) or switch (C) statement.
3608 Push a block onto case_stack and nesting_stack
3609 to accumulate the case-labels that are seen
3610 and to record the labels generated for the statement.
3611
3612 EXIT_FLAG is nonzero if `exit_something' should exit this case stmt.
3613 Otherwise, this construct is transparent for `exit_something'.
3614
3615 EXPR is the index-expression to be dispatched on.
3616 TYPE is its nominal type. We could simply convert EXPR to this type,
3617 but instead we take short cuts. */
3618
3619void
46c5ad27
AJ
3620expand_start_case (int exit_flag, tree expr, tree type,
3621 const char *printname)
28d81abb 3622{
b3694847 3623 struct nesting *thiscase = ALLOC_NESTING ();
28d81abb
RK
3624
3625 /* Make an entry on case_stack for the case we are entering. */
3626
e2500fed 3627 thiscase->desc = CASE_NESTING;
28d81abb
RK
3628 thiscase->next = case_stack;
3629 thiscase->all = nesting_stack;
3630 thiscase->depth = ++nesting_depth;
3631 thiscase->exit_label = exit_flag ? gen_label_rtx () : 0;
3632 thiscase->data.case_stmt.case_list = 0;
3633 thiscase->data.case_stmt.index_expr = expr;
3634 thiscase->data.case_stmt.nominal_type = type;
3635 thiscase->data.case_stmt.default_label = 0;
28d81abb 3636 thiscase->data.case_stmt.printname = printname;
a11759a3 3637 thiscase->data.case_stmt.line_number_status = force_line_numbers ();
28d81abb
RK
3638 case_stack = thiscase;
3639 nesting_stack = thiscase;
3640
3641 do_pending_stack_adjust ();
f43f4314 3642 emit_queue ();
28d81abb
RK
3643
3644 /* Make sure case_stmt.start points to something that won't
3645 need any transformation before expand_end_case. */
3646 if (GET_CODE (get_last_insn ()) != NOTE)
2e040219 3647 emit_note (NOTE_INSN_DELETED);
28d81abb
RK
3648
3649 thiscase->data.case_stmt.start = get_last_insn ();
4c581243 3650
956d6950 3651 start_cleanup_deferral ();
28d81abb 3652}
a11759a3 3653
28d81abb
RK
3654/* Accumulate one case or default label inside a case or switch statement.
3655 VALUE is the value of the case (a null pointer, for a default label).
f52fba84
PE
3656 The function CONVERTER, when applied to arguments T and V,
3657 converts the value V to the type T.
28d81abb
RK
3658
3659 If not currently inside a case or switch statement, return 1 and do
3660 nothing. The caller will print a language-specific error message.
3661 If VALUE is a duplicate or overlaps, return 2 and do nothing
3662 except store the (first) duplicate node in *DUPLICATE.
3663 If VALUE is out of range, return 3 and do nothing.
e976b8b2 3664 If we are jumping into the scope of a cleanup or var-sized array, return 5.
28d81abb
RK
3665 Return 0 on success.
3666
3667 Extended to handle range statements. */
3668
3669int
46c5ad27
AJ
3670pushcase (tree value, tree (*converter) (tree, tree), tree label,
3671 tree *duplicate)
28d81abb 3672{
28d81abb
RK
3673 tree index_type;
3674 tree nominal_type;
3675
3676 /* Fail if not inside a real case statement. */
3677 if (! (case_stack && case_stack->data.case_stmt.start))
3678 return 1;
3679
3680 if (stack_block_stack
3681 && stack_block_stack->depth > case_stack->depth)
3682 return 5;
3683
3684 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3685 nominal_type = case_stack->data.case_stmt.nominal_type;
3686
3687 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3688 if (index_type == error_mark_node)
3689 return 0;
3690
2f985ca6
JW
3691 /* Convert VALUE to the type in which the comparisons are nominally done. */
3692 if (value != 0)
3693 value = (*converter) (nominal_type, value);
3694
28d81abb
RK
3695 /* Fail if this value is out of range for the actual type of the index
3696 (which may be narrower than NOMINAL_TYPE). */
14a774a9
RK
3697 if (value != 0
3698 && (TREE_CONSTANT_OVERFLOW (value)
3699 || ! int_fits_type_p (value, index_type)))
28d81abb
RK
3700 return 3;
3701
6de9cd9a 3702 return add_case_node (value, value, label, duplicate, false);
28d81abb
RK
3703}
3704
956d6950
JL
3705/* Like pushcase but this case applies to all values between VALUE1 and
3706 VALUE2 (inclusive). If VALUE1 is NULL, the range starts at the lowest
3707 value of the index type and ends at VALUE2. If VALUE2 is NULL, the range
3708 starts at VALUE1 and ends at the highest value of the index type.
3709 If both are NULL, this case applies to all values.
3710
3711 The return value is the same as that of pushcase but there is one
3712 additional error code: 4 means the specified range was empty. */
28d81abb
RK
3713
3714int
46c5ad27
AJ
3715pushcase_range (tree value1, tree value2, tree (*converter) (tree, tree),
3716 tree label, tree *duplicate)
28d81abb 3717{
28d81abb
RK
3718 tree index_type;
3719 tree nominal_type;
3720
3721 /* Fail if not inside a real case statement. */
3722 if (! (case_stack && case_stack->data.case_stmt.start))
3723 return 1;
3724
3725 if (stack_block_stack
3726 && stack_block_stack->depth > case_stack->depth)
3727 return 5;
3728
3729 index_type = TREE_TYPE (case_stack->data.case_stmt.index_expr);
3730 nominal_type = case_stack->data.case_stmt.nominal_type;
3731
3732 /* If the index is erroneous, avoid more problems: pretend to succeed. */
3733 if (index_type == error_mark_node)
3734 return 0;
3735
956d6950
JL
3736 /* Convert VALUEs to type in which the comparisons are nominally done
3737 and replace any unspecified value with the corresponding bound. */
3738 if (value1 == 0)
1974bfb1 3739 value1 = TYPE_MIN_VALUE (index_type);
956d6950 3740 if (value2 == 0)
1974bfb1 3741 value2 = TYPE_MAX_VALUE (index_type);
956d6950
JL
3742
3743 /* Fail if the range is empty. Do this before any conversion since
3744 we want to allow out-of-range empty ranges. */
14a774a9 3745 if (value2 != 0 && tree_int_cst_lt (value2, value1))
956d6950
JL
3746 return 4;
3747
4381f7c2 3748 /* If the max was unbounded, use the max of the nominal_type we are
e1ee5cdc
RH
3749 converting to. Do this after the < check above to suppress false
3750 positives. */
14a774a9 3751 if (value2 == 0)
e1ee5cdc 3752 value2 = TYPE_MAX_VALUE (nominal_type);
28d81abb 3753
2f985ca6
JW
3754 value1 = (*converter) (nominal_type, value1);
3755 value2 = (*converter) (nominal_type, value2);
3756
28d81abb 3757 /* Fail if these values are out of range. */
956d6950
JL
3758 if (TREE_CONSTANT_OVERFLOW (value1)
3759 || ! int_fits_type_p (value1, index_type))
28d81abb
RK
3760 return 3;
3761
956d6950
JL
3762 if (TREE_CONSTANT_OVERFLOW (value2)
3763 || ! int_fits_type_p (value2, index_type))
28d81abb
RK
3764 return 3;
3765
6de9cd9a 3766 return add_case_node (value1, value2, label, duplicate, false);
57641239
RK
3767}
3768
3769/* Do the actual insertion of a case label for pushcase and pushcase_range
3770 into case_stack->data.case_stmt.case_list. Use an AVL tree to avoid
3771 slowdown for large switch statements. */
3772
56cb9733 3773int
6de9cd9a
DN
3774add_case_node (tree low, tree high, tree label, tree *duplicate,
3775 bool dont_expand_label)
57641239
RK
3776{
3777 struct case_node *p, **q, *r;
3778
56cb9733
MM
3779 /* If there's no HIGH value, then this is not a case range; it's
3780 just a simple case label. But that's just a degenerate case
3781 range. */
3782 if (!high)
3783 high = low;
3784
3785 /* Handle default labels specially. */
3786 if (!high && !low)
3787 {
3788 if (case_stack->data.case_stmt.default_label != 0)
3789 {
3790 *duplicate = case_stack->data.case_stmt.default_label;
3791 return 2;
3792 }
3793 case_stack->data.case_stmt.default_label = label;
6de9cd9a
DN
3794 if (!dont_expand_label)
3795 expand_label (label);
56cb9733
MM
3796 return 0;
3797 }
3798
57641239
RK
3799 q = &case_stack->data.case_stmt.case_list;
3800 p = *q;
3801
69d4ca36 3802 while ((r = *q))
28d81abb 3803 {
57641239
RK
3804 p = r;
3805
3806 /* Keep going past elements distinctly greater than HIGH. */
3807 if (tree_int_cst_lt (high, p->low))
3808 q = &p->left;
3809
3810 /* or distinctly less than LOW. */
3811 else if (tree_int_cst_lt (p->high, low))
3812 q = &p->right;
3813
3814 else
28d81abb 3815 {
57641239
RK
3816 /* We have an overlap; this is an error. */
3817 *duplicate = p->code_label;
28d81abb
RK
3818 return 2;
3819 }
3820 }
3821
1da68f56 3822 /* Add this label to the chain, and succeed. */
28d81abb 3823
703ad42b 3824 r = ggc_alloc (sizeof (struct case_node));
1da68f56 3825 r->low = low;
28d81abb 3826
57641239 3827 /* If the bounds are equal, turn this into the one-value case. */
57641239
RK
3828 if (tree_int_cst_equal (low, high))
3829 r->high = r->low;
3830 else
1da68f56 3831 r->high = high;
57641239
RK
3832
3833 r->code_label = label;
6de9cd9a
DN
3834 if (!dont_expand_label)
3835 expand_label (label);
28d81abb 3836
57641239
RK
3837 *q = r;
3838 r->parent = p;
3839 r->left = 0;
3840 r->right = 0;
3841 r->balance = 0;
3842
3843 while (p)
3844 {
3845 struct case_node *s;
3846
3847 if (r == p->left)
3848 {
3849 int b;
3850
3851 if (! (b = p->balance))
3852 /* Growth propagation from left side. */
3853 p->balance = -1;
3854 else if (b < 0)
3855 {
3856 if (r->balance < 0)
3857 {
3858 /* R-Rotation */
51723711 3859 if ((p->left = s = r->right))
57641239
RK
3860 s->parent = p;
3861
3862 r->right = p;
3863 p->balance = 0;
3864 r->balance = 0;
3865 s = p->parent;
3866 p->parent = r;
3867
51723711 3868 if ((r->parent = s))
57641239
RK
3869 {
3870 if (s->left == p)
3871 s->left = r;
3872 else
3873 s->right = r;
3874 }
3875 else
3876 case_stack->data.case_stmt.case_list = r;
3877 }
3878 else
3879 /* r->balance == +1 */
3880 {
5720c7e7
RK
3881 /* LR-Rotation */
3882
57641239
RK
3883 int b2;
3884 struct case_node *t = r->right;
3885
51723711 3886 if ((p->left = s = t->right))
57641239
RK
3887 s->parent = p;
3888
3889 t->right = p;
51723711 3890 if ((r->right = s = t->left))
57641239
RK
3891 s->parent = r;
3892
3893 t->left = r;
3894 b = t->balance;
3895 b2 = b < 0;
3896 p->balance = b2;
3897 b2 = -b2 - b;
3898 r->balance = b2;
3899 t->balance = 0;
3900 s = p->parent;
3901 p->parent = t;
3902 r->parent = t;
3903
51723711 3904 if ((t->parent = s))
57641239
RK
3905 {
3906 if (s->left == p)
3907 s->left = t;
3908 else
3909 s->right = t;
3910 }
3911 else
3912 case_stack->data.case_stmt.case_list = t;
3913 }
3914 break;
3915 }
3916
3917 else
3918 {
3919 /* p->balance == +1; growth of left side balances the node. */
3920 p->balance = 0;
3921 break;
3922 }
3923 }
3924 else
3925 /* r == p->right */
3926 {
3927 int b;
3928
3929 if (! (b = p->balance))
3930 /* Growth propagation from right side. */
3931 p->balance++;
3932 else if (b > 0)
3933 {
3934 if (r->balance > 0)
3935 {
3936 /* L-Rotation */
3937
51723711 3938 if ((p->right = s = r->left))
57641239
RK
3939 s->parent = p;
3940
3941 r->left = p;
3942 p->balance = 0;
3943 r->balance = 0;
3944 s = p->parent;
3945 p->parent = r;
51723711 3946 if ((r->parent = s))
57641239
RK
3947 {
3948 if (s->left == p)
3949 s->left = r;
3950 else
3951 s->right = r;
3952 }
3953
3954 else
3955 case_stack->data.case_stmt.case_list = r;
3956 }
3957
3958 else
3959 /* r->balance == -1 */
3960 {
3961 /* RL-Rotation */
3962 int b2;
3963 struct case_node *t = r->left;
3964
51723711 3965 if ((p->right = s = t->left))
57641239
RK
3966 s->parent = p;
3967
3968 t->left = p;
3969
51723711 3970 if ((r->left = s = t->right))
57641239
RK
3971 s->parent = r;
3972
3973 t->right = r;
3974 b = t->balance;
3975 b2 = b < 0;
3976 r->balance = b2;
3977 b2 = -b2 - b;
3978 p->balance = b2;
3979 t->balance = 0;
3980 s = p->parent;
3981 p->parent = t;
3982 r->parent = t;
3983
51723711 3984 if ((t->parent = s))
57641239
RK
3985 {
3986 if (s->left == p)
3987 s->left = t;
3988 else
3989 s->right = t;
3990 }
3991
3992 else
3993 case_stack->data.case_stmt.case_list = t;
3994 }
3995 break;
3996 }
3997 else
3998 {
3999 /* p->balance == -1; growth of right side balances the node. */
4000 p->balance = 0;
4001 break;
4002 }
4003 }
4004
4005 r = p;
4006 p = p->parent;
4007 }
28d81abb
RK
4008
4009 return 0;
4010}
28d81abb 4011\f
9bb231fd
RS
4012/* Maximum number of case bit tests. */
4013#define MAX_CASE_BIT_TESTS 3
4014
4015/* By default, enable case bit tests on targets with ashlsi3. */
4016#ifndef CASE_USE_BIT_TESTS
4017#define CASE_USE_BIT_TESTS (ashl_optab->handlers[word_mode].insn_code \
4018 != CODE_FOR_nothing)
4019#endif
4020
4021
4022/* A case_bit_test represents a set of case nodes that may be
4023 selected from using a bit-wise comparison. HI and LO hold
4024 the integer to be tested against, LABEL contains the label
4025 to jump to upon success and BITS counts the number of case
4026 nodes handled by this test, typically the number of bits
4027 set in HI:LO. */
4028
4029struct case_bit_test
4030{
4031 HOST_WIDE_INT hi;
4032 HOST_WIDE_INT lo;
4033 rtx label;
4034 int bits;
4035};
4036
4037/* Determine whether "1 << x" is relatively cheap in word_mode. */
4038
7e51717c
AJ
4039static
4040bool lshift_cheap_p (void)
9bb231fd
RS
4041{
4042 static bool init = false;
4043 static bool cheap = true;
4044
4045 if (!init)
4046 {
4047 rtx reg = gen_rtx_REG (word_mode, 10000);
4048 int cost = rtx_cost (gen_rtx_ASHIFT (word_mode, const1_rtx, reg), SET);
4049 cheap = cost < COSTS_N_INSNS (3);
4050 init = true;
4051 }
4052
4053 return cheap;
4054}
4055
4056/* Comparison function for qsort to order bit tests by decreasing
4057 number of case nodes, i.e. the node with the most cases gets
4058 tested first. */
4059
f667741c
SB
4060static int
4061case_bit_test_cmp (const void *p1, const void *p2)
9bb231fd
RS
4062{
4063 const struct case_bit_test *d1 = p1;
4064 const struct case_bit_test *d2 = p2;
4065
4066 return d2->bits - d1->bits;
4067}
4068
4069/* Expand a switch statement by a short sequence of bit-wise
4070 comparisons. "switch(x)" is effectively converted into
4071 "if ((1 << (x-MINVAL)) & CST)" where CST and MINVAL are
4072 integer constants.
4073
4074 INDEX_EXPR is the value being switched on, which is of
4075 type INDEX_TYPE. MINVAL is the lowest case value of in
4076 the case nodes, of INDEX_TYPE type, and RANGE is highest
4077 value minus MINVAL, also of type INDEX_TYPE. NODES is
4078 the set of case nodes, and DEFAULT_LABEL is the label to
4079 branch to should none of the cases match.
4080
4081 There *MUST* be MAX_CASE_BIT_TESTS or less unique case
4082 node targets. */
4083
4084static void
46c5ad27
AJ
4085emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
4086 tree range, case_node_ptr nodes, rtx default_label)
9bb231fd
RS
4087{
4088 struct case_bit_test test[MAX_CASE_BIT_TESTS];
4089 enum machine_mode mode;
4090 rtx expr, index, label;
4091 unsigned int i,j,lo,hi;
4092 struct case_node *n;
4093 unsigned int count;
4094
4095 count = 0;
4096 for (n = nodes; n; n = n->right)
4097 {
4098 label = label_rtx (n->code_label);
4099 for (i = 0; i < count; i++)
4100 if (same_case_target_p (label, test[i].label))
4101 break;
4102
4103 if (i == count)
4104 {
4105 if (count >= MAX_CASE_BIT_TESTS)
4106 abort ();
4107 test[i].hi = 0;
4108 test[i].lo = 0;
4109 test[i].label = label;
4110 test[i].bits = 1;
4111 count++;
4112 }
4113 else
4114 test[i].bits++;
4115
4116 lo = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4117 n->low, minval)), 1);
4118 hi = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4119 n->high, minval)), 1);
4120 for (j = lo; j <= hi; j++)
4121 if (j >= HOST_BITS_PER_WIDE_INT)
4122 test[i].hi |= (HOST_WIDE_INT) 1 << (j - HOST_BITS_PER_INT);
4123 else
4124 test[i].lo |= (HOST_WIDE_INT) 1 << j;
4125 }
4126
4127 qsort (test, count, sizeof(*test), case_bit_test_cmp);
4128
4129 index_expr = fold (build (MINUS_EXPR, index_type,
4130 convert (index_type, index_expr),
4131 convert (index_type, minval)));
4132 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
4133 emit_queue ();
4134 index = protect_from_queue (index, 0);
4135 do_pending_stack_adjust ();
4136
4137 mode = TYPE_MODE (index_type);
4138 expr = expand_expr (range, NULL_RTX, VOIDmode, 0);
4139 emit_cmp_and_jump_insns (index, expr, GTU, NULL_RTX, mode, 1,
4140 default_label);
4141
4142 index = convert_to_mode (word_mode, index, 0);
4143 index = expand_binop (word_mode, ashl_optab, const1_rtx,
4144 index, NULL_RTX, 1, OPTAB_WIDEN);
4145
4146 for (i = 0; i < count; i++)
4147 {
4148 expr = immed_double_const (test[i].lo, test[i].hi, word_mode);
4149 expr = expand_binop (word_mode, and_optab, index, expr,
4150 NULL_RTX, 1, OPTAB_WIDEN);
4151 emit_cmp_and_jump_insns (expr, const0_rtx, NE, NULL_RTX,
4152 word_mode, 1, test[i].label);
4153 }
4154
4155 emit_jump (default_label);
4156}
ad82abb8 4157
41cbdcd0
KH
4158#ifndef HAVE_casesi
4159#define HAVE_casesi 0
4160#endif
4161
4162#ifndef HAVE_tablejump
4163#define HAVE_tablejump 0
4164#endif
4165
28d81abb 4166/* Terminate a case (Pascal) or switch (C) statement
9ab0ddd7 4167 in which ORIG_INDEX is the expression to be tested.
6f9fdf4d
JJ
4168 If ORIG_TYPE is not NULL, it is the original ORIG_INDEX
4169 type as given in the source before any compiler conversions.
28d81abb
RK
4170 Generate the code to test it and jump to the right place. */
4171
4172void
46c5ad27 4173expand_end_case_type (tree orig_index, tree orig_type)
28d81abb 4174{
9fb60a0d 4175 tree minval = NULL_TREE, maxval = NULL_TREE, range = NULL_TREE;
28d81abb 4176 rtx default_label = 0;
9bb231fd
RS
4177 struct case_node *n, *m;
4178 unsigned int count, uniq;
28d81abb 4179 rtx index;
ca695ac9 4180 rtx table_label;
28d81abb
RK
4181 int ncases;
4182 rtx *labelvec;
b3694847 4183 int i;
9bb231fd 4184 rtx before_case, end, lab;
b3694847 4185 struct nesting *thiscase = case_stack;
1b0cb6fc 4186 tree index_expr, index_type;
100e3acb 4187 bool exit_done = false;
ca695ac9
JB
4188 int unsignedp;
4189
03c03770
AS
4190 /* Don't crash due to previous errors. */
4191 if (thiscase == NULL)
4192 return;
4193
ca695ac9 4194 index_expr = thiscase->data.case_stmt.index_expr;
1b0cb6fc 4195 index_type = TREE_TYPE (index_expr);
8df83eae 4196 unsignedp = TYPE_UNSIGNED (index_type);
6f9fdf4d
JJ
4197 if (orig_type == NULL)
4198 orig_type = TREE_TYPE (orig_index);
28d81abb
RK
4199
4200 do_pending_stack_adjust ();
4201
4202 /* An ERROR_MARK occurs for various reasons including invalid data type. */
1b0cb6fc 4203 if (index_type != error_mark_node)
28d81abb 4204 {
28d81abb
RK
4205 /* If we don't have a default-label, create one here,
4206 after the body of the switch. */
4207 if (thiscase->data.case_stmt.default_label == 0)
4208 {
4209 thiscase->data.case_stmt.default_label
4210 = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
100e3acb
RS
4211 /* Share the exit label if possible. */
4212 if (thiscase->exit_label)
4213 {
4214 SET_DECL_RTL (thiscase->data.case_stmt.default_label,
4215 thiscase->exit_label);
4216 exit_done = true;
4217 }
28d81abb
RK
4218 expand_label (thiscase->data.case_stmt.default_label);
4219 }
4220 default_label = label_rtx (thiscase->data.case_stmt.default_label);
4221
4222 before_case = get_last_insn ();
4223
5720c7e7
RK
4224 if (thiscase->data.case_stmt.case_list
4225 && thiscase->data.case_stmt.case_list->left)
b059139c 4226 thiscase->data.case_stmt.case_list
4381f7c2 4227 = case_tree2list (thiscase->data.case_stmt.case_list, 0);
b059139c 4228
28d81abb
RK
4229 /* Simplify the case-list before we count it. */
4230 group_case_nodes (thiscase->data.case_stmt.case_list);
100e3acb
RS
4231 strip_default_case_nodes (&thiscase->data.case_stmt.case_list,
4232 default_label);
28d81abb
RK
4233
4234 /* Get upper and lower bounds of case values.
4235 Also convert all the case values to the index expr's data type. */
4236
9bb231fd 4237 uniq = 0;
28d81abb
RK
4238 count = 0;
4239 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4240 {
4241 /* Check low and high label values are integers. */
4242 if (TREE_CODE (n->low) != INTEGER_CST)
4243 abort ();
4244 if (TREE_CODE (n->high) != INTEGER_CST)
4245 abort ();
4246
1b0cb6fc
RK
4247 n->low = convert (index_type, n->low);
4248 n->high = convert (index_type, n->high);
28d81abb
RK
4249
4250 /* Count the elements and track the largest and smallest
4251 of them (treating them as signed even if they are not). */
4252 if (count++ == 0)
4253 {
4254 minval = n->low;
4255 maxval = n->high;
4256 }
4257 else
4258 {
4259 if (INT_CST_LT (n->low, minval))
4260 minval = n->low;
4261 if (INT_CST_LT (maxval, n->high))
4262 maxval = n->high;
4263 }
4264 /* A range counts double, since it requires two compares. */
4265 if (! tree_int_cst_equal (n->low, n->high))
4266 count++;
9bb231fd
RS
4267
4268 /* Count the number of unique case node targets. */
4269 uniq++;
4270 lab = label_rtx (n->code_label);
4271 for (m = thiscase->data.case_stmt.case_list; m != n; m = m->right)
4272 if (same_case_target_p (label_rtx (m->code_label), lab))
4273 {
4274 uniq--;
4275 break;
4276 }
28d81abb
RK
4277 }
4278
4279 /* Compute span of values. */
4280 if (count != 0)
1b0cb6fc 4281 range = fold (build (MINUS_EXPR, index_type, maxval, minval));
28d81abb 4282
956d6950 4283 end_cleanup_deferral ();
4c581243 4284
1b0cb6fc 4285 if (count == 0)
28d81abb
RK
4286 {
4287 expand_expr (index_expr, const0_rtx, VOIDmode, 0);
4288 emit_queue ();
4289 emit_jump (default_label);
4290 }
3474db0e 4291
9bb231fd
RS
4292 /* Try implementing this switch statement by a short sequence of
4293 bit-wise comparisons. However, we let the binary-tree case
4294 below handle constant index expressions. */
4295 else if (CASE_USE_BIT_TESTS
4296 && ! TREE_CONSTANT (index_expr)
4297 && compare_tree_int (range, GET_MODE_BITSIZE (word_mode)) < 0
766dec0e 4298 && compare_tree_int (range, 0) > 0
9bb231fd
RS
4299 && lshift_cheap_p ()
4300 && ((uniq == 1 && count >= 3)
4301 || (uniq == 2 && count >= 5)
4302 || (uniq == 3 && count >= 6)))
4303 {
4304 /* Optimize the case where all the case values fit in a
4305 word without having to subtract MINVAL. In this case,
4306 we can optimize away the subtraction. */
4307 if (compare_tree_int (minval, 0) > 0
4308 && compare_tree_int (maxval, GET_MODE_BITSIZE (word_mode)) < 0)
4309 {
4310 minval = integer_zero_node;
4311 range = maxval;
4312 }
4313 emit_case_bit_tests (index_type, index_expr, minval, range,
4314 thiscase->data.case_stmt.case_list,
4315 default_label);
4316 }
4317
28d81abb
RK
4318 /* If range of values is much bigger than number of values,
4319 make a sequence of conditional branches instead of a dispatch.
4320 If the switch-index is a constant, do it this way
4321 because we can optimize it. */
4f73c5dd 4322
ad82abb8 4323 else if (count < case_values_threshold ()
9e4b13a7
SB
4324 || compare_tree_int (range,
4325 (optimize_size ? 3 : 10) * count) > 0
f0c988c8
BS
4326 /* RANGE may be signed, and really large ranges will show up
4327 as negative numbers. */
4328 || compare_tree_int (range, 0) < 0
3f6fe18e
RK
4329#ifndef ASM_OUTPUT_ADDR_DIFF_ELT
4330 || flag_pic
4331#endif
41cbdcd0
KH
4332 || TREE_CONSTANT (index_expr)
4333 /* If neither casesi or tablejump is available, we can
4334 only go this way. */
4335 || (!HAVE_casesi && !HAVE_tablejump))
28d81abb 4336 {
37366632 4337 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
28d81abb
RK
4338
4339 /* If the index is a short or char that we do not have
4340 an insn to handle comparisons directly, convert it to
4341 a full integer now, rather than letting each comparison
4342 generate the conversion. */
4343
4344 if (GET_MODE_CLASS (GET_MODE (index)) == MODE_INT
ef89d648 4345 && ! have_insn_for (COMPARE, GET_MODE (index)))
28d81abb
RK
4346 {
4347 enum machine_mode wider_mode;
4348 for (wider_mode = GET_MODE (index); wider_mode != VOIDmode;
4349 wider_mode = GET_MODE_WIDER_MODE (wider_mode))
ef89d648 4350 if (have_insn_for (COMPARE, wider_mode))
28d81abb
RK
4351 {
4352 index = convert_to_mode (wider_mode, index, unsignedp);
4353 break;
4354 }
4355 }
4356
4357 emit_queue ();
4358 do_pending_stack_adjust ();
4359
4360 index = protect_from_queue (index, 0);
3c0cb5de 4361 if (MEM_P (index))
28d81abb
RK
4362 index = copy_to_reg (index);
4363 if (GET_CODE (index) == CONST_INT
4364 || TREE_CODE (index_expr) == INTEGER_CST)
4365 {
4366 /* Make a tree node with the proper constant value
4367 if we don't already have one. */
4368 if (TREE_CODE (index_expr) != INTEGER_CST)
4369 {
4370 index_expr
4371 = build_int_2 (INTVAL (index),
e9a042b6 4372 unsignedp || INTVAL (index) >= 0 ? 0 : -1);
1b0cb6fc 4373 index_expr = convert (index_type, index_expr);
28d81abb
RK
4374 }
4375
4376 /* For constant index expressions we need only
4fe9b91c 4377 issue an unconditional branch to the appropriate
28d81abb 4378 target code. The job of removing any unreachable
6356f892 4379 code is left to the optimization phase if the
28d81abb 4380 "-O" option is specified. */
1b0cb6fc
RK
4381 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4382 if (! tree_int_cst_lt (index_expr, n->low)
4383 && ! tree_int_cst_lt (n->high, index_expr))
4384 break;
4385
28d81abb
RK
4386 if (n)
4387 emit_jump (label_rtx (n->code_label));
4388 else
4389 emit_jump (default_label);
4390 }
4391 else
4392 {
4393 /* If the index expression is not constant we generate
4394 a binary decision tree to select the appropriate
4395 target code. This is done as follows:
4396
4397 The list of cases is rearranged into a binary tree,
4398 nearly optimal assuming equal probability for each case.
4399
4400 The tree is transformed into RTL, eliminating
4401 redundant test conditions at the same time.
4402
4403 If program flow could reach the end of the
4404 decision tree an unconditional jump to the
4405 default code is emitted. */
4406
4407 use_cost_table
6f9fdf4d 4408 = (TREE_CODE (orig_type) != ENUMERAL_TYPE
28d81abb 4409 && estimate_case_costs (thiscase->data.case_stmt.case_list));
9714cf43 4410 balance_case_nodes (&thiscase->data.case_stmt.case_list, NULL);
28d81abb 4411 emit_case_nodes (index, thiscase->data.case_stmt.case_list,
1b0cb6fc 4412 default_label, index_type);
28d81abb
RK
4413 emit_jump_if_reachable (default_label);
4414 }
4415 }
4416 else
4417 {
100e3acb 4418 table_label = gen_label_rtx ();
ad82abb8
ZW
4419 if (! try_casesi (index_type, index_expr, minval, range,
4420 table_label, default_label))
28d81abb 4421 {
ecc9dd93 4422 index_type = thiscase->data.case_stmt.nominal_type;
1ff37128 4423
786de7eb 4424 /* Index jumptables from zero for suitable values of
1ff37128 4425 minval to avoid a subtraction. */
786de7eb
KH
4426 if (! optimize_size
4427 && compare_tree_int (minval, 0) > 0
4428 && compare_tree_int (minval, 3) < 0)
4429 {
4430 minval = integer_zero_node;
4431 range = maxval;
4432 }
1ff37128 4433
ad82abb8
ZW
4434 if (! try_tablejump (index_type, index_expr, minval, range,
4435 table_label, default_label))
4436 abort ();
28d81abb 4437 }
786de7eb 4438
28d81abb
RK
4439 /* Get table of labels to jump to, in order of case index. */
4440
1ff37128 4441 ncases = tree_low_cst (range, 0) + 1;
703ad42b
KG
4442 labelvec = alloca (ncases * sizeof (rtx));
4443 memset (labelvec, 0, ncases * sizeof (rtx));
28d81abb
RK
4444
4445 for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
4446 {
2d9d49e4
OH
4447 /* Compute the low and high bounds relative to the minimum
4448 value since that should fit in a HOST_WIDE_INT while the
4449 actual values may not. */
4450 HOST_WIDE_INT i_low
786de7eb
KH
4451 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4452 n->low, minval)), 1);
2d9d49e4 4453 HOST_WIDE_INT i_high
786de7eb
KH
4454 = tree_low_cst (fold (build (MINUS_EXPR, index_type,
4455 n->high, minval)), 1);
2d9d49e4
OH
4456 HOST_WIDE_INT i;
4457
4458 for (i = i_low; i <= i_high; i ++)
4459 labelvec[i]
4460 = gen_rtx_LABEL_REF (Pmode, label_rtx (n->code_label));
28d81abb
RK
4461 }
4462
4463 /* Fill in the gaps with the default. */
4464 for (i = 0; i < ncases; i++)
4465 if (labelvec[i] == 0)
38a448ca 4466 labelvec[i] = gen_rtx_LABEL_REF (Pmode, default_label);
28d81abb 4467
f9da5064 4468 /* Output the table. */
28d81abb
RK
4469 emit_label (table_label);
4470
18543a22 4471 if (CASE_VECTOR_PC_RELATIVE || flag_pic)
38a448ca
RH
4472 emit_jump_insn (gen_rtx_ADDR_DIFF_VEC (CASE_VECTOR_MODE,
4473 gen_rtx_LABEL_REF (Pmode, table_label),
33f7f353 4474 gen_rtvec_v (ncases, labelvec),
4381f7c2 4475 const0_rtx, const0_rtx));
28d81abb 4476 else
38a448ca
RH
4477 emit_jump_insn (gen_rtx_ADDR_VEC (CASE_VECTOR_MODE,
4478 gen_rtvec_v (ncases, labelvec)));
28d81abb
RK
4479
4480 /* If the case insn drops through the table,
4481 after the table we must jump to the default-label.
4482 Otherwise record no drop-through after the table. */
4483#ifdef CASE_DROPS_THROUGH
4484 emit_jump (default_label);
4485#else
4486 emit_barrier ();
4487#endif
4488 }
4489
2270623a
JM
4490 before_case = NEXT_INSN (before_case);
4491 end = get_last_insn ();
2b7d71b2
JJ
4492 if (squeeze_notes (&before_case, &end))
4493 abort ();
2270623a 4494 reorder_insns (before_case, end,
28d81abb
RK
4495 thiscase->data.case_stmt.start);
4496 }
4c581243 4497 else
956d6950 4498 end_cleanup_deferral ();
1b0cb6fc 4499
100e3acb 4500 if (thiscase->exit_label && !exit_done)
28d81abb
RK
4501 emit_label (thiscase->exit_label);
4502
4503 POPSTACK (case_stack);
4504
4505 free_temp_slots ();
4506}
4507
57641239
RK
4508/* Convert the tree NODE into a list linked by the right field, with the left
4509 field zeroed. RIGHT is used for recursion; it is a list to be placed
4510 rightmost in the resulting list. */
4511
4512static struct case_node *
46c5ad27 4513case_tree2list (struct case_node *node, struct case_node *right)
57641239
RK
4514{
4515 struct case_node *left;
4516
4517 if (node->right)
4518 right = case_tree2list (node->right, right);
4519
4520 node->right = right;
51723711 4521 if ((left = node->left))
57641239
RK
4522 {
4523 node->left = 0;
4524 return case_tree2list (left, node);
4525 }
4526
4527 return node;
4528}
ca695ac9 4529
28d81abb
RK
4530/* Generate code to jump to LABEL if OP1 and OP2 are equal. */
4531
4532static void
46c5ad27 4533do_jump_if_equal (rtx op1, rtx op2, rtx label, int unsignedp)
28d81abb 4534{
d43e0b7d 4535 if (GET_CODE (op1) == CONST_INT && GET_CODE (op2) == CONST_INT)
28d81abb 4536 {
d51d146f 4537 if (op1 == op2)
28d81abb
RK
4538 emit_jump (label);
4539 }
4540 else
d43e0b7d
RK
4541 emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX,
4542 (GET_MODE (op1) == VOIDmode
4543 ? GET_MODE (op2) : GET_MODE (op1)),
4544 unsignedp, label);
28d81abb
RK
4545}
4546\f
4547/* Not all case values are encountered equally. This function
4548 uses a heuristic to weight case labels, in cases where that
4549 looks like a reasonable thing to do.
4550
4551 Right now, all we try to guess is text, and we establish the
4552 following weights:
4553
4554 chars above space: 16
4555 digits: 16
4556 default: 12
4557 space, punct: 8
4558 tab: 4
4559 newline: 2
4560 other "\" chars: 1
4561 remaining chars: 0
4562
4563 If we find any cases in the switch that are not either -1 or in the range
4564 of valid ASCII characters, or are control characters other than those
4565 commonly used with "\", don't treat this switch scanning text.
4566
4567 Return 1 if these nodes are suitable for cost estimation, otherwise
4568 return 0. */
4569
4570static int
46c5ad27 4571estimate_case_costs (case_node_ptr node)
28d81abb 4572{
f2d1f0ba 4573 tree min_ascii = integer_minus_one_node;
28d81abb
RK
4574 tree max_ascii = convert (TREE_TYPE (node->high), build_int_2 (127, 0));
4575 case_node_ptr n;
4576 int i;
4577
4578 /* If we haven't already made the cost table, make it now. Note that the
4579 lower bound of the table is -1, not zero. */
4580
2a2137c4 4581 if (! cost_table_initialized)
28d81abb 4582 {
2a2137c4 4583 cost_table_initialized = 1;
28d81abb
RK
4584
4585 for (i = 0; i < 128; i++)
4586 {
e9a780ec 4587 if (ISALNUM (i))
2a2137c4 4588 COST_TABLE (i) = 16;
e9a780ec 4589 else if (ISPUNCT (i))
2a2137c4 4590 COST_TABLE (i) = 8;
e9a780ec 4591 else if (ISCNTRL (i))
2a2137c4 4592 COST_TABLE (i) = -1;
28d81abb
RK
4593 }
4594
2a2137c4
RH
4595 COST_TABLE (' ') = 8;
4596 COST_TABLE ('\t') = 4;
4597 COST_TABLE ('\0') = 4;
4598 COST_TABLE ('\n') = 2;
4599 COST_TABLE ('\f') = 1;
4600 COST_TABLE ('\v') = 1;
4601 COST_TABLE ('\b') = 1;
28d81abb
RK
4602 }
4603
4604 /* See if all the case expressions look like text. It is text if the
4605 constant is >= -1 and the highest constant is <= 127. Do all comparisons
4606 as signed arithmetic since we don't want to ever access cost_table with a
4607 value less than -1. Also check that none of the constants in a range
4608 are strange control characters. */
4609
4610 for (n = node; n; n = n->right)
4611 {
4612 if ((INT_CST_LT (n->low, min_ascii)) || INT_CST_LT (max_ascii, n->high))
4613 return 0;
4614
05bccae2
RK
4615 for (i = (HOST_WIDE_INT) TREE_INT_CST_LOW (n->low);
4616 i <= (HOST_WIDE_INT) TREE_INT_CST_LOW (n->high); i++)
2a2137c4 4617 if (COST_TABLE (i) < 0)
28d81abb
RK
4618 return 0;
4619 }
4620
4621 /* All interesting values are within the range of interesting
4622 ASCII characters. */
4623 return 1;
4624}
4625
100e3acb
RS
4626/* Determine whether two case labels branch to the same target. */
4627
4628static bool
46c5ad27 4629same_case_target_p (rtx l1, rtx l2)
100e3acb 4630{
6de9cd9a 4631#if 0
100e3acb
RS
4632 rtx i1, i2;
4633
4634 if (l1 == l2)
4635 return true;
4636
4637 i1 = next_real_insn (l1);
4638 i2 = next_real_insn (l2);
4639 if (i1 == i2)
4640 return true;
4641
4642 if (i1 && simplejump_p (i1))
4643 {
4644 l1 = XEXP (SET_SRC (PATTERN (i1)), 0);
4645 }
4646
4647 if (i2 && simplejump_p (i2))
4648 {
4649 l2 = XEXP (SET_SRC (PATTERN (i2)), 0);
4650 }
6de9cd9a
DN
4651#endif
4652 /* When coming from gimple, we usually won't have emitted either
4653 the labels or the body of the switch statement. The job being
4654 done here should be done via jump threading at the tree level.
4655 Cases that go the same place should have the same label. */
100e3acb
RS
4656 return l1 == l2;
4657}
4658
4659/* Delete nodes that branch to the default label from a list of
4660 case nodes. Eg. case 5: default: becomes just default: */
4661
4662static void
46c5ad27 4663strip_default_case_nodes (case_node_ptr *prev, rtx deflab)
100e3acb
RS
4664{
4665 case_node_ptr ptr;
4666
4667 while (*prev)
4668 {
4669 ptr = *prev;
4670 if (same_case_target_p (label_rtx (ptr->code_label), deflab))
4671 *prev = ptr->right;
4672 else
4673 prev = &ptr->right;
4674 }
4675}
4676
28d81abb
RK
4677/* Scan an ordered list of case nodes
4678 combining those with consecutive values or ranges.
4679
4680 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
4681
4682static void
46c5ad27 4683group_case_nodes (case_node_ptr head)
28d81abb
RK
4684{
4685 case_node_ptr node = head;
4686
4687 while (node)
4688 {
6de9cd9a 4689 rtx lab;
28d81abb
RK
4690 case_node_ptr np = node;
4691
6de9cd9a
DN
4692 lab = label_rtx (node->code_label);
4693
28d81abb
RK
4694 /* Try to group the successors of NODE with NODE. */
4695 while (((np = np->right) != 0)
4696 /* Do they jump to the same place? */
100e3acb 4697 && same_case_target_p (label_rtx (np->code_label), lab)
28d81abb
RK
4698 /* Are their ranges consecutive? */
4699 && tree_int_cst_equal (np->low,
4700 fold (build (PLUS_EXPR,
4701 TREE_TYPE (node->high),
4702 node->high,
4703 integer_one_node)))
4704 /* An overflow is not consecutive. */
4705 && tree_int_cst_lt (node->high,
4706 fold (build (PLUS_EXPR,
4707 TREE_TYPE (node->high),
4708 node->high,
4709 integer_one_node))))
4710 {
4711 node->high = np->high;
4712 }
4713 /* NP is the first node after NODE which can't be grouped with it.
4714 Delete the nodes in between, and move on to that node. */
4715 node->right = np;
4716 node = np;
4717 }
4718}
4719
4720/* Take an ordered list of case nodes
4721 and transform them into a near optimal binary tree,
6dc42e49 4722 on the assumption that any target code selection value is as
28d81abb
RK
4723 likely as any other.
4724
4725 The transformation is performed by splitting the ordered
4726 list into two equal sections plus a pivot. The parts are
4727 then attached to the pivot as left and right branches. Each
38e01259 4728 branch is then transformed recursively. */
28d81abb
RK
4729
4730static void
46c5ad27 4731balance_case_nodes (case_node_ptr *head, case_node_ptr parent)
28d81abb 4732{
b3694847 4733 case_node_ptr np;
28d81abb
RK
4734
4735 np = *head;
4736 if (np)
4737 {
4738 int cost = 0;
4739 int i = 0;
4740 int ranges = 0;
b3694847 4741 case_node_ptr *npp;
28d81abb
RK
4742 case_node_ptr left;
4743
4744 /* Count the number of entries on branch. Also count the ranges. */
4745
4746 while (np)
4747 {
4748 if (!tree_int_cst_equal (np->low, np->high))
4749 {
4750 ranges++;
4751 if (use_cost_table)
2a2137c4 4752 cost += COST_TABLE (TREE_INT_CST_LOW (np->high));
28d81abb
RK
4753 }
4754
4755 if (use_cost_table)
2a2137c4 4756 cost += COST_TABLE (TREE_INT_CST_LOW (np->low));
28d81abb
RK
4757
4758 i++;
4759 np = np->right;
4760 }
4761
4762 if (i > 2)
4763 {
4764 /* Split this list if it is long enough for that to help. */
4765 npp = head;
4766 left = *npp;
4767 if (use_cost_table)
4768 {
4769 /* Find the place in the list that bisects the list's total cost,
4770 Here I gets half the total cost. */
4771 int n_moved = 0;
4772 i = (cost + 1) / 2;
4773 while (1)
4774 {
4775 /* Skip nodes while their cost does not reach that amount. */
4776 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
2a2137c4
RH
4777 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->high));
4778 i -= COST_TABLE (TREE_INT_CST_LOW ((*npp)->low));
28d81abb
RK
4779 if (i <= 0)
4780 break;
4781 npp = &(*npp)->right;
4782 n_moved += 1;
4783 }
4784 if (n_moved == 0)
4785 {
4786 /* Leave this branch lopsided, but optimize left-hand
4787 side and fill in `parent' fields for right-hand side. */
4788 np = *head;
4789 np->parent = parent;
4790 balance_case_nodes (&np->left, np);
4791 for (; np->right; np = np->right)
4792 np->right->parent = np;
4793 return;
4794 }
4795 }
4796 /* If there are just three nodes, split at the middle one. */
4797 else if (i == 3)
4798 npp = &(*npp)->right;
4799 else
4800 {
4801 /* Find the place in the list that bisects the list's total cost,
4802 where ranges count as 2.
4803 Here I gets half the total cost. */
4804 i = (i + ranges + 1) / 2;
4805 while (1)
4806 {
4807 /* Skip nodes while their cost does not reach that amount. */
4808 if (!tree_int_cst_equal ((*npp)->low, (*npp)->high))
4809 i--;
4810 i--;
4811 if (i <= 0)
4812 break;
4813 npp = &(*npp)->right;
4814 }
4815 }
4816 *head = np = *npp;
4817 *npp = 0;
4818 np->parent = parent;
4819 np->left = left;
4820
4821 /* Optimize each of the two split parts. */
4822 balance_case_nodes (&np->left, np);
4823 balance_case_nodes (&np->right, np);
4824 }
4825 else
4826 {
4827 /* Else leave this branch as one level,
4828 but fill in `parent' fields. */
4829 np = *head;
4830 np->parent = parent;
4831 for (; np->right; np = np->right)
4832 np->right->parent = np;
4833 }
4834 }
4835}
4836\f
4837/* Search the parent sections of the case node tree
4838 to see if a test for the lower bound of NODE would be redundant.
4839 INDEX_TYPE is the type of the index expression.
4840
4841 The instructions to generate the case decision tree are
4842 output in the same order as nodes are processed so it is
4843 known that if a parent node checks the range of the current
4844 node minus one that the current node is bounded at its lower
4845 span. Thus the test would be redundant. */
4846
4847static int
46c5ad27 4848node_has_low_bound (case_node_ptr node, tree index_type)
28d81abb
RK
4849{
4850 tree low_minus_one;
4851 case_node_ptr pnode;
4852
4853 /* If the lower bound of this node is the lowest value in the index type,
4854 we need not test it. */
4855
4856 if (tree_int_cst_equal (node->low, TYPE_MIN_VALUE (index_type)))
4857 return 1;
4858
4859 /* If this node has a left branch, the value at the left must be less
4860 than that at this node, so it cannot be bounded at the bottom and
4861 we need not bother testing any further. */
4862
4863 if (node->left)
4864 return 0;
4865
4866 low_minus_one = fold (build (MINUS_EXPR, TREE_TYPE (node->low),
4867 node->low, integer_one_node));
4868
4869 /* If the subtraction above overflowed, we can't verify anything.
4870 Otherwise, look for a parent that tests our value - 1. */
4871
4872 if (! tree_int_cst_lt (low_minus_one, node->low))
4873 return 0;
4874
4875 for (pnode = node->parent; pnode; pnode = pnode->parent)
4876 if (tree_int_cst_equal (low_minus_one, pnode->high))
4877 return 1;
4878
4879 return 0;
4880}
4881
4882/* Search the parent sections of the case node tree
4883 to see if a test for the upper bound of NODE would be redundant.
4884 INDEX_TYPE is the type of the index expression.
4885
4886 The instructions to generate the case decision tree are
4887 output in the same order as nodes are processed so it is
4888 known that if a parent node checks the range of the current
4889 node plus one that the current node is bounded at its upper
4890 span. Thus the test would be redundant. */
4891
4892static int
46c5ad27 4893node_has_high_bound (case_node_ptr node, tree index_type)
28d81abb
RK
4894{
4895 tree high_plus_one;
4896 case_node_ptr pnode;
4897
e1ee5cdc
RH
4898 /* If there is no upper bound, obviously no test is needed. */
4899
4900 if (TYPE_MAX_VALUE (index_type) == NULL)
4901 return 1;
4902
28d81abb
RK
4903 /* If the upper bound of this node is the highest value in the type
4904 of the index expression, we need not test against it. */
4905
4906 if (tree_int_cst_equal (node->high, TYPE_MAX_VALUE (index_type)))
4907 return 1;
4908
4909 /* If this node has a right branch, the value at the right must be greater
4910 than that at this node, so it cannot be bounded at the top and
4911 we need not bother testing any further. */
4912
4913 if (node->right)
4914 return 0;
4915
4916 high_plus_one = fold (build (PLUS_EXPR, TREE_TYPE (node->high),
4917 node->high, integer_one_node));
4918
4919 /* If the addition above overflowed, we can't verify anything.
4920 Otherwise, look for a parent that tests our value + 1. */
4921
4922 if (! tree_int_cst_lt (node->high, high_plus_one))
4923 return 0;
4924
4925 for (pnode = node->parent; pnode; pnode = pnode->parent)
4926 if (tree_int_cst_equal (high_plus_one, pnode->low))
4927 return 1;
4928
4929 return 0;
4930}
4931
4932/* Search the parent sections of the
4933 case node tree to see if both tests for the upper and lower
4934 bounds of NODE would be redundant. */
4935
4936static int
46c5ad27 4937node_is_bounded (case_node_ptr node, tree index_type)
28d81abb
RK
4938{
4939 return (node_has_low_bound (node, index_type)
4940 && node_has_high_bound (node, index_type));
4941}
4942
4943/* Emit an unconditional jump to LABEL unless it would be dead code. */
4944
4945static void
46c5ad27 4946emit_jump_if_reachable (rtx label)
28d81abb
RK
4947{
4948 if (GET_CODE (get_last_insn ()) != BARRIER)
4949 emit_jump (label);
4950}
4951\f
4952/* Emit step-by-step code to select a case for the value of INDEX.
4953 The thus generated decision tree follows the form of the
4954 case-node binary tree NODE, whose nodes represent test conditions.
4955 INDEX_TYPE is the type of the index of the switch.
4956
4957 Care is taken to prune redundant tests from the decision tree
4958 by detecting any boundary conditions already checked by
4959 emitted rtx. (See node_has_high_bound, node_has_low_bound
4960 and node_is_bounded, above.)
4961
4962 Where the test conditions can be shown to be redundant we emit
4963 an unconditional jump to the target code. As a further
4964 optimization, the subordinates of a tree node are examined to
4965 check for bounded nodes. In this case conditional and/or
4966 unconditional jumps as a result of the boundary check for the
4967 current node are arranged to target the subordinates associated
38e01259 4968 code for out of bound conditions on the current node.
28d81abb 4969
f72aed24 4970 We can assume that when control reaches the code generated here,
28d81abb
RK
4971 the index value has already been compared with the parents
4972 of this node, and determined to be on the same side of each parent
4973 as this node is. Thus, if this node tests for the value 51,
4974 and a parent tested for 52, we don't need to consider
4975 the possibility of a value greater than 51. If another parent
4976 tests for the value 50, then this node need not test anything. */
4977
4978static void
46c5ad27
AJ
4979emit_case_nodes (rtx index, case_node_ptr node, rtx default_label,
4980 tree index_type)
28d81abb
RK
4981{
4982 /* If INDEX has an unsigned type, we must make unsigned branches. */
8df83eae 4983 int unsignedp = TYPE_UNSIGNED (index_type);
28d81abb 4984 enum machine_mode mode = GET_MODE (index);
69107307 4985 enum machine_mode imode = TYPE_MODE (index_type);
28d81abb
RK
4986
4987 /* See if our parents have already tested everything for us.
4988 If they have, emit an unconditional jump for this node. */
4989 if (node_is_bounded (node, index_type))
4990 emit_jump (label_rtx (node->code_label));
4991
4992 else if (tree_int_cst_equal (node->low, node->high))
4993 {
4994 /* Node is single valued. First see if the index expression matches
0f41302f 4995 this node and then check our children, if any. */
28d81abb 4996
69107307
AO
4997 do_jump_if_equal (index,
4998 convert_modes (mode, imode,
4999 expand_expr (node->low, NULL_RTX,
5000 VOIDmode, 0),
5001 unsignedp),
28d81abb
RK
5002 label_rtx (node->code_label), unsignedp);
5003
5004 if (node->right != 0 && node->left != 0)
5005 {
5006 /* This node has children on both sides.
5007 Dispatch to one side or the other
5008 by comparing the index value with this node's value.
5009 If one subtree is bounded, check that one first,
5010 so we can avoid real branches in the tree. */
5011
5012 if (node_is_bounded (node->right, index_type))
5013 {
4381f7c2 5014 emit_cmp_and_jump_insns (index,
69107307
AO
5015 convert_modes
5016 (mode, imode,
5017 expand_expr (node->high, NULL_RTX,
5018 VOIDmode, 0),
5019 unsignedp),
d43e0b7d 5020 GT, NULL_RTX, mode, unsignedp,
4381f7c2 5021 label_rtx (node->right->code_label));
28d81abb
RK
5022 emit_case_nodes (index, node->left, default_label, index_type);
5023 }
5024
5025 else if (node_is_bounded (node->left, index_type))
5026 {
4381f7c2 5027 emit_cmp_and_jump_insns (index,
69107307
AO
5028 convert_modes
5029 (mode, imode,
5030 expand_expr (node->high, NULL_RTX,
5031 VOIDmode, 0),
5032 unsignedp),
d43e0b7d 5033 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5034 label_rtx (node->left->code_label));
28d81abb
RK
5035 emit_case_nodes (index, node->right, default_label, index_type);
5036 }
5037
43a21dfc
KH
5038 /* If both children are single-valued cases with no
5039 children, finish up all the work. This way, we can save
5040 one ordered comparison. */
5041 else if (tree_int_cst_equal (node->right->low, node->right->high)
5042 && node->right->left == 0
5043 && node->right->right == 0
5044 && tree_int_cst_equal (node->left->low, node->left->high)
5045 && node->left->left == 0
5046 && node->left->right == 0)
5047 {
5048 /* Neither node is bounded. First distinguish the two sides;
5049 then emit the code for one side at a time. */
5050
5051 /* See if the value matches what the right hand side
5052 wants. */
5053 do_jump_if_equal (index,
5054 convert_modes (mode, imode,
5055 expand_expr (node->right->low,
5056 NULL_RTX,
5057 VOIDmode, 0),
5058 unsignedp),
5059 label_rtx (node->right->code_label),
5060 unsignedp);
5061
5062 /* See if the value matches what the left hand side
5063 wants. */
5064 do_jump_if_equal (index,
5065 convert_modes (mode, imode,
5066 expand_expr (node->left->low,
5067 NULL_RTX,
5068 VOIDmode, 0),
5069 unsignedp),
5070 label_rtx (node->left->code_label),
5071 unsignedp);
5072 }
5073
28d81abb
RK
5074 else
5075 {
5076 /* Neither node is bounded. First distinguish the two sides;
5077 then emit the code for one side at a time. */
5078
4381f7c2 5079 tree test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
28d81abb
RK
5080
5081 /* See if the value is on the right. */
4381f7c2 5082 emit_cmp_and_jump_insns (index,
69107307
AO
5083 convert_modes
5084 (mode, imode,
5085 expand_expr (node->high, NULL_RTX,
5086 VOIDmode, 0),
5087 unsignedp),
d43e0b7d 5088 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5089 label_rtx (test_label));
28d81abb
RK
5090
5091 /* Value must be on the left.
5092 Handle the left-hand subtree. */
5093 emit_case_nodes (index, node->left, default_label, index_type);
5094 /* If left-hand subtree does nothing,
5095 go to default. */
5096 emit_jump_if_reachable (default_label);
5097
5098 /* Code branches here for the right-hand subtree. */
5099 expand_label (test_label);
5100 emit_case_nodes (index, node->right, default_label, index_type);
5101 }
5102 }
5103
5104 else if (node->right != 0 && node->left == 0)
5105 {
5106 /* Here we have a right child but no left so we issue conditional
5107 branch to default and process the right child.
5108
5109 Omit the conditional branch to default if we it avoid only one
5110 right child; it costs too much space to save so little time. */
5111
de14fd73 5112 if (node->right->right || node->right->left
28d81abb
RK
5113 || !tree_int_cst_equal (node->right->low, node->right->high))
5114 {
5115 if (!node_has_low_bound (node, index_type))
5116 {
4381f7c2 5117 emit_cmp_and_jump_insns (index,
69107307
AO
5118 convert_modes
5119 (mode, imode,
5120 expand_expr (node->high, NULL_RTX,
5121 VOIDmode, 0),
5122 unsignedp),
d43e0b7d 5123 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5124 default_label);
28d81abb
RK
5125 }
5126
5127 emit_case_nodes (index, node->right, default_label, index_type);
5128 }
5129 else
5130 /* We cannot process node->right normally
5131 since we haven't ruled out the numbers less than
5132 this node's value. So handle node->right explicitly. */
5133 do_jump_if_equal (index,
69107307
AO
5134 convert_modes
5135 (mode, imode,
5136 expand_expr (node->right->low, NULL_RTX,
5137 VOIDmode, 0),
5138 unsignedp),
28d81abb
RK
5139 label_rtx (node->right->code_label), unsignedp);
5140 }
5141
5142 else if (node->right == 0 && node->left != 0)
5143 {
5144 /* Just one subtree, on the left. */
4381f7c2 5145 if (node->left->left || node->left->right
28d81abb
RK
5146 || !tree_int_cst_equal (node->left->low, node->left->high))
5147 {
5148 if (!node_has_high_bound (node, index_type))
5149 {
69107307
AO
5150 emit_cmp_and_jump_insns (index,
5151 convert_modes
5152 (mode, imode,
5153 expand_expr (node->high, NULL_RTX,
5154 VOIDmode, 0),
5155 unsignedp),
d43e0b7d 5156 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5157 default_label);
28d81abb
RK
5158 }
5159
5160 emit_case_nodes (index, node->left, default_label, index_type);
5161 }
5162 else
5163 /* We cannot process node->left normally
5164 since we haven't ruled out the numbers less than
5165 this node's value. So handle node->left explicitly. */
5166 do_jump_if_equal (index,
69107307
AO
5167 convert_modes
5168 (mode, imode,
5169 expand_expr (node->left->low, NULL_RTX,
5170 VOIDmode, 0),
5171 unsignedp),
28d81abb
RK
5172 label_rtx (node->left->code_label), unsignedp);
5173 }
5174 }
5175 else
5176 {
5177 /* Node is a range. These cases are very similar to those for a single
5178 value, except that we do not start by testing whether this node
5179 is the one to branch to. */
5180
5181 if (node->right != 0 && node->left != 0)
5182 {
5183 /* Node has subtrees on both sides.
5184 If the right-hand subtree is bounded,
5185 test for it first, since we can go straight there.
5186 Otherwise, we need to make a branch in the control structure,
5187 then handle the two subtrees. */
5188 tree test_label = 0;
5189
28d81abb
RK
5190 if (node_is_bounded (node->right, index_type))
5191 /* Right hand node is fully bounded so we can eliminate any
5192 testing and branch directly to the target code. */
69107307
AO
5193 emit_cmp_and_jump_insns (index,
5194 convert_modes
5195 (mode, imode,
5196 expand_expr (node->high, NULL_RTX,
5197 VOIDmode, 0),
5198 unsignedp),
d43e0b7d 5199 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5200 label_rtx (node->right->code_label));
28d81abb
RK
5201 else
5202 {
5203 /* Right hand node requires testing.
5204 Branch to a label where we will handle it later. */
5205
5206 test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
4381f7c2 5207 emit_cmp_and_jump_insns (index,
69107307
AO
5208 convert_modes
5209 (mode, imode,
5210 expand_expr (node->high, NULL_RTX,
5211 VOIDmode, 0),
5212 unsignedp),
d43e0b7d 5213 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5214 label_rtx (test_label));
28d81abb
RK
5215 }
5216
5217 /* Value belongs to this node or to the left-hand subtree. */
5218
69107307
AO
5219 emit_cmp_and_jump_insns (index,
5220 convert_modes
5221 (mode, imode,
5222 expand_expr (node->low, NULL_RTX,
5223 VOIDmode, 0),
5224 unsignedp),
d43e0b7d 5225 GE, NULL_RTX, mode, unsignedp,
c5d5d461 5226 label_rtx (node->code_label));
28d81abb
RK
5227
5228 /* Handle the left-hand subtree. */
5229 emit_case_nodes (index, node->left, default_label, index_type);
5230
5231 /* If right node had to be handled later, do that now. */
5232
5233 if (test_label)
5234 {
5235 /* If the left-hand subtree fell through,
5236 don't let it fall into the right-hand subtree. */
5237 emit_jump_if_reachable (default_label);
5238
5239 expand_label (test_label);
5240 emit_case_nodes (index, node->right, default_label, index_type);
5241 }
5242 }
5243
5244 else if (node->right != 0 && node->left == 0)
5245 {
5246 /* Deal with values to the left of this node,
5247 if they are possible. */
5248 if (!node_has_low_bound (node, index_type))
5249 {
4381f7c2 5250 emit_cmp_and_jump_insns (index,
69107307
AO
5251 convert_modes
5252 (mode, imode,
5253 expand_expr (node->low, NULL_RTX,
5254 VOIDmode, 0),
5255 unsignedp),
d43e0b7d 5256 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5257 default_label);
28d81abb
RK
5258 }
5259
5260 /* Value belongs to this node or to the right-hand subtree. */
5261
69107307
AO
5262 emit_cmp_and_jump_insns (index,
5263 convert_modes
5264 (mode, imode,
5265 expand_expr (node->high, NULL_RTX,
5266 VOIDmode, 0),
5267 unsignedp),
d43e0b7d 5268 LE, NULL_RTX, mode, unsignedp,
c5d5d461 5269 label_rtx (node->code_label));
28d81abb
RK
5270
5271 emit_case_nodes (index, node->right, default_label, index_type);
5272 }
5273
5274 else if (node->right == 0 && node->left != 0)
5275 {
5276 /* Deal with values to the right of this node,
5277 if they are possible. */
5278 if (!node_has_high_bound (node, index_type))
5279 {
4381f7c2 5280 emit_cmp_and_jump_insns (index,
69107307
AO
5281 convert_modes
5282 (mode, imode,
5283 expand_expr (node->high, NULL_RTX,
5284 VOIDmode, 0),
5285 unsignedp),
d43e0b7d 5286 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5287 default_label);
28d81abb
RK
5288 }
5289
5290 /* Value belongs to this node or to the left-hand subtree. */
5291
4381f7c2 5292 emit_cmp_and_jump_insns (index,
69107307
AO
5293 convert_modes
5294 (mode, imode,
5295 expand_expr (node->low, NULL_RTX,
5296 VOIDmode, 0),
5297 unsignedp),
d43e0b7d 5298 GE, NULL_RTX, mode, unsignedp,
c5d5d461 5299 label_rtx (node->code_label));
28d81abb
RK
5300
5301 emit_case_nodes (index, node->left, default_label, index_type);
5302 }
5303
5304 else
5305 {
5306 /* Node has no children so we check low and high bounds to remove
5307 redundant tests. Only one of the bounds can exist,
5308 since otherwise this node is bounded--a case tested already. */
923cbdc3
JH
5309 int high_bound = node_has_high_bound (node, index_type);
5310 int low_bound = node_has_low_bound (node, index_type);
28d81abb 5311
923cbdc3 5312 if (!high_bound && low_bound)
28d81abb 5313 {
4381f7c2 5314 emit_cmp_and_jump_insns (index,
69107307
AO
5315 convert_modes
5316 (mode, imode,
5317 expand_expr (node->high, NULL_RTX,
5318 VOIDmode, 0),
5319 unsignedp),
d43e0b7d 5320 GT, NULL_RTX, mode, unsignedp,
c5d5d461 5321 default_label);
28d81abb
RK
5322 }
5323
923cbdc3 5324 else if (!low_bound && high_bound)
28d81abb 5325 {
4381f7c2 5326 emit_cmp_and_jump_insns (index,
69107307
AO
5327 convert_modes
5328 (mode, imode,
5329 expand_expr (node->low, NULL_RTX,
5330 VOIDmode, 0),
5331 unsignedp),
d43e0b7d 5332 LT, NULL_RTX, mode, unsignedp,
c5d5d461 5333 default_label);
28d81abb 5334 }
923cbdc3
JH
5335 else if (!low_bound && !high_bound)
5336 {
9312aecc 5337 /* Widen LOW and HIGH to the same width as INDEX. */
ae2bcd98 5338 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9312aecc
JDA
5339 tree low = build1 (CONVERT_EXPR, type, node->low);
5340 tree high = build1 (CONVERT_EXPR, type, node->high);
ef89d648 5341 rtx low_rtx, new_index, new_bound;
9312aecc
JDA
5342
5343 /* Instead of doing two branches, emit one unsigned branch for
5344 (index-low) > (high-low). */
ef89d648
ZW
5345 low_rtx = expand_expr (low, NULL_RTX, mode, 0);
5346 new_index = expand_simple_binop (mode, MINUS, index, low_rtx,
5347 NULL_RTX, unsignedp,
5348 OPTAB_WIDEN);
9312aecc
JDA
5349 new_bound = expand_expr (fold (build (MINUS_EXPR, type,
5350 high, low)),
5351 NULL_RTX, mode, 0);
786de7eb 5352
9312aecc 5353 emit_cmp_and_jump_insns (new_index, new_bound, GT, NULL_RTX,
d43e0b7d 5354 mode, 1, default_label);
923cbdc3 5355 }
28d81abb
RK
5356
5357 emit_jump (label_rtx (node->code_label));
5358 }
5359 }
5360}
e2500fed
GK
5361
5362#include "gt-stmt.h"