]>
Commit | Line | Data |
---|---|---|
e9a25f70 | 1 | /* Subroutines used for code generation on the Argonaut ARC cpu. |
c6b97fac | 2 | Copyright (C) 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2003 |
ba4828e0 | 3 | Free Software Foundation, Inc. |
e90d5e57 | 4 | |
7ec022b2 | 5 | This file is part of GCC. |
e90d5e57 | 6 | |
7ec022b2 | 7 | GCC is free software; you can redistribute it and/or modify |
e90d5e57 JL |
8 | it under the terms of the GNU General Public License as published by |
9 | the Free Software Foundation; either version 2, or (at your option) | |
10 | any later version. | |
11 | ||
7ec022b2 | 12 | GCC is distributed in the hope that it will be useful, |
e90d5e57 JL |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
7ec022b2 | 18 | along with GCC; see the file COPYING. If not, write to |
e90d5e57 JL |
19 | the Free Software Foundation, 59 Temple Place - Suite 330, |
20 | Boston, MA 02111-1307, USA. */ | |
21 | ||
22 | /* ??? This is an old port, and is undoubtedly suffering from bit rot. */ | |
23 | ||
e90d5e57 | 24 | #include "config.h" |
c5c76735 | 25 | #include "system.h" |
4977bab6 ZW |
26 | #include "coretypes.h" |
27 | #include "tm.h" | |
e90d5e57 JL |
28 | #include "tree.h" |
29 | #include "rtl.h" | |
30 | #include "regs.h" | |
31 | #include "hard-reg-set.h" | |
32 | #include "real.h" | |
33 | #include "insn-config.h" | |
34 | #include "conditions.h" | |
e90d5e57 JL |
35 | #include "output.h" |
36 | #include "insn-attr.h" | |
37 | #include "flags.h" | |
49ad7cfa | 38 | #include "function.h" |
e90d5e57 JL |
39 | #include "expr.h" |
40 | #include "recog.h" | |
2b046bda KG |
41 | #include "toplev.h" |
42 | #include "tm_p.h" | |
672a6f42 NB |
43 | #include "target.h" |
44 | #include "target-def.h" | |
e90d5e57 JL |
45 | |
46 | /* Which cpu we're compiling for (NULL(=base), ???). */ | |
2b046bda | 47 | const char *arc_cpu_string; |
e90d5e57 JL |
48 | int arc_cpu_type; |
49 | ||
50 | /* Name of mangle string to add to symbols to separate code compiled for each | |
51 | cpu (or NULL). */ | |
2b046bda | 52 | const char *arc_mangle_cpu; |
e90d5e57 JL |
53 | |
54 | /* Save the operands last given to a compare for use when we | |
55 | generate a scc or bcc insn. */ | |
56 | rtx arc_compare_op0, arc_compare_op1; | |
57 | ||
58 | /* Name of text, data, and rodata sections, as specified on command line. | |
59 | Selected by -m{text,data,rodata} flags. */ | |
2b046bda KG |
60 | const char *arc_text_string = ARC_DEFAULT_TEXT_SECTION; |
61 | const char *arc_data_string = ARC_DEFAULT_DATA_SECTION; | |
62 | const char *arc_rodata_string = ARC_DEFAULT_RODATA_SECTION; | |
e90d5e57 JL |
63 | |
64 | /* Name of text, data, and rodata sections used in varasm.c. */ | |
2b046bda KG |
65 | const char *arc_text_section; |
66 | const char *arc_data_section; | |
67 | const char *arc_rodata_section; | |
e90d5e57 JL |
68 | |
69 | /* Array of valid operand punctuation characters. */ | |
70 | char arc_punct_chars[256]; | |
71 | ||
72 | /* Variables used by arc_final_prescan_insn to implement conditional | |
73 | execution. */ | |
74 | static int arc_ccfsm_state; | |
75 | static int arc_ccfsm_current_cc; | |
76 | static rtx arc_ccfsm_target_insn; | |
77 | static int arc_ccfsm_target_label; | |
78 | ||
79 | /* The maximum number of insns skipped which will be conditionalised if | |
80 | possible. */ | |
81 | #define MAX_INSNS_SKIPPED 3 | |
82 | ||
83 | /* A nop is needed between a 4 byte insn that sets the condition codes and | |
84 | a branch that uses them (the same isn't true for an 8 byte insn that sets | |
85 | the condition codes). Set by arc_final_prescan_insn. Used by | |
86 | arc_print_operand. */ | |
87 | static int last_insn_set_cc_p; | |
88 | static int current_insn_set_cc_p; | |
2b046bda KG |
89 | static void record_cc_ref PARAMS ((rtx)); |
90 | static void arc_init_reg_tables PARAMS ((void)); | |
91 | static int get_arc_condition_code PARAMS ((rtx)); | |
91d231cb JM |
92 | const struct attribute_spec arc_attribute_table[]; |
93 | static tree arc_handle_interrupt_attribute PARAMS ((tree *, tree, tree, int, bool *)); | |
301d03af | 94 | static bool arc_assemble_integer PARAMS ((rtx, unsigned int, int)); |
08c148a8 NB |
95 | static void arc_output_function_prologue PARAMS ((FILE *, HOST_WIDE_INT)); |
96 | static void arc_output_function_epilogue PARAMS ((FILE *, HOST_WIDE_INT)); | |
1bc7c5b6 | 97 | static void arc_file_start PARAMS ((void)); |
4977bab6 | 98 | static void arc_internal_label PARAMS ((FILE *, const char *, unsigned long)); |
3c50106f | 99 | static bool arc_rtx_costs PARAMS ((rtx, int, int, int *)); |
dcefdf67 | 100 | static int arc_address_cost PARAMS ((rtx)); |
672a6f42 NB |
101 | \f |
102 | /* Initialize the GCC target structure. */ | |
301d03af RS |
103 | #undef TARGET_ASM_ALIGNED_HI_OP |
104 | #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t" | |
105 | #undef TARGET_ASM_ALIGNED_SI_OP | |
106 | #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" | |
107 | #undef TARGET_ASM_INTEGER | |
108 | #define TARGET_ASM_INTEGER arc_assemble_integer | |
109 | ||
08c148a8 NB |
110 | #undef TARGET_ASM_FUNCTION_PROLOGUE |
111 | #define TARGET_ASM_FUNCTION_PROLOGUE arc_output_function_prologue | |
112 | #undef TARGET_ASM_FUNCTION_EPILOGUE | |
113 | #define TARGET_ASM_FUNCTION_EPILOGUE arc_output_function_epilogue | |
1bc7c5b6 ZW |
114 | #undef TARGET_ASM_FILE_START |
115 | #define TARGET_ASM_FILE_START arc_file_start | |
91d231cb JM |
116 | #undef TARGET_ATTRIBUTE_TABLE |
117 | #define TARGET_ATTRIBUTE_TABLE arc_attribute_table | |
4977bab6 | 118 | #undef TARGET_ASM_INTERNAL_LABEL |
3c50106f RH |
119 | #define TARGET_ASM_INTERNAL_LABEL arc_internal_label |
120 | ||
121 | #undef TARGET_RTX_COSTS | |
122 | #define TARGET_RTX_COSTS arc_rtx_costs | |
dcefdf67 RH |
123 | #undef TARGET_ADDRESS_COST |
124 | #define TARGET_ADDRESS_COST arc_address_cost | |
e90d5e57 | 125 | |
f6897b10 | 126 | struct gcc_target targetm = TARGET_INITIALIZER; |
672a6f42 | 127 | \f |
e90d5e57 JL |
128 | /* Called by OVERRIDE_OPTIONS to initialize various things. */ |
129 | ||
130 | void | |
ab2877a3 | 131 | arc_init () |
e90d5e57 | 132 | { |
2b046bda KG |
133 | char *tmp; |
134 | ||
e90d5e57 JL |
135 | if (arc_cpu_string == 0 |
136 | || !strcmp (arc_cpu_string, "base")) | |
137 | { | |
138 | /* Ensure we have a printable value for the .cpu pseudo-op. */ | |
139 | arc_cpu_string = "base"; | |
140 | arc_cpu_type = 0; | |
141 | arc_mangle_cpu = NULL; | |
142 | } | |
143 | else if (ARC_EXTENSION_CPU (arc_cpu_string)) | |
144 | ; /* nothing to do */ | |
145 | else | |
146 | { | |
147 | error ("bad value (%s) for -mcpu switch", arc_cpu_string); | |
148 | arc_cpu_string = "base"; | |
149 | arc_cpu_type = 0; | |
150 | arc_mangle_cpu = NULL; | |
151 | } | |
152 | ||
153 | /* Set the pseudo-ops for the various standard sections. */ | |
2b046bda KG |
154 | arc_text_section = tmp = xmalloc (strlen (arc_text_string) + sizeof (ARC_SECTION_FORMAT) + 1); |
155 | sprintf (tmp, ARC_SECTION_FORMAT, arc_text_string); | |
156 | arc_data_section = tmp = xmalloc (strlen (arc_data_string) + sizeof (ARC_SECTION_FORMAT) + 1); | |
157 | sprintf (tmp, ARC_SECTION_FORMAT, arc_data_string); | |
158 | arc_rodata_section = tmp = xmalloc (strlen (arc_rodata_string) + sizeof (ARC_SECTION_FORMAT) + 1); | |
159 | sprintf (tmp, ARC_SECTION_FORMAT, arc_rodata_string); | |
e90d5e57 JL |
160 | |
161 | arc_init_reg_tables (); | |
162 | ||
163 | /* Initialize array for PRINT_OPERAND_PUNCT_VALID_P. */ | |
164 | memset (arc_punct_chars, 0, sizeof (arc_punct_chars)); | |
165 | arc_punct_chars['#'] = 1; | |
166 | arc_punct_chars['*'] = 1; | |
167 | arc_punct_chars['?'] = 1; | |
168 | arc_punct_chars['!'] = 1; | |
169 | arc_punct_chars['~'] = 1; | |
170 | } | |
171 | \f | |
172 | /* The condition codes of the ARC, and the inverse function. */ | |
2b046bda | 173 | static const char *const arc_condition_codes[] = |
e90d5e57 JL |
174 | { |
175 | "al", 0, "eq", "ne", "p", "n", "c", "nc", "v", "nv", | |
176 | "gt", "le", "ge", "lt", "hi", "ls", "pnz", 0 | |
177 | }; | |
178 | ||
179 | #define ARC_INVERSE_CONDITION_CODE(X) ((X) ^ 1) | |
180 | ||
181 | /* Returns the index of the ARC condition code string in | |
182 | `arc_condition_codes'. COMPARISON should be an rtx like | |
183 | `(eq (...) (...))'. */ | |
184 | ||
185 | static int | |
186 | get_arc_condition_code (comparison) | |
187 | rtx comparison; | |
188 | { | |
189 | switch (GET_CODE (comparison)) | |
190 | { | |
191 | case EQ : return 2; | |
192 | case NE : return 3; | |
193 | case GT : return 10; | |
194 | case LE : return 11; | |
195 | case GE : return 12; | |
196 | case LT : return 13; | |
197 | case GTU : return 14; | |
198 | case LEU : return 15; | |
199 | case LTU : return 6; | |
200 | case GEU : return 7; | |
201 | default : abort (); | |
202 | } | |
203 | /*NOTREACHED*/ | |
204 | return (42); | |
205 | } | |
206 | ||
207 | /* Given a comparison code (EQ, NE, etc.) and the first operand of a COMPARE, | |
208 | return the mode to be used for the comparison. */ | |
209 | ||
210 | enum machine_mode | |
211 | arc_select_cc_mode (op, x, y) | |
212 | enum rtx_code op; | |
2b046bda | 213 | rtx x, y ATTRIBUTE_UNUSED; |
e90d5e57 JL |
214 | { |
215 | switch (op) | |
216 | { | |
217 | case EQ : | |
218 | case NE : | |
219 | return CCZNmode; | |
220 | default : | |
221 | switch (GET_CODE (x)) | |
222 | { | |
223 | case AND : | |
224 | case IOR : | |
225 | case XOR : | |
226 | case SIGN_EXTEND : | |
227 | case ZERO_EXTEND : | |
228 | return CCZNmode; | |
229 | case ASHIFT : | |
230 | case ASHIFTRT : | |
231 | case LSHIFTRT : | |
232 | return CCZNCmode; | |
2b046bda KG |
233 | default: |
234 | break; | |
e90d5e57 JL |
235 | } |
236 | } | |
237 | return CCmode; | |
238 | } | |
239 | \f | |
240 | /* Vectors to keep interesting information about registers where it can easily | |
241 | be got. We use to use the actual mode value as the bit number, but there | |
242 | is (or may be) more than 32 modes now. Instead we use two tables: one | |
243 | indexed by hard register number, and one indexed by mode. */ | |
244 | ||
245 | /* The purpose of arc_mode_class is to shrink the range of modes so that | |
246 | they all fit (as bit numbers) in a 32 bit word (again). Each real mode is | |
247 | mapped into one arc_mode_class mode. */ | |
248 | ||
249 | enum arc_mode_class { | |
250 | C_MODE, | |
251 | S_MODE, D_MODE, T_MODE, O_MODE, | |
252 | SF_MODE, DF_MODE, TF_MODE, OF_MODE | |
253 | }; | |
254 | ||
255 | /* Modes for condition codes. */ | |
256 | #define C_MODES (1 << (int) C_MODE) | |
257 | ||
258 | /* Modes for single-word and smaller quantities. */ | |
259 | #define S_MODES ((1 << (int) S_MODE) | (1 << (int) SF_MODE)) | |
260 | ||
261 | /* Modes for double-word and smaller quantities. */ | |
262 | #define D_MODES (S_MODES | (1 << (int) D_MODE) | (1 << DF_MODE)) | |
263 | ||
264 | /* Modes for quad-word and smaller quantities. */ | |
265 | #define T_MODES (D_MODES | (1 << (int) T_MODE) | (1 << (int) TF_MODE)) | |
266 | ||
267 | /* Value is 1 if register/mode pair is acceptable on arc. */ | |
268 | ||
0b5826ac | 269 | const unsigned int arc_hard_regno_mode_ok[] = { |
e90d5e57 JL |
270 | T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, |
271 | T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, | |
272 | T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, T_MODES, D_MODES, | |
273 | D_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, | |
274 | ||
275 | /* ??? Leave these as S_MODES for now. */ | |
276 | S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, | |
277 | S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, | |
278 | S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, | |
279 | S_MODES, S_MODES, S_MODES, S_MODES, S_MODES, C_MODES | |
280 | }; | |
281 | ||
282 | unsigned int arc_mode_class [NUM_MACHINE_MODES]; | |
283 | ||
284 | enum reg_class arc_regno_reg_class[FIRST_PSEUDO_REGISTER]; | |
285 | ||
2b046bda | 286 | static void |
e90d5e57 JL |
287 | arc_init_reg_tables () |
288 | { | |
289 | int i; | |
290 | ||
291 | for (i = 0; i < NUM_MACHINE_MODES; i++) | |
292 | { | |
293 | switch (GET_MODE_CLASS (i)) | |
294 | { | |
295 | case MODE_INT: | |
296 | case MODE_PARTIAL_INT: | |
297 | case MODE_COMPLEX_INT: | |
298 | if (GET_MODE_SIZE (i) <= 4) | |
299 | arc_mode_class[i] = 1 << (int) S_MODE; | |
300 | else if (GET_MODE_SIZE (i) == 8) | |
301 | arc_mode_class[i] = 1 << (int) D_MODE; | |
302 | else if (GET_MODE_SIZE (i) == 16) | |
303 | arc_mode_class[i] = 1 << (int) T_MODE; | |
304 | else if (GET_MODE_SIZE (i) == 32) | |
305 | arc_mode_class[i] = 1 << (int) O_MODE; | |
306 | else | |
307 | arc_mode_class[i] = 0; | |
308 | break; | |
309 | case MODE_FLOAT: | |
310 | case MODE_COMPLEX_FLOAT: | |
311 | if (GET_MODE_SIZE (i) <= 4) | |
312 | arc_mode_class[i] = 1 << (int) SF_MODE; | |
313 | else if (GET_MODE_SIZE (i) == 8) | |
314 | arc_mode_class[i] = 1 << (int) DF_MODE; | |
315 | else if (GET_MODE_SIZE (i) == 16) | |
316 | arc_mode_class[i] = 1 << (int) TF_MODE; | |
317 | else if (GET_MODE_SIZE (i) == 32) | |
318 | arc_mode_class[i] = 1 << (int) OF_MODE; | |
319 | else | |
320 | arc_mode_class[i] = 0; | |
321 | break; | |
322 | case MODE_CC: | |
323 | default: | |
324 | /* mode_class hasn't been initialized yet for EXTRA_CC_MODES, so | |
325 | we must explicitly check for them here. */ | |
326 | if (i == (int) CCmode || i == (int) CCZNmode || i == (int) CCZNCmode) | |
327 | arc_mode_class[i] = 1 << (int) C_MODE; | |
328 | else | |
329 | arc_mode_class[i] = 0; | |
330 | break; | |
331 | } | |
332 | } | |
333 | ||
334 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
335 | { | |
336 | if (i < 60) | |
337 | arc_regno_reg_class[i] = GENERAL_REGS; | |
338 | else if (i == 60) | |
339 | arc_regno_reg_class[i] = LPCOUNT_REG; | |
340 | else if (i == 61) | |
341 | arc_regno_reg_class[i] = NO_REGS /* CC_REG: must be NO_REGS */; | |
342 | else | |
343 | arc_regno_reg_class[i] = NO_REGS; | |
344 | } | |
345 | } | |
346 | \f | |
347 | /* ARC specific attribute support. | |
348 | ||
349 | The ARC has these attributes: | |
350 | interrupt - for interrupt functions | |
351 | */ | |
352 | ||
91d231cb JM |
353 | const struct attribute_spec arc_attribute_table[] = |
354 | { | |
355 | /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ | |
356 | { "interrupt", 1, 1, true, false, false, arc_handle_interrupt_attribute }, | |
357 | { NULL, 0, 0, false, false, false, NULL } | |
358 | }; | |
e90d5e57 | 359 | |
91d231cb JM |
360 | /* Handle an "interrupt" attribute; arguments as in |
361 | struct attribute_spec.handler. */ | |
362 | static tree | |
363 | arc_handle_interrupt_attribute (node, name, args, flags, no_add_attrs) | |
364 | tree *node ATTRIBUTE_UNUSED; | |
365 | tree name; | |
e90d5e57 | 366 | tree args; |
91d231cb JM |
367 | int flags ATTRIBUTE_UNUSED; |
368 | bool *no_add_attrs; | |
e90d5e57 | 369 | { |
91d231cb | 370 | tree value = TREE_VALUE (args); |
e90d5e57 | 371 | |
91d231cb JM |
372 | if (TREE_CODE (value) != STRING_CST) |
373 | { | |
374 | warning ("argument of `%s' attribute is not a string constant", | |
375 | IDENTIFIER_POINTER (name)); | |
376 | *no_add_attrs = true; | |
e90d5e57 | 377 | } |
91d231cb JM |
378 | else if (strcmp (TREE_STRING_POINTER (value), "ilink1") |
379 | && strcmp (TREE_STRING_POINTER (value), "ilink2")) | |
380 | { | |
381 | warning ("argument of `%s' attribute is not \"ilink1\" or \"ilink2\"", | |
382 | IDENTIFIER_POINTER (name)); | |
383 | *no_add_attrs = true; | |
384 | } | |
385 | ||
386 | return NULL_TREE; | |
e90d5e57 JL |
387 | } |
388 | ||
e90d5e57 JL |
389 | \f |
390 | /* Acceptable arguments to the call insn. */ | |
391 | ||
392 | int | |
393 | call_address_operand (op, mode) | |
394 | rtx op; | |
395 | enum machine_mode mode; | |
396 | { | |
397 | return (symbolic_operand (op, mode) | |
398 | || (GET_CODE (op) == CONST_INT && LEGITIMATE_CONSTANT_P (op)) | |
399 | || (GET_CODE (op) == REG)); | |
400 | } | |
401 | ||
402 | int | |
403 | call_operand (op, mode) | |
404 | rtx op; | |
405 | enum machine_mode mode; | |
406 | { | |
407 | if (GET_CODE (op) != MEM) | |
408 | return 0; | |
409 | op = XEXP (op, 0); | |
410 | return call_address_operand (op, mode); | |
411 | } | |
412 | ||
413 | /* Returns 1 if OP is a symbol reference. */ | |
414 | ||
415 | int | |
416 | symbolic_operand (op, mode) | |
417 | rtx op; | |
2b046bda | 418 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
419 | { |
420 | switch (GET_CODE (op)) | |
421 | { | |
422 | case SYMBOL_REF: | |
423 | case LABEL_REF: | |
424 | case CONST : | |
425 | return 1; | |
426 | default: | |
427 | return 0; | |
428 | } | |
429 | } | |
430 | ||
431 | /* Return truth value of statement that OP is a symbolic memory | |
432 | operand of mode MODE. */ | |
433 | ||
434 | int | |
435 | symbolic_memory_operand (op, mode) | |
436 | rtx op; | |
2b046bda | 437 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
438 | { |
439 | if (GET_CODE (op) == SUBREG) | |
440 | op = SUBREG_REG (op); | |
441 | if (GET_CODE (op) != MEM) | |
442 | return 0; | |
443 | op = XEXP (op, 0); | |
444 | return (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == CONST | |
445 | || GET_CODE (op) == LABEL_REF); | |
446 | } | |
447 | ||
448 | /* Return true if OP is a short immediate (shimm) value. */ | |
449 | ||
450 | int | |
451 | short_immediate_operand (op, mode) | |
452 | rtx op; | |
2b046bda | 453 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
454 | { |
455 | if (GET_CODE (op) != CONST_INT) | |
456 | return 0; | |
457 | return SMALL_INT (INTVAL (op)); | |
458 | } | |
459 | ||
460 | /* Return true if OP will require a long immediate (limm) value. | |
461 | This is currently only used when calculating length attributes. */ | |
462 | ||
463 | int | |
464 | long_immediate_operand (op, mode) | |
465 | rtx op; | |
2b046bda | 466 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
467 | { |
468 | switch (GET_CODE (op)) | |
469 | { | |
470 | case SYMBOL_REF : | |
471 | case LABEL_REF : | |
472 | case CONST : | |
473 | return 1; | |
474 | case CONST_INT : | |
475 | return !SMALL_INT (INTVAL (op)); | |
476 | case CONST_DOUBLE : | |
477 | /* These can happen because large unsigned 32 bit constants are | |
478 | represented this way (the multiplication patterns can cause these | |
479 | to be generated). They also occur for SFmode values. */ | |
480 | return 1; | |
2b046bda KG |
481 | default: |
482 | break; | |
e90d5e57 JL |
483 | } |
484 | return 0; | |
485 | } | |
486 | ||
487 | /* Return true if OP is a MEM that when used as a load or store address will | |
488 | require an 8 byte insn. | |
489 | Load and store instructions don't allow the same possibilities but they're | |
490 | similar enough that this one function will do. | |
491 | This is currently only used when calculating length attributes. */ | |
492 | ||
493 | int | |
494 | long_immediate_loadstore_operand (op, mode) | |
495 | rtx op; | |
2b046bda | 496 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
497 | { |
498 | if (GET_CODE (op) != MEM) | |
499 | return 0; | |
500 | ||
501 | op = XEXP (op, 0); | |
502 | switch (GET_CODE (op)) | |
503 | { | |
504 | case SYMBOL_REF : | |
505 | case LABEL_REF : | |
506 | case CONST : | |
507 | return 1; | |
508 | case CONST_INT : | |
509 | /* This must be handled as "st c,[limm]". Ditto for load. | |
510 | Technically, the assembler could translate some possibilities to | |
511 | "st c,[limm/2 + limm/2]" if limm/2 will fit in a shimm, but we don't | |
512 | assume that it does. */ | |
513 | return 1; | |
514 | case CONST_DOUBLE : | |
515 | /* These can happen because large unsigned 32 bit constants are | |
516 | represented this way (the multiplication patterns can cause these | |
517 | to be generated). They also occur for SFmode values. */ | |
518 | return 1; | |
519 | case REG : | |
520 | return 0; | |
521 | case PLUS : | |
522 | if (GET_CODE (XEXP (op, 1)) == CONST_INT | |
523 | && !SMALL_INT (INTVAL (XEXP (op, 1)))) | |
524 | return 1; | |
525 | return 0; | |
2b046bda KG |
526 | default: |
527 | break; | |
e90d5e57 JL |
528 | } |
529 | return 0; | |
530 | } | |
531 | ||
532 | /* Return true if OP is an acceptable argument for a single word | |
533 | move source. */ | |
534 | ||
535 | int | |
536 | move_src_operand (op, mode) | |
537 | rtx op; | |
538 | enum machine_mode mode; | |
539 | { | |
540 | switch (GET_CODE (op)) | |
541 | { | |
542 | case SYMBOL_REF : | |
543 | case LABEL_REF : | |
544 | case CONST : | |
545 | return 1; | |
546 | case CONST_INT : | |
547 | return (LARGE_INT (INTVAL (op))); | |
548 | case CONST_DOUBLE : | |
549 | /* We can handle DImode integer constants in SImode if the value | |
550 | (signed or unsigned) will fit in 32 bits. This is needed because | |
551 | large unsigned 32 bit constants are represented as CONST_DOUBLEs. */ | |
552 | if (mode == SImode) | |
553 | return arc_double_limm_p (op); | |
554 | /* We can handle 32 bit floating point constants. */ | |
555 | if (mode == SFmode) | |
556 | return GET_MODE (op) == SFmode; | |
557 | return 0; | |
558 | case REG : | |
559 | return register_operand (op, mode); | |
560 | case SUBREG : | |
561 | /* (subreg (mem ...) ...) can occur here if the inner part was once a | |
562 | pseudo-reg and is now a stack slot. */ | |
563 | if (GET_CODE (SUBREG_REG (op)) == MEM) | |
564 | return address_operand (XEXP (SUBREG_REG (op), 0), mode); | |
565 | else | |
566 | return register_operand (op, mode); | |
567 | case MEM : | |
568 | return address_operand (XEXP (op, 0), mode); | |
569 | default : | |
570 | return 0; | |
571 | } | |
572 | } | |
573 | ||
574 | /* Return true if OP is an acceptable argument for a double word | |
575 | move source. */ | |
576 | ||
577 | int | |
578 | move_double_src_operand (op, mode) | |
579 | rtx op; | |
580 | enum machine_mode mode; | |
581 | { | |
582 | switch (GET_CODE (op)) | |
583 | { | |
584 | case REG : | |
585 | return register_operand (op, mode); | |
586 | case SUBREG : | |
587 | /* (subreg (mem ...) ...) can occur here if the inner part was once a | |
588 | pseudo-reg and is now a stack slot. */ | |
589 | if (GET_CODE (SUBREG_REG (op)) == MEM) | |
590 | return move_double_src_operand (SUBREG_REG (op), mode); | |
591 | else | |
592 | return register_operand (op, mode); | |
593 | case MEM : | |
594 | /* Disallow auto inc/dec for now. */ | |
595 | if (GET_CODE (XEXP (op, 0)) == PRE_DEC | |
596 | || GET_CODE (XEXP (op, 0)) == PRE_INC) | |
597 | return 0; | |
598 | return address_operand (XEXP (op, 0), mode); | |
599 | case CONST_INT : | |
600 | case CONST_DOUBLE : | |
601 | return 1; | |
602 | default : | |
603 | return 0; | |
604 | } | |
605 | } | |
606 | ||
607 | /* Return true if OP is an acceptable argument for a move destination. */ | |
608 | ||
609 | int | |
610 | move_dest_operand (op, mode) | |
611 | rtx op; | |
612 | enum machine_mode mode; | |
613 | { | |
614 | switch (GET_CODE (op)) | |
615 | { | |
616 | case REG : | |
617 | return register_operand (op, mode); | |
618 | case SUBREG : | |
619 | /* (subreg (mem ...) ...) can occur here if the inner part was once a | |
620 | pseudo-reg and is now a stack slot. */ | |
621 | if (GET_CODE (SUBREG_REG (op)) == MEM) | |
622 | return address_operand (XEXP (SUBREG_REG (op), 0), mode); | |
623 | else | |
624 | return register_operand (op, mode); | |
625 | case MEM : | |
626 | return address_operand (XEXP (op, 0), mode); | |
627 | default : | |
628 | return 0; | |
629 | } | |
630 | } | |
631 | ||
632 | /* Return true if OP is valid load with update operand. */ | |
633 | ||
634 | int | |
635 | load_update_operand (op, mode) | |
636 | rtx op; | |
637 | enum machine_mode mode; | |
638 | { | |
639 | if (GET_CODE (op) != MEM | |
640 | || GET_MODE (op) != mode) | |
641 | return 0; | |
642 | op = XEXP (op, 0); | |
643 | if (GET_CODE (op) != PLUS | |
644 | || GET_MODE (op) != Pmode | |
645 | || !register_operand (XEXP (op, 0), Pmode) | |
646 | || !nonmemory_operand (XEXP (op, 1), Pmode)) | |
647 | return 0; | |
648 | return 1; | |
649 | } | |
650 | ||
651 | /* Return true if OP is valid store with update operand. */ | |
652 | ||
653 | int | |
654 | store_update_operand (op, mode) | |
655 | rtx op; | |
656 | enum machine_mode mode; | |
657 | { | |
658 | if (GET_CODE (op) != MEM | |
659 | || GET_MODE (op) != mode) | |
660 | return 0; | |
661 | op = XEXP (op, 0); | |
662 | if (GET_CODE (op) != PLUS | |
663 | || GET_MODE (op) != Pmode | |
664 | || !register_operand (XEXP (op, 0), Pmode) | |
665 | || !(GET_CODE (XEXP (op, 1)) == CONST_INT | |
666 | && SMALL_INT (INTVAL (XEXP (op, 1))))) | |
667 | return 0; | |
668 | return 1; | |
669 | } | |
670 | ||
671 | /* Return true if OP is a non-volatile non-immediate operand. | |
672 | Volatile memory refs require a special "cache-bypass" instruction | |
673 | and only the standard movXX patterns are set up to handle them. */ | |
674 | ||
675 | int | |
676 | nonvol_nonimm_operand (op, mode) | |
677 | rtx op; | |
678 | enum machine_mode mode; | |
679 | { | |
680 | if (GET_CODE (op) == MEM && MEM_VOLATILE_P (op)) | |
681 | return 0; | |
682 | return nonimmediate_operand (op, mode); | |
683 | } | |
684 | ||
685 | /* Accept integer operands in the range -0x80000000..0x7fffffff. We have | |
686 | to check the range carefully since this predicate is used in DImode | |
687 | contexts. */ | |
688 | ||
689 | int | |
690 | const_sint32_operand (op, mode) | |
691 | rtx op; | |
2b046bda | 692 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
693 | { |
694 | /* All allowed constants will fit a CONST_INT. */ | |
695 | return (GET_CODE (op) == CONST_INT | |
696 | && (INTVAL (op) >= (-0x7fffffff - 1) && INTVAL (op) <= 0x7fffffff)); | |
697 | } | |
698 | ||
699 | /* Accept integer operands in the range 0..0xffffffff. We have to check the | |
700 | range carefully since this predicate is used in DImode contexts. Also, we | |
701 | need some extra crud to make it work when hosted on 64-bit machines. */ | |
702 | ||
703 | int | |
704 | const_uint32_operand (op, mode) | |
705 | rtx op; | |
2b046bda | 706 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
707 | { |
708 | #if HOST_BITS_PER_WIDE_INT > 32 | |
709 | /* All allowed constants will fit a CONST_INT. */ | |
710 | return (GET_CODE (op) == CONST_INT | |
711 | && (INTVAL (op) >= 0 && INTVAL (op) <= 0xffffffffL)); | |
712 | #else | |
713 | return ((GET_CODE (op) == CONST_INT && INTVAL (op) >= 0) | |
714 | || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0)); | |
715 | #endif | |
716 | } | |
717 | ||
718 | /* Return 1 if OP is a comparison operator valid for the mode of CC. | |
719 | This allows the use of MATCH_OPERATOR to recognize all the branch insns. | |
720 | ||
721 | Some insns only set a few bits in the condition code. So only allow those | |
722 | comparisons that use the bits that are valid. */ | |
723 | ||
724 | int | |
725 | proper_comparison_operator (op, mode) | |
726 | rtx op; | |
2b046bda | 727 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
728 | { |
729 | enum rtx_code code = GET_CODE (op); | |
730 | ||
731 | if (GET_RTX_CLASS (code) != '<') | |
732 | return 0; | |
733 | ||
734 | if (GET_MODE (XEXP (op, 0)) == CCZNmode) | |
735 | return (code == EQ || code == NE); | |
736 | if (GET_MODE (XEXP (op, 0)) == CCZNCmode) | |
737 | return (code == EQ || code == NE | |
738 | || code == LTU || code == GEU || code == GTU || code == LEU); | |
739 | return 1; | |
740 | } | |
741 | \f | |
742 | /* Misc. utilities. */ | |
743 | ||
744 | /* X and Y are two things to compare using CODE. Emit the compare insn and | |
745 | return the rtx for the cc reg in the proper mode. */ | |
746 | ||
747 | rtx | |
748 | gen_compare_reg (code, x, y) | |
749 | enum rtx_code code; | |
750 | rtx x, y; | |
751 | { | |
752 | enum machine_mode mode = SELECT_CC_MODE (code, x, y); | |
753 | rtx cc_reg; | |
754 | ||
c5c76735 | 755 | cc_reg = gen_rtx_REG (mode, 61); |
e90d5e57 | 756 | |
c5c76735 JL |
757 | emit_insn (gen_rtx_SET (VOIDmode, cc_reg, |
758 | gen_rtx_COMPARE (mode, x, y))); | |
e90d5e57 JL |
759 | |
760 | return cc_reg; | |
761 | } | |
762 | ||
763 | /* Return 1 if VALUE, a const_double, will fit in a limm (4 byte number). | |
764 | We assume the value can be either signed or unsigned. */ | |
765 | ||
766 | int | |
767 | arc_double_limm_p (value) | |
768 | rtx value; | |
769 | { | |
770 | HOST_WIDE_INT low, high; | |
771 | ||
772 | if (GET_CODE (value) != CONST_DOUBLE) | |
773 | abort (); | |
774 | ||
775 | low = CONST_DOUBLE_LOW (value); | |
776 | high = CONST_DOUBLE_HIGH (value); | |
777 | ||
778 | if (low & 0x80000000) | |
779 | { | |
780 | return (((unsigned HOST_WIDE_INT) low <= 0xffffffff && high == 0) | |
781 | || (((low & - (unsigned HOST_WIDE_INT) 0x80000000) | |
782 | == - (unsigned HOST_WIDE_INT) 0x80000000) | |
783 | && high == -1)); | |
784 | } | |
785 | else | |
786 | { | |
787 | return (unsigned HOST_WIDE_INT) low <= 0x7fffffff && high == 0; | |
788 | } | |
789 | } | |
790 | \f | |
791 | /* Do any needed setup for a variadic function. For the ARC, we must | |
792 | create a register parameter block, and then copy any anonymous arguments | |
793 | in registers to memory. | |
794 | ||
795 | CUM has not been updated for the last named argument which has type TYPE | |
796 | and mode MODE, and we rely on this fact. | |
797 | ||
798 | We do things a little weird here. We're supposed to only allocate space | |
799 | for the anonymous arguments. However we need to keep the stack eight byte | |
965eb474 | 800 | aligned. So we round the space up if necessary, and leave it to va_start |
e90d5e57 JL |
801 | to compensate. */ |
802 | ||
803 | void | |
804 | arc_setup_incoming_varargs (cum, mode, type, pretend_size, no_rtl) | |
805 | CUMULATIVE_ARGS *cum; | |
806 | enum machine_mode mode; | |
2b046bda | 807 | tree type ATTRIBUTE_UNUSED; |
e90d5e57 JL |
808 | int *pretend_size; |
809 | int no_rtl; | |
810 | { | |
811 | int first_anon_arg; | |
812 | ||
813 | /* All BLKmode values are passed by reference. */ | |
814 | if (mode == BLKmode) | |
815 | abort (); | |
816 | ||
6c535c69 ZW |
817 | first_anon_arg = *cum + ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) |
818 | / UNITS_PER_WORD); | |
e90d5e57 JL |
819 | |
820 | if (first_anon_arg < MAX_ARC_PARM_REGS && !no_rtl) | |
821 | { | |
822 | /* Note that first_reg_offset < MAX_ARC_PARM_REGS. */ | |
823 | int first_reg_offset = first_anon_arg; | |
824 | /* Size in words to "pretend" allocate. */ | |
825 | int size = MAX_ARC_PARM_REGS - first_reg_offset; | |
826 | /* Extra slop to keep stack eight byte aligned. */ | |
827 | int align_slop = size & 1; | |
828 | rtx regblock; | |
829 | ||
c5c76735 JL |
830 | regblock = gen_rtx_MEM (BLKmode, |
831 | plus_constant (arg_pointer_rtx, | |
832 | FIRST_PARM_OFFSET (0) | |
833 | + align_slop * UNITS_PER_WORD)); | |
ba4828e0 | 834 | set_mem_alias_set (regblock, get_varargs_alias_set ()); |
8ac61af7 | 835 | set_mem_align (regblock, BITS_PER_WORD); |
e90d5e57 | 836 | move_block_from_reg (first_reg_offset, regblock, |
c6b97fac | 837 | MAX_ARC_PARM_REGS - first_reg_offset); |
e90d5e57 JL |
838 | |
839 | *pretend_size = ((MAX_ARC_PARM_REGS - first_reg_offset + align_slop) | |
840 | * UNITS_PER_WORD); | |
841 | } | |
842 | } | |
843 | \f | |
844 | /* Cost functions. */ | |
845 | ||
dcefdf67 RH |
846 | /* Compute a (partial) cost for rtx X. Return true if the complete |
847 | cost has been computed, and false if subexpressions should be | |
848 | scanned. In either case, *TOTAL contains the cost result. */ | |
849 | ||
850 | static bool | |
851 | arc_rtx_costs (x, code, outer_code, total) | |
852 | rtx x; | |
853 | int code; | |
854 | int outer_code ATTRIBUTE_UNUSED; | |
855 | int *total; | |
856 | { | |
857 | switch (code) | |
858 | { | |
859 | /* Small integers are as cheap as registers. 4 byte values can | |
860 | be fetched as immediate constants - let's give that the cost | |
861 | of an extra insn. */ | |
862 | case CONST_INT: | |
863 | if (SMALL_INT (INTVAL (x))) | |
864 | { | |
865 | *total = 0; | |
866 | return true; | |
867 | } | |
868 | /* FALLTHRU */ | |
869 | ||
870 | case CONST: | |
871 | case LABEL_REF: | |
872 | case SYMBOL_REF: | |
873 | *total = COSTS_N_INSNS (1); | |
874 | return true; | |
875 | ||
876 | case CONST_DOUBLE: | |
877 | { | |
878 | rtx high, low; | |
879 | split_double (x, &high, &low); | |
880 | *total = COSTS_N_INSNS (!SMALL_INT (INTVAL (high)) | |
881 | + !SMALL_INT (INTVAL (low))); | |
882 | return true; | |
883 | } | |
884 | ||
885 | /* Encourage synth_mult to find a synthetic multiply when reasonable. | |
886 | If we need more than 12 insns to do a multiply, then go out-of-line, | |
887 | since the call overhead will be < 10% of the cost of the multiply. */ | |
888 | case ASHIFT: | |
889 | case ASHIFTRT: | |
890 | case LSHIFTRT: | |
891 | if (TARGET_SHIFTER) | |
892 | *total = COSTS_N_INSNS (1); | |
893 | else if (GET_CODE (XEXP (x, 1)) != CONST_INT) | |
894 | *total = COSTS_N_INSNS (16); | |
895 | else | |
896 | *total = COSTS_N_INSNS (INTVAL (XEXP ((x), 1))); | |
897 | return false; | |
898 | ||
899 | default: | |
900 | return false; | |
901 | } | |
902 | } | |
903 | ||
904 | ||
e90d5e57 JL |
905 | /* Provide the costs of an addressing mode that contains ADDR. |
906 | If ADDR is not a valid address, its cost is irrelevant. */ | |
907 | ||
dcefdf67 | 908 | static int |
e90d5e57 JL |
909 | arc_address_cost (addr) |
910 | rtx addr; | |
911 | { | |
912 | switch (GET_CODE (addr)) | |
913 | { | |
914 | case REG : | |
e90d5e57 JL |
915 | return 1; |
916 | ||
917 | case LABEL_REF : | |
918 | case SYMBOL_REF : | |
919 | case CONST : | |
920 | return 2; | |
921 | ||
922 | case PLUS : | |
923 | { | |
924 | register rtx plus0 = XEXP (addr, 0); | |
925 | register rtx plus1 = XEXP (addr, 1); | |
926 | ||
927 | if (GET_CODE (plus0) != REG) | |
928 | break; | |
929 | ||
930 | switch (GET_CODE (plus1)) | |
931 | { | |
932 | case CONST_INT : | |
933 | return SMALL_INT (plus1) ? 1 : 2; | |
934 | case CONST : | |
935 | case SYMBOL_REF : | |
936 | case LABEL_REF : | |
937 | return 2; | |
938 | default: | |
939 | break; | |
940 | } | |
941 | break; | |
942 | } | |
2b046bda KG |
943 | default: |
944 | break; | |
e90d5e57 JL |
945 | } |
946 | ||
947 | return 4; | |
948 | } | |
949 | \f | |
950 | /* Function prologue/epilogue handlers. */ | |
951 | ||
952 | /* ARC stack frames look like: | |
953 | ||
954 | Before call After call | |
955 | +-----------------------+ +-----------------------+ | |
956 | | | | | | |
957 | high | local variables, | | local variables, | | |
958 | mem | reg save area, etc. | | reg save area, etc. | | |
959 | | | | | | |
960 | +-----------------------+ +-----------------------+ | |
961 | | | | | | |
962 | | arguments on stack. | | arguments on stack. | | |
963 | | | | | | |
964 | SP+16->+-----------------------+FP+48->+-----------------------+ | |
965 | | 4 word save area for | | reg parm save area, | | |
966 | | return addr, prev %fp | | only created for | | |
967 | SP+0->+-----------------------+ | variable argument | | |
968 | | functions | | |
969 | FP+16->+-----------------------+ | |
970 | | 4 word save area for | | |
971 | | return addr, prev %fp | | |
972 | FP+0->+-----------------------+ | |
973 | | | | |
974 | | local variables | | |
975 | | | | |
976 | +-----------------------+ | |
977 | | | | |
978 | | register save area | | |
979 | | | | |
980 | +-----------------------+ | |
981 | | | | |
982 | | alloca allocations | | |
983 | | | | |
984 | +-----------------------+ | |
985 | | | | |
986 | | arguments on stack | | |
987 | | | | |
988 | SP+16->+-----------------------+ | |
989 | low | 4 word save area for | | |
990 | memory | return addr, prev %fp | | |
991 | SP+0->+-----------------------+ | |
992 | ||
993 | Notes: | |
994 | 1) The "reg parm save area" does not exist for non variable argument fns. | |
995 | The "reg parm save area" can be eliminated completely if we created our | |
996 | own va-arc.h, but that has tradeoffs as well (so it's not done). */ | |
997 | ||
998 | /* Structure to be filled in by arc_compute_frame_size with register | |
999 | save masks, and offsets for the current function. */ | |
1000 | struct arc_frame_info | |
1001 | { | |
1002 | unsigned int total_size; /* # bytes that the entire frame takes up. */ | |
1003 | unsigned int extra_size; /* # bytes of extra stuff. */ | |
1004 | unsigned int pretend_size; /* # bytes we push and pretend caller did. */ | |
1005 | unsigned int args_size; /* # bytes that outgoing arguments take up. */ | |
1006 | unsigned int reg_size; /* # bytes needed to store regs. */ | |
1007 | unsigned int var_size; /* # bytes that variables take up. */ | |
1008 | unsigned int reg_offset; /* Offset from new sp to store regs. */ | |
1009 | unsigned int gmask; /* Mask of saved gp registers. */ | |
1010 | int initialized; /* Nonzero if frame size already calculated. */ | |
1011 | }; | |
1012 | ||
1013 | /* Current frame information calculated by arc_compute_frame_size. */ | |
1014 | static struct arc_frame_info current_frame_info; | |
1015 | ||
1016 | /* Zero structure to initialize current_frame_info. */ | |
1017 | static struct arc_frame_info zero_frame_info; | |
1018 | ||
1019 | /* Type of function DECL. | |
1020 | ||
1021 | The result is cached. To reset the cache at the end of a function, | |
1022 | call with DECL = NULL_TREE. */ | |
1023 | ||
1024 | enum arc_function_type | |
1025 | arc_compute_function_type (decl) | |
1026 | tree decl; | |
1027 | { | |
1028 | tree a; | |
1029 | /* Cached value. */ | |
1030 | static enum arc_function_type fn_type = ARC_FUNCTION_UNKNOWN; | |
1031 | /* Last function we were called for. */ | |
1032 | static tree last_fn = NULL_TREE; | |
1033 | ||
1034 | /* Resetting the cached value? */ | |
1035 | if (decl == NULL_TREE) | |
1036 | { | |
1037 | fn_type = ARC_FUNCTION_UNKNOWN; | |
1038 | last_fn = NULL_TREE; | |
1039 | return fn_type; | |
1040 | } | |
1041 | ||
1042 | if (decl == last_fn && fn_type != ARC_FUNCTION_UNKNOWN) | |
1043 | return fn_type; | |
1044 | ||
1045 | /* Assume we have a normal function (not an interrupt handler). */ | |
1046 | fn_type = ARC_FUNCTION_NORMAL; | |
1047 | ||
1048 | /* Now see if this is an interrupt handler. */ | |
91d231cb | 1049 | for (a = DECL_ATTRIBUTES (current_function_decl); |
e90d5e57 JL |
1050 | a; |
1051 | a = TREE_CHAIN (a)) | |
1052 | { | |
1053 | tree name = TREE_PURPOSE (a), args = TREE_VALUE (a); | |
1054 | ||
1055 | if (name == get_identifier ("__interrupt__") | |
1056 | && list_length (args) == 1 | |
1057 | && TREE_CODE (TREE_VALUE (args)) == STRING_CST) | |
1058 | { | |
1059 | tree value = TREE_VALUE (args); | |
1060 | ||
1061 | if (!strcmp (TREE_STRING_POINTER (value), "ilink1")) | |
1062 | fn_type = ARC_FUNCTION_ILINK1; | |
1063 | else if (!strcmp (TREE_STRING_POINTER (value), "ilink2")) | |
1064 | fn_type = ARC_FUNCTION_ILINK2; | |
1065 | else | |
1066 | abort (); | |
1067 | break; | |
1068 | } | |
1069 | } | |
1070 | ||
1071 | last_fn = decl; | |
1072 | return fn_type; | |
1073 | } | |
1074 | ||
1075 | #define ILINK1_REGNUM 29 | |
1076 | #define ILINK2_REGNUM 30 | |
1077 | #define RETURN_ADDR_REGNUM 31 | |
1078 | #define FRAME_POINTER_MASK (1 << (FRAME_POINTER_REGNUM)) | |
1079 | #define RETURN_ADDR_MASK (1 << (RETURN_ADDR_REGNUM)) | |
1080 | ||
1081 | /* Tell prologue and epilogue if register REGNO should be saved / restored. | |
1082 | The return address and frame pointer are treated separately. | |
1083 | Don't consider them here. */ | |
1084 | #define MUST_SAVE_REGISTER(regno, interrupt_p) \ | |
1085 | ((regno) != RETURN_ADDR_REGNUM && (regno) != FRAME_POINTER_REGNUM \ | |
1086 | && (regs_ever_live[regno] && (!call_used_regs[regno] || interrupt_p))) | |
1087 | ||
1088 | #define MUST_SAVE_RETURN_ADDR (regs_ever_live[RETURN_ADDR_REGNUM]) | |
1089 | ||
1090 | /* Return the bytes needed to compute the frame pointer from the current | |
1091 | stack pointer. | |
1092 | ||
1093 | SIZE is the size needed for local variables. */ | |
1094 | ||
1095 | unsigned int | |
1096 | arc_compute_frame_size (size) | |
1097 | int size; /* # of var. bytes allocated. */ | |
1098 | { | |
1099 | int regno; | |
1100 | unsigned int total_size, var_size, args_size, pretend_size, extra_size; | |
1101 | unsigned int reg_size, reg_offset; | |
1102 | unsigned int gmask; | |
1103 | enum arc_function_type fn_type; | |
1104 | int interrupt_p; | |
1105 | ||
1106 | var_size = size; | |
1107 | args_size = current_function_outgoing_args_size; | |
1108 | pretend_size = current_function_pretend_args_size; | |
1109 | extra_size = FIRST_PARM_OFFSET (0); | |
1110 | total_size = extra_size + pretend_size + args_size + var_size; | |
1111 | reg_offset = FIRST_PARM_OFFSET(0) + current_function_outgoing_args_size; | |
1112 | reg_size = 0; | |
1113 | gmask = 0; | |
1114 | ||
1115 | /* See if this is an interrupt handler. Call used registers must be saved | |
1116 | for them too. */ | |
1117 | fn_type = arc_compute_function_type (current_function_decl); | |
1118 | interrupt_p = ARC_INTERRUPT_P (fn_type); | |
1119 | ||
1120 | /* Calculate space needed for registers. | |
1121 | ??? We ignore the extension registers for now. */ | |
1122 | ||
1123 | for (regno = 0; regno <= 31; regno++) | |
1124 | { | |
1125 | if (MUST_SAVE_REGISTER (regno, interrupt_p)) | |
1126 | { | |
1127 | reg_size += UNITS_PER_WORD; | |
1128 | gmask |= 1 << regno; | |
1129 | } | |
1130 | } | |
1131 | ||
1132 | total_size += reg_size; | |
1133 | ||
1134 | /* If the only space to allocate is the fp/blink save area this is an | |
1135 | empty frame. However, if we'll be making a function call we need to | |
1136 | allocate a stack frame for our callee's fp/blink save area. */ | |
1137 | if (total_size == extra_size | |
1138 | && !MUST_SAVE_RETURN_ADDR) | |
1139 | total_size = extra_size = 0; | |
1140 | ||
1141 | total_size = ARC_STACK_ALIGN (total_size); | |
1142 | ||
1143 | /* Save computed information. */ | |
1144 | current_frame_info.total_size = total_size; | |
1145 | current_frame_info.extra_size = extra_size; | |
1146 | current_frame_info.pretend_size = pretend_size; | |
1147 | current_frame_info.var_size = var_size; | |
1148 | current_frame_info.args_size = args_size; | |
1149 | current_frame_info.reg_size = reg_size; | |
1150 | current_frame_info.reg_offset = reg_offset; | |
1151 | current_frame_info.gmask = gmask; | |
1152 | current_frame_info.initialized = reload_completed; | |
1153 | ||
1154 | /* Ok, we're done. */ | |
1155 | return total_size; | |
1156 | } | |
1157 | \f | |
1158 | /* Common code to save/restore registers. */ | |
1159 | ||
1160 | void | |
1161 | arc_save_restore (file, base_reg, offset, gmask, op) | |
1162 | FILE *file; | |
99f44eba | 1163 | const char *base_reg; |
e90d5e57 JL |
1164 | unsigned int offset; |
1165 | unsigned int gmask; | |
99f44eba | 1166 | const char *op; |
e90d5e57 JL |
1167 | { |
1168 | int regno; | |
1169 | ||
1170 | if (gmask == 0) | |
1171 | return; | |
1172 | ||
1173 | for (regno = 0; regno <= 31; regno++) | |
1174 | { | |
1175 | if ((gmask & (1L << regno)) != 0) | |
1176 | { | |
1177 | fprintf (file, "\t%s %s,[%s,%d]\n", | |
1178 | op, reg_names[regno], base_reg, offset); | |
1179 | offset += UNITS_PER_WORD; | |
1180 | } | |
1181 | } | |
1182 | } | |
1183 | \f | |
301d03af RS |
1184 | /* Target hook to assemble an integer object. The ARC version needs to |
1185 | emit a special directive for references to labels and function | |
1186 | symbols. */ | |
1187 | ||
1188 | static bool | |
1189 | arc_assemble_integer (x, size, aligned_p) | |
1190 | rtx x; | |
1191 | unsigned int size; | |
1192 | int aligned_p; | |
1193 | { | |
1194 | if (size == UNITS_PER_WORD && aligned_p | |
8cbd5ce0 | 1195 | && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x)) |
301d03af RS |
1196 | || GET_CODE (x) == LABEL_REF)) |
1197 | { | |
1198 | fputs ("\t.word\t%st(", asm_out_file); | |
1199 | output_addr_const (asm_out_file, x); | |
1200 | fputs (")\n", asm_out_file); | |
1201 | return true; | |
1202 | } | |
1203 | return default_assemble_integer (x, size, aligned_p); | |
1204 | } | |
1205 | \f | |
e90d5e57 JL |
1206 | /* Set up the stack and frame pointer (if desired) for the function. */ |
1207 | ||
08c148a8 | 1208 | static void |
e90d5e57 JL |
1209 | arc_output_function_prologue (file, size) |
1210 | FILE *file; | |
08c148a8 | 1211 | HOST_WIDE_INT size; |
e90d5e57 | 1212 | { |
99f44eba KG |
1213 | const char *sp_str = reg_names[STACK_POINTER_REGNUM]; |
1214 | const char *fp_str = reg_names[FRAME_POINTER_REGNUM]; | |
e90d5e57 JL |
1215 | unsigned int gmask = current_frame_info.gmask; |
1216 | enum arc_function_type fn_type = arc_compute_function_type (current_function_decl); | |
1217 | ||
1218 | /* If this is an interrupt handler, set up our stack frame. | |
1219 | ??? Optimize later. */ | |
1220 | if (ARC_INTERRUPT_P (fn_type)) | |
1221 | { | |
1222 | fprintf (file, "\t%s interrupt handler\n", | |
1223 | ASM_COMMENT_START); | |
1224 | fprintf (file, "\tsub %s,%s,16\n", sp_str, sp_str); | |
1225 | } | |
1226 | ||
1227 | /* This is only for the human reader. */ | |
1228 | fprintf (file, "\t%s BEGIN PROLOGUE %s vars= %d, regs= %d, args= %d, extra= %d\n", | |
1229 | ASM_COMMENT_START, ASM_COMMENT_START, | |
1230 | current_frame_info.var_size, | |
1231 | current_frame_info.reg_size / 4, | |
1232 | current_frame_info.args_size, | |
1233 | current_frame_info.extra_size); | |
1234 | ||
1235 | size = ARC_STACK_ALIGN (size); | |
1236 | size = (! current_frame_info.initialized | |
1237 | ? arc_compute_frame_size (size) | |
1238 | : current_frame_info.total_size); | |
1239 | ||
1240 | /* These cases shouldn't happen. Catch them now. */ | |
1241 | if (size == 0 && gmask) | |
1242 | abort (); | |
1243 | ||
956d6950 | 1244 | /* Allocate space for register arguments if this is a variadic function. */ |
e90d5e57 JL |
1245 | if (current_frame_info.pretend_size != 0) |
1246 | fprintf (file, "\tsub %s,%s,%d\n", | |
1247 | sp_str, sp_str, current_frame_info.pretend_size); | |
1248 | ||
1249 | /* The home-grown ABI says link register is saved first. */ | |
1250 | if (MUST_SAVE_RETURN_ADDR) | |
1251 | fprintf (file, "\tst %s,[%s,%d]\n", | |
1252 | reg_names[RETURN_ADDR_REGNUM], sp_str, UNITS_PER_WORD); | |
1253 | ||
1254 | /* Set up the previous frame pointer next (if we need to). */ | |
1255 | if (frame_pointer_needed) | |
1256 | { | |
1257 | fprintf (file, "\tst %s,[%s]\n", fp_str, sp_str); | |
1258 | fprintf (file, "\tmov %s,%s\n", fp_str, sp_str); | |
1259 | } | |
1260 | ||
1261 | /* ??? We don't handle the case where the saved regs are more than 252 | |
1262 | bytes away from sp. This can be handled by decrementing sp once, saving | |
1263 | the regs, and then decrementing it again. The epilogue doesn't have this | |
1264 | problem as the `ld' insn takes reg+limm values (though it would be more | |
1265 | efficient to avoid reg+limm). */ | |
1266 | ||
1267 | /* Allocate the stack frame. */ | |
1268 | if (size - current_frame_info.pretend_size > 0) | |
15a5dddc | 1269 | fprintf (file, "\tsub %s,%s," HOST_WIDE_INT_PRINT_DEC "\n", |
e90d5e57 JL |
1270 | sp_str, sp_str, size - current_frame_info.pretend_size); |
1271 | ||
1272 | /* Save any needed call-saved regs (and call-used if this is an | |
1273 | interrupt handler). */ | |
1274 | arc_save_restore (file, sp_str, current_frame_info.reg_offset, | |
1275 | /* The zeroing of these two bits is unnecessary, | |
1276 | but leave this in for clarity. */ | |
1277 | gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK), | |
1278 | "st"); | |
1279 | ||
1280 | fprintf (file, "\t%s END PROLOGUE\n", ASM_COMMENT_START); | |
1281 | } | |
1282 | \f | |
1283 | /* Do any necessary cleanup after a function to restore stack, frame, | |
6b857ce3 | 1284 | and regs. */ |
e90d5e57 | 1285 | |
08c148a8 | 1286 | static void |
e90d5e57 JL |
1287 | arc_output_function_epilogue (file, size) |
1288 | FILE *file; | |
08c148a8 | 1289 | HOST_WIDE_INT size; |
e90d5e57 JL |
1290 | { |
1291 | rtx epilogue_delay = current_function_epilogue_delay_list; | |
1292 | int noepilogue = FALSE; | |
1293 | enum arc_function_type fn_type = arc_compute_function_type (current_function_decl); | |
1294 | ||
1295 | /* This is only for the human reader. */ | |
1296 | fprintf (file, "\t%s EPILOGUE\n", ASM_COMMENT_START); | |
1297 | ||
1298 | size = ARC_STACK_ALIGN (size); | |
1299 | size = (!current_frame_info.initialized | |
1300 | ? arc_compute_frame_size (size) | |
1301 | : current_frame_info.total_size); | |
1302 | ||
1303 | if (size == 0 && epilogue_delay == 0) | |
1304 | { | |
1305 | rtx insn = get_last_insn (); | |
1306 | ||
1307 | /* If the last insn was a BARRIER, we don't have to write any code | |
1308 | because a jump (aka return) was put there. */ | |
1309 | if (GET_CODE (insn) == NOTE) | |
1310 | insn = prev_nonnote_insn (insn); | |
1311 | if (insn && GET_CODE (insn) == BARRIER) | |
1312 | noepilogue = TRUE; | |
1313 | } | |
1314 | ||
1315 | if (!noepilogue) | |
1316 | { | |
1317 | unsigned int pretend_size = current_frame_info.pretend_size; | |
1318 | unsigned int frame_size = size - pretend_size; | |
1319 | int restored, fp_restored_p; | |
1320 | int can_trust_sp_p = !current_function_calls_alloca; | |
99f44eba KG |
1321 | const char *sp_str = reg_names[STACK_POINTER_REGNUM]; |
1322 | const char *fp_str = reg_names[FRAME_POINTER_REGNUM]; | |
e90d5e57 JL |
1323 | |
1324 | /* ??? There are lots of optimizations that can be done here. | |
1325 | EG: Use fp to restore regs if it's closer. | |
1326 | Maybe in time we'll do them all. For now, always restore regs from | |
1327 | sp, but don't restore sp if we don't have to. */ | |
1328 | ||
1329 | if (!can_trust_sp_p) | |
1330 | { | |
1331 | if (!frame_pointer_needed) | |
1332 | abort (); | |
1333 | fprintf (file,"\tsub %s,%s,%d\t\t%s sp not trusted here\n", | |
1334 | sp_str, fp_str, frame_size, ASM_COMMENT_START); | |
1335 | } | |
1336 | ||
1337 | /* Restore any saved registers. */ | |
1338 | arc_save_restore (file, sp_str, current_frame_info.reg_offset, | |
1339 | /* The zeroing of these two bits is unnecessary, | |
1340 | but leave this in for clarity. */ | |
1341 | current_frame_info.gmask & ~(FRAME_POINTER_MASK | RETURN_ADDR_MASK), | |
1342 | "ld"); | |
1343 | ||
1344 | if (MUST_SAVE_RETURN_ADDR) | |
1345 | fprintf (file, "\tld %s,[%s,%d]\n", | |
1346 | reg_names[RETURN_ADDR_REGNUM], | |
1347 | frame_pointer_needed ? fp_str : sp_str, | |
1348 | UNITS_PER_WORD + (frame_pointer_needed ? 0 : frame_size)); | |
1349 | ||
1350 | /* Keep track of how much of the stack pointer we've restored. | |
1351 | It makes the following a lot more readable. */ | |
1352 | restored = 0; | |
1353 | fp_restored_p = 0; | |
1354 | ||
1355 | /* We try to emit the epilogue delay slot insn right after the load | |
1356 | of the return address register so that it can execute with the | |
1357 | stack intact. Secondly, loads are delayed. */ | |
1358 | /* ??? If stack intactness is important, always emit now. */ | |
1359 | if (MUST_SAVE_RETURN_ADDR && epilogue_delay != NULL_RTX) | |
1360 | { | |
1361 | final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1); | |
1362 | epilogue_delay = NULL_RTX; | |
1363 | } | |
1364 | ||
1365 | if (frame_pointer_needed) | |
1366 | { | |
1367 | /* Try to restore the frame pointer in the delay slot. We can't, | |
1368 | however, if any of these is true. */ | |
1369 | if (epilogue_delay != NULL_RTX | |
1370 | || !SMALL_INT (frame_size) | |
1371 | || pretend_size | |
1372 | || ARC_INTERRUPT_P (fn_type)) | |
1373 | { | |
1374 | /* Note that we restore fp and sp here! */ | |
1375 | fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size); | |
1376 | restored += frame_size; | |
1377 | fp_restored_p = 1; | |
1378 | } | |
1379 | } | |
1380 | else if (!SMALL_INT (size /* frame_size + pretend_size */) | |
1381 | || ARC_INTERRUPT_P (fn_type)) | |
1382 | { | |
1383 | fprintf (file, "\tadd %s,%s,%d\n", sp_str, sp_str, frame_size); | |
1384 | restored += frame_size; | |
1385 | } | |
1386 | ||
1387 | /* These must be done before the return insn because the delay slot | |
1388 | does the final stack restore. */ | |
1389 | if (ARC_INTERRUPT_P (fn_type)) | |
1390 | { | |
1391 | if (epilogue_delay) | |
1392 | { | |
1393 | final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1); | |
1394 | } | |
1395 | } | |
1396 | ||
1397 | /* Emit the return instruction. */ | |
1398 | { | |
8b60264b | 1399 | static const int regs[4] = { |
e90d5e57 JL |
1400 | 0, RETURN_ADDR_REGNUM, ILINK1_REGNUM, ILINK2_REGNUM |
1401 | }; | |
1402 | fprintf (file, "\tj.d %s\n", reg_names[regs[fn_type]]); | |
1403 | } | |
1404 | ||
1405 | /* If the only register saved is the return address, we need a | |
1406 | nop, unless we have an instruction to put into it. Otherwise | |
1407 | we don't since reloading multiple registers doesn't reference | |
1408 | the register being loaded. */ | |
1409 | ||
1410 | if (ARC_INTERRUPT_P (fn_type)) | |
1411 | fprintf (file, "\tadd %s,%s,16\n", sp_str, sp_str); | |
1412 | else if (epilogue_delay != NULL_RTX) | |
1413 | { | |
1414 | if (frame_pointer_needed && !fp_restored_p) | |
1415 | abort (); | |
1416 | if (restored < size) | |
1417 | abort (); | |
1418 | final_scan_insn (XEXP (epilogue_delay, 0), file, 1, -2, 1); | |
1419 | } | |
1420 | else if (frame_pointer_needed && !fp_restored_p) | |
1421 | { | |
1422 | if (!SMALL_INT (frame_size)) | |
1423 | abort (); | |
1424 | /* Note that we restore fp and sp here! */ | |
1425 | fprintf (file, "\tld.a %s,[%s,%d]\n", fp_str, sp_str, frame_size); | |
1426 | } | |
1427 | else if (restored < size) | |
1428 | { | |
1429 | if (!SMALL_INT (size - restored)) | |
1430 | abort (); | |
15a5dddc | 1431 | fprintf (file, "\tadd %s,%s," HOST_WIDE_INT_PRINT_DEC "\n", |
e90d5e57 JL |
1432 | sp_str, sp_str, size - restored); |
1433 | } | |
1434 | else | |
1435 | fprintf (file, "\tnop\n"); | |
1436 | } | |
1437 | ||
1438 | /* Reset state info for each function. */ | |
1439 | current_frame_info = zero_frame_info; | |
1440 | arc_compute_function_type (NULL_TREE); | |
1441 | } | |
1442 | \f | |
1443 | /* Define the number of delay slots needed for the function epilogue. | |
1444 | ||
1445 | Interrupt handlers can't have any epilogue delay slots (it's always needed | |
1446 | for something else, I think). For normal functions, we have to worry about | |
1447 | using call-saved regs as they'll be restored before the delay slot insn. | |
1448 | Functions with non-empty frames already have enough choices for the epilogue | |
1449 | delay slot so for now we only consider functions with empty frames. */ | |
1450 | ||
1451 | int | |
1452 | arc_delay_slots_for_epilogue () | |
1453 | { | |
1454 | if (arc_compute_function_type (current_function_decl) != ARC_FUNCTION_NORMAL) | |
1455 | return 0; | |
1456 | if (!current_frame_info.initialized) | |
1457 | (void) arc_compute_frame_size (get_frame_size ()); | |
1458 | if (current_frame_info.total_size == 0) | |
1459 | return 1; | |
1460 | return 0; | |
1461 | } | |
1462 | ||
1463 | /* Return true if TRIAL is a valid insn for the epilogue delay slot. | |
1464 | Any single length instruction which doesn't reference the stack or frame | |
1465 | pointer or any call-saved register is OK. SLOT will always be 0. */ | |
1466 | ||
1467 | int | |
1468 | arc_eligible_for_epilogue_delay (trial, slot) | |
1469 | rtx trial; | |
1470 | int slot; | |
1471 | { | |
1472 | if (slot != 0) | |
1473 | abort (); | |
1474 | ||
1475 | if (get_attr_length (trial) == 1 | |
1476 | /* If registers where saved, presumably there's more than enough | |
1477 | possibilities for the delay slot. The alternative is something | |
1478 | more complicated (of course, if we expanded the epilogue as rtl | |
1479 | this problem would go away). */ | |
1480 | /* ??? Note that this will always be true since only functions with | |
1481 | empty frames have epilogue delay slots. See | |
1482 | arc_delay_slots_for_epilogue. */ | |
1483 | && current_frame_info.gmask == 0 | |
1484 | && ! reg_mentioned_p (stack_pointer_rtx, PATTERN (trial)) | |
1485 | && ! reg_mentioned_p (frame_pointer_rtx, PATTERN (trial))) | |
1486 | return 1; | |
1487 | return 0; | |
1488 | } | |
1489 | \f | |
1490 | /* PIC */ | |
1491 | ||
e90d5e57 JL |
1492 | /* Emit special PIC prologues and epilogues. */ |
1493 | ||
1494 | void | |
1495 | arc_finalize_pic () | |
1496 | { | |
1497 | /* nothing to do */ | |
1498 | } | |
1499 | \f | |
1500 | /* Return true if OP is a shift operator. */ | |
1501 | ||
1502 | int | |
1503 | shift_operator (op, mode) | |
1504 | rtx op; | |
2b046bda | 1505 | enum machine_mode mode ATTRIBUTE_UNUSED; |
e90d5e57 JL |
1506 | { |
1507 | switch (GET_CODE (op)) | |
1508 | { | |
1509 | case ASHIFTRT: | |
1510 | case LSHIFTRT: | |
1511 | case ASHIFT: | |
1512 | return 1; | |
1513 | default: | |
1514 | return 0; | |
1515 | } | |
1516 | } | |
1517 | ||
1518 | /* Output the assembler code for doing a shift. | |
1519 | We go to a bit of trouble to generate efficient code as the ARC only has | |
1520 | single bit shifts. This is taken from the h8300 port. We only have one | |
1521 | mode of shifting and can't access individual bytes like the h8300 can, so | |
1522 | this is greatly simplified (at the expense of not generating hyper- | |
1523 | efficient code). | |
1524 | ||
1525 | This function is not used if the variable shift insns are present. */ | |
1526 | ||
1527 | /* ??? We assume the output operand is the same as operand 1. | |
1528 | This can be optimized (deleted) in the case of 1 bit shifts. */ | |
1529 | /* ??? We use the loop register here. We don't use it elsewhere (yet) and | |
1530 | using it here will give us a chance to play with it. */ | |
1531 | ||
2b046bda | 1532 | const char * |
e90d5e57 JL |
1533 | output_shift (operands) |
1534 | rtx *operands; | |
1535 | { | |
e90d5e57 JL |
1536 | rtx shift = operands[3]; |
1537 | enum machine_mode mode = GET_MODE (shift); | |
1538 | enum rtx_code code = GET_CODE (shift); | |
2b046bda | 1539 | const char *shift_one; |
e90d5e57 JL |
1540 | |
1541 | if (mode != SImode) | |
1542 | abort (); | |
1543 | ||
1544 | switch (code) | |
1545 | { | |
1546 | case ASHIFT: shift_one = "asl %0,%0"; break; | |
1547 | case ASHIFTRT: shift_one = "asr %0,%0"; break; | |
1548 | case LSHIFTRT: shift_one = "lsr %0,%0"; break; | |
1549 | default: abort (); | |
1550 | } | |
1551 | ||
1552 | if (GET_CODE (operands[2]) != CONST_INT) | |
1553 | { | |
1554 | if (optimize) | |
1555 | output_asm_insn ("mov lp_count,%2", operands); | |
1556 | else | |
1557 | output_asm_insn ("mov %4,%2", operands); | |
1558 | goto shiftloop; | |
1559 | } | |
1560 | else | |
1561 | { | |
1562 | int n = INTVAL (operands[2]); | |
1563 | ||
1564 | /* If the count is negative, make it 0. */ | |
1565 | if (n < 0) | |
1566 | n = 0; | |
1567 | /* If the count is too big, truncate it. | |
1568 | ANSI says shifts of GET_MODE_BITSIZE are undefined - we choose to | |
1569 | do the intuitive thing. */ | |
1570 | else if (n > GET_MODE_BITSIZE (mode)) | |
1571 | n = GET_MODE_BITSIZE (mode); | |
1572 | ||
1573 | /* First see if we can do them inline. */ | |
1574 | if (n <= 8) | |
1575 | { | |
1576 | while (--n >= 0) | |
1577 | output_asm_insn (shift_one, operands); | |
1578 | } | |
1579 | /* See if we can use a rotate/and. */ | |
1580 | else if (n == BITS_PER_WORD - 1) | |
1581 | { | |
1582 | switch (code) | |
1583 | { | |
1584 | case ASHIFT : | |
1585 | output_asm_insn ("and %0,%0,1\n\tror %0,%0", operands); | |
1586 | break; | |
1587 | case ASHIFTRT : | |
1588 | /* The ARC doesn't have a rol insn. Use something else. */ | |
1589 | output_asm_insn ("asl.f 0,%0\n\tsbc %0,0,0", operands); | |
1590 | break; | |
1591 | case LSHIFTRT : | |
1592 | /* The ARC doesn't have a rol insn. Use something else. */ | |
1593 | output_asm_insn ("asl.f 0,%0\n\tadc %0,0,0", operands); | |
1594 | break; | |
2b046bda KG |
1595 | default: |
1596 | break; | |
e90d5e57 JL |
1597 | } |
1598 | } | |
1599 | /* Must loop. */ | |
1600 | else | |
1601 | { | |
1602 | char buf[100]; | |
1603 | ||
1604 | if (optimize) | |
1605 | output_asm_insn ("mov lp_count,%c2", operands); | |
1606 | else | |
1607 | output_asm_insn ("mov %4,%c2", operands); | |
1608 | shiftloop: | |
1609 | if (optimize) | |
1610 | { | |
1611 | if (flag_pic) | |
2b046bda | 1612 | sprintf (buf, "lr %%4,[status]\n\tadd %%4,%%4,6\t%s single insn loop start", |
e90d5e57 JL |
1613 | ASM_COMMENT_START); |
1614 | else | |
1615 | sprintf (buf, "mov %%4,%%%%st(1f)\t%s (single insn loop start) >> 2", | |
1616 | ASM_COMMENT_START); | |
1617 | output_asm_insn (buf, operands); | |
1618 | output_asm_insn ("sr %4,[lp_start]", operands); | |
1619 | output_asm_insn ("add %4,%4,1", operands); | |
1620 | output_asm_insn ("sr %4,[lp_end]", operands); | |
1621 | output_asm_insn ("nop\n\tnop", operands); | |
1622 | if (flag_pic) | |
761c70aa KG |
1623 | fprintf (asm_out_file, "\t%s single insn loop\n", |
1624 | ASM_COMMENT_START); | |
e90d5e57 | 1625 | else |
761c70aa KG |
1626 | fprintf (asm_out_file, "1:\t%s single insn loop\n", |
1627 | ASM_COMMENT_START); | |
e90d5e57 JL |
1628 | output_asm_insn (shift_one, operands); |
1629 | } | |
1630 | else | |
1631 | { | |
761c70aa KG |
1632 | fprintf (asm_out_file, "1:\t%s begin shift loop\n", |
1633 | ASM_COMMENT_START); | |
e90d5e57 JL |
1634 | output_asm_insn ("sub.f %4,%4,1", operands); |
1635 | output_asm_insn ("nop", operands); | |
1636 | output_asm_insn ("bn.nd 2f", operands); | |
1637 | output_asm_insn (shift_one, operands); | |
1638 | output_asm_insn ("b.nd 1b", operands); | |
761c70aa KG |
1639 | fprintf (asm_out_file, "2:\t%s end shift loop\n", |
1640 | ASM_COMMENT_START); | |
e90d5e57 JL |
1641 | } |
1642 | } | |
1643 | } | |
1644 | ||
1645 | return ""; | |
1646 | } | |
1647 | \f | |
1648 | /* Nested function support. */ | |
1649 | ||
1650 | /* Emit RTL insns to initialize the variable parts of a trampoline. | |
1651 | FNADDR is an RTX for the address of the function's pure code. | |
1652 | CXT is an RTX for the static chain value for the function. */ | |
1653 | ||
1654 | void | |
1655 | arc_initialize_trampoline (tramp, fnaddr, cxt) | |
2b046bda | 1656 | rtx tramp ATTRIBUTE_UNUSED, fnaddr ATTRIBUTE_UNUSED, cxt ATTRIBUTE_UNUSED; |
e90d5e57 JL |
1657 | { |
1658 | } | |
1659 | \f | |
1660 | /* Set the cpu type and print out other fancy things, | |
1661 | at the top of the file. */ | |
1662 | ||
1bc7c5b6 ZW |
1663 | static void |
1664 | arc_file_start () | |
e90d5e57 | 1665 | { |
1bc7c5b6 ZW |
1666 | default_file_start (); |
1667 | fprintf (asm_out_file, "\t.cpu %s\n", arc_cpu_string); | |
e90d5e57 JL |
1668 | } |
1669 | \f | |
1670 | /* Print operand X (an rtx) in assembler syntax to file FILE. | |
1671 | CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified. | |
1672 | For `%' followed by punctuation, CODE is the punctuation and X is null. */ | |
1673 | ||
1674 | void | |
1675 | arc_print_operand (file, x, code) | |
1676 | FILE *file; | |
1677 | rtx x; | |
1678 | int code; | |
1679 | { | |
1680 | switch (code) | |
1681 | { | |
1682 | case '#' : | |
956d6950 | 1683 | /* Conditional branches. For now these are equivalent. */ |
e90d5e57 JL |
1684 | case '*' : |
1685 | /* Unconditional branches. Output the appropriate delay slot suffix. */ | |
1686 | if (!final_sequence || XVECLEN (final_sequence, 0) == 1) | |
1687 | { | |
1688 | /* There's nothing in the delay slot. */ | |
1689 | fputs (".nd", file); | |
1690 | } | |
1691 | else | |
1692 | { | |
1693 | rtx jump = XVECEXP (final_sequence, 0, 0); | |
1694 | rtx delay = XVECEXP (final_sequence, 0, 1); | |
1695 | if (INSN_ANNULLED_BRANCH_P (jump)) | |
1696 | fputs (INSN_FROM_TARGET_P (delay) ? ".jd" : ".nd", file); | |
1697 | else | |
1698 | fputs (".d", file); | |
1699 | } | |
1700 | return; | |
1701 | case '?' : /* with leading "." */ | |
1702 | case '!' : /* without leading "." */ | |
1703 | /* This insn can be conditionally executed. See if the ccfsm machinery | |
1704 | says it should be conditionalized. */ | |
1705 | if (arc_ccfsm_state == 3 || arc_ccfsm_state == 4) | |
1706 | { | |
1707 | /* Is this insn in a delay slot? */ | |
1708 | if (final_sequence && XVECLEN (final_sequence, 0) == 2) | |
1709 | { | |
1710 | rtx insn = XVECEXP (final_sequence, 0, 1); | |
1711 | ||
1712 | /* If the insn is annulled and is from the target path, we need | |
1713 | to inverse the condition test. */ | |
1714 | if (INSN_ANNULLED_BRANCH_P (insn)) | |
1715 | { | |
1716 | if (INSN_FROM_TARGET_P (insn)) | |
1717 | fprintf (file, "%s%s", | |
1718 | code == '?' ? "." : "", | |
1719 | arc_condition_codes[ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc)]); | |
1720 | else | |
1721 | fprintf (file, "%s%s", | |
1722 | code == '?' ? "." : "", | |
1723 | arc_condition_codes[arc_ccfsm_current_cc]); | |
1724 | } | |
1725 | else | |
2b046bda KG |
1726 | { |
1727 | /* This insn is executed for either path, so don't | |
1728 | conditionalize it at all. */ | |
1729 | ; /* nothing to do */ | |
1730 | } | |
e90d5e57 JL |
1731 | } |
1732 | else | |
1733 | { | |
1734 | /* This insn isn't in a delay slot. */ | |
1735 | fprintf (file, "%s%s", | |
1736 | code == '?' ? "." : "", | |
1737 | arc_condition_codes[arc_ccfsm_current_cc]); | |
1738 | } | |
1739 | } | |
1740 | return; | |
1741 | case '~' : | |
1742 | /* Output a nop if we're between a set of the condition codes, | |
1743 | and a conditional branch. */ | |
1744 | if (last_insn_set_cc_p) | |
1745 | fputs ("nop\n\t", file); | |
1746 | return; | |
1747 | case 'd' : | |
1748 | fputs (arc_condition_codes[get_arc_condition_code (x)], file); | |
1749 | return; | |
1750 | case 'D' : | |
1751 | fputs (arc_condition_codes[ARC_INVERSE_CONDITION_CODE | |
1752 | (get_arc_condition_code (x))], | |
1753 | file); | |
1754 | return; | |
1755 | case 'R' : | |
1756 | /* Write second word of DImode or DFmode reference, | |
1757 | register or memory. */ | |
1758 | if (GET_CODE (x) == REG) | |
1759 | fputs (reg_names[REGNO (x)+1], file); | |
1760 | else if (GET_CODE (x) == MEM) | |
1761 | { | |
1762 | fputc ('[', file); | |
1763 | /* Handle possible auto-increment. Since it is pre-increment and | |
1764 | we have already done it, we can just use an offset of four. */ | |
1765 | /* ??? This is taken from rs6000.c I think. I don't think it is | |
1766 | currently necessary, but keep it around. */ | |
1767 | if (GET_CODE (XEXP (x, 0)) == PRE_INC | |
1768 | || GET_CODE (XEXP (x, 0)) == PRE_DEC) | |
1769 | output_address (plus_constant (XEXP (XEXP (x, 0), 0), 4)); | |
1770 | else | |
1771 | output_address (plus_constant (XEXP (x, 0), 4)); | |
1772 | fputc (']', file); | |
1773 | } | |
1774 | else | |
a52453cc | 1775 | output_operand_lossage ("invalid operand to %%R code"); |
e90d5e57 JL |
1776 | return; |
1777 | case 'S' : | |
8cbd5ce0 | 1778 | if ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x)) |
e90d5e57 JL |
1779 | || GET_CODE (x) == LABEL_REF) |
1780 | { | |
1781 | fprintf (file, "%%st("); | |
1782 | output_addr_const (file, x); | |
1783 | fprintf (file, ")"); | |
1784 | return; | |
1785 | } | |
1786 | break; | |
1787 | case 'H' : | |
1788 | case 'L' : | |
1789 | if (GET_CODE (x) == REG) | |
1790 | { | |
1791 | /* L = least significant word, H = most significant word */ | |
1792 | if ((TARGET_BIG_ENDIAN != 0) ^ (code == 'L')) | |
1793 | fputs (reg_names[REGNO (x)], file); | |
1794 | else | |
1795 | fputs (reg_names[REGNO (x)+1], file); | |
1796 | } | |
1797 | else if (GET_CODE (x) == CONST_INT | |
1798 | || GET_CODE (x) == CONST_DOUBLE) | |
1799 | { | |
1800 | rtx first, second; | |
1801 | ||
1802 | split_double (x, &first, &second); | |
1803 | fprintf (file, "0x%08lx", | |
2b046bda | 1804 | (long)(code == 'L' ? INTVAL (first) : INTVAL (second))); |
e90d5e57 JL |
1805 | } |
1806 | else | |
a52453cc | 1807 | output_operand_lossage ("invalid operand to %%H/%%L code"); |
e90d5e57 JL |
1808 | return; |
1809 | case 'A' : | |
1810 | { | |
e90d5e57 JL |
1811 | char str[30]; |
1812 | ||
1813 | if (GET_CODE (x) != CONST_DOUBLE | |
1814 | || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT) | |
1815 | abort (); | |
da6eec72 RH |
1816 | |
1817 | real_to_decimal (str, CONST_DOUBLE_REAL_VALUE (x), sizeof (str), 0, 1); | |
e90d5e57 JL |
1818 | fprintf (file, "%s", str); |
1819 | return; | |
1820 | } | |
1821 | case 'U' : | |
1822 | /* Output a load/store with update indicator if appropriate. */ | |
1823 | if (GET_CODE (x) == MEM) | |
1824 | { | |
1825 | if (GET_CODE (XEXP (x, 0)) == PRE_INC | |
1826 | || GET_CODE (XEXP (x, 0)) == PRE_DEC) | |
1827 | fputs (".a", file); | |
1828 | } | |
1829 | else | |
a52453cc | 1830 | output_operand_lossage ("invalid operand to %%U code"); |
e90d5e57 JL |
1831 | return; |
1832 | case 'V' : | |
1833 | /* Output cache bypass indicator for a load/store insn. Volatile memory | |
1834 | refs are defined to use the cache bypass mechanism. */ | |
1835 | if (GET_CODE (x) == MEM) | |
1836 | { | |
1837 | if (MEM_VOLATILE_P (x)) | |
1838 | fputs (".di", file); | |
1839 | } | |
1840 | else | |
a52453cc | 1841 | output_operand_lossage ("invalid operand to %%V code"); |
e90d5e57 JL |
1842 | return; |
1843 | case 0 : | |
1844 | /* Do nothing special. */ | |
1845 | break; | |
1846 | default : | |
1847 | /* Unknown flag. */ | |
1848 | output_operand_lossage ("invalid operand output code"); | |
1849 | } | |
1850 | ||
1851 | switch (GET_CODE (x)) | |
1852 | { | |
1853 | case REG : | |
1854 | fputs (reg_names[REGNO (x)], file); | |
1855 | break; | |
1856 | case MEM : | |
1857 | fputc ('[', file); | |
1858 | if (GET_CODE (XEXP (x, 0)) == PRE_INC) | |
1859 | output_address (plus_constant (XEXP (XEXP (x, 0), 0), | |
1860 | GET_MODE_SIZE (GET_MODE (x)))); | |
1861 | else if (GET_CODE (XEXP (x, 0)) == PRE_DEC) | |
1862 | output_address (plus_constant (XEXP (XEXP (x, 0), 0), | |
1863 | - GET_MODE_SIZE (GET_MODE (x)))); | |
1864 | else | |
1865 | output_address (XEXP (x, 0)); | |
1866 | fputc (']', file); | |
1867 | break; | |
1868 | case CONST_DOUBLE : | |
1869 | /* We handle SFmode constants here as output_addr_const doesn't. */ | |
1870 | if (GET_MODE (x) == SFmode) | |
1871 | { | |
1872 | REAL_VALUE_TYPE d; | |
1873 | long l; | |
1874 | ||
1875 | REAL_VALUE_FROM_CONST_DOUBLE (d, x); | |
1876 | REAL_VALUE_TO_TARGET_SINGLE (d, l); | |
1877 | fprintf (file, "0x%08lx", l); | |
1878 | break; | |
1879 | } | |
1880 | /* Fall through. Let output_addr_const deal with it. */ | |
1881 | default : | |
1882 | output_addr_const (file, x); | |
1883 | break; | |
1884 | } | |
1885 | } | |
1886 | ||
1887 | /* Print a memory address as an operand to reference that memory location. */ | |
1888 | ||
1889 | void | |
1890 | arc_print_operand_address (file, addr) | |
1891 | FILE *file; | |
1892 | rtx addr; | |
1893 | { | |
1894 | register rtx base, index = 0; | |
1895 | int offset = 0; | |
1896 | ||
1897 | switch (GET_CODE (addr)) | |
1898 | { | |
1899 | case REG : | |
1900 | fputs (reg_names[REGNO (addr)], file); | |
1901 | break; | |
1902 | case SYMBOL_REF : | |
8cbd5ce0 | 1903 | if (/*???*/ 0 && SYMBOL_REF_FUNCTION_P (addr)) |
e90d5e57 JL |
1904 | { |
1905 | fprintf (file, "%%st("); | |
1906 | output_addr_const (file, addr); | |
1907 | fprintf (file, ")"); | |
1908 | } | |
1909 | else | |
1910 | output_addr_const (file, addr); | |
1911 | break; | |
1912 | case PLUS : | |
1913 | if (GET_CODE (XEXP (addr, 0)) == CONST_INT) | |
1914 | offset = INTVAL (XEXP (addr, 0)), base = XEXP (addr, 1); | |
1915 | else if (GET_CODE (XEXP (addr, 1)) == CONST_INT) | |
1916 | offset = INTVAL (XEXP (addr, 1)), base = XEXP (addr, 0); | |
1917 | else | |
1918 | base = XEXP (addr, 0), index = XEXP (addr, 1); | |
1919 | if (GET_CODE (base) != REG) | |
1920 | abort (); | |
1921 | fputs (reg_names[REGNO (base)], file); | |
1922 | if (index == 0) | |
1923 | { | |
1924 | if (offset != 0) | |
1925 | fprintf (file, ",%d", offset); | |
1926 | } | |
1927 | else if (GET_CODE (index) == REG) | |
1928 | fprintf (file, ",%s", reg_names[REGNO (index)]); | |
1929 | else if (GET_CODE (index) == SYMBOL_REF) | |
1930 | fputc (',', file), output_addr_const (file, index); | |
1931 | else | |
1932 | abort (); | |
1933 | break; | |
1934 | case PRE_INC : | |
1935 | case PRE_DEC : | |
1936 | /* We shouldn't get here as we've lost the mode of the memory object | |
1937 | (which says how much to inc/dec by. */ | |
1938 | abort (); | |
1939 | break; | |
1940 | default : | |
1941 | output_addr_const (file, addr); | |
1942 | break; | |
1943 | } | |
1944 | } | |
1945 | ||
1946 | /* Update compare/branch separation marker. */ | |
1947 | ||
1948 | static void | |
1949 | record_cc_ref (insn) | |
1950 | rtx insn; | |
1951 | { | |
1952 | last_insn_set_cc_p = current_insn_set_cc_p; | |
1953 | ||
1954 | switch (get_attr_cond (insn)) | |
1955 | { | |
1956 | case COND_SET : | |
1957 | case COND_SET_ZN : | |
1958 | case COND_SET_ZNC : | |
1959 | if (get_attr_length (insn) == 1) | |
1960 | current_insn_set_cc_p = 1; | |
1961 | else | |
1962 | current_insn_set_cc_p = 0; | |
1963 | break; | |
1964 | default : | |
1965 | current_insn_set_cc_p = 0; | |
1966 | break; | |
1967 | } | |
1968 | } | |
1969 | \f | |
1970 | /* Conditional execution support. | |
1971 | ||
1972 | This is based on the ARM port but for now is much simpler. | |
1973 | ||
1974 | A finite state machine takes care of noticing whether or not instructions | |
1975 | can be conditionally executed, and thus decrease execution time and code | |
1976 | size by deleting branch instructions. The fsm is controlled by | |
1977 | final_prescan_insn, and controls the actions of PRINT_OPERAND. The patterns | |
1978 | in the .md file for the branch insns also have a hand in this. */ | |
1979 | ||
1980 | /* The state of the fsm controlling condition codes are: | |
1981 | 0: normal, do nothing special | |
1982 | 1: don't output this insn | |
1983 | 2: don't output this insn | |
1984 | 3: make insns conditional | |
1985 | 4: make insns conditional | |
1986 | ||
1987 | State transitions (state->state by whom, under what condition): | |
1988 | 0 -> 1 final_prescan_insn, if insn is conditional branch | |
1989 | 0 -> 2 final_prescan_insn, if the `target' is an unconditional branch | |
1990 | 1 -> 3 branch patterns, after having not output the conditional branch | |
1991 | 2 -> 4 branch patterns, after having not output the conditional branch | |
4977bab6 | 1992 | 3 -> 0 (*targetm.asm_out.internal_label), if the `target' label is reached |
e90d5e57 JL |
1993 | (the target label has CODE_LABEL_NUMBER equal to |
1994 | arc_ccfsm_target_label). | |
1995 | 4 -> 0 final_prescan_insn, if `target' unconditional branch is reached | |
1996 | ||
1997 | If the jump clobbers the conditions then we use states 2 and 4. | |
1998 | ||
1999 | A similar thing can be done with conditional return insns. | |
2000 | ||
2001 | We also handle separating branches from sets of the condition code. | |
2002 | This is done here because knowledge of the ccfsm state is required, | |
2003 | we may not be outputting the branch. */ | |
2004 | ||
2005 | void | |
2006 | arc_final_prescan_insn (insn, opvec, noperands) | |
2007 | rtx insn; | |
2b046bda KG |
2008 | rtx *opvec ATTRIBUTE_UNUSED; |
2009 | int noperands ATTRIBUTE_UNUSED; | |
e90d5e57 JL |
2010 | { |
2011 | /* BODY will hold the body of INSN. */ | |
2012 | register rtx body = PATTERN (insn); | |
2013 | ||
2014 | /* This will be 1 if trying to repeat the trick (ie: do the `else' part of | |
2015 | an if/then/else), and things need to be reversed. */ | |
2016 | int reverse = 0; | |
2017 | ||
6b857ce3 | 2018 | /* If we start with a return insn, we only succeed if we find another one. */ |
e90d5e57 JL |
2019 | int seeking_return = 0; |
2020 | ||
2021 | /* START_INSN will hold the insn from where we start looking. This is the | |
2022 | first insn after the following code_label if REVERSE is true. */ | |
2023 | rtx start_insn = insn; | |
2024 | ||
2025 | /* Update compare/branch separation marker. */ | |
2026 | record_cc_ref (insn); | |
2027 | ||
2028 | /* Allow -mdebug-ccfsm to turn this off so we can see how well it does. | |
9ec36da5 | 2029 | We can't do this in macro FINAL_PRESCAN_INSN because its called from |
e90d5e57 JL |
2030 | final_scan_insn which has `optimize' as a local. */ |
2031 | if (optimize < 2 || TARGET_NO_COND_EXEC) | |
2032 | return; | |
2033 | ||
2034 | /* If in state 4, check if the target branch is reached, in order to | |
2035 | change back to state 0. */ | |
2036 | if (arc_ccfsm_state == 4) | |
2037 | { | |
2038 | if (insn == arc_ccfsm_target_insn) | |
2039 | { | |
2040 | arc_ccfsm_target_insn = NULL; | |
2041 | arc_ccfsm_state = 0; | |
2042 | } | |
2043 | return; | |
2044 | } | |
2045 | ||
2046 | /* If in state 3, it is possible to repeat the trick, if this insn is an | |
2047 | unconditional branch to a label, and immediately following this branch | |
2048 | is the previous target label which is only used once, and the label this | |
2049 | branch jumps to is not too far off. Or in other words "we've done the | |
2050 | `then' part, see if we can do the `else' part." */ | |
2051 | if (arc_ccfsm_state == 3) | |
2052 | { | |
2053 | if (simplejump_p (insn)) | |
2054 | { | |
2055 | start_insn = next_nonnote_insn (start_insn); | |
2056 | if (GET_CODE (start_insn) == BARRIER) | |
2057 | { | |
2058 | /* ??? Isn't this always a barrier? */ | |
2059 | start_insn = next_nonnote_insn (start_insn); | |
2060 | } | |
2061 | if (GET_CODE (start_insn) == CODE_LABEL | |
2062 | && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label | |
2063 | && LABEL_NUSES (start_insn) == 1) | |
2064 | reverse = TRUE; | |
2065 | else | |
2066 | return; | |
2067 | } | |
2068 | else if (GET_CODE (body) == RETURN) | |
2069 | { | |
2070 | start_insn = next_nonnote_insn (start_insn); | |
2071 | if (GET_CODE (start_insn) == BARRIER) | |
2072 | start_insn = next_nonnote_insn (start_insn); | |
2073 | if (GET_CODE (start_insn) == CODE_LABEL | |
2074 | && CODE_LABEL_NUMBER (start_insn) == arc_ccfsm_target_label | |
2075 | && LABEL_NUSES (start_insn) == 1) | |
2076 | { | |
2077 | reverse = TRUE; | |
2078 | seeking_return = 1; | |
2079 | } | |
2080 | else | |
2081 | return; | |
2082 | } | |
2083 | else | |
2084 | return; | |
2085 | } | |
2086 | ||
2087 | if (GET_CODE (insn) != JUMP_INSN) | |
2088 | return; | |
2089 | ||
956d6950 | 2090 | /* This jump might be paralleled with a clobber of the condition codes, |
e90d5e57 JL |
2091 | the jump should always come first. */ |
2092 | if (GET_CODE (body) == PARALLEL && XVECLEN (body, 0) > 0) | |
2093 | body = XVECEXP (body, 0, 0); | |
2094 | ||
2095 | if (reverse | |
2096 | || (GET_CODE (body) == SET && GET_CODE (SET_DEST (body)) == PC | |
2097 | && GET_CODE (SET_SRC (body)) == IF_THEN_ELSE)) | |
2098 | { | |
2099 | int insns_skipped = 0, fail = FALSE, succeed = FALSE; | |
2100 | /* Flag which part of the IF_THEN_ELSE is the LABEL_REF. */ | |
2101 | int then_not_else = TRUE; | |
2102 | /* Nonzero if next insn must be the target label. */ | |
2103 | int next_must_be_target_label_p; | |
2104 | rtx this_insn = start_insn, label = 0; | |
2105 | ||
2106 | /* Register the insn jumped to. */ | |
2107 | if (reverse) | |
2108 | { | |
2109 | if (!seeking_return) | |
2110 | label = XEXP (SET_SRC (body), 0); | |
2111 | } | |
2112 | else if (GET_CODE (XEXP (SET_SRC (body), 1)) == LABEL_REF) | |
2113 | label = XEXP (XEXP (SET_SRC (body), 1), 0); | |
2114 | else if (GET_CODE (XEXP (SET_SRC (body), 2)) == LABEL_REF) | |
2115 | { | |
2116 | label = XEXP (XEXP (SET_SRC (body), 2), 0); | |
2117 | then_not_else = FALSE; | |
2118 | } | |
2119 | else if (GET_CODE (XEXP (SET_SRC (body), 1)) == RETURN) | |
2120 | seeking_return = 1; | |
2121 | else if (GET_CODE (XEXP (SET_SRC (body), 2)) == RETURN) | |
2122 | { | |
2123 | seeking_return = 1; | |
2124 | then_not_else = FALSE; | |
2125 | } | |
2126 | else | |
2127 | abort (); | |
2128 | ||
2129 | /* See how many insns this branch skips, and what kind of insns. If all | |
2130 | insns are okay, and the label or unconditional branch to the same | |
2131 | label is not too far away, succeed. */ | |
2132 | for (insns_skipped = 0, next_must_be_target_label_p = FALSE; | |
2133 | !fail && !succeed && insns_skipped < MAX_INSNS_SKIPPED; | |
2134 | insns_skipped++) | |
2135 | { | |
2136 | rtx scanbody; | |
2137 | ||
2138 | this_insn = next_nonnote_insn (this_insn); | |
2139 | if (!this_insn) | |
2140 | break; | |
2141 | ||
2142 | if (next_must_be_target_label_p) | |
2143 | { | |
2144 | if (GET_CODE (this_insn) == BARRIER) | |
2145 | continue; | |
2146 | if (GET_CODE (this_insn) == CODE_LABEL | |
2147 | && this_insn == label) | |
2148 | { | |
2149 | arc_ccfsm_state = 1; | |
2150 | succeed = TRUE; | |
2151 | } | |
2152 | else | |
2153 | fail = TRUE; | |
2154 | break; | |
2155 | } | |
2156 | ||
2157 | scanbody = PATTERN (this_insn); | |
2158 | ||
2159 | switch (GET_CODE (this_insn)) | |
2160 | { | |
2161 | case CODE_LABEL: | |
2162 | /* Succeed if it is the target label, otherwise fail since | |
2163 | control falls in from somewhere else. */ | |
2164 | if (this_insn == label) | |
2165 | { | |
2166 | arc_ccfsm_state = 1; | |
2167 | succeed = TRUE; | |
2168 | } | |
2169 | else | |
2170 | fail = TRUE; | |
2171 | break; | |
2172 | ||
2173 | case BARRIER: | |
2174 | /* Succeed if the following insn is the target label. | |
2175 | Otherwise fail. | |
2176 | If return insns are used then the last insn in a function | |
6b857ce3 | 2177 | will be a barrier. */ |
e90d5e57 JL |
2178 | next_must_be_target_label_p = TRUE; |
2179 | break; | |
2180 | ||
2181 | case CALL_INSN: | |
2182 | /* Can handle a call insn if there are no insns after it. | |
2183 | IE: The next "insn" is the target label. We don't have to | |
2184 | worry about delay slots as such insns are SEQUENCE's inside | |
2185 | INSN's. ??? It is possible to handle such insns though. */ | |
2186 | if (get_attr_cond (this_insn) == COND_CANUSE) | |
2187 | next_must_be_target_label_p = TRUE; | |
2188 | else | |
2189 | fail = TRUE; | |
2190 | break; | |
2191 | ||
2192 | case JUMP_INSN: | |
2193 | /* If this is an unconditional branch to the same label, succeed. | |
2194 | If it is to another label, do nothing. If it is conditional, | |
2195 | fail. */ | |
6b857ce3 | 2196 | /* ??? Probably, the test for the SET and the PC are unnecessary. */ |
e90d5e57 JL |
2197 | |
2198 | if (GET_CODE (scanbody) == SET | |
2199 | && GET_CODE (SET_DEST (scanbody)) == PC) | |
2200 | { | |
2201 | if (GET_CODE (SET_SRC (scanbody)) == LABEL_REF | |
2202 | && XEXP (SET_SRC (scanbody), 0) == label && !reverse) | |
2203 | { | |
2204 | arc_ccfsm_state = 2; | |
2205 | succeed = TRUE; | |
2206 | } | |
2207 | else if (GET_CODE (SET_SRC (scanbody)) == IF_THEN_ELSE) | |
2208 | fail = TRUE; | |
2209 | } | |
2210 | else if (GET_CODE (scanbody) == RETURN | |
2211 | && seeking_return) | |
2212 | { | |
2213 | arc_ccfsm_state = 2; | |
2214 | succeed = TRUE; | |
2215 | } | |
2216 | else if (GET_CODE (scanbody) == PARALLEL) | |
2217 | { | |
2218 | if (get_attr_cond (this_insn) != COND_CANUSE) | |
2219 | fail = TRUE; | |
2220 | } | |
2221 | break; | |
2222 | ||
2223 | case INSN: | |
2224 | /* We can only do this with insns that can use the condition | |
2225 | codes (and don't set them). */ | |
2226 | if (GET_CODE (scanbody) == SET | |
2227 | || GET_CODE (scanbody) == PARALLEL) | |
2228 | { | |
2229 | if (get_attr_cond (this_insn) != COND_CANUSE) | |
2230 | fail = TRUE; | |
2231 | } | |
2232 | /* We can't handle other insns like sequences. */ | |
2233 | else | |
2234 | fail = TRUE; | |
2235 | break; | |
2236 | ||
2237 | default: | |
2238 | break; | |
2239 | } | |
2240 | } | |
2241 | ||
2242 | if (succeed) | |
2243 | { | |
2244 | if ((!seeking_return) && (arc_ccfsm_state == 1 || reverse)) | |
2245 | arc_ccfsm_target_label = CODE_LABEL_NUMBER (label); | |
2246 | else if (seeking_return || arc_ccfsm_state == 2) | |
2247 | { | |
2248 | while (this_insn && GET_CODE (PATTERN (this_insn)) == USE) | |
2249 | { | |
2250 | this_insn = next_nonnote_insn (this_insn); | |
2251 | if (this_insn && (GET_CODE (this_insn) == BARRIER | |
2252 | || GET_CODE (this_insn) == CODE_LABEL)) | |
2253 | abort (); | |
2254 | } | |
2255 | if (!this_insn) | |
2256 | { | |
2257 | /* Oh dear! we ran off the end, give up. */ | |
71a83373 | 2258 | extract_insn_cached (insn); |
e90d5e57 JL |
2259 | arc_ccfsm_state = 0; |
2260 | arc_ccfsm_target_insn = NULL; | |
2261 | return; | |
2262 | } | |
2263 | arc_ccfsm_target_insn = this_insn; | |
2264 | } | |
2265 | else | |
2266 | abort (); | |
2267 | ||
2268 | /* If REVERSE is true, ARM_CURRENT_CC needs to be inverted from | |
2269 | what it was. */ | |
2270 | if (!reverse) | |
2271 | arc_ccfsm_current_cc = get_arc_condition_code (XEXP (SET_SRC (body), | |
2272 | 0)); | |
2273 | ||
2274 | if (reverse || then_not_else) | |
2275 | arc_ccfsm_current_cc = ARC_INVERSE_CONDITION_CODE (arc_ccfsm_current_cc); | |
2276 | } | |
2277 | ||
1ccbefce | 2278 | /* Restore recog_data. Getting the attributes of other insns can |
e90d5e57 | 2279 | destroy this array, but final.c assumes that it remains intact |
71a83373 JJ |
2280 | across this call. */ |
2281 | extract_insn_cached (insn); | |
e90d5e57 JL |
2282 | } |
2283 | } | |
2284 | ||
2285 | /* Record that we are currently outputting label NUM with prefix PREFIX. | |
2286 | It it's the label we're looking for, reset the ccfsm machinery. | |
2287 | ||
4977bab6 | 2288 | Called from (*targetm.asm_out.internal_label). */ |
e90d5e57 JL |
2289 | |
2290 | void | |
2291 | arc_ccfsm_at_label (prefix, num) | |
2b046bda | 2292 | const char *prefix; |
e90d5e57 JL |
2293 | int num; |
2294 | { | |
2295 | if (arc_ccfsm_state == 3 && arc_ccfsm_target_label == num | |
2296 | && !strcmp (prefix, "L")) | |
2297 | { | |
2298 | arc_ccfsm_state = 0; | |
2299 | arc_ccfsm_target_insn = NULL_RTX; | |
2300 | } | |
2301 | } | |
2302 | ||
2303 | /* See if the current insn, which is a conditional branch, is to be | |
2304 | deleted. */ | |
2305 | ||
2306 | int | |
2307 | arc_ccfsm_branch_deleted_p () | |
2308 | { | |
2309 | if (arc_ccfsm_state == 1 || arc_ccfsm_state == 2) | |
2310 | return 1; | |
2311 | return 0; | |
2312 | } | |
2313 | ||
2314 | /* Record a branch isn't output because subsequent insns can be | |
2315 | conditionalized. */ | |
2316 | ||
2317 | void | |
2318 | arc_ccfsm_record_branch_deleted () | |
2319 | { | |
2320 | /* Indicate we're conditionalizing insns now. */ | |
2321 | arc_ccfsm_state += 2; | |
2322 | ||
2323 | /* If the next insn is a subroutine call, we still need a nop between the | |
2324 | cc setter and user. We need to undo the effect of calling record_cc_ref | |
2325 | for the just deleted branch. */ | |
2326 | current_insn_set_cc_p = last_insn_set_cc_p; | |
2327 | } | |
965eb474 RH |
2328 | \f |
2329 | void | |
e5faf155 | 2330 | arc_va_start (valist, nextarg) |
965eb474 RH |
2331 | tree valist; |
2332 | rtx nextarg; | |
2333 | { | |
2334 | /* See arc_setup_incoming_varargs for reasons for this oddity. */ | |
2335 | if (current_function_args_info < 8 | |
2336 | && (current_function_args_info & 1)) | |
2337 | nextarg = plus_constant (nextarg, UNITS_PER_WORD); | |
2338 | ||
e5faf155 | 2339 | std_expand_builtin_va_start (valist, nextarg); |
965eb474 RH |
2340 | } |
2341 | ||
2342 | rtx | |
2343 | arc_va_arg (valist, type) | |
2344 | tree valist, type; | |
2345 | { | |
2346 | rtx addr_rtx; | |
2347 | tree addr, incr; | |
2348 | tree type_ptr = build_pointer_type (type); | |
2349 | ||
2350 | /* All aggregates are passed by reference. All scalar types larger | |
2351 | than 8 bytes are passed by reference. */ | |
2352 | ||
2353 | if (AGGREGATE_TYPE_P (type) || int_size_in_bytes (type) > 8) | |
2354 | { | |
2355 | tree type_ptr_ptr = build_pointer_type (type_ptr); | |
2356 | ||
2357 | addr = build (INDIRECT_REF, type_ptr, | |
2358 | build (NOP_EXPR, type_ptr_ptr, valist)); | |
2359 | ||
2360 | incr = build (PLUS_EXPR, TREE_TYPE (valist), | |
2361 | valist, build_int_2 (UNITS_PER_WORD, 0)); | |
2362 | } | |
2363 | else | |
2364 | { | |
2365 | HOST_WIDE_INT align, rounded_size; | |
2366 | ||
2367 | /* Compute the rounded size of the type. */ | |
2368 | align = PARM_BOUNDARY / BITS_PER_UNIT; | |
2369 | rounded_size = (((TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT | |
2370 | + align - 1) / align) * align); | |
2371 | ||
2372 | /* Align 8 byte operands. */ | |
2373 | addr = valist; | |
2374 | if (TYPE_ALIGN (type) > BITS_PER_WORD) | |
2375 | { | |
2376 | /* AP = (TYPE *)(((int)AP + 7) & -8) */ | |
2377 | ||
2378 | addr = build (NOP_EXPR, integer_type_node, valist); | |
2379 | addr = fold (build (PLUS_EXPR, integer_type_node, addr, | |
2380 | build_int_2 (7, 0))); | |
2381 | addr = fold (build (BIT_AND_EXPR, integer_type_node, addr, | |
2382 | build_int_2 (-8, 0))); | |
2383 | addr = fold (build (NOP_EXPR, TREE_TYPE (valist), addr)); | |
2384 | } | |
2385 | ||
2386 | /* The increment is always rounded_size past the aligned pointer. */ | |
2387 | incr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr, | |
2388 | build_int_2 (rounded_size, 0))); | |
2389 | ||
2390 | /* Adjust the pointer in big-endian mode. */ | |
2391 | if (BYTES_BIG_ENDIAN) | |
2392 | { | |
2393 | HOST_WIDE_INT adj; | |
2394 | adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT; | |
2395 | if (rounded_size > align) | |
2396 | adj = rounded_size; | |
2397 | ||
2398 | addr = fold (build (PLUS_EXPR, TREE_TYPE (addr), addr, | |
2399 | build_int_2 (rounded_size - adj, 0))); | |
2400 | } | |
2401 | } | |
2402 | ||
2403 | /* Evaluate the data address. */ | |
2404 | addr_rtx = expand_expr (addr, NULL_RTX, Pmode, EXPAND_NORMAL); | |
2405 | addr_rtx = copy_to_reg (addr_rtx); | |
2406 | ||
2407 | /* Compute new value for AP. */ | |
2408 | incr = build (MODIFY_EXPR, TREE_TYPE (valist), valist, incr); | |
2409 | TREE_SIDE_EFFECTS (incr) = 1; | |
2410 | expand_expr (incr, const0_rtx, VOIDmode, EXPAND_NORMAL); | |
2411 | ||
2412 | return addr_rtx; | |
2413 | } | |
fb49053f | 2414 | |
4977bab6 ZW |
2415 | /* This is how to output a definition of an internal numbered label where |
2416 | PREFIX is the class of label and NUM is the number within the class. */ | |
2417 | ||
2418 | static void | |
2419 | arc_internal_label (stream, prefix, labelno) | |
2420 | FILE *stream; | |
2421 | const char *prefix; | |
2422 | unsigned long labelno; | |
2423 | { | |
2424 | arc_ccfsm_at_label (prefix, labelno); | |
2425 | default_internal_label (stream, prefix, labelno); | |
2426 | } |