X-Git-Url: http://git.ipfire.org/?a=blobdiff_plain;f=gcc%2Femit-rtl.c;h=162d3f301f30f000ebc959f9871b5d7a6f802664;hb=4c73896d18e03c31a811c941082a6ed94605a905;hp=9f9289bfad5560ce63c9102672a2b557a7fe8643;hpb=5b0264cb4d7a017784253061843a52f7776a3942;p=thirdparty%2Fgcc.git diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index 9f9289bfad55..162d3f301f30 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -1,6 +1,6 @@ /* Emit RTL for the GCC expander. Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, - 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc. + 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc. This file is part of GCC. @@ -69,17 +69,6 @@ enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */ static GTY(()) int label_num = 1; -/* Highest label number in current function. - Zero means use the value of label_num instead. - This is nonzero only when belatedly compiling an inline function. */ - -static int last_label_num; - -/* Value label_num had when set_new_last_label_num was called. - If label_num has not changed since then, last_label_num is valid. */ - -static int base_label_num; - /* Nonzero means do not generate NOTEs for source line numbers. */ static int no_line_numbers; @@ -194,8 +183,7 @@ static hashval_t reg_attrs_htab_hash (const void *); static int reg_attrs_htab_eq (const void *, const void *); static reg_attrs *get_reg_attrs (tree, int); static tree component_ref_for_mem_expr (tree); -static rtx gen_const_vector_0 (enum machine_mode); -static rtx gen_complex_constant_part (enum machine_mode, rtx, int); +static rtx gen_const_vector (enum machine_mode, int); static void copy_rtx_if_shared_1 (rtx *orig); /* Probability of the conditional branch currently proceeded by try_split. @@ -618,20 +606,98 @@ gen_const_mem (enum machine_mode mode, rtx addr) return mem; } -rtx -gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset) +/* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if + this construct would be valid, and false otherwise. */ + +bool +validate_subreg (enum machine_mode omode, enum machine_mode imode, + rtx reg, unsigned int offset) { - /* This is the most common failure type. - Catch it early so we can see who does it. */ - gcc_assert (!(offset % GET_MODE_SIZE (mode))); + unsigned int isize = GET_MODE_SIZE (imode); + unsigned int osize = GET_MODE_SIZE (omode); - /* This check isn't usable right now because combine will - throw arbitrary crap like a CALL into a SUBREG in - gen_lowpart_for_combine so we must just eat it. */ -#if 0 - /* Check for this too. */ - gcc_assert (offset < GET_MODE_SIZE (GET_MODE (reg))); + /* All subregs must be aligned. */ + if (offset % osize != 0) + return false; + + /* The subreg offset cannot be outside the inner object. */ + if (offset >= isize) + return false; + + /* ??? This should not be here. Temporarily continue to allow word_mode + subregs of anything. The most common offender is (subreg:SI (reg:DF)). + Generally, backends are doing something sketchy but it'll take time to + fix them all. */ + if (omode == word_mode) + ; + /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field + is the culprit here, and not the backends. */ + else if (osize >= UNITS_PER_WORD && isize >= osize) + ; + /* Allow component subregs of complex and vector. Though given the below + extraction rules, it's not always clear what that means. */ + else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) + && GET_MODE_INNER (imode) == omode) + ; + /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs, + i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to + represent this. It's questionable if this ought to be represented at + all -- why can't this all be hidden in post-reload splitters that make + arbitrarily mode changes to the registers themselves. */ + else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode) + ; + /* Subregs involving floating point modes are not allowed to + change size. Therefore (subreg:DI (reg:DF) 0) is fine, but + (subreg:SI (reg:DF) 0) isn't. */ + else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) + { + if (isize != osize) + return false; + } + + /* Paradoxical subregs must have offset zero. */ + if (osize > isize) + return offset == 0; + + /* This is a normal subreg. Verify that the offset is representable. */ + + /* For hard registers, we already have most of these rules collected in + subreg_offset_representable_p. */ + if (reg && REG_P (reg) && HARD_REGISTER_P (reg)) + { + unsigned int regno = REGNO (reg); + +#ifdef CANNOT_CHANGE_MODE_CLASS + if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) + && GET_MODE_INNER (imode) == omode) + ; + else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode)) + return false; #endif + + return subreg_offset_representable_p (regno, imode, offset, omode); + } + + /* For pseudo registers, we want most of the same checks. Namely: + If the register no larger than a word, the subreg must be lowpart. + If the register is larger than a word, the subreg must be the lowpart + of a subword. A subreg does *not* perform arbitrary bit extraction. + Given that we've already checked mode/offset alignment, we only have + to check subword subregs here. */ + if (osize < UNITS_PER_WORD) + { + enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode; + unsigned int low_off = subreg_lowpart_offset (omode, wmode); + if (offset % UNITS_PER_WORD != low_off) + return false; + } + return true; +} + +rtx +gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset) +{ + gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); return gen_rtx_raw_SUBREG (mode, reg, offset); } @@ -1007,8 +1073,6 @@ max_reg_num (void) int max_label_num (void) { - if (last_label_num && label_num == base_label_num) - return last_label_num; return label_num; } @@ -1031,34 +1095,6 @@ maybe_set_first_label_num (rtx x) first_label_num = CODE_LABEL_NUMBER (x); } -/* Return the final regno of X, which is a SUBREG of a hard - register. */ -int -subreg_hard_regno (rtx x, int check_mode) -{ - enum machine_mode mode = GET_MODE (x); - unsigned int byte_offset, base_regno, final_regno; - rtx reg = SUBREG_REG (x); - - /* This is where we attempt to catch illegal subregs - created by the compiler. */ - gcc_assert (GET_CODE (x) == SUBREG && REG_P (reg)); - base_regno = REGNO (reg); - gcc_assert (base_regno < FIRST_PSEUDO_REGISTER); - gcc_assert (!check_mode || HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg))); -#ifdef ENABLE_CHECKING - gcc_assert (subreg_offset_representable_p (REGNO (reg), GET_MODE (reg), - SUBREG_BYTE (x), mode)); -#endif - /* Catch non-congruent offsets too. */ - byte_offset = SUBREG_BYTE (x); - gcc_assert (!(byte_offset % GET_MODE_SIZE (mode))); - - final_regno = subreg_regno (x); - - return final_regno; -} - /* Return a value representing some low-order bits of X, where the number of low-order bits is given by MODE. Note that no conversion is done between floating-point and fixed-point values, rather, the bit @@ -1132,81 +1168,6 @@ gen_lowpart_common (enum machine_mode mode, rtx x) return 0; } -/* Return the constant real or imaginary part (which has mode MODE) - of a complex value X. The IMAGPART_P argument determines whether - the real or complex component should be returned. This function - returns NULL_RTX if the component isn't a constant. */ - -static rtx -gen_complex_constant_part (enum machine_mode mode, rtx x, int imagpart_p) -{ - tree decl, part; - - if (MEM_P (x) - && GET_CODE (XEXP (x, 0)) == SYMBOL_REF) - { - decl = SYMBOL_REF_DECL (XEXP (x, 0)); - if (decl != NULL_TREE && TREE_CODE (decl) == COMPLEX_CST) - { - part = imagpart_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl); - if (TREE_CODE (part) == REAL_CST - || TREE_CODE (part) == INTEGER_CST) - return expand_expr (part, NULL_RTX, mode, 0); - } - } - return NULL_RTX; -} - -/* Return the real part (which has mode MODE) of a complex value X. - This always comes at the low address in memory. */ - -rtx -gen_realpart (enum machine_mode mode, rtx x) -{ - rtx part; - - /* Handle complex constants. */ - part = gen_complex_constant_part (mode, x, 0); - if (part != NULL_RTX) - return part; - - if (WORDS_BIG_ENDIAN - && GET_MODE_BITSIZE (mode) < BITS_PER_WORD - && REG_P (x) - && REGNO (x) < FIRST_PSEUDO_REGISTER) - internal_error - ("can't access real part of complex value in hard register"); - else if (WORDS_BIG_ENDIAN) - return gen_highpart (mode, x); - else - return gen_lowpart (mode, x); -} - -/* Return the imaginary part (which has mode MODE) of a complex value X. - This always comes at the high address in memory. */ - -rtx -gen_imagpart (enum machine_mode mode, rtx x) -{ - rtx part; - - /* Handle complex constants. */ - part = gen_complex_constant_part (mode, x, 1); - if (part != NULL_RTX) - return part; - - if (WORDS_BIG_ENDIAN) - return gen_lowpart (mode, x); - else if (! WORDS_BIG_ENDIAN - && GET_MODE_BITSIZE (mode) < BITS_PER_WORD - && REG_P (x) - && REGNO (x) < FIRST_PSEUDO_REGISTER) - internal_error - ("can't access imaginary part of complex value in hard register"); - else - return gen_highpart (mode, x); -} - rtx gen_highpart (enum machine_mode mode, rtx x) { @@ -1369,9 +1330,10 @@ operand_subword (rtx op, unsigned int offset, int validate_address, enum machine return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD)); } -/* Similar to `operand_subword', but never return 0. If we can't extract - the required subword, put OP into a register and try again. If that fails, - abort. We always validate the address in this case. +/* Similar to `operand_subword', but never return 0. If we can't + extract the required subword, put OP into a register and try again. + The second attempt must succeed. We always validate the address in + this case. MODE is the mode of OP, in case it is CONST_INT. */ @@ -1399,38 +1361,6 @@ operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode) return result; } -/* Given a compare instruction, swap the operands. - A test instruction is changed into a compare of 0 against the operand. */ - -void -reverse_comparison (rtx insn) -{ - rtx body = PATTERN (insn); - rtx comp; - - if (GET_CODE (body) == SET) - comp = SET_SRC (body); - else - comp = SET_SRC (XVECEXP (body, 0, 0)); - - if (GET_CODE (comp) == COMPARE) - { - rtx op0 = XEXP (comp, 0); - rtx op1 = XEXP (comp, 1); - XEXP (comp, 0) = op1; - XEXP (comp, 1) = op0; - } - else - { - rtx new = gen_rtx_COMPARE (VOIDmode, - CONST0_RTX (GET_MODE (comp)), comp); - if (GET_CODE (body) == SET) - SET_SRC (body) = new; - else - SET_SRC (XVECEXP (body, 0, 0)) = new; - } -} - /* Within a MEM_EXPR, we care about either (1) a component ref of a decl, or (2) a component ref of something variable. Represent the later with a NULL expression. */ @@ -1485,7 +1415,7 @@ mem_expr_equal_p (tree expr1, tree expr2) && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */ TREE_OPERAND (expr2, 1)); - if (TREE_CODE (expr1) == INDIRECT_REF) + if (INDIRECT_REF_P (expr1)) return mem_expr_equal_p (TREE_OPERAND (expr1, 0), TREE_OPERAND (expr2, 0)); @@ -1546,8 +1476,19 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, /* We can set the alignment from the type if we are making an object, this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */ - if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type)) + if (objectp || TREE_CODE (t) == INDIRECT_REF + || TREE_CODE (t) == ALIGN_INDIRECT_REF + || TYPE_ALIGN_OK (type)) align = MAX (align, TYPE_ALIGN (type)); + else + if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF) + { + if (integer_zerop (TREE_OPERAND (t, 1))) + /* We don't know anything about the alignment. */ + align = BITS_PER_UNIT; + else + align = tree_low_cst (TREE_OPERAND (t, 1), 1); + } /* If the size is known, we can set that. */ if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1)) @@ -1561,7 +1502,12 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, if (base && DECL_P (base) && TREE_READONLY (base) && (TREE_STATIC (base) || DECL_EXTERNAL (base))) - MEM_READONLY_P (ref) = 1; + { + tree base_type = TREE_TYPE (base); + gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type)) + || DECL_ARTIFICIAL (base)); + MEM_READONLY_P (ref) = 1; + } if (TREE_THIS_VOLATILE (t)) MEM_VOLATILE_P (ref) = 1; @@ -1574,9 +1520,9 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, || TREE_CODE (t) == SAVE_EXPR) t = TREE_OPERAND (t, 0); - /* If this expression can't be addressed (e.g., it contains a reference - to a non-addressable field), show we don't change its alias set. */ - if (! can_address_p (t)) + /* If this expression uses it's parent's alias set, mark it such + that we won't change it. */ + if (component_uses_parent_alias_set (t)) MEM_KEEP_ALIAS_SET_P (ref) = 1; /* If this is a decl, set the attributes of the MEM from it. */ @@ -1592,7 +1538,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, } /* If this is a constant, we know the alignment. */ - else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c') + else if (CONSTANT_CLASS_P (t)) { align = TYPE_ALIGN (type); #ifdef CONSTANT_ALIGNMENT @@ -1633,8 +1579,8 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, index, then convert to sizetype and multiply by the size of the array element. */ if (! integer_zerop (low_bound)) - index = fold (build2 (MINUS_EXPR, TREE_TYPE (index), - index, low_bound)); + index = fold_build2 (MINUS_EXPR, TREE_TYPE (index), + index, low_bound); off_tree = size_binop (PLUS_EXPR, size_binop (MULT_EXPR, convert (sizetype, @@ -1672,7 +1618,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, the size we got from the type? */ } else if (flag_argument_noalias > 1 - && TREE_CODE (t2) == INDIRECT_REF + && (INDIRECT_REF_P (t2)) && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL) { expr = t2; @@ -1683,7 +1629,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, /* If this is a Fortran indirect argument reference, record the parameter decl. */ else if (flag_argument_noalias > 1 - && TREE_CODE (t) == INDIRECT_REF + && (INDIRECT_REF_P (t)) && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL) { expr = t; @@ -1701,6 +1647,14 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, size = plus_constant (size, apply_bitpos / BITS_PER_UNIT); } + if (TREE_CODE (t) == ALIGN_INDIRECT_REF) + { + /* Force EXPR and OFFSE to NULL, since we don't know exactly what + we're overlapping. */ + offset = NULL; + expr = NULL; + } + /* Now set the attributes we computed above. */ MEM_ATTRS (ref) = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref)); @@ -2116,25 +2070,6 @@ set_new_first_and_last_insn (rtx first, rtx last) cur_insn_uid++; } - -/* Set the last label number found in the current function. - This is used when belatedly compiling an inline function. */ - -void -set_new_last_label_num (int last) -{ - base_label_num = label_num; - last_label_num = last; -} - -/* Restore all variables describing the current status from the structure *P. - This is used after a nested function. */ - -void -restore_emit_status (struct function *p ATTRIBUTE_UNUSED) -{ - last_label_num = 0; -} /* Go through all the RTL insn bodies and copy any invalid shared structure. This routine should only be called once. */ @@ -2260,15 +2195,18 @@ verify_rtx_sharing (rtx orig, rtx insn) /* This rtx may not be shared. If it has already been seen, replace it with a copy of itself. */ - +#ifdef ENABLE_CHECKING if (RTX_FLAG (x, used)) { error ("Invalid rtl sharing found in the insn"); debug_rtx (insn); error ("Shared rtx"); debug_rtx (x); - fatal_error ("Internal consistency failure"); + internal_error ("Internal consistency failure"); } +#endif + gcc_assert (!RTX_FLAG (x, used)); + RTX_FLAG (x, used) = 1; /* Now scan the subexpressions recursively. */ @@ -2291,9 +2229,11 @@ verify_rtx_sharing (rtx orig, rtx insn) for (j = 0; j < len; j++) { - /* We allow sharing of ASM_OPERANDS inside single instruction. */ + /* We allow sharing of ASM_OPERANDS inside single + instruction. */ if (j && GET_CODE (XVECEXP (x, i, j)) == SET - && GET_CODE (SET_SRC (XVECEXP (x, i, j))) == ASM_OPERANDS) + && (GET_CODE (SET_SRC (XVECEXP (x, i, j))) + == ASM_OPERANDS)) verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn); else verify_rtx_sharing (XVECEXP (x, i, j), insn); @@ -2379,105 +2319,6 @@ reset_used_decls (tree blk) reset_used_decls (t); } -/* Similar to `copy_rtx' except that if MAY_SHARE is present, it is - placed in the result directly, rather than being copied. MAY_SHARE is - either a MEM of an EXPR_LIST of MEMs. */ - -rtx -copy_most_rtx (rtx orig, rtx may_share) -{ - rtx copy; - int i, j; - RTX_CODE code; - const char *format_ptr; - - if (orig == may_share - || (GET_CODE (may_share) == EXPR_LIST - && in_expr_list_p (may_share, orig))) - return orig; - - code = GET_CODE (orig); - - switch (code) - { - case REG: - case CONST_INT: - case CONST_DOUBLE: - case CONST_VECTOR: - case SYMBOL_REF: - case CODE_LABEL: - case PC: - case CC0: - return orig; - default: - break; - } - - copy = rtx_alloc (code); - PUT_MODE (copy, GET_MODE (orig)); - RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct); - RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil); - RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging); - RTX_FLAG (copy, frame_related) = RTX_FLAG (orig, frame_related); - RTX_FLAG (copy, return_val) = RTX_FLAG (orig, return_val); - - format_ptr = GET_RTX_FORMAT (GET_CODE (copy)); - - for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++) - { - switch (*format_ptr++) - { - case 'e': - XEXP (copy, i) = XEXP (orig, i); - if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share) - XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share); - break; - - case 'u': - XEXP (copy, i) = XEXP (orig, i); - break; - - case 'E': - case 'V': - XVEC (copy, i) = XVEC (orig, i); - if (XVEC (orig, i) != NULL) - { - XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); - for (j = 0; j < XVECLEN (copy, i); j++) - XVECEXP (copy, i, j) - = copy_most_rtx (XVECEXP (orig, i, j), may_share); - } - break; - - case 'w': - XWINT (copy, i) = XWINT (orig, i); - break; - - case 'n': - case 'i': - XINT (copy, i) = XINT (orig, i); - break; - - case 't': - XTREE (copy, i) = XTREE (orig, i); - break; - - case 's': - case 'S': - XSTR (copy, i) = XSTR (orig, i); - break; - - case '0': - X0ANY (copy, i) = X0ANY (orig, i); - break; - - default: - gcc_unreachable (); - } - } - return copy; -} - /* Mark ORIG as in use, and return a copy of it if it was already in use. Recursively does the same for subexpressions. Uses copy_rtx_if_shared_1 to reduce stack space. */ @@ -2847,11 +2688,19 @@ get_first_nonnote_insn (void) { rtx insn = first_insn; - while (insn) + if (insn) { - insn = next_insn (insn); - if (insn == 0 || !NOTE_P (insn)) - break; + if (NOTE_P (insn)) + for (insn = next_insn (insn); + insn && NOTE_P (insn); + insn = next_insn (insn)) + continue; + else + { + if (NONJUMP_INSN_P (insn) + && GET_CODE (PATTERN (insn)) == SEQUENCE) + insn = XVECEXP (PATTERN (insn), 0, 0); + } } return insn; @@ -2865,11 +2714,20 @@ get_last_nonnote_insn (void) { rtx insn = last_insn; - while (insn) + if (insn) { - insn = previous_insn (insn); - if (insn == 0 || !NOTE_P (insn)) - break; + if (NOTE_P (insn)) + for (insn = previous_insn (insn); + insn && NOTE_P (insn); + insn = previous_insn (insn)) + continue; + else + { + if (NONJUMP_INSN_P (insn) + && GET_CODE (PATTERN (insn)) == SEQUENCE) + insn = XVECEXP (PATTERN (insn), 0, + XVECLEN (PATTERN (insn), 0) - 1); + } } return insn; @@ -3317,7 +3175,6 @@ try_split (rtx pat, rtx trial, int last) case REG_NORETURN: case REG_SETJMP: - case REG_ALWAYS_RETURN: insn = insn_last; while (insn != NULL_RTX) { @@ -3409,7 +3266,7 @@ make_insn_raw (rtx pattern) || (GET_CODE (insn) == SET && SET_DEST (insn) == pc_rtx))) { - warning ("ICE: emit_insn used where emit_jump_insn needed:\n"); + warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n"); debug_rtx (insn); } #endif @@ -3589,7 +3446,7 @@ add_insn_before (rtx insn, rtx before) if (INSN_P (insn)) bb->flags |= BB_DIRTY; /* Should not happen as first in the BB is always either NOTE or - LABEl. */ + LABEL. */ gcc_assert (BB_HEAD (bb) != insn /* Avoid clobbering of structure when creating new BB. */ || BARRIER_P (insn) @@ -3797,7 +3654,6 @@ find_line_note (rtx insn) void remove_unnecessary_notes (void) { - rtx block_stack = NULL_RTX; rtx eh_stack = NULL_RTX; rtx insn; rtx next; @@ -3836,66 +3692,17 @@ remove_unnecessary_notes (void) break; case NOTE_INSN_BLOCK_BEG: - /* By now, all notes indicating lexical blocks should have - NOTE_BLOCK filled in. */ - gcc_assert (NOTE_BLOCK (insn)); - block_stack = alloc_INSN_LIST (insn, block_stack); - break; - case NOTE_INSN_BLOCK_END: - /* Too many end notes. */ - gcc_assert (block_stack); - /* Mismatched nesting. */ - gcc_assert (NOTE_BLOCK (XEXP (block_stack, 0)) == NOTE_BLOCK (insn)); - tmp = block_stack; - block_stack = XEXP (block_stack, 1); - free_INSN_LIST_node (tmp); + /* BLOCK_END and BLOCK_BEG notes only exist in the `final' pass. */ + gcc_unreachable (); - /* Scan back to see if there are any non-note instructions - between INSN and the beginning of this block. If not, - then there is no PC range in the generated code that will - actually be in this block, so there's no point in - remembering the existence of the block. */ - for (tmp = PREV_INSN (insn); tmp; tmp = PREV_INSN (tmp)) - { - /* This block contains a real instruction. Note that we - don't include labels; if the only thing in the block - is a label, then there are still no PC values that - lie within the block. */ - if (INSN_P (tmp)) - break; - - /* We're only interested in NOTEs. */ - if (!NOTE_P (tmp)) - continue; - - if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG) - { - /* We just verified that this BLOCK matches us with - the block_stack check above. Never delete the - BLOCK for the outermost scope of the function; we - can refer to names from that scope even if the - block notes are messed up. */ - if (! is_body_block (NOTE_BLOCK (insn)) - && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn))) - { - remove_insn (tmp); - remove_insn (insn); - } - break; - } - else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END) - /* There's a nested block. We need to leave the - current block in place since otherwise the debugger - wouldn't be able to show symbols from our block in - the nested block. */ - break; - } + default: + break; } } - /* Too many begin notes. */ - gcc_assert (!block_stack && !eh_stack); + /* Too many EH_REGION_BEG notes. */ + gcc_assert (!eh_stack); } @@ -3927,7 +3734,7 @@ remove_unnecessary_notes (void) /* Make X be output before the instruction BEFORE. */ rtx -emit_insn_before (rtx x, rtx before) +emit_insn_before_noloc (rtx x, rtx before) { rtx last = before; rtx insn; @@ -3974,7 +3781,7 @@ emit_insn_before (rtx x, rtx before) and output it before the instruction BEFORE. */ rtx -emit_jump_insn_before (rtx x, rtx before) +emit_jump_insn_before_noloc (rtx x, rtx before) { rtx insn, last = NULL_RTX; @@ -4017,7 +3824,7 @@ emit_jump_insn_before (rtx x, rtx before) and output it before the instruction BEFORE. */ rtx -emit_call_insn_before (rtx x, rtx before) +emit_call_insn_before_noloc (rtx x, rtx before) { rtx last = NULL_RTX, insn; @@ -4147,7 +3954,7 @@ emit_insn_after_1 (rtx first, rtx after) /* Make X be output after the insn AFTER. */ rtx -emit_insn_after (rtx x, rtx after) +emit_insn_after_noloc (rtx x, rtx after) { rtx last = after; @@ -4203,7 +4010,7 @@ emit_insn_after_with_line_notes (rtx x, rtx after, rtx from) and output it after the insn AFTER. */ rtx -emit_jump_insn_after (rtx x, rtx after) +emit_jump_insn_after_noloc (rtx x, rtx after) { rtx last; @@ -4239,7 +4046,7 @@ emit_jump_insn_after (rtx x, rtx after) and output it after the instruction AFTER. */ rtx -emit_call_insn_after (rtx x, rtx after) +emit_call_insn_after_noloc (rtx x, rtx after) { rtx last; @@ -4340,19 +4147,19 @@ emit_note_copy_after (rtx orig, rtx after) return note; } -/* Like emit_insn_after, but set INSN_LOCATOR according to SCOPE. */ +/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ rtx emit_insn_after_setloc (rtx pattern, rtx after, int loc) { - rtx last = emit_insn_after (pattern, after); + rtx last = emit_insn_after_noloc (pattern, after); - if (pattern == NULL_RTX) + if (pattern == NULL_RTX || !loc) return last; after = NEXT_INSN (after); while (1) { - if (active_insn_p (after)) + if (active_insn_p (after) && !INSN_LOCATOR (after)) INSN_LOCATOR (after) = loc; if (after == last) break; @@ -4361,19 +4168,29 @@ emit_insn_after_setloc (rtx pattern, rtx after, int loc) return last; } -/* Like emit_jump_insn_after, but set INSN_LOCATOR according to SCOPE. */ +/* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ +rtx +emit_insn_after (rtx pattern, rtx after) +{ + if (INSN_P (after)) + return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); + else + return emit_insn_after_noloc (pattern, after); +} + +/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ rtx emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) { - rtx last = emit_jump_insn_after (pattern, after); + rtx last = emit_jump_insn_after_noloc (pattern, after); - if (pattern == NULL_RTX) + if (pattern == NULL_RTX || !loc) return last; after = NEXT_INSN (after); while (1) { - if (active_insn_p (after)) + if (active_insn_p (after) && !INSN_LOCATOR (after)) INSN_LOCATOR (after) = loc; if (after == last) break; @@ -4382,19 +4199,29 @@ emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) return last; } -/* Like emit_call_insn_after, but set INSN_LOCATOR according to SCOPE. */ +/* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ +rtx +emit_jump_insn_after (rtx pattern, rtx after) +{ + if (INSN_P (after)) + return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); + else + return emit_jump_insn_after_noloc (pattern, after); +} + +/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ rtx emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) { - rtx last = emit_call_insn_after (pattern, after); + rtx last = emit_call_insn_after_noloc (pattern, after); - if (pattern == NULL_RTX) + if (pattern == NULL_RTX || !loc) return last; after = NEXT_INSN (after); while (1) { - if (active_insn_p (after)) + if (active_insn_p (after) && !INSN_LOCATOR (after)) INSN_LOCATOR (after) = loc; if (after == last) break; @@ -4403,12 +4230,54 @@ emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) return last; } -/* Like emit_insn_before, but set INSN_LOCATOR according to SCOPE. */ +/* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ +rtx +emit_call_insn_after (rtx pattern, rtx after) +{ + if (INSN_P (after)) + return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); + else + return emit_call_insn_after_noloc (pattern, after); +} + +/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */ rtx emit_insn_before_setloc (rtx pattern, rtx before, int loc) { rtx first = PREV_INSN (before); - rtx last = emit_insn_before (pattern, before); + rtx last = emit_insn_before_noloc (pattern, before); + + if (pattern == NULL_RTX || !loc) + return last; + + first = NEXT_INSN (first); + while (1) + { + if (active_insn_p (first) && !INSN_LOCATOR (first)) + INSN_LOCATOR (first) = loc; + if (first == last) + break; + first = NEXT_INSN (first); + } + return last; +} + +/* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */ +rtx +emit_insn_before (rtx pattern, rtx before) +{ + if (INSN_P (before)) + return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); + else + return emit_insn_before_noloc (pattern, before); +} + +/* like emit_insn_before_noloc, but set insn_locator according to scope. */ +rtx +emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc) +{ + rtx first = PREV_INSN (before); + rtx last = emit_jump_insn_before_noloc (pattern, before); if (pattern == NULL_RTX) return last; @@ -4416,7 +4285,7 @@ emit_insn_before_setloc (rtx pattern, rtx before, int loc) first = NEXT_INSN (first); while (1) { - if (active_insn_p (first)) + if (active_insn_p (first) && !INSN_LOCATOR (first)) INSN_LOCATOR (first) = loc; if (first == last) break; @@ -4424,6 +4293,49 @@ emit_insn_before_setloc (rtx pattern, rtx before, int loc) } return last; } + +/* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */ +rtx +emit_jump_insn_before (rtx pattern, rtx before) +{ + if (INSN_P (before)) + return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); + else + return emit_jump_insn_before_noloc (pattern, before); +} + +/* like emit_insn_before_noloc, but set insn_locator according to scope. */ +rtx +emit_call_insn_before_setloc (rtx pattern, rtx before, int loc) +{ + rtx first = PREV_INSN (before); + rtx last = emit_call_insn_before_noloc (pattern, before); + + if (pattern == NULL_RTX) + return last; + + first = NEXT_INSN (first); + while (1) + { + if (active_insn_p (first) && !INSN_LOCATOR (first)) + INSN_LOCATOR (first) = loc; + if (first == last) + break; + first = NEXT_INSN (first); + } + return last; +} + +/* like emit_call_insn_before_noloc, + but set insn_locator according to before. */ +rtx +emit_call_insn_before (rtx pattern, rtx before) +{ + if (INSN_P (before)) + return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); + else + return emit_call_insn_before_noloc (pattern, before); +} /* Take X and emit it at the end of the doubly-linked INSN list. @@ -4710,7 +4622,7 @@ set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum) /* Return an indication of which type of insn should have X as a body. The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */ -enum rtx_code +static enum rtx_code classify_insn (rtx x) { if (LABEL_P (x)) @@ -4822,18 +4734,6 @@ push_to_sequence (rtx first) last_insn = last; } -/* Set up the insn chain from a chain stort in FIRST to LAST. */ - -void -push_to_full_sequence (rtx first, rtx last) -{ - start_sequence (); - first_insn = first; - last_insn = last; - /* We really should have the end of the insn chain here. */ - gcc_assert (!last || !NEXT_INSN (last)); -} - /* Set up the outer-level insn chain as the current sequence, saving the previously current one. */ @@ -5104,7 +5004,6 @@ init_emit (void) reg_rtx_no = LAST_VIRTUAL_REGISTER + 1; last_location = UNKNOWN_LOCATION; first_label_num = label_num; - last_label_num = 0; seq_stack = NULL; /* Init the tables that describe all the pseudo regs. */ @@ -5157,10 +5056,10 @@ init_emit (void) #endif } -/* Generate the constant 0. */ +/* Generate a vector constant for mode MODE and constant value CONSTANT. */ static rtx -gen_const_vector_0 (enum machine_mode mode) +gen_const_vector (enum machine_mode mode, int constant) { rtx tem; rtvec v; @@ -5172,28 +5071,44 @@ gen_const_vector_0 (enum machine_mode mode) v = rtvec_alloc (units); - /* We need to call this function after we to set CONST0_RTX first. */ - gcc_assert (CONST0_RTX (inner)); + /* We need to call this function after we set the scalar const_tiny_rtx + entries. */ + gcc_assert (const_tiny_rtx[constant][(int) inner]); for (i = 0; i < units; ++i) - RTVEC_ELT (v, i) = CONST0_RTX (inner); + RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner]; tem = gen_rtx_raw_CONST_VECTOR (mode, v); return tem; } /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when - all elements are zero. */ + all elements are zero, and the one vector when all elements are one. */ rtx gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v) { - rtx inner_zero = CONST0_RTX (GET_MODE_INNER (mode)); + enum machine_mode inner = GET_MODE_INNER (mode); + int nunits = GET_MODE_NUNITS (mode); + rtx x; int i; - for (i = GET_MODE_NUNITS (mode) - 1; i >= 0; i--) - if (RTVEC_ELT (v, i) != inner_zero) - return gen_rtx_raw_CONST_VECTOR (mode, v); - return CONST0_RTX (mode); + /* Check to see if all of the elements have the same value. */ + x = RTVEC_ELT (v, nunits - 1); + for (i = nunits - 2; i >= 0; i--) + if (RTVEC_ELT (v, i) != x) + break; + + /* If the values are all the same, check to see if we can use one of the + standard constant vectors. */ + if (i == -1) + { + if (x == CONST0_RTX (inner)) + return CONST0_RTX (mode); + else if (x == CONST1_RTX (inner)) + return CONST1_RTX (mode); + } + + return gen_rtx_raw_CONST_VECTOR (mode, v); } /* Create some permanent unique rtl objects shared between all functions. @@ -5347,12 +5262,18 @@ init_emit_once (int line_numbers) for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT); mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) - const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode); + { + const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); + const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); + } for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT); mode != VOIDmode; mode = GET_MODE_WIDER_MODE (mode)) - const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode); + { + const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); + const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1); + } for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC) @@ -5430,6 +5351,11 @@ emit_copy_of_insn_after (rtx insn, rtx after) INSN_LOCATOR (new) = INSN_LOCATOR (insn); + /* If the old insn is frame related, then so is the new one. This is + primarily needed for IA-64 unwind info which marks epilogue insns, + which may be duplicated by the basic block reordering code. */ + RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn); + /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will make them. */ for (link = REG_NOTES (insn); link; link = XEXP (link, 1))