extern void compute_record_mode (tree);
extern void finish_bitfield_layout (tree);
extern void finish_record_layout (record_layout_info, int);
-extern unsigned int element_precision (const_tree);
extern void finalize_size_functions (void);
extern void fixup_unsigned_type (tree);
extern void initialize_sizetypes (void);
|| gimple_could_trap_p (stmt))
return MOVE_PRESERVE_EXECUTION;
+ if (is_gimple_assign (stmt))
+ {
+ auto code = gimple_assign_rhs_code (stmt);
+ tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
+ /* For shifts and rotates and possibly out-of-bound shift operands
+ we currently cannot rewrite them into something unconditionally
+ well-defined. */
+ if ((code == LSHIFT_EXPR
+ || code == RSHIFT_EXPR
+ || code == LROTATE_EXPR
+ || code == RROTATE_EXPR)
+ && (TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST
+ /* We cannot use ranges at 'stmt' here. */
+ || wi::ltu_p (wi::to_wide (gimple_assign_rhs2 (stmt)),
+ element_precision (type))))
+ ret = MOVE_PRESERVE_EXECUTION;
+ }
+
/* Non local loads in a transaction cannot be hoisted out. Well,
unless the load happens on every path out of the loop, but we
don't take this into account yet. */
#define SET_TYPE_MODE(NODE, MODE) \
(TYPE_CHECK (NODE)->type_common.mode = (MODE))
+extern unsigned int element_precision (const_tree);
extern machine_mode element_mode (const_tree);
extern machine_mode vector_type_mode (const_tree);
extern unsigned int vector_element_bits (const_tree);