bits2);
}
-/* Rotate the bits in BITS SHIFT bits to the left if SHIFT is positive, or ABS
- (SHIFT) bits to the right if SHIFT is negative.
+/* Rotate the bits in BITS according to the value of SHIFT:
- A run-time error is raised if the count overflows the BITS value. */
+ - If ABS(SHIFT) >= bits_width, the result is all bits clear.
+ - If SHIFT is positive, BITS gets shifted SHIFT bits to the right.
+ - If SHIFT is negative, BITS gets shifted ABS(SHIFT) bits to the left.
+*/
tree
-a68_bits_shift (tree shift, tree bits)
+a68_bits_shift (NODE_T *p, tree shift, tree bits)
{
shift = save_expr (shift);
bits = save_expr (bits);
- return fold_build3 (COND_EXPR,
- TREE_TYPE (bits),
- fold_build2 (GE_EXPR, TREE_TYPE (shift),
- shift, build_int_cst (TREE_TYPE (shift), 0)),
- fold_build2 (LSHIFT_EXPR, TREE_TYPE (bits),
- bits, shift),
- fold_build2 (RSHIFT_EXPR, TREE_TYPE (bits),
- bits,
- fold_build1 (ABS_EXPR, TREE_TYPE (shift), shift)));
+
+ tree shift_type = TREE_TYPE (shift);
+ tree bits_type = TREE_TYPE (bits);
+ tree abs_shift = save_expr (fold_build1 (ABS_EXPR, TREE_TYPE (shift), shift));
+
+ tree shifted_right = fold_build2 (RSHIFT_EXPR, bits_type, bits, abs_shift);
+ tree shifted_left = fold_build2 (LSHIFT_EXPR, bits_type, bits, abs_shift);
+
+ tree shifted_bits = fold_build3 (COND_EXPR, TREE_TYPE (bits),
+ fold_build2 (GE_EXPR, shift_type,
+ shift, build_zero_cst (shift_type)),
+ shifted_right, shifted_left);
+
+ return fold_build3_loc (a68_get_node_location (p),
+ COND_EXPR,
+ TREE_TYPE (bits),
+ fold_build2 (LT_EXPR, TREE_TYPE (abs_shift),
+ abs_shift, a68_bits_width (bits_type)),
+ shifted_bits, build_zero_cst (bits_type));
}
/* Given two bits values, build an expression that calculates whether A = B. */
{
tree bits = a68_lower_tree (SUB (p), ctx);
tree shift = a68_lower_tree (NEXT (NEXT (SUB (p))), ctx);
- return a68_bits_shift (shift, bits);
+ return a68_bits_shift (p,
+ fold_build1 (NEGATE_EXPR, TREE_TYPE (shift), shift),
+ bits);
}
tree
{
tree bits = a68_lower_tree (SUB (p), ctx);
tree shift = a68_lower_tree (NEXT (NEXT (SUB (p))), ctx);
- return a68_bits_shift (fold_build1 (NEGATE_EXPR,
- TREE_TYPE (shift), shift),
- bits);
+ return a68_bits_shift (p, shift, bits);
}
tree
tree a68_bits_xor (tree bits1, tree bits2);
tree a68_bits_elem (NODE_T *p, tree pos, tree bits);
tree a68_bits_subset (tree bits1, tree bits2);
-tree a68_bits_shift (tree shift, tree bits);
+tree a68_bits_shift (NODE_T *p, tree shift, tree bits);
tree a68_bits_eq (tree a, tree b, location_t loc = UNKNOWN_LOCATION);
tree a68_bits_ne (tree a, tree b, location_t loc = UNKNOWN_LOCATION);
tree a68_bits_set (MOID_T *m, tree bits, tree numbit, location_t loc = UNKNOWN_LOCATION);
@deftypefn Operator {} {@B{SHL}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} @B{bits}}
@deftypefnx Operator {} {@B{UP}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} @B{bits}}
-Dyadic operator that yields the given bits operand shifted @code{n}
-positions to the left. Extra elements introduced on the right are
-initialized to @code{@B{false}}.
+Dyadic operator that yields the given bits operand shifted @code{ABS
+n} positions to the left if @code{n >= 0} or @code{ABS n} positions to
+the right if @code{n < 0}. Extra elements introduced on the right or
+left are initialized to @code{@B{false}}. If @code{ABS n >
+L_bits_width} then the resulting bits value has all bits set to
+@code{false}.
@end deftypefn
@deftypefn Operator {} {@B{SHR}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} @B{bits}}
@deftypefnx Operator {} {@B{DOWN}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} @B{bits}}
-Dyadic operator that yields the given bits operand shifted @code{n}
-positions to the right. Extra elements introduced on the left are
-initialized to @code{@B{false}}.
+Dyadic operator that yields the given bits operand shifted @code{ABS
+n} positions to the right if @code{n >= 0} or @code{ABS n} positions
+to the left if @code{n < 0}. Extra elements introduced on the right
+or left are initialized to @code{@B{false}}. If @code{ABS n >
+L_bits_width} then the resulting bits value has all bits set to
+@code{false}.
@end deftypefn
@subsection Relational
--- /dev/null
+begin int first_bit = 2**31;
+ int myshift = 29;
+
+ bits b1 = BIN (first_bit) SHR 29;
+ bits b2 = BIN (first_bit) SHR myshift;
+
+ assert (b1 = 2r100);
+ assert (b2 = 2r100)
+end
--- /dev/null
+{ Shifting by L_bits_width results in all bits being zero. }
+
+begin assert (short short 16rffff SHR short_short_bits_width = short short 16r0);
+ assert (short 16rffff SHR short_bits_width = short 16r0);
+ assert (16rffff SHR bits_width = 16r0);
+ assert (long 16rffff SHR long_bits_width = long 16r0);
+ assert (long long 16rffff SHR long_long_bits_width = long long 16r0);
+
+ assert (short short 16rffff SHR -short_short_bits_width = short short 16r0);
+ assert (short 16rffff SHR -short_bits_width = short 16r0);
+ assert (16rffff SHR -bits_width = 16r0);
+ assert (long 16rffff SHR -long_bits_width = long 16r0);
+ assert (long long 16rffff SHR -long_long_bits_width = long long 16r0);
+
+ assert (short short 16rffff SHL short_short_bits_width = short short 16r0);
+ assert (short 16rffff SHL short_bits_width = short 16r0);
+ assert (16rffff SHL bits_width = 16r0);
+ assert (long 16rffff SHL long_bits_width = long 16r0);
+ assert (long long 16rffff SHL long_long_bits_width = long long 16r0);
+
+ assert (short short 16rffff SHL -short_short_bits_width = short short 16r0);
+ assert (short 16rffff SHL -short_bits_width = short 16r0);
+ assert (16rffff SHL -bits_width = 16r0);
+ assert (long 16rffff SHL -long_bits_width = long 16r0);
+ assert (long long 16rffff SHL -long_long_bits_width = long long 16r0)
+end
--- /dev/null
+{ Shifting by > L_bits_width results in all bits being zero. }
+
+begin assert (short short 16rffff SHR (short_short_bits_width + 1) = short short 16r0);
+ assert (short 16rffff SHR (short_bits_width + 1) = short 16r0);
+ assert (16rffff SHR (bits_width + 1) = 16r0);
+ assert (long 16rffff SHR (long_bits_width + 1) = long 16r0);
+ assert (long long 16rffff SHR (long_long_bits_width + 1) = long long 16r0);
+
+ assert (short short 16rffff SHR -(short_short_bits_width + 1) = short short 16r0);
+ assert (short 16rffff SHR -(short_bits_width + 1) = short 16r0);
+ assert (16rffff SHR -(bits_width + 1) = 16r0);
+ assert (long 16rffff SHR -(long_bits_width + 1) = long 16r0);
+ assert (long long 16rffff SHR -(long_long_bits_width + 1) = long long 16r0);
+
+ assert (short short 16rffff SHL (short_short_bits_width + 1) = short short 16r0);
+ assert (short 16rffff SHL (short_bits_width + 1) = short 16r0);
+ assert (16rffff SHL (bits_width + 1) = 16r0);
+ assert (long 16rffff SHL (long_bits_width + 1) = long 16r0);
+ assert (long long 16rffff SHL (long_long_bits_width + 1) = long long 16r0);
+
+ assert (short short 16rffff SHL -(short_short_bits_width + 1) = short short 16r0);
+ assert (short 16rffff SHL -(short_bits_width + 1) = short 16r0);
+ assert (16rffff SHL -(bits_width + 1) = 16r0);
+ assert (long 16rffff SHL -(long_bits_width + 1) = long 16r0);
+ assert (long long 16rffff SHL -(long_long_bits_width + 1) = long long 16r0)
+end