In Standard Algol 68:

- ABS (b) < bits_width results in the expected result.
- ABS (b) = bits_width results in all bits set to zero.
- ABS (b) > bits_width is undefined.

In GNU algol 68:

- ABS (b) < bits_width results in the expected result.
- ABS (b) >= bits_width results in all bits set to zero.

Our behavior is:

1. Conformant and backwards compatible.

2. Matches well with the similar extension of skip of integral, bits,
   real, etc values to always yield zeroes.

3. Intuitive.

This patch fixes the lowering of the standard operators SHR and SHL
accordingly, including an update for the manual and a few tests.

Signed-off-by: Jose E. Marchesi <[email protected]>

gcc/algol68/ChangeLog

        PR algol68/123959
        * a68.h: Expand prototype of a68_bits_shift to get a node argument
        for location purposes.
        * a68-low-bits.cc (a68_bits_shift): Implement RR compatible
        semantics.
        * a68-low-prelude.cc (a68_lower_shl3): Fix call to a68_bit_shift.
        (a68_lower_shr3): Likewise.
        * ga68.texi (Bits operators): Fix documentation of SHR and SHL.

gcc/testsuite/ChangeLog

        PR algol68/123959
        * algol68/execute/bits-shift-1.a68: New test.
        * algol68/execute/bits-shift-2.a68: Likewise.
        * algol68/execute/bits-shift-3.a68: Likewise.
---
 gcc/algol68/a68-low-bits.cc                   | 38 ++++++++++++-------
 gcc/algol68/a68-low-prelude.cc                |  8 ++--
 gcc/algol68/a68.h                             |  2 +-
 gcc/algol68/ga68.texi                         | 18 ++++++---
 .../algol68/execute/bits-shift-1.a68          |  9 +++++
 .../algol68/execute/bits-shift-2.a68          | 26 +++++++++++++
 .../algol68/execute/bits-shift-3.a68          | 26 +++++++++++++
 7 files changed, 103 insertions(+), 24 deletions(-)
 create mode 100644 gcc/testsuite/algol68/execute/bits-shift-1.a68
 create mode 100644 gcc/testsuite/algol68/execute/bits-shift-2.a68
 create mode 100644 gcc/testsuite/algol68/execute/bits-shift-3.a68

diff --git a/gcc/algol68/a68-low-bits.cc b/gcc/algol68/a68-low-bits.cc
index cfe84fbff33..7ec058ee1b0 100644
--- a/gcc/algol68/a68-low-bits.cc
+++ b/gcc/algol68/a68-low-bits.cc
@@ -258,25 +258,37 @@ a68_bits_subset (tree bits1, tree bits2)
                      bits2);
 }
 
-/* Rotate the bits in BITS SHIFT bits to the left if SHIFT is positive, or ABS
-   (SHIFT) bits to the right if SHIFT is negative.
+/* Rotate the bits in BITS according to the value of SHIFT:
 
-   A run-time error is raised if the count overflows the BITS value.  */
+   - If ABS(SHIFT) >= bits_width, the result is all bits clear.
+   - If SHIFT is positive, BITS gets shifted SHIFT bits to the right.
+   - If SHIFT is negative, BITS gets shifted ABS(SHIFT) bits to the left.
+*/
 
 tree
-a68_bits_shift (tree shift, tree bits)
+a68_bits_shift (NODE_T *p, tree shift, tree bits)
 {
   shift = save_expr (shift);
   bits = save_expr (bits);
-  return fold_build3 (COND_EXPR,
-                     TREE_TYPE (bits),
-                     fold_build2 (GE_EXPR, TREE_TYPE (shift),
-                                  shift, build_int_cst (TREE_TYPE (shift), 0)),
-                     fold_build2 (LSHIFT_EXPR, TREE_TYPE (bits),
-                                  bits, shift),
-                     fold_build2 (RSHIFT_EXPR, TREE_TYPE (bits),
-                                  bits,
-                                  fold_build1 (ABS_EXPR, TREE_TYPE (shift), 
shift)));
+
+  tree shift_type = TREE_TYPE (shift);
+  tree bits_type = TREE_TYPE (bits);
+  tree abs_shift = save_expr (fold_build1 (ABS_EXPR, TREE_TYPE (shift), 
shift));
+
+  tree shifted_right = fold_build2 (RSHIFT_EXPR, bits_type, bits, abs_shift);
+  tree shifted_left = fold_build2 (LSHIFT_EXPR, bits_type, bits, abs_shift);
+
+  tree shifted_bits = fold_build3 (COND_EXPR, TREE_TYPE (bits),
+                                  fold_build2 (GE_EXPR, shift_type,
+                                               shift, build_zero_cst 
(shift_type)),
+                                  shifted_right, shifted_left);
+
+  return fold_build3_loc (a68_get_node_location (p),
+                         COND_EXPR,
+                         TREE_TYPE (bits),
+                         fold_build2 (LT_EXPR, TREE_TYPE (abs_shift),
+                                      abs_shift, a68_bits_width (bits_type)),
+                         shifted_bits, build_zero_cst (bits_type));
 }
 
 /* Given two bits values, build an expression that calculates whether A = B.  
*/
diff --git a/gcc/algol68/a68-low-prelude.cc b/gcc/algol68/a68-low-prelude.cc
index 9bc03cff48f..44abd5fe74d 100644
--- a/gcc/algol68/a68-low-prelude.cc
+++ b/gcc/algol68/a68-low-prelude.cc
@@ -1085,7 +1085,9 @@ a68_lower_shl3 (NODE_T *p, LOW_CTX_T ctx)
 {
   tree bits = a68_lower_tree (SUB (p), ctx);
   tree shift = a68_lower_tree (NEXT (NEXT (SUB (p))), ctx);
-  return a68_bits_shift (shift, bits);
+  return a68_bits_shift (p,
+                        fold_build1 (NEGATE_EXPR, TREE_TYPE (shift), shift),
+                        bits);
 }
 
 tree
@@ -1093,9 +1095,7 @@ a68_lower_shr3 (NODE_T *p, LOW_CTX_T ctx)
 {
   tree bits = a68_lower_tree (SUB (p), ctx);
   tree shift = a68_lower_tree (NEXT (NEXT (SUB (p))), ctx);
-  return a68_bits_shift (fold_build1 (NEGATE_EXPR,
-                                     TREE_TYPE (shift), shift),
-                        bits);
+  return a68_bits_shift (p, shift, bits);
 }
 
 tree
diff --git a/gcc/algol68/a68.h b/gcc/algol68/a68.h
index 3e3442f668a..9dcb14600a2 100644
--- a/gcc/algol68/a68.h
+++ b/gcc/algol68/a68.h
@@ -534,7 +534,7 @@ tree a68_bits_ior (tree bits1, tree bits2);
 tree a68_bits_xor (tree bits1, tree bits2);
 tree a68_bits_elem (NODE_T *p, tree pos, tree bits);
 tree a68_bits_subset (tree bits1, tree bits2);
-tree a68_bits_shift (tree shift, tree bits);
+tree a68_bits_shift (NODE_T *p, tree shift, tree bits);
 tree a68_bits_eq (tree a, tree b, location_t loc = UNKNOWN_LOCATION);
 tree a68_bits_ne (tree a, tree b, location_t loc = UNKNOWN_LOCATION);
 tree a68_bits_set (MOID_T *m, tree bits, tree numbit, location_t loc = 
UNKNOWN_LOCATION);
diff --git a/gcc/algol68/ga68.texi b/gcc/algol68/ga68.texi
index 1d40530c3f4..811f34f7d9f 100644
--- a/gcc/algol68/ga68.texi
+++ b/gcc/algol68/ga68.texi
@@ -2799,16 +2799,22 @@ in the elements of the given bits operands.
 
 @deftypefn Operator {} {@B{SHL}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} 
@B{bits}}
 @deftypefnx Operator {} {@B{UP}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} 
@B{bits}}
-Dyadic operator that yields the given bits operand shifted @code{n}
-positions to the left.  Extra elements introduced on the right are
-initialized to @code{@B{false}}.
+Dyadic operator that yields the given bits operand shifted @code{ABS
+n} positions to the left if @code{n >= 0} or @code{ABS n} positions to
+the right if @code{n < 0}.  Extra elements introduced on the right or
+left are initialized to @code{@B{false}}.  If @code{ABS n >
+L_bits_width} then the resulting bits value has all bits set to
+@code{false}.
 @end deftypefn
 
 @deftypefn Operator {} {@B{SHR}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} 
@B{bits}}
 @deftypefnx Operator {} {@B{DOWN}} {= (@B{l} @B{bits} a, @B{int} n) @B{l} 
@B{bits}}
-Dyadic operator that yields the given bits operand shifted @code{n}
-positions to the right.  Extra elements introduced on the left are
-initialized to @code{@B{false}}.
+Dyadic operator that yields the given bits operand shifted @code{ABS
+n} positions to the right if @code{n >= 0} or @code{ABS n} positions
+to the left if @code{n < 0}.  Extra elements introduced on the right
+or left are initialized to @code{@B{false}}.  If @code{ABS n >
+L_bits_width} then the resulting bits value has all bits set to
+@code{false}.
 @end deftypefn
 
 @subsection Relational
diff --git a/gcc/testsuite/algol68/execute/bits-shift-1.a68 
b/gcc/testsuite/algol68/execute/bits-shift-1.a68
new file mode 100644
index 00000000000..9e3220d9190
--- /dev/null
+++ b/gcc/testsuite/algol68/execute/bits-shift-1.a68
@@ -0,0 +1,9 @@
+begin int first_bit = 2**31;
+      int myshift = 29;
+
+      bits b1 = BIN (first_bit) SHR 29;
+      bits b2 = BIN (first_bit) SHR myshift;
+
+      assert (b1 = 2r100);
+      assert (b2 = 2r100)
+end
diff --git a/gcc/testsuite/algol68/execute/bits-shift-2.a68 
b/gcc/testsuite/algol68/execute/bits-shift-2.a68
new file mode 100644
index 00000000000..e946b978728
--- /dev/null
+++ b/gcc/testsuite/algol68/execute/bits-shift-2.a68
@@ -0,0 +1,26 @@
+{ Shifting by L_bits_width results in all bits being zero. }
+
+begin assert (short short 16rffff SHR short_short_bits_width = short short 
16r0);
+      assert (short 16rffff SHR short_bits_width = short 16r0);
+      assert (16rffff SHR bits_width = 16r0);
+      assert (long 16rffff SHR long_bits_width = long 16r0);
+      assert (long long 16rffff SHR long_long_bits_width = long long 16r0);
+
+      assert (short short 16rffff SHR -short_short_bits_width = short short 
16r0);
+      assert (short 16rffff SHR -short_bits_width = short 16r0);
+      assert (16rffff SHR -bits_width = 16r0);
+      assert (long 16rffff SHR -long_bits_width = long 16r0);
+      assert (long long 16rffff SHR -long_long_bits_width = long long 16r0);
+
+      assert (short short 16rffff SHL short_short_bits_width = short short 
16r0);
+      assert (short 16rffff SHL short_bits_width = short 16r0);
+      assert (16rffff SHL bits_width = 16r0);
+      assert (long 16rffff SHL long_bits_width = long 16r0);
+      assert (long long 16rffff SHL long_long_bits_width = long long 16r0);
+
+      assert (short short 16rffff SHL -short_short_bits_width = short short 
16r0);
+      assert (short 16rffff SHL -short_bits_width = short 16r0);
+      assert (16rffff SHL -bits_width = 16r0);
+      assert (long 16rffff SHL -long_bits_width = long 16r0);
+      assert (long long 16rffff SHL -long_long_bits_width = long long 16r0)
+end
diff --git a/gcc/testsuite/algol68/execute/bits-shift-3.a68 
b/gcc/testsuite/algol68/execute/bits-shift-3.a68
new file mode 100644
index 00000000000..b4536dc4368
--- /dev/null
+++ b/gcc/testsuite/algol68/execute/bits-shift-3.a68
@@ -0,0 +1,26 @@
+{ Shifting by > L_bits_width results in all bits being zero. }
+
+begin assert (short short 16rffff SHR (short_short_bits_width + 1) = short 
short 16r0);
+      assert (short 16rffff SHR (short_bits_width + 1) = short 16r0);
+      assert (16rffff SHR (bits_width + 1) = 16r0);
+      assert (long 16rffff SHR (long_bits_width + 1) = long 16r0);
+      assert (long long 16rffff SHR (long_long_bits_width + 1) = long long 
16r0);
+
+      assert (short short 16rffff SHR -(short_short_bits_width + 1) = short 
short 16r0);
+      assert (short 16rffff SHR -(short_bits_width + 1) = short 16r0);
+      assert (16rffff SHR -(bits_width + 1) = 16r0);
+      assert (long 16rffff SHR -(long_bits_width + 1) = long 16r0);
+      assert (long long 16rffff SHR -(long_long_bits_width + 1) = long long 
16r0);
+
+      assert (short short 16rffff SHL (short_short_bits_width + 1) = short 
short 16r0);
+      assert (short 16rffff SHL (short_bits_width + 1) = short 16r0);
+      assert (16rffff SHL (bits_width + 1) = 16r0);
+      assert (long 16rffff SHL (long_bits_width + 1) = long 16r0);
+      assert (long long 16rffff SHL (long_long_bits_width + 1) = long long 
16r0);
+
+      assert (short short 16rffff SHL -(short_short_bits_width + 1) = short 
short 16r0);
+      assert (short 16rffff SHL -(short_bits_width + 1) = short 16r0);
+      assert (16rffff SHL -(bits_width + 1) = 16r0);
+      assert (long 16rffff SHL -(long_bits_width + 1) = long 16r0);
+      assert (long long 16rffff SHL -(long_long_bits_width + 1) = long long 
16r0)
+end
-- 
2.39.5

Reply via email to