This is Richi's prototype patch in
https://gcc.gnu.org/bugzilla/show_bug.cgi?id=36043#c23 with fixes for
blocks larger than one reg, big-endian, and BLOCK_REG_PADDING.
I also removed the operand_subword_force since we may as well let
narrow_bit_field_mem in extract_bit_field do that for us.  It is
necessary to do the BLOCK_REG_PADDING shift after we've loaded the
block or else repeat the bit-field extraction in that case.

Bootstrapped and regression tested (-m32 and -m64) x86_64-linux and
powerpc64-linux.  OK to apply?

I'll also throw together a testcase or three.  For execute tests I'm
thinking of using sbrk to locate an odd sized struct such that access
past the end segfaults, rather than mmap/munmap as was done in the
pr36043 testcase.  Does that sound reasonable?

        PR target/65408
        PR target/58744
        PR middle-end/36043
        * calls.c (load_register_parameters): Don't load past end of
        mem unless suitably aligned.

Index: gcc/calls.c
===================================================================
--- gcc/calls.c (revision 221435)
+++ gcc/calls.c (working copy)
@@ -2090,6 +2090,26 @@ load_register_parameters (struct arg_data *args, i
                                           (XEXP (args[i].value, 0), size)))
                *sibcall_failure = 1;
 
+             if (size % UNITS_PER_WORD == 0
+                 || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
+               move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
+             else
+               {
+                 if (nregs > 1)
+                   move_block_to_reg (REGNO (reg), mem, nregs - 1,
+                                      args[i].mode);
+                 rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
+                 unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
+                 unsigned int bitsize = size * BITS_PER_UNIT - bitoff;
+                 rtx x = extract_bit_field (mem, bitsize, bitoff, 1,
+                                            dest, word_mode, word_mode);
+                 if (BYTES_BIG_ENDIAN)
+                   x = expand_shift (LSHIFT_EXPR, word_mode, x,
+                                     BITS_PER_WORD - bitsize, dest, 1);
+                 if (x != dest)
+                   emit_move_insn (dest, x);
+               }
+
              /* Handle a BLKmode that needs shifting.  */
              if (nregs == 1 && size < UNITS_PER_WORD
 #ifdef BLOCK_REG_PADDING
@@ -2097,22 +2117,18 @@ load_register_parameters (struct arg_data *args, i
 #else
                  && BYTES_BIG_ENDIAN
 #endif
-                )
+                 )
                {
-                 rtx tem = operand_subword_force (mem, 0, args[i].mode);
-                 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
-                 rtx x = gen_reg_rtx (word_mode);
+                 rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
                  int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
-                 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
-                                                       : LSHIFT_EXPR;
+                 enum tree_code dir = (BYTES_BIG_ENDIAN
+                                       ? RSHIFT_EXPR : LSHIFT_EXPR);
+                 rtx x;
 
-                 emit_move_insn (x, tem);
-                 x = expand_shift (dir, word_mode, x, shift, ri, 1);
-                 if (x != ri)
-                   emit_move_insn (ri, x);
+                 x = expand_shift (dir, word_mode, dest, shift, dest, 1);
+                 if (x != dest)
+                   emit_move_insn (dest, x);
                }
-             else
-               move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
            }
 
          /* When a parameter is a block, and perhaps in other cases, it is

-- 
Alan Modra
Australia Development Lab, IBM

Reply via email to