Hello!

Oh well... some broken assemblers got confused with "movq %reg, %xmm"
DImode move mnemonic. "movd" works correctly as well, so use this form
instead.

2011-05-04  Uros Bizjak  <ubiz...@gmail.com>

        PR target/48860
        * config/i386/i386.md (*movdi_internal_rex64) Use %vmovd
        for reg<->xmm moves.
        * config/i386/sse.md (*vec_concatv2di_rex64_sse4_1): Ditto.
        (vec_concatv2di_rex64_sse): Ditto.
        (*sse2_storeq_rex64): Do not emit %v prefix for mov{q} mnemonic.
        (*vec_extractv2di_1_rex64): Ditto.

        Revert:
        2011-05-02  Uros Bizjak  <ubiz...@gmail.com>

        * config/i386/mmx.md (*mov<mode>_internal_rex64): Use %vmovq for
        reg<->xmm moves.
        (*movv2sf_internal_rex64): Use %vmovq for reg<->xmm moves.

Tested on x86_64-pc-linux-gnu, committed to mainline SVN. I will audit
movq usage in other release branches as well.

Uros.
Index: config/i386/i386.md
===================================================================
--- config/i386/i386.md (revision 173353)
+++ config/i386/i386.md (working copy)
@@ -2001,11 +2001,13 @@
     case TYPE_SSEMOV:
       if (get_attr_mode (insn) == MODE_TI)
        return "%vmovdqa\t{%1, %0|%0, %1}";
+      /* Handle broken assemblers that reqire movd instead of movq.  */
+      if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
+       return "%vmovd\t{%1, %0|%0, %1}";
       return "%vmovq\t{%1, %0|%0, %1}";
 
     case TYPE_MMXMOV:
-      /* Moves from and into integer register is done using movd
-        opcode with REX prefix.  */
+      /* Handle broken assemblers that reqire movd instead of movq.  */
       if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
        return "movd\t{%1, %0|%0, %1}";
       return "movq\t{%1, %0|%0, %1}";
@@ -3018,7 +3020,8 @@
 
     case 11:
     case 12:
-    return "%vmovd\t{%1, %0|%0, %1}";
+      /* Handle broken assemblers that reqire movd instead of movq.  */
+      return "%vmovd\t{%1, %0|%0, %1}";
 
     default:
       gcc_unreachable();
@@ -3494,12 +3497,13 @@
 
     case 9: case 10: case 14: case 15:
       return "movd\t{%1, %0|%0, %1}";
-    case 12: case 13:
-      return "%vmovd\t{%1, %0|%0, %1}";
 
     case 11:
       return "movq\t{%1, %0|%0, %1}";
 
+    case 12: case 13:
+      return "%vmovd\t{%1, %0|%0, %1}";
+
     default:
       gcc_unreachable ();
     }
Index: config/i386/mmx.md
===================================================================
--- config/i386/mmx.md  (revision 173353)
+++ config/i386/mmx.md  (working copy)
@@ -63,6 +63,7 @@
   DONE;
 })
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*mov<mode>_internal_rex64"
   [(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
         "=rm,r,!?y,!y,!?y,m  ,!y ,*Y2,x,x ,m,r ,Yi")
@@ -82,8 +83,8 @@
     %vpxor\t%0, %d0
     %vmovq\t{%1, %0|%0, %1}
     %vmovq\t{%1, %0|%0, %1}
-    %vmovq\t{%1, %0|%0, %1}
-    %vmovq\t{%1, %0|%0, %1}"
+    %vmovd\t{%1, %0|%0, %1}
+    %vmovd\t{%1, %0|%0, %1}"
   [(set_attr "type" 
"imov,imov,mmx,mmxmov,mmxmov,mmxmov,ssecvt,ssecvt,sselog1,ssemov,ssemov,ssemov,ssemov")
    (set_attr "unit" "*,*,*,*,*,*,mmx,mmx,*,*,*,*,*")
    (set_attr "prefix_rep" "*,*,*,*,*,*,1,1,*,1,*,*,*")
@@ -155,6 +156,7 @@
   DONE;
 })
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*movv2sf_internal_rex64"
   [(set (match_operand:V2SF 0 "nonimmediate_operand"
         "=rm,r,!?y,!y,!?y,m  ,!y ,*Y2,x,x,x,m,r ,Yi")
@@ -175,8 +177,8 @@
     %vmovaps\t{%1, %0|%0, %1}
     %vmovlps\t{%1, %d0|%d0, %1}
     %vmovlps\t{%1, %0|%0, %1}
-    %vmovq\t{%1, %0|%0, %1}
-    %vmovq\t{%1, %0|%0, %1}"
+    %vmovd\t{%1, %0|%0, %1}
+    %vmovd\t{%1, %0|%0, %1}"
   [(set_attr "type" 
"imov,imov,mmx,mmxmov,mmxmov,mmxmov,ssecvt,ssecvt,ssemov,sselog1,ssemov,ssemov,ssemov,ssemov")
    (set_attr "unit" "*,*,*,*,*,*,mmx,mmx,*,*,*,*,*,*")
    (set_attr "prefix_rep" "*,*,*,*,*,*,1,1,*,*,*,*,*,*")
Index: config/i386/sse.md
===================================================================
--- config/i386/sse.md  (revision 173353)
+++ config/i386/sse.md  (working copy)
@@ -6479,9 +6479,8 @@
   "@
    #
    #
-   %vmov{q}\t{%1, %0|%0, %1}"
+   mov{q}\t{%1, %0|%0, %1}"
   [(set_attr "type" "*,*,imov")
-   (set_attr "prefix" "*,*,maybe_vex")
    (set_attr "mode" "*,*,DI")])
 
 (define_insn "*sse2_storeq"
@@ -6516,12 +6515,12 @@
    psrldq\t{$8, %0|%0, 8}
    vpsrldq\t{$8, %1, %0|%0, %1, 8}
    %vmovq\t{%H1, %0|%0, %H1}
-   %vmov{q}\t{%H1, %0|%0, %H1}"
+   mov{q}\t{%H1, %0|%0, %H1}"
   [(set_attr "isa" "base,noavx,avx,base,base")
    (set_attr "type" "ssemov,sseishft1,sseishft1,ssemov,imov")
    (set_attr "length_immediate" "*,1,1,*,*")
    (set_attr "memory" "*,none,none,*,*")
-   (set_attr "prefix" "maybe_vex,orig,vex,maybe_vex,maybe_vex")
+   (set_attr "prefix" "maybe_vex,orig,vex,maybe_vex,orig")
    (set_attr "mode" "V2SF,TI,TI,TI,DI")])
 
 (define_insn "*vec_extractv2di_1_sse2"
@@ -6687,6 +6686,7 @@
   [(set_attr "type" "sselog,ssemov,ssemov")
    (set_attr "mode" "TI,V4SF,V2SF")])
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*vec_concatv2di_rex64_sse4_1"
   [(set (match_operand:V2DI 0 "register_operand"
          "=x, x, x,Yi,!x,x,x,x,x")
@@ -6700,7 +6700,7 @@
    pinsrq\t{$0x1, %2, %0|%0, %2, 0x1}
    vpinsrq\t{$0x1, %2, %1, %0|%0, %1, %2, 0x1}
    %vmovq\t{%1, %0|%0, %1}
-   %vmovq\t{%1, %0|%0, %1}
+   %vmovd\t{%1, %0|%0, %1}
    movq2dq\t{%1, %0|%0, %1}
    punpcklqdq\t{%2, %0|%0, %2}
    vpunpcklqdq\t{%2, %1, %0|%0, %1, %2}
@@ -6719,6 +6719,7 @@
    (set_attr "prefix" "orig,vex,maybe_vex,maybe_vex,orig,orig,vex,orig,vex")
    (set_attr "mode" "TI,TI,TI,TI,TI,TI,TI,V2SF,V2SF")])
 
+;; movd instead of movq is required to handle broken assemblers.
 (define_insn "*vec_concatv2di_rex64_sse"
   [(set (match_operand:V2DI 0 "register_operand"     "=Y2,Yi,!Y2,Y2,x,x")
        (vec_concat:V2DI
@@ -6727,7 +6728,7 @@
   "TARGET_64BIT && TARGET_SSE"
   "@
    movq\t{%1, %0|%0, %1}
-   movq\t{%1, %0|%0, %1}
+   movd\t{%1, %0|%0, %1}
    movq2dq\t{%1, %0|%0, %1}
    punpcklqdq\t{%2, %0|%0, %2}
    movlhps\t{%2, %0|%0, %2}

Reply via email to