2012-10-15   Easwaran Raman  <eraman@google.com>

	* tree-ssa-reassoc.c(build_and_add_sum): Assign UID for moved
	statements.
	(linearize_expr): Likewise.
	(ensure_ops_are_available): New function.
	(rewrite_expr_tree): Do not move statements beyond what is
	necessary. Remove call to swap_ops_for_binary_stmt...
	(reassociate_bb): ... and move it here.
	(do_reassoc): Assign UID to gimple statements.

Index: gcc/tree-ssa-reassoc.c
===================================================================
--- gcc/tree-ssa-reassoc.c	(revision 192487)
+++ gcc/tree-ssa-reassoc.c	(working copy)
@@ -1167,6 +1167,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
       && (!op2def || gimple_nop_p (op2def)))
     {
       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
+      gimple_set_uid (sum, gimple_uid (gsi_stmt (gsi)));
       gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
     }
   else if ((!op1def || gimple_nop_p (op1def))
@@ -1176,6 +1177,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
       if (gimple_code (op2def) == GIMPLE_PHI)
 	{
 	  gsi = gsi_after_labels (gimple_bb (op2def));
+          gimple_set_uid (sum, gimple_uid (gsi_stmt (gsi)));
 	  gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
 	}
       else
@@ -1183,6 +1185,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
 	  if (!stmt_ends_bb_p (op2def))
 	    {
 	      gsi = gsi_for_stmt (op2def);
+              gimple_set_uid (sum, gimple_uid (op2def));
 	      gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
 	    }
 	  else
@@ -1201,6 +1204,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
       if (gimple_code (op1def) == GIMPLE_PHI)
 	{
 	  gsi = gsi_after_labels (gimple_bb (op1def));
+          gimple_set_uid (sum, gimple_uid (op1def));
 	  gsi_insert_before (&gsi, sum, GSI_NEW_STMT);
 	}
       else
@@ -1208,6 +1212,7 @@ build_and_add_sum (tree type, tree op1, tree op2,
 	  if (!stmt_ends_bb_p (op1def))
 	    {
 	      gsi = gsi_for_stmt (op1def);
+              gimple_set_uid (sum, gimple_uid (op1def));
 	      gsi_insert_after (&gsi, sum, GSI_NEW_STMT);
 	    }
 	  else
@@ -2250,6 +2255,78 @@ swap_ops_for_binary_stmt (VEC(operand_entry_t, hea
     }
 }
 
+/* Ensure that operands in the OPS vector starting from OPINDEXth entry are live
+   at STMT.  This is accomplished by moving STMT if needed.  */
+
+static void
+ensure_ops_are_available (gimple stmt, VEC(operand_entry_t, heap) * ops, int opindex)
+{
+  int i;
+  int len = VEC_length (operand_entry_t, ops);
+  gimple insert_stmt = stmt;
+  basic_block insert_bb = gimple_bb (stmt);
+  gimple_stmt_iterator gsi_insert, gsistmt;
+  for (i = opindex; i < len; i++)
+    {
+      operand_entry_t oe = VEC_index (operand_entry_t, ops, i);
+      gimple def_stmt;
+      basic_block def_bb;
+      /* Ignore constants and operands with default definitons.  */
+      if (TREE_CODE (oe->op) != SSA_NAME
+          || SSA_NAME_IS_DEFAULT_DEF (oe->op))
+        continue;
+      def_stmt = SSA_NAME_DEF_STMT (oe->op);
+      def_bb = gimple_bb (def_stmt);
+      if (def_bb != insert_bb
+          && !dominated_by_p (CDI_DOMINATORS, insert_bb, def_bb))
+        {
+          insert_bb = def_bb;
+          insert_stmt = def_stmt;
+        }
+      else if (def_bb == insert_bb
+               && gimple_uid (insert_stmt) < gimple_uid (def_stmt))
+        insert_stmt = def_stmt;
+    }
+  if (insert_stmt == stmt)
+    return;
+  gsistmt = gsi_for_stmt (stmt);
+  /* If INSERT_STMT is a phi node, then do not insert just after that statement.
+     Instead, find the first non-label gimple statement in BB and insert before
+     that.  */
+  if (gimple_code (insert_stmt) == GIMPLE_PHI)
+    {
+      gsi_insert = gsi_after_labels (insert_bb);
+      gsi_move_before (&gsistmt, &gsi_insert);
+    }
+  /* Statements marked for throw can not be in the middle of a basic block. So
+     we can not insert a statement (not marked for throw) immediately after.  */
+  else if (stmt_can_throw_internal (insert_stmt))
+    {
+      edge e, succ_edge = NULL;
+      edge_iterator ei;
+
+      /* There should be exactly one normal edge out of the basic block.  */
+      FOR_EACH_EDGE (e, ei, insert_bb->succs)
+        {
+          if (!(e->flags & EDGE_COMPLEX))
+            {
+              gcc_assert (succ_edge == NULL);
+              succ_edge = e;
+            }
+        }
+      /* Insert STMT at the beginning of the successor basic block.  */
+      insert_bb = succ_edge->dest;
+      gsi_insert = gsi_after_labels (insert_bb);
+      gsi_move_before (&gsistmt, &gsi_insert);
+    }
+  else
+    {
+      gsi_insert = gsi_for_stmt (insert_stmt);
+      gsi_move_after (&gsistmt, &gsi_insert);
+    }
+
+}
+
 /* Recursively rewrite our linearized statements so that the operators
    match those in OPS[OPINDEX], putting the computation in rank
    order.  */
@@ -2262,11 +2339,6 @@ rewrite_expr_tree (gimple stmt, unsigned int opind
   tree rhs2 = gimple_assign_rhs2 (stmt);
   operand_entry_t oe;
 
-  /* If we have three operands left, then we want to make sure the ones
-     that get the double binary op are chosen wisely.  */
-  if (opindex + 3 == VEC_length (operand_entry_t, ops))
-    swap_ops_for_binary_stmt (ops, opindex, stmt);
-
   /* The final recursion case for this function is that you have
      exactly two operations left.
      If we had one exactly one op in the entire list to start with, we
@@ -2312,19 +2384,17 @@ rewrite_expr_tree (gimple stmt, unsigned int opind
     {
       if (!moved)
 	{
-	  gimple_stmt_iterator gsinow, gsirhs1;
-	  gimple stmt1 = stmt, stmt2;
-	  unsigned int count;
+	  gimple stmt1 = stmt;
+	  unsigned int count, i = 1;
 
-	  gsinow = gsi_for_stmt (stmt);
 	  count = VEC_length (operand_entry_t, ops) - opindex - 2;
-	  while (count-- != 0)
+	  while (i <= count)
 	    {
-	      stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
-	      gsirhs1 = gsi_for_stmt (stmt2);
-	      gsi_move_before (&gsirhs1, &gsinow);
-	      gsi_prev (&gsinow);
-	      stmt1 = stmt2;
+	      stmt1 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
+              /* Ensure that STMT1 is moved to a place where all operands in
+                 OPS[opindex + i...] are available.  */
+              ensure_ops_are_available (stmt1, ops, opindex + i);
+              i++;
 	    }
 	  moved = true;
 	}
@@ -2537,6 +2607,7 @@ linearize_expr (gimple stmt)
   gsinow = gsi_for_stmt (stmt);
   gsirhs = gsi_for_stmt (binrhs);
   gsi_move_before (&gsirhs, &gsinow);
+  gimple_set_uid (binrhs, gimple_uid (stmt));
 
   gimple_assign_set_rhs2 (stmt, gimple_assign_rhs1 (binrhs));
   gimple_assign_set_rhs1 (binrhs, gimple_assign_lhs (binlhs));
@@ -3542,8 +3613,17 @@ reassociate_bb (basic_block bb)
 		      && VEC_length (operand_entry_t, ops) > 3)
 		    rewrite_expr_tree_parallel (stmt, width, ops);
 		  else
-		    rewrite_expr_tree (stmt, 0, ops, false);
+                    {
+                      /* When there are three operands left, we want
+                         to make sure the ones that get the double
+                         binary op are chosen wisely.  */
+                      int len = VEC_length (operand_entry_t, ops);
+                      if (len >= 3)
+                        swap_ops_for_binary_stmt (ops, len - 3, stmt);
 
+		      rewrite_expr_tree (stmt, 0, ops, false);
+                    }
+
 		  /* If we combined some repeated factors into a 
 		     __builtin_powi call, multiply that result by the
 		     reassociated operands.  */
@@ -3603,6 +3683,7 @@ static void
 do_reassoc (void)
 {
   break_up_subtract_bb (ENTRY_BLOCK_PTR);
+  renumber_gimple_stmt_uids ();
   reassociate_bb (EXIT_BLOCK_PTR);
 }
 
