Index: toplev.c
===================================================================
--- toplev.c	(revision 155343)
+++ toplev.c	(working copy)
@@ -287,7 +287,7 @@
 /* Set the default region and algorithm for the integrated register
    allocator.  */
 
-enum ira_algorithm flag_ira_algorithm = IRA_ALGORITHM_CB;
+enum ira_algorithm flag_ira_algorithm = IRA_ALGORITHM_PRIORITY;
 enum ira_region flag_ira_region = IRA_REGION_MIXED;
 
 /* Set the default value for -fira-verbose.  */
Index: ira-int.h
===================================================================
--- ira-int.h	(revision 155343)
+++ ira-int.h	(working copy)
@@ -418,6 +418,9 @@
   ira_allocno_t prev_bucket_allocno;
   /* Used for temporary purposes.  */
   int temp;
+  /* make note that this allocno can only use a subset of ALL_REGS.
+     e.g. BOTTOM_REGS */
+  int requires_register_subset;
 };
 
 /* All members of the allocno structures should be accessed only
@@ -481,6 +484,7 @@
 #define ALLOCNO_MIN(A) ((A)->min)
 #define ALLOCNO_MAX(A) ((A)->max)
 #define ALLOCNO_CONFLICT_ID(A) ((A)->conflict_id)
+#define ALLOCNO_REQUIRES_REGISTER_SUBSET(A) ((A)->requires_register_subset)
 
 /* Map regno -> allocnos with given regno (see comments for
    allocno member `next_regno_allocno').  */
Index: ira-color.c
===================================================================
--- ira-color.c	(revision 155343)
+++ ira-color.c	(working copy)
@@ -429,6 +429,65 @@
     }
 }
 
+
+/*
+  Calculates the maximum ALLOCNO BOTTOM_REG CONFLICT DEPTH.
+
+  Examine the entire live range for this allocno and look for one or more points
+  where taking a BOTTOM_REG for this allocno will deprive a yet-to-be-colored
+  allocno that needs BOTTOM_REGS. I identify these points by calculating the
+  "Allocno BOTTOM_REG Conflict Depth" (ABCD), which is the sum of the number of
+  yet-to-be-colored allocnos that require BOTTOM_REGS and the number of
+  BOTTOM_REGS already allocated to allocnos that conflict with this one.
+
+  If the max ABCD across the live range is 15 or greater (the number of non-fixed
+  BOTTOM_REGS) then we should not take a BOTTOM_REG.
+*/
+static int determine_max_abcd (ira_allocno_t a)
+{
+  allocno_live_range_t r;
+  ira_allocno_t conflict_allocno;
+  ira_allocno_conflict_iterator aci;
+
+  /* if this one needs BOTTOM_REGS, the ABCD is irrelevant so return 0 */
+  if (ALLOCNO_REQUIRES_REGISTER_SUBSET (a))
+    return 0;
+
+  int max = 0;
+  for (r = ALLOCNO_LIVE_RANGES (a); r != NULL; r = r->next)
+    {
+      int prog_point;
+      for (prog_point = r->finish; prog_point >= r->start; prog_point--)
+        {
+          int depth = 0;
+          FOR_EACH_ALLOCNO_CONFLICT (a, conflict_allocno, aci)
+            {
+              allocno_live_range_t s;
+              if (ALLOCNO_REQUIRES_REGISTER_SUBSET (conflict_allocno) ||
+                  (ALLOCNO_ASSIGNED_P (conflict_allocno) && (ALLOCNO_HARD_REGNO (conflict_allocno) < 16)))
+                {
+                  for (s = ALLOCNO_LIVE_RANGES (conflict_allocno); s != NULL; s = s->next)
+                    {
+                      if (s->finish >= prog_point && s->start <= prog_point)
+                        {
+                          depth++;
+                         break;
+                        }
+                    }
+                }
+            }
+          
+          if (max < depth)
+            {
+              max = depth;
+            }
+        }
+    }
+
+  return max;
+}
+
+
 /* Choose a hard register for ALLOCNO (or for all coalesced allocnos
    represented by ALLOCNO).  If RETRY_P is TRUE, it means that the
    function called from function `ira_reassign_conflict_allocnos' and
@@ -452,6 +511,7 @@
 #ifdef STACK_REGS
   bool no_stack_reg_p;
 #endif
+  int leave_bottom = 0;
 
   ira_assert (! ALLOCNO_ASSIGNED_P (allocno));
   cover_class = ALLOCNO_COVER_CLASS (allocno);
@@ -494,6 +554,17 @@
 	    costs[i] += cost;
 	    full_costs[i] += cost;
 	  }
+
+
+      // let's determine the "ALLOCNO_BOTTOM_CONFLICT_DEPTH"
+      // If it's 15 or more then we cannot take a BOTTOM_REG this time
+      if (determine_max_abcd(a) >= 15)
+        {
+          leave_bottom = 1;
+          if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
+            fprintf(ira_dump_file, "[ABCD optimisation triggered for a%d]\n", ALLOCNO_NUM(a));        
+        }
+
       /* Take preferences of conflicting allocnos into account.  */
       FOR_EACH_ALLOCNO_CONFLICT (a, conflict_allocno, aci)
 	/* Reload can give another class so we need to check all
@@ -502,7 +573,8 @@
 				     ALLOCNO_NUM (conflict_allocno)))
 	  {
 	    conflict_cover_class = ALLOCNO_COVER_CLASS (conflict_allocno);
-	    ira_assert (ira_reg_classes_intersect_p
+
+            ira_assert (ira_reg_classes_intersect_p
 			[cover_class][conflict_cover_class]);
 	    if (allocno_coalesced_p)
 	      {
@@ -600,6 +672,15 @@
 	}
       if (min_cost > cost)
 	min_cost = cost;
+
+      /* Nudge the cost of this reg up because someone else wants this one. */
+      if (leave_bottom && hard_regno < 16)
+        {
+          /* we need to add on the add_cost so that caller-save BOTTOM_REGS end up more
+             expensive than callee-save BOTTOM_REGS */
+          full_cost += (1 + add_cost);
+        }
+
       if (min_full_cost > full_cost)
 	{
 	  min_full_cost = full_cost;
@@ -607,6 +688,15 @@
 	  ira_assert (hard_regno >= 0);
 	}
     }
+
+  /* Detect cases where what we added to the full_cost was not enough to force BOTTOM_REG to be avoided.
+     This can be caused by cost adjustments due to hard reg moves and allocno copies. */
+  if (leave_bottom && best_hard_regno < 16)
+    {
+      if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
+        fprintf(ira_dump_file, "[ABCD optimisation failed to force TOP_CREG usage (a%d got %d)]\n", ALLOCNO_NUM(allocno), best_hard_regno);
+    }
+
   if (min_full_cost > mem_cost)
     {
       if (! retry_p && internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
@@ -2853,14 +2943,42 @@
 ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
 		      HARD_REG_SET bad_spill_regs,
 		      HARD_REG_SET *pseudo_forbidden_regs,
-		      HARD_REG_SET *pseudo_previous_regs,  bitmap spilled)
+		      HARD_REG_SET *pseudo_previous_regs,
+		      bitmap spilled)
 {
   int i, m, n, regno;
   bool changed_p;
   ira_allocno_t a, conflict_a;
   HARD_REG_SET forbidden_regs;
   ira_allocno_conflict_iterator aci;
+  bitmap temp = BITMAP_ALLOC (NULL);
 
+  /* Add pseudos which conflict with pseudos already in
+     SPILLED_PSEUDO_REGS to SPILLED_PSEUDO_REGS.  This is preferable
+     to allocating in two steps as some of the conflicts might have
+     a higher priority than the pseudos passed in SPILLED_PSEUDO_REGS.  */
+  for (i = 0; i < num; i++)
+    bitmap_set_bit (temp, spilled_pseudo_regs[i]);
+
+  for (i = 0, n = num; i < n; i++)
+    {
+      int regno = spilled_pseudo_regs[i];
+      bitmap_set_bit (temp, regno);
+
+      a = ira_regno_allocno_map[regno];
+      FOR_EACH_ALLOCNO_CONFLICT (a, conflict_a, aci)
+	if (ALLOCNO_HARD_REGNO (conflict_a) < 0
+	    && ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
+	    && ! bitmap_bit_p (temp, ALLOCNO_REGNO (conflict_a)))
+	  {
+	    spilled_pseudo_regs[num++] = ALLOCNO_REGNO (conflict_a);
+	    bitmap_set_bit (temp, ALLOCNO_REGNO (conflict_a));
+	    /* ?!? This seems wrong.  */
+	    bitmap_set_bit (consideration_allocno_bitmap,
+			    ALLOCNO_NUM (conflict_a));
+	  }
+    }
+
   if (num > 1)
     qsort (spilled_pseudo_regs, num, sizeof (int), pseudo_reg_compare);
   changed_p = false;
@@ -2878,7 +2996,7 @@
       ira_assert (reg_renumber[regno] < 0);
       if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
 	fprintf (ira_dump_file,
-		 "      Spill %d(a%d), cost=%d", regno, ALLOCNO_NUM (a),
+		 "      Try Assign %d(a%d), cost=%d", regno, ALLOCNO_NUM (a),
 		 ALLOCNO_MEMORY_COST (a)
 		 - ALLOCNO_COVER_CLASS_COST (a));
       allocno_reload_assign (a, forbidden_regs);
@@ -2887,60 +3005,8 @@
 	  CLEAR_REGNO_REG_SET (spilled, regno);
 	  changed_p = true;
 	}
-      else
-	spilled_pseudo_regs[m++] = regno;
     }
-  if (m == 0)
-    return changed_p;
-  if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
-    {
-      fprintf (ira_dump_file, "      Spilled regs");
-      for (i = 0; i < m; i++)
-	fprintf (ira_dump_file, " %d", spilled_pseudo_regs[i]);
-      fprintf (ira_dump_file, "\n");
-    }
-  /* Try to assign hard registers to pseudos conflicting with ones
-     from SPILLED_PSEUDO_REGS.  */
-  for (i = n = 0; i < m; i++)
-    {
-      regno = spilled_pseudo_regs[i];
-      a = ira_regno_allocno_map[regno];
-      FOR_EACH_ALLOCNO_CONFLICT (a, conflict_a, aci)
-	if (ALLOCNO_HARD_REGNO (conflict_a) < 0
-	    && ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
-	    && ! bitmap_bit_p (consideration_allocno_bitmap,
-			       ALLOCNO_NUM (conflict_a)))
-	  {
-	    sorted_allocnos[n++] = conflict_a;
-	    bitmap_set_bit (consideration_allocno_bitmap,
-			    ALLOCNO_NUM (conflict_a));
-	  }
-    }
-  if (n != 0)
-    {
-      setup_allocno_priorities (sorted_allocnos, n);
-      qsort (sorted_allocnos, n, sizeof (ira_allocno_t),
-	     allocno_priority_compare_func);
-      for (i = 0; i < n; i++)
-	{
-	  a = sorted_allocnos[i];
-	  regno = ALLOCNO_REGNO (a);
-	  COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
-	  IOR_HARD_REG_SET (forbidden_regs, pseudo_forbidden_regs[regno]);
-	  IOR_HARD_REG_SET (forbidden_regs, pseudo_previous_regs[regno]);
-	  if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
-	    fprintf (ira_dump_file,
-		     "        Try assign %d(a%d), cost=%d",
-		     regno, ALLOCNO_NUM (a),
-		     ALLOCNO_MEMORY_COST (a)
-		     - ALLOCNO_COVER_CLASS_COST (a));
-	  if (allocno_reload_assign (a, forbidden_regs))
-	    {
-	      changed_p = true;
-	      bitmap_clear_bit (spilled, regno);
-	    }
-	}
-    }
+  BITMAP_FREE (temp);
   return changed_p;
 }
 
Index: ira-build.c
===================================================================
--- ira-build.c	(revision 155343)
+++ ira-build.c	(working copy)
@@ -482,6 +482,7 @@
   ALLOCNO_LIVE_RANGES (a) = NULL;
   ALLOCNO_MIN (a) = INT_MAX;
   ALLOCNO_MAX (a) = -1;
+  ALLOCNO_REQUIRES_REGISTER_SUBSET (a) = 0;
   ALLOCNO_CONFLICT_ID (a) = ira_allocnos_num;
   VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
   ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
Index: ira-costs.c
===================================================================
--- ira-costs.c	(revision 155343)
+++ ira-costs.c	(working copy)
@@ -637,6 +637,14 @@
 	    struct costs *pp = op_costs[i], *qq = this_op_costs[i];
 	    int scale = 1 + (recog_data.operand_type[i] == OP_INOUT);
 
+            /* catch any allocnos that want bottom_regs and record this for later */
+            if (allocno_pref && allocno_pref[ALLOCNO_NUM
+				       (ira_curr_regno_allocno_map
+					[REGNO (ops[i])])] == BOTTOM_REGS)
+              {
+                ALLOCNO_REQUIRES_REGISTER_SUBSET (ira_curr_regno_allocno_map[REGNO (ops[i])]) = 1;
+              }
+
 	    pp->mem_cost = MIN (pp->mem_cost,
 				(qq->mem_cost + op_cost_add) * scale);
 
Index: reload1.c
===================================================================
--- reload1.c	(revision 155343)
+++ reload1.c	(working copy)
@@ -48,6 +48,7 @@
 #include "df.h"
 #include "target.h"
 #include "emit-rtl.h"
+#include "ira-int.h"
 
 /* This file contains the reload pass of the compiler, which is
    run after register allocation has been done.  It checks that
@@ -5720,6 +5721,48 @@
   return 0;
 }
 
+/* Return nonzero if REGNO is a particularly bad choice for reloading X.
+   (Part of the Jeff Law hack) */
+static int
+ira_bad_reload_regno_1 (int regno, rtx x)
+{
+   int x_regno;
+   ira_allocno_t a;
+   enum reg_class pref;
+
+   /* We only deal with pseudo regs.  */
+   if (! x || GET_CODE (x) != REG)
+     return 0;
+
+   x_regno = REGNO (x);
+   if (x_regno < FIRST_PSEUDO_REGISTER)
+     return 0;
+
+   /* If the pseudo prefers REGNO explicitly, then do not consider
+      REGNO a bad spill choice.  */
+   pref = reg_preferred_class (x_regno);
+   if (reg_class_size[pref] == 1
+&& TEST_HARD_REG_BIT (reg_class_contents[pref], regno))
+     return 0;
+
+   /* If the pseudo conflicts with REGNO, then we consider REGNO a
+      poor choice for a reload regno.  */
+   a = ira_regno_allocno_map[x_regno];
+   if (TEST_HARD_REG_BIT (ALLOCNO_TOTAL_CONFLICT_HARD_REGS (a), regno))
+     return 1;
+
+   return 0;
+}
+
+/* Return nonzero if REGNO is a particularly bad choice for reloading
+    IN or OUT.  (Part of the Jeff Law hack) */
+int
+ira_bad_reload_regno (int regno, rtx in, rtx out)
+{
+   return (ira_bad_reload_regno_1 (regno, in)
+           || ira_bad_reload_regno_1 (regno, out));
+}
+
 /* Find a spill register to use as a reload register for reload R.
    LAST_RELOAD is nonzero if this is the last reload for the insn being
    processed.
@@ -5761,8 +5804,11 @@
      run out of reload regs.  Suppose we have three reloads, and
      reloads A and B can share regs.  These need two regs.
      Suppose A and B are given different regs.
-     That leaves none for C.  */
-  for (pass = 0; pass < 2; pass++)
+     That leaves none for C.
+
+     NOTE: There are now three passes in accordance with the Jeff
+     Law hack.  */
+  for (pass = 0; pass < 3; pass++)
     {
       /* I is the index in spill_regs.
 	 We advance it round-robin between insns to use all spill regs
@@ -5801,6 +5847,11 @@
 		      && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
 					      regnum))))
 	    {
+              /* BEGIN: Jeff Law's hack */
+              if (pass == 1
+                  && ira_bad_reload_regno (regnum, rld[r].in, rld[r].out))
+                continue;
+              /* END: Jeff Law's hack */
 	      int nr = hard_regno_nregs[regnum][rld[r].mode];
 	      /* Avoid the problem where spilling a GENERAL_OR_FP_REG
 		 (on 68000) got us two FP regs.  If NR is 1,
