The kernel builds with -fno-var-tracking-assignments.  For MIPS (and
likely other delay slot targets) this results in DEBUG_INSNs appearing
in the IL and neither reorg.c nor resource.c is prepared for that.

I don't doubt there's more of these problems lurking, but this patch is
enough to get the MIPS ports bootstrapping with
-fno-var-tracking-assignments and linux kernels to build.

Committing to the trunk.

Jeff
        * reorg.c (stop_search_p): Handle DEBUG_INSN.
        (redundant_insn, fill_simple_delay_slots): Likewise.
        (fill_slots_from_thread): Likewise.
        * resource.c (mark_referenced_resources): Likewise.
        (mark_set_resources, find_dead_or_set_registers): Likewise.

diff --git a/gcc/reorg.c b/gcc/reorg.c
index ecdc3752af3..904d91ec9e8 100644
--- a/gcc/reorg.c
+++ b/gcc/reorg.c
@@ -276,6 +276,7 @@ stop_search_p (rtx_insn *insn, int labels_p)
     {
     case NOTE:
     case CALL_INSN:
+    case DEBUG_INSN:
       return 0;
 
     case CODE_LABEL:
@@ -1493,6 +1494,9 @@ redundant_insn (rtx insn, rtx_insn *target, const 
vec<rtx_insn *> &delay_list)
       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
        continue;
 
+      if (GET_CODE (trial) == DEBUG_INSN)
+       continue;
+
       if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
        {
          /* Stop for a CALL and its delay slots because it is difficult to
@@ -1588,6 +1592,9 @@ redundant_insn (rtx insn, rtx_insn *target, const 
vec<rtx_insn *> &delay_list)
       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
        continue;
 
+      if (GET_CODE (trial) == DEBUG_INSN)
+       continue;
+
       if (rtx_sequence *seq = dyn_cast <rtx_sequence *> (pat))
        {
          bool annul_p = false;
@@ -2020,6 +2027,10 @@ fill_simple_delay_slots (int non_jumps_p)
              if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
                continue;
 
+             /* And DEBUG_INSNs never go into delay slots.  */
+             if (GET_CODE (trial) == DEBUG_INSN)
+               continue;
+
              /* Check for resource conflict first, to avoid unnecessary
                 splitting.  */
              if (! insn_references_resource_p (trial, &set, true)
@@ -2142,6 +2153,10 @@ fill_simple_delay_slots (int non_jumps_p)
              if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
                continue;
 
+             /* And DEBUG_INSNs do not go in delay slots.  */
+             if (GET_CODE (trial) == DEBUG_INSN)
+               continue;
+
              /* If this already has filled delay slots, get the insn needing
                 the delay slots.  */
              if (GET_CODE (pat) == SEQUENCE)
@@ -2211,8 +2226,8 @@ fill_simple_delay_slots (int non_jumps_p)
              && ! can_throw_internal (trial))
            {
              /* See comment in relax_delay_slots about necessity of using
-                next_real_insn here.  */
-             rtx_insn *new_label = next_real_insn (next_trial);
+                next_real_nondebug_insn here.  */
+             rtx_insn *new_label = next_real_nondebug_insn (next_trial);
 
              if (new_label != 0)
                new_label = get_label_before (new_label, JUMP_LABEL (trial));
@@ -2406,6 +2421,9 @@ fill_slots_from_thread (rtx_jump_insn *insn, rtx 
condition,
       if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
        continue;
 
+      if (GET_CODE (trial) == DEBUG_INSN)
+       continue;
+
       /* If TRIAL conflicts with the insns ahead of it, we lose.  Also,
         don't separate or copy insns that set and use CC0.  */
       if (! insn_references_resource_p (trial, &set, true)
@@ -3309,10 +3327,10 @@ relax_delay_slots (rtx_insn *first)
 
       /* If the first insn at TARGET_LABEL is redundant with a previous
         insn, redirect the jump to the following insn and process again.
-        We use next_real_insn instead of next_active_insn so we
+        We use next_real_nondebug_insn instead of next_active_insn so we
         don't skip USE-markers, or we'll end up with incorrect
         liveness info.  */
-      trial = next_real_insn (target_label);
+      trial = next_real_nondebug_insn (target_label);
       if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
          && redundant_insn (trial, insn, vNULL)
          && ! can_throw_internal (trial))
@@ -3327,7 +3345,7 @@ relax_delay_slots (rtx_insn *first)
            {
              /* Insert the special USE insn and update dataflow info.
                 We know "trial" is an insn here as it is the output of
-                next_real_insn () above.  */
+                next_real_nondebug_insn () above.  */
              update_block (as_a <rtx_insn *> (trial), tmp);
              
              /* Now emit a label before the special USE insn, and
diff --git a/gcc/resource.c b/gcc/resource.c
index ff194bb0be0..0822daebde7 100644
--- a/gcc/resource.c
+++ b/gcc/resource.c
@@ -212,6 +212,7 @@ mark_referenced_resources (rtx x, struct resources *res,
     case PC:
     case SYMBOL_REF:
     case LABEL_REF:
+    case DEBUG_INSN:
       return;
 
     case SUBREG:
@@ -451,6 +452,7 @@ find_dead_or_set_registers (rtx_insn *target, struct 
resources *res,
 
        case BARRIER:
        case NOTE:
+       case DEBUG_INSN:
          continue;
 
        case INSN:
@@ -639,6 +641,7 @@ mark_set_resources (rtx x, struct resources *res, int 
in_dest,
     case SYMBOL_REF:
     case CONST:
     case PC:
+    case DEBUG_INSN:
       /* These don't set any resources.  */
       return;
 

Reply via email to