This adds a struct function taking variant of 
pre_and_rev_post_order_compute that also drops the assert that
all blocks were visited (consistent with the function comment).
This makes graph.c not ICE on CFGs with unreachable blocks
which it tries to handle already, but it didn't notice that
RPO order is filled from the end and that pre_and_rev_post_order_compute
ICEs anyway for n != n_basic_blocks_for_function ...

The reverse filling is a good reason to keep the assert in
the pre_and_rev_post_order_compute variant that now wraps
pre_and_rev_post_order_compute_fn.

Bootstrap and regtest running on x86_64-unknown-linux-gnu.

Richard.

2013-10-30  Richard Biener  <rguent...@suse.de>

        PR middle-end/57100
        * basic-block.h (pre_and_rev_post_order_compute_fn): New function.
        * cfganal.c (pre_and_rev_post_order_compute_fn): New function
        as worker for ...
        (pre_and_rev_post_order_compute): ... which now wraps it.
        * graph.c (draw_cfg_nodes_no_loops): Use
        pre_and_rev_post_order_compute_fn to avoid ICEing and dependence
        on cfun.

Index: gcc/basic-block.h
===================================================================
*** gcc/basic-block.h   (revision 204202)
--- gcc/basic-block.h   (working copy)
*************** extern void connect_infinite_loops_to_ex
*** 795,800 ****
--- 795,802 ----
  extern int post_order_compute (int *, bool, bool);
  extern basic_block dfs_find_deadend (basic_block);
  extern int inverted_post_order_compute (int *);
+ extern int pre_and_rev_post_order_compute_fn (struct function *,
+                                             int *, int *, bool);
  extern int pre_and_rev_post_order_compute (int *, int *, bool);
  extern int dfs_enumerate_from (basic_block, int,
                               bool (*)(const_basic_block, const void *),
Index: gcc/cfganal.c
===================================================================
*** gcc/cfganal.c       (revision 204202)
--- gcc/cfganal.c       (working copy)
*************** inverted_post_order_compute (int *post_o
*** 878,897 ****
    return post_order_num;
  }
  
! /* Compute the depth first search order and store in the array
!   PRE_ORDER if nonzero, marking the nodes visited in VISITED.  If
!   REV_POST_ORDER is nonzero, return the reverse completion number for each
!   node.  Returns the number of nodes visited.  A depth first search
!   tries to get as far away from the starting point as quickly as
!   possible.
! 
!   pre_order is a really a preorder numbering of the graph.
!   rev_post_order is really a reverse postorder numbering of the graph.
!  */
  
  int
! pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
!                               bool include_entry_exit)
  {
    edge_iterator *stack;
    int sp;
--- 878,899 ----
    return post_order_num;
  }
  
! /* Compute the depth first search order of FN and store in the array
!    PRE_ORDER if nonzero.  If REV_POST_ORDER is nonzero, return the
!    reverse completion number for each node.  Returns the number of nodes
!    visited.  A depth first search tries to get as far away from the starting
!    point as quickly as possible.
! 
!    In case the function has unreachable blocks the number of nodes
!    visited does not include them.
! 
!    pre_order is a really a preorder numbering of the graph.
!    rev_post_order is really a reverse postorder numbering of the graph.  */
  
  int
! pre_and_rev_post_order_compute_fn (struct function *fn,
!                                  int *pre_order, int *rev_post_order,
!                                  bool include_entry_exit)
  {
    edge_iterator *stack;
    int sp;
*************** pre_and_rev_post_order_compute (int *pre
*** 921,927 ****
    bitmap_clear (visited);
  
    /* Push the first edge on to the stack.  */
!   stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs);
  
    while (sp)
      {
--- 923,929 ----
    bitmap_clear (visited);
  
    /* Push the first edge on to the stack.  */
!   stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)->succs);
  
    while (sp)
      {
*************** pre_and_rev_post_order_compute (int *pre
*** 935,941 ****
        dest = ei_edge (ei)->dest;
  
        /* Check if the edge destination has been visited yet.  */
!       if (dest != EXIT_BLOCK_PTR && ! bitmap_bit_p (visited, dest->index))
        {
          /* Mark that we have visited the destination.  */
          bitmap_set_bit (visited, dest->index);
--- 937,944 ----
        dest = ei_edge (ei)->dest;
  
        /* Check if the edge destination has been visited yet.  */
!       if (dest != EXIT_BLOCK_PTR_FOR_FUNCTION (fn)
!         && ! bitmap_bit_p (visited, dest->index))
        {
          /* Mark that we have visited the destination.  */
          bitmap_set_bit (visited, dest->index);
*************** pre_and_rev_post_order_compute (int *pre
*** 956,962 ****
        }
        else
        {
!         if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR
              && rev_post_order)
            /* There are no more successors for the SRC node
               so assign its reverse completion number.  */
--- 959,966 ----
        }
        else
        {
!         if (ei_one_before_end_p (ei)
!             && src != ENTRY_BLOCK_PTR_FOR_FUNCTION (fn)
              && rev_post_order)
            /* There are no more successors for the SRC node
               so assign its reverse completion number.  */
*************** pre_and_rev_post_order_compute (int *pre
*** 979,987 ****
        pre_order_num++;
        if (rev_post_order)
        rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
-       /* The number of nodes visited should be the number of blocks.  */
-       gcc_assert (pre_order_num == n_basic_blocks);
      }
    else
      /* The number of nodes visited should be the number of blocks minus
         the entry and exit blocks which are not visited here.  */
--- 983,1006 ----
        pre_order_num++;
        if (rev_post_order)
        rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
      }
+ 
+   return pre_order_num;
+ }
+ 
+ /* Like pre_and_rev_post_order_compute_fn but operating on the
+    current function and asserting that all nodes were visited.  */
+ 
+ int
+ pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
+                               bool include_entry_exit)
+ {
+   int pre_order_num
+     = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
+                                        include_entry_exit);
+   if (include_entry_exit)
+     /* The number of nodes visited should be the number of blocks.  */
+     gcc_assert (pre_order_num == n_basic_blocks);
    else
      /* The number of nodes visited should be the number of blocks minus
         the entry and exit blocks which are not visited here.  */
Index: gcc/graph.c
===================================================================
*** gcc/graph.c (revision 204202)
--- gcc/graph.c (working copy)
*************** draw_cfg_nodes_no_loops (pretty_printer
*** 160,168 ****
    visited = sbitmap_alloc (last_basic_block);
    bitmap_clear (visited);
  
!   /* FIXME: pre_and_rev_post_order_compute only works if fun == cfun.  */
!   n = pre_and_rev_post_order_compute (NULL, rpo, true);
!   for (i = 0; i < n; i++)
      {
        basic_block bb = BASIC_BLOCK (rpo[i]);
        draw_cfg_node (pp, fun->funcdef_no, bb);
--- 160,168 ----
    visited = sbitmap_alloc (last_basic_block);
    bitmap_clear (visited);
  
!   n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, true);
!   for (i = n_basic_blocks_for_function (fun) - n;
!        i < n_basic_blocks_for_function (fun); i++)
      {
        basic_block bb = BASIC_BLOCK (rpo[i]);
        draw_cfg_node (pp, fun->funcdef_no, bb);

Reply via email to