>Number:         153298
>Category:       gnu
>Synopsis:       Update base gcc with latest GPL2 patches  (FSF 4.2.2 
>prerelease)
>Confidential:   no
>Severity:       non-critical
>Priority:       medium
>Responsible:    freebsd-bugs
>State:          open
>Quarter:        
>Keywords:       
>Date-Required:
>Class:          update
>Submitter-Id:   current-users
>Arrival-Date:   Sun Dec 19 22:00:22 UTC 2010
>Closed-Date:
>Last-Modified:
>Originator:     Pedro F. Giffuni
>Release:        8.2-BETA1
>Organization:
>Environment:
FreeBSD mogwai.giffuni.net 8.2-BETA1 FreeBSD 8.2-BETA1 #0: Sun Dec  5 02:13:37 
UTC 2010     [email protected]:/usr/obj/usr/src/sys/GENERIC  i386

>Description:
The latest revision of the FSF gcc-4.2 under the GPL2 was r127959.
In order to avoid confusion with the gcc-4.2.2 release (under the
GPL3), I updated the base system with all the changes up to
2007-07-25, right before the prerelease version bump.

After this I applied the fixes to the following gcc's PRs: 
middle-end/32563
debug/32610
c++/31337 
rtl-optimization/33148

Everything is under the GPL2. GCC still works and builds ports ;).
>How-To-Repeat:

>Fix:
Patch attached.

Patch attached with submission follows:

diff -ru gcc.orig/ChangeLog gcc/ChangeLog
--- gcc.orig/ChangeLog  2010-12-19 14:08:26.000000000 +0000
+++ gcc/ChangeLog       2010-12-19 16:25:49.000000000 +0000
@@ -1,3 +1,54 @@
+2007-07-25  Steve Ellcey  <[email protected]>
+
+       PR target/32218
+       * tree-vect-patterns.c (vect_pattern_recog_1): Check for valid type.
+
+2007-07-25  Dorit Nuzman  <[email protected]>
+           Devang Patel  <[email protected]>
+
+       PR tree-optimization/25413
+       * targhooks.c (default_builtin_vector_alignment_reachable): New.
+       * targhooks.h (default_builtin_vector_alignment_reachable): New.
+       * tree.h (contains_packed_reference): New.
+       * expr.c (contains_packed_reference): New.
+       * tree-vect-analyze.c (vector_alignment_reachable_p): New.
+       (vect_enhance_data_refs_alignment): Call
+       vector_alignment_reachable_p.
+       * target.h (vector_alignment_reachable): New builtin.
+       * target-def.h (TARGET_VECTOR_ALIGNMENT_REACHABLE): New.
+       * config/rs6000/rs6000.c (rs6000_vector_alignment_reachable): New.
+       (TARGET_VECTOR_ALIGNMENT_REACHABLE): Define.
+
+2007-07-24  Richard Guenther  <[email protected]>
+
+       Backport from mainline:
+       2007-07-16  Richard Guenther  <[email protected]>
+                   Uros Bizjak  <[email protected]>
+
+       * tree-if-conv.c (find_phi_replacement_condition): Unshare "*cond"
+       before forcing it to gimple operand.
+
+2007-07-24  Richard Guenther  <[email protected]>
+
+       PR tree-optimization/32723
+       Backport from mainline:
+       2007-03-09  Daniel Berlin  <[email protected]>
+
+       * tree-ssa-structalias.c (shared_bitmap_info_t): New structure.
+       (shared_bitmap_table): New variable.
+       (shared_bitmap_hash): New function.
+       (shared_bitmap_eq): Ditto
+       (shared_bitmap_lookup): Ditto.
+       (shared_bitmap_add): Ditto.
+       (find_what_p_points_to): Rewrite to use shared bitmap hashtable.
+       (init_alias_vars): Init shared bitmap hashtable.
+       (delete_points_to_sets): Delete shared bitmap hashtable.
+
+2007-07-23  Bernd Schmidt  <[email protected]>
+
+       * reload1.c (choose_reload_regs): Set reload_spill_index for regs
+       chosen during find_reloads.
+
 2007-07-19  Release Manager
 
        * GCC 4.2.1 released.
diff -ru gcc.orig/config/rs6000/rs6000.c gcc/config/rs6000/rs6000.c
--- gcc.orig/config/rs6000/rs6000.c     2010-12-19 14:08:18.000000000 +0000
+++ gcc/config/rs6000/rs6000.c  2010-12-19 15:14:32.000000000 +0000
@@ -664,6 +664,7 @@
 static tree rs6000_builtin_mask_for_load (void);
 
 static void def_builtin (int, const char *, tree, int);
+static bool rs6000_vector_alignment_reachable (tree, bool);
 static void rs6000_init_builtins (void);
 static rtx rs6000_expand_unop_builtin (enum insn_code, tree, rtx);
 static rtx rs6000_expand_binop_builtin (enum insn_code, tree, rtx);
@@ -915,6 +916,9 @@
 #undef TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD
 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD rs6000_builtin_mask_for_load
 
+#undef TARGET_VECTOR_ALIGNMENT_REACHABLE
+#define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
+
 #undef TARGET_INIT_BUILTINS
 #define TARGET_INIT_BUILTINS rs6000_init_builtins
 
@@ -1584,6 +1588,37 @@
     return 0;
 }
 
+
+/* Return true iff, data reference of TYPE can reach vector alignment (16)
+   after applying N number of iterations.  This routine does not determine
+   how may iterations are required to reach desired alignment.  */
+
+static bool
+rs6000_vector_alignment_reachable (tree type ATTRIBUTE_UNUSED, bool is_packed)
+{
+  if (is_packed)
+    return false;
+
+  if (TARGET_32BIT)
+    {
+      if (rs6000_alignment_flags == MASK_ALIGN_NATURAL)
+        return true;
+
+      if (rs6000_alignment_flags ==  MASK_ALIGN_POWER)
+        return true;
+
+      return false;
+    }
+  else
+    {
+      if (TARGET_MACHO)
+        return false;
+
+      /* Assuming that all other types are naturally aligned. CHECKME!  */
+      return true;
+    }
+}
+
 /* Handle generic options of the form -mfoo=yes/no.
    NAME is the option name.
    VALUE is the option value.
Only in gcc: dwarf2aout.c
diff -ru gcc.orig/dwarf2out.c gcc/dwarf2out.c
--- gcc.orig/dwarf2out.c        2010-12-19 14:08:23.000000000 +0000
+++ gcc/dwarf2out.c     2010-12-19 16:51:06.000000000 +0000
@@ -10065,6 +10065,43 @@
   else if (initializer_constant_valid_p (init, type)
           && ! walk_tree (&init, reference_to_unused, NULL, NULL))
     {
+      /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
+        possible.  */
+      if (TREE_CODE (type) == VECTOR_TYPE)
+       switch (TREE_CODE (init))
+         {
+         case VECTOR_CST:
+           break;
+         case CONSTRUCTOR:
+           if (TREE_CONSTANT (init))
+             {
+               VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (init);
+               bool constant_p = true;
+               tree value;
+               unsigned HOST_WIDE_INT ix;
+
+               /* Even when ctor is constant, it might contain non-*_CST
+                  elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
+                  belong into VECTOR_CST nodes.  */
+               FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
+                 if (!CONSTANT_CLASS_P (value))
+                   {
+                     constant_p = false;
+                     break;
+                   }
+
+               if (constant_p)
+                 {
+                   init = build_vector_from_ctor (type, elts);
+                   break;
+                 }
+             }
+           /* FALLTHRU */
+
+         default:
+           return NULL;
+         }
+
       rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
 
       /* If expand_expr returns a MEM, it wasn't immediate.  */
@@ -13197,7 +13234,8 @@
         was generated within the original definition of an inline function) we
         have to generate a special (abbreviated) DW_TAG_structure_type,
         DW_TAG_union_type, or DW_TAG_enumeration_type DIE here.  */
-      if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
+      if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE
+         && is_tagged_type (TREE_TYPE (decl)))
        {
          gen_tagged_type_instantiation_die (TREE_TYPE (decl), context_die);
          break;
diff -ru gcc.orig/expr.c gcc/expr.c
--- gcc.orig/expr.c     2010-12-19 14:08:23.000000000 +0000
+++ gcc/expr.c  2010-12-19 16:17:49.000000000 +0000
@@ -5654,7 +5654,6 @@
   enum machine_mode mode = VOIDmode;
   tree offset = size_zero_node;
   tree bit_offset = bitsize_zero_node;
-  tree tem;
 
   /* First get the mode, signedness, and size.  We do this from just the
      outermost expression.  */
@@ -5690,6 +5689,8 @@
        *pbitsize = tree_low_cst (size_tree, 1);
     }
 
+  *pmode = mode;
+
   /* Compute cumulative bit-offset for nested component-refs and array-refs,
      and find the ultimate containing object.  */
   while (1)
@@ -5774,21 +5775,69 @@
  done:
 
   /* If OFFSET is constant, see if we can return the whole thing as a
-     constant bit position.  Otherwise, split it up.  */
-  if (host_integerp (offset, 0)
-      && 0 != (tem = size_binop (MULT_EXPR,
-                                fold_convert (bitsizetype, offset),
-                                bitsize_unit_node))
-      && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
-      && host_integerp (tem, 0))
-    *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
-  else
-    *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
+     constant bit position.  Make sure to handle overflow during
+     this conversion.  */
+  if (host_integerp (offset, 0))
+    {
+      double_int tem = double_int_mul (tree_to_double_int (offset),
+                                      uhwi_to_double_int (BITS_PER_UNIT));
+      tem = double_int_add (tem, tree_to_double_int (bit_offset));
+      if (double_int_fits_in_shwi_p (tem))
+       {
+         *pbitpos = double_int_to_shwi (tem);
+         *poffset = NULL_TREE;
+         return exp;
+       }
+    }
+
+  /* Otherwise, split it up.  */
+  *pbitpos = tree_low_cst (bit_offset, 0);
+  *poffset = offset;
 
-  *pmode = mode;
   return exp;
 }
 
+/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
+   look for whether EXP or any nested component-refs within EXP is marked
+   as PACKED.  */
+
+bool
+contains_packed_reference (tree exp)
+{
+  bool packed_p = false;
+
+  while (1)
+    {
+      switch (TREE_CODE (exp))
+       {
+       case COMPONENT_REF:
+         {
+           tree field = TREE_OPERAND (exp, 1);
+           packed_p = DECL_PACKED (field) 
+                      || TYPE_PACKED (TREE_TYPE (field))
+                      || TYPE_PACKED (TREE_TYPE (exp));
+           if (packed_p)
+             goto done;
+         }
+         break;
+
+       case BIT_FIELD_REF:
+       case ARRAY_REF:
+       case ARRAY_RANGE_REF:
+       case REALPART_EXPR:
+       case IMAGPART_EXPR:
+       case VIEW_CONVERT_EXPR:
+         break;
+
+       default:
+         goto done;
+       }
+      exp = TREE_OPERAND (exp, 0);
+    }
+ done:
+  return packed_p;
+}
+
 /* Return a tree of sizetype representing the size, in bytes, of the element
    of EXP, an ARRAY_REF.  */
 
diff -ru gcc.orig/gimplify.c gcc/gimplify.c
--- gcc.orig/gimplify.c 2010-12-19 14:08:24.000000000 +0000
+++ gcc/gimplify.c      2010-12-19 16:29:44.000000000 +0000
@@ -3532,8 +3532,16 @@
   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
              || TREE_CODE (*expr_p) == INIT_EXPR);
 
-  /* For zero sized types only gimplify the left hand side and right hand side
-     as statements and throw away the assignment.  */
+  /* See if any simplifications can be done based on what the RHS is.  */
+  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
+                                 want_value);
+  if (ret != GS_UNHANDLED)
+    return ret;
+
+  /* For zero sized types only gimplify the left hand side and right hand
+     side as statements and throw away the assignment.  Do this after
+     gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
+     types properly.  */
   if (zero_sized_type (TREE_TYPE (*from_p)))
     {
       gimplify_stmt (from_p);
@@ -3544,12 +3552,6 @@
       return GS_ALL_DONE;
     }
 
-  /* See if any simplifications can be done based on what the RHS is.  */
-  ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
-                                 want_value);
-  if (ret != GS_UNHANDLED)
-    return ret;
-
   /* If the value being copied is of variable width, compute the length
      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
      before gimplifying any of the operands so that we can resolve any
diff -ru gcc.orig/reload1.c gcc/reload1.c
--- gcc.orig/reload1.c  2010-12-19 14:08:24.000000000 +0000
+++ gcc/reload1.c       2010-12-19 14:26:26.000000000 +0000
@@ -5451,7 +5451,14 @@
   for (j = 0; j < n_reloads; j++)
     {
       reload_order[j] = j;
-      reload_spill_index[j] = -1;
+      if (rld[j].reg_rtx != NULL_RTX)
+       {
+         gcc_assert (REG_P (rld[j].reg_rtx)
+                     && HARD_REGISTER_P (rld[j].reg_rtx));
+         reload_spill_index[j] = REGNO (rld[j].reg_rtx);
+       }
+      else
+       reload_spill_index[j] = -1;
 
       if (rld[j].nregs > 1)
        {
diff -ru gcc.orig/target-def.h gcc/target-def.h
--- gcc.orig/target-def.h       2010-12-19 14:08:25.000000000 +0000
+++ gcc/target-def.h    2010-12-19 15:10:58.000000000 +0000
@@ -337,9 +337,12 @@
    TARGET_SCHED_SET_SCHED_FLAGS}
 
 #define TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD 0
+#define TARGET_VECTOR_ALIGNMENT_REACHABLE \
+  default_builtin_vector_alignment_reachable
 
 #define TARGET_VECTORIZE                                                \
-  {TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD}
+  {TARGET_VECTORIZE_BUILTIN_MASK_FOR_LOAD,                             \
+   TARGET_VECTOR_ALIGNMENT_REACHABLE}
 
 #define TARGET_DEFAULT_TARGET_FLAGS 0
 
diff -ru gcc.orig/target.h gcc/target.h
--- gcc.orig/target.h   2010-12-19 14:08:25.000000000 +0000
+++ gcc/target.h        2010-12-19 15:10:28.000000000 +0000
@@ -375,6 +375,10 @@
        by the vectorizer, and return the decl of the target builtin
        function.  */
     tree (* builtin_mask_for_load) (void);
+
+    /* Return true if vector alignment is reachable (by peeling N
+      interations) for the given type.  */
+     bool (* vector_alignment_reachable) (tree, bool);
   } vectorize;
 
   /* The initial value of target_flags.  */
diff -ru gcc.orig/targhooks.c gcc/targhooks.c
--- gcc.orig/targhooks.c        2010-12-19 14:08:25.000000000 +0000
+++ gcc/targhooks.c     2010-12-19 15:05:00.000000000 +0000
@@ -604,4 +604,20 @@
   return flag_pic ? 3 : 0;
 }
 
+bool
+default_builtin_vector_alignment_reachable (tree type, bool is_packed)
+{
+  if (is_packed)
+    return false;
+
+  /* Assuming that types whose size is > pointer-size are not guaranteed to be
+     naturally aligned.  */
+  if (tree_int_cst_compare (TYPE_SIZE (type), bitsize_int (POINTER_SIZE)) > 0)
+    return false;
+
+  /* Assuming that types whose size is <= pointer-size
+     are naturally aligned.  */
+  return true;
+}
+
 #include "gt-targhooks.h"
diff -ru gcc.orig/targhooks.h gcc/targhooks.h
--- gcc.orig/targhooks.h        2010-12-19 14:08:25.000000000 +0000
+++ gcc/targhooks.h     2010-12-19 15:05:37.000000000 +0000
@@ -57,6 +57,8 @@
 
 extern bool default_narrow_bitfield (void);
 
+extern bool default_builtin_vector_alignment_reachable (tree, bool);
+
 /* These are here, and not in hooks.[ch], because not all users of
    hooks.h include tm.h, and thus we don't have CUMULATIVE_ARGS.  */
 
diff -ru gcc.orig/tree-if-conv.c gcc/tree-if-conv.c
--- gcc.orig/tree-if-conv.c     2010-12-19 14:08:25.000000000 +0000
+++ gcc/tree-if-conv.c  2010-12-19 14:32:44.000000000 +0000
@@ -743,7 +743,7 @@
       if (TREE_CODE (*cond) == TRUTH_NOT_EXPR)
        /* We can be smart here and choose inverted
           condition without switching bbs.  */
-         *cond = invert_truthvalue (*cond);
+       *cond = invert_truthvalue (*cond);
       else
        /* Select non loop header bb.  */
        first_edge = second_edge;
@@ -762,9 +762,11 @@
 
   /* Create temp. for the condition. Vectorizer prefers to have gimple
      value as condition. Various targets use different means to communicate
-     condition in vector compare operation. Using gimple value allows compiler
-     to emit vector compare and select RTL without exposing compare's result.  
*/
-  *cond = force_gimple_operand (*cond, &new_stmts, false, NULL_TREE);
+     condition in vector compare operation. Using gimple value allows
+     compiler to emit vector compare and select RTL without exposing
+     compare's result.  */
+  *cond = force_gimple_operand (unshare_expr (*cond), &new_stmts,
+                               false, NULL_TREE);
   if (new_stmts)
     bsi_insert_before (bsi, new_stmts, BSI_SAME_STMT);
   if (!is_gimple_reg (*cond) && !is_gimple_condexpr (*cond))
diff -ru gcc.orig/tree-ssa-structalias.c gcc/tree-ssa-structalias.c
--- gcc.orig/tree-ssa-structalias.c     2010-12-19 14:08:25.000000000 +0000
+++ gcc/tree-ssa-structalias.c  2010-12-19 14:30:17.000000000 +0000
@@ -4350,6 +4350,75 @@
   process_constraint (new_constraint (lhs, rhs));
 }
 
+/* Structure used to put solution bitmaps in a hashtable so they can
+   be shared among variables with the same points-to set.  */
+
+typedef struct shared_bitmap_info
+{
+  bitmap pt_vars;
+  hashval_t hashcode;
+} *shared_bitmap_info_t;
+
+static htab_t shared_bitmap_table;
+
+/* Hash function for a shared_bitmap_info_t */
+
+static hashval_t
+shared_bitmap_hash (const void *p)
+{
+  const shared_bitmap_info_t bi = (shared_bitmap_info_t) p;
+  return bi->hashcode;
+}
+
+/* Equality function for two shared_bitmap_info_t's. */
+
+static int
+shared_bitmap_eq (const void *p1, const void *p2)
+{
+  const shared_bitmap_info_t sbi1 = (shared_bitmap_info_t) p1;
+  const shared_bitmap_info_t sbi2 = (shared_bitmap_info_t) p2;
+  return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
+}
+
+/* Lookup a bitmap in the shared bitmap hashtable, and return an already
+   existing instance if there is one, NULL otherwise.  */
+
+static bitmap
+shared_bitmap_lookup (bitmap pt_vars)
+{
+  void **slot;
+  struct shared_bitmap_info sbi;
+
+  sbi.pt_vars = pt_vars;
+  sbi.hashcode = bitmap_hash (pt_vars);
+  
+  slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
+                                  sbi.hashcode, NO_INSERT);
+  if (!slot)
+    return NULL;
+  else
+    return ((shared_bitmap_info_t) *slot)->pt_vars;
+}
+
+
+/* Add a bitmap to the shared bitmap hashtable.  */
+
+static void
+shared_bitmap_add (bitmap pt_vars)
+{
+  void **slot;
+  shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
+  
+  sbi->pt_vars = pt_vars;
+  sbi->hashcode = bitmap_hash (pt_vars);
+  
+  slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
+                                  sbi->hashcode, INSERT);
+  gcc_assert (!*slot);
+  *slot = (void *) sbi;
+}
+
+
 /* Set bits in INTO corresponding to the variable uids in solution set
    FROM, which came from variable PTR.
    For variables that are actually dereferenced, we also use type
@@ -4460,7 +4529,9 @@
          struct ptr_info_def *pi = get_ptr_info (p);
          unsigned int i;
          bitmap_iterator bi;
-
+         bitmap finished_solution;
+         bitmap result;
+         
          /* This variable may have been collapsed, let's get the real
             variable.  */
          vi = get_varinfo (find (vi->id));
@@ -4492,10 +4563,20 @@
          if (pi->pt_anything)
            return false;
 
-         if (!pi->pt_vars)
-           pi->pt_vars = BITMAP_GGC_ALLOC ();
+         finished_solution = BITMAP_GGC_ALLOC ();
+         set_uids_in_ptset (vi->decl, finished_solution, vi->solution);
+         result = shared_bitmap_lookup (finished_solution);
 
-         set_uids_in_ptset (vi->decl, pi->pt_vars, vi->solution);
+         if (!result)
+           {
+             shared_bitmap_add (finished_solution);
+             pi->pt_vars = finished_solution;
+           }
+         else
+           {
+             pi->pt_vars = result;
+             bitmap_clear (finished_solution);
+           }
 
          if (bitmap_empty_p (pi->pt_vars))
            pi->pt_vars = NULL;
@@ -4691,6 +4772,8 @@
   vi_for_tree = pointer_map_create ();
 
   memset (&stats, 0, sizeof (stats));
+  shared_bitmap_table = htab_create (511, shared_bitmap_hash,
+                                    shared_bitmap_eq, free);
   init_base_vars ();
 }
 
@@ -4923,6 +5006,7 @@
   varinfo_t v;
   int i;
 
+  htab_delete (shared_bitmap_table);
   if (dump_file && (dump_flags & TDF_STATS))
     fprintf (dump_file, "Points to sets created:%d\n",
             stats.points_to_sets_created);
diff -ru gcc.orig/tree-vect-analyze.c gcc/tree-vect-analyze.c
--- gcc.orig/tree-vect-analyze.c        2010-12-19 14:08:25.000000000 +0000
+++ gcc/tree-vect-analyze.c     2010-12-19 15:16:30.000000000 +0000
@@ -25,6 +25,7 @@
 #include "tm.h"
 #include "ggc.h"
 #include "tree.h"
+#include "target.h"
 #include "basic-block.h"
 #include "diagnostic.h"
 #include "tree-flow.h"
@@ -911,6 +912,57 @@
 }
 
 
+/* Function vector_alignment_reachable_p
+
+   Return true if vector alignment for DR is reachable by peeling
+   a few loop iterations.  Return false otherwise.  */
+
+static bool
+vector_alignment_reachable_p (struct data_reference *dr)
+{
+  tree stmt = DR_STMT (dr);
+  stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
+  tree vectype = STMT_VINFO_VECTYPE (stmt_info);
+
+  /* If misalignment is known at the compile time then allow peeling
+     only if natural alignment is reachable through peeling.  */
+  if (known_alignment_for_access_p (dr) && !aligned_access_p (dr))
+    {
+      HOST_WIDE_INT elmsize = 
+               int_cst_value (TYPE_SIZE_UNIT (TREE_TYPE (vectype)));
+      if (vect_print_dump_info (REPORT_DETAILS))
+       {
+         fprintf (vect_dump, "data size =" HOST_WIDE_INT_PRINT_DEC, elmsize);
+         fprintf (vect_dump, ". misalignment = %d. ", DR_MISALIGNMENT (dr));
+       }
+      if (DR_MISALIGNMENT (dr) % elmsize)
+       {
+         if (vect_print_dump_info (REPORT_DETAILS))
+           fprintf (vect_dump, "data size does not divide the 
misalignment.\n");
+         return false;
+       }
+    }
+
+  if (!known_alignment_for_access_p (dr))
+    {
+      tree type = (TREE_TYPE (DR_REF (dr)));
+      tree ba = DR_BASE_OBJECT (dr);
+      bool is_packed = false;
+
+      if (ba)
+       is_packed = contains_packed_reference (ba);
+
+      if (vect_print_dump_info (REPORT_DETAILS))
+       fprintf (vect_dump, "Unknown misalignment, is_packed = %d",is_packed);
+      if (targetm.vectorize.vector_alignment_reachable (type, is_packed))
+       return true;
+      else
+       return false;
+    }
+
+  return true;
+}
+
 /* Function vect_enhance_data_refs_alignment
 
    This pass will use loop versioning and loop peeling in order to enhance
@@ -1056,8 +1108,11 @@
   for (i = 0; VEC_iterate (data_reference_p, datarefs, i, dr); i++)
     if (!DR_IS_READ (dr) && !aligned_access_p (dr))
       {
-       dr0 = dr;
-       do_peeling = true;
+        do_peeling = vector_alignment_reachable_p (dr);
+        if (do_peeling)
+          dr0 = dr;
+        if (!do_peeling && vect_print_dump_info (REPORT_DETAILS))
+          fprintf (vect_dump, "vector alignment may not be reachable");
        break;
       }
 
diff -ru gcc.orig/tree.c gcc/tree.c
--- gcc.orig/tree.c     2010-12-19 14:08:25.000000000 +0000
+++ gcc/tree.c  2010-12-19 16:05:48.000000000 +0000
@@ -4540,7 +4540,8 @@
               && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) >= 0)
              || (! pos && TREE_INT_CST_HIGH (t) == -1
                  && (HOST_WIDE_INT) TREE_INT_CST_LOW (t) < 0
-                 && !TYPE_UNSIGNED (TREE_TYPE (t)))
+                 && (!TYPE_UNSIGNED (TREE_TYPE (t))
+                     || TYPE_IS_SIZETYPE (TREE_TYPE (t))))
              || (pos && TREE_INT_CST_HIGH (t) == 0)));
 }
 
diff -ru gcc.orig/tree.h gcc/tree.h
--- gcc.orig/tree.h     2010-12-19 14:08:25.000000000 +0000
+++ gcc/tree.h  2010-12-19 15:06:19.000000000 +0000
@@ -4112,6 +4112,12 @@
                                 tree *, enum machine_mode *, int *, int *,
                                 bool);
 
+/* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
+   look for whether EXP or any nested component-refs within EXP is marked
+   as PACKED.  */
+
+extern bool contains_packed_reference (tree exp);
+
 /* Return 1 if T is an expression that get_inner_reference handles.  */
 
 extern int handled_component_p (tree);


>Release-Note:
>Audit-Trail:
>Unformatted:
_______________________________________________
[email protected] mailing list
http://lists.freebsd.org/mailman/listinfo/freebsd-bugs
To unsubscribe, send any mail to "[email protected]"

Reply via email to