[PATCH] Unify CCP helper routines

Richard Guenther rguenther@suse.de
Tue Mar 22 14:04:00 GMT 2011


This follows up http://gcc.gnu.org/ml/gcc-patches/2011-03/msg01023.html
moving CCP related functions to gimple-fold.[ch] instead.  It doesn't
integrate with the gimple-fold proposal yet, but I figured
incrementally doing that is easier and allows to bisect this patch.

Bootstrapped and tested on x86_64-unknown-linux-gnu, I will apply this
tomorrow if there are no complaints.

Richard.

2011-03-17  Richard Guenther  <rguenther@suse.de>

	PR tree-optimization/46562
	* tree.c (build_invariant_address): New function.
	* tree.h (build_invariant_address): Declare.
	* tree-dfa.c (get_addr_base_and_unit_offset): Wrap around
	a renamed function moved ...
	* tree-flow-inline.h (get_addr_base_and_unit_offset_1): ... here.
	Take valueization callback parameter.
	* tree-flow.h (gimple_fold_stmt_to_constant): Declare.
	* gimple-fold.h: New file.
	* tree-ssa-ccp.c (ccp_fold): Use gimple_fold_stmt_to_constant_1.
	(ccp_fold, fold_const_aggregate_ref,
	fold_ctor_reference, fold_nonarray_ctor_reference,
	fold_array_ctor_reference, fold_string_cst_ctor_reference,
	get_base_constructor): Move ...
	* gimple-fold.c: ... here.
	(gimple_fold_stmt_to_constant_1): New function
	split out from ccp_fold.  Take a valueization callback parameter.
	Valueize all operands.
	(gimple_fold_stmt_to_constant): New wrapper function.
	(fold_const_aggregate_ref_1): New function split out from
	fold_const_aggregate_ref.  Take a valueization callback parameter.
	(fold_const_aggregate_ref): Wrap fold_const_aggregate_ref_1.
	* tree-ssa-sccvn.c (simplify_binary_expression): Simplify
	invariant POINTER_PLUS_EXPRs to invariant form.
	(vn_valueize): New function.
	(try_to_simplify): Simplify by using gimple_fold_stmt_to_constant.
	* tree-vrp.c (vrp_valueize): New function.
	(vrp_visit_assignment_or_call): Use gimple_fold_stmt_to_constant
	to fold statements to constants.
	* tree-ssa-pre.c (eliminate): Properly guard propagation of
	function declarations.
	* Makefile.in (tree-ssa-sccvn.o, tree-vrp.o, gimple-fold.o,
	tree-ssa-ccp.o): Add gimple-fold.h dependencies.

	* c-c++-common/pr46562-2.c: New testcase.
	* c-c++-common/pr46562.c: Likewise.


Index: gcc/gimple-fold.h
===================================================================
*** /dev/null	1970-01-01 00:00:00.000000000 +0000
--- gcc/gimple-fold.h	2011-03-22 13:07:21.000000000 +0100
***************
*** 0 ****
--- 1,33 ----
+ /* Gimple folding definitions.
+ 
+    Copyright 2011 Free Software Foundation, Inc.
+    Contributed by Richard Guenther <rguenther@suse.de>
+ 
+ This file is part of GCC.
+ 
+ GCC is free software; you can redistribute it and/or modify it under
+ the terms of the GNU General Public License as published by the Free
+ Software Foundation; either version 3, or (at your option) any later
+ version.
+ 
+ GCC is distributed in the hope that it will be useful, but WITHOUT ANY
+ WARRANTY; without even the implied warranty of MERCHANTABILITY or
+ FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
+ for more details.
+ 
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3.  If not see
+ <http://www.gnu.org/licenses/>.  */
+ 
+ #ifndef GCC_GIMPLE_FOLD_H
+ #define GCC_GIMPLE_FOLD_H
+ 
+ #include "coretypes.h"
+ 
+ tree fold_const_aggregate_ref_1 (tree, tree (*) (tree));
+ tree fold_const_aggregate_ref (tree);
+ 
+ tree gimple_fold_stmt_to_constant_1 (gimple, tree (*) (tree));
+ tree gimple_fold_stmt_to_constant (gimple, tree (*) (tree));
+ 
+ #endif  /* GCC_GIMPLE_FOLD_H */
Index: gcc/gimple-fold.c
===================================================================
*** gcc/gimple-fold.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/gimple-fold.c	2011-03-22 13:15:00.000000000 +0100
*************** along with GCC; see the file COPYING3.
*** 30,35 ****
--- 30,36 ----
  #include "tree-pass.h"
  #include "tree-ssa-propagate.h"
  #include "target.h"
+ #include "gimple-fold.h"
  
  /* Return true when DECL can be referenced from current unit.
     We can get declarations that are not possible to reference for
*************** maybe_fold_or_comparisons (enum tree_cod
*** 2665,2667 ****
--- 2666,3304 ----
    else
      return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
  }
+ 
+ 
+ /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
+ 
+    Either NULL_TREE, a simplified but non-constant or a constant
+    is returned.
+ 
+    ???  This should go into a gimple-fold-inline.h file to be eventually
+    privatized with the single valueize function used in the various TUs
+    to avoid the indirect function call overhead.  */
+ 
+ tree
+ gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
+ {
+   location_t loc = gimple_location (stmt);
+   switch (gimple_code (stmt))
+     {
+     case GIMPLE_ASSIGN:
+       {
+         enum tree_code subcode = gimple_assign_rhs_code (stmt);
+ 
+         switch (get_gimple_rhs_class (subcode))
+           {
+           case GIMPLE_SINGLE_RHS:
+             {
+               tree rhs = gimple_assign_rhs1 (stmt);
+               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
+ 
+               if (TREE_CODE (rhs) == SSA_NAME)
+                 {
+                   /* If the RHS is an SSA_NAME, return its known constant value,
+                      if any.  */
+                   return (*valueize) (rhs);
+                 }
+ 	      /* Handle propagating invariant addresses into address
+ 		 operations.  */
+ 	      else if (TREE_CODE (rhs) == ADDR_EXPR
+ 		       && !is_gimple_min_invariant (rhs))
+ 		{
+ 		  HOST_WIDE_INT offset;
+ 		  tree base;
+ 		  base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
+ 							  &offset,
+ 							  valueize);
+ 		  if (base
+ 		      && (CONSTANT_CLASS_P (base)
+ 			  || decl_address_invariant_p (base)))
+ 		    return build_invariant_address (TREE_TYPE (rhs),
+ 						    base, offset);
+ 		}
+ 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
+ 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
+ 		       && (CONSTRUCTOR_NELTS (rhs)
+ 			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
+ 		{
+ 		  unsigned i;
+ 		  tree val, list;
+ 
+ 		  list = NULL_TREE;
+ 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
+ 		    {
+ 		      val = (*valueize) (val);
+ 		      if (TREE_CODE (val) == INTEGER_CST
+ 			  || TREE_CODE (val) == REAL_CST
+ 			  || TREE_CODE (val) == FIXED_CST)
+ 			list = tree_cons (NULL_TREE, val, list);
+ 		      else
+ 			return NULL_TREE;
+ 		    }
+ 
+ 		  return build_vector (TREE_TYPE (rhs), nreverse (list));
+ 		}
+ 
+               if (kind == tcc_reference)
+ 		{
+ 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
+ 		       || TREE_CODE (rhs) == REALPART_EXPR
+ 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
+ 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ 		    {
+ 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
+ 		      return fold_unary_loc (EXPR_LOCATION (rhs),
+ 					     TREE_CODE (rhs),
+ 					     TREE_TYPE (rhs), val);
+ 		    }
+ 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
+ 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ 		    {
+ 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
+ 		      return fold_ternary_loc (EXPR_LOCATION (rhs),
+ 					       TREE_CODE (rhs),
+ 					       TREE_TYPE (rhs), val,
+ 					       TREE_OPERAND (rhs, 1),
+ 					       TREE_OPERAND (rhs, 2));
+ 		    }
+ 		  else if (TREE_CODE (rhs) == MEM_REF
+ 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ 		    {
+ 		      tree val = (*valueize) (TREE_OPERAND (rhs, 0));
+ 		      if (TREE_CODE (val) == ADDR_EXPR
+ 			  && is_gimple_min_invariant (val))
+ 			{
+ 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
+ 						  unshare_expr (val),
+ 						  TREE_OPERAND (rhs, 1));
+ 			  if (tem)
+ 			    rhs = tem;
+ 			}
+ 		    }
+ 		  return fold_const_aggregate_ref_1 (rhs, valueize);
+ 		}
+               else if (kind == tcc_declaration)
+                 return get_symbol_constant_value (rhs);
+               return rhs;
+             }
+ 
+           case GIMPLE_UNARY_RHS:
+             {
+               /* Handle unary operators that can appear in GIMPLE form.
+                  Note that we know the single operand must be a constant,
+                  so this should almost always return a simplified RHS.  */
+               tree lhs = gimple_assign_lhs (stmt);
+               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
+ 
+ 	      /* Conversions are useless for CCP purposes if they are
+ 		 value-preserving.  Thus the restrictions that
+ 		 useless_type_conversion_p places for pointer type conversions
+ 		 do not apply here.  Substitution later will only substitute to
+ 		 allowed places.  */
+ 	      if (CONVERT_EXPR_CODE_P (subcode)
+ 		  && POINTER_TYPE_P (TREE_TYPE (lhs))
+ 		  && POINTER_TYPE_P (TREE_TYPE (op0)))
+ 		{
+ 		  tree tem;
+ 		  /* Try to re-construct array references on-the-fly.  */
+ 		  if (!useless_type_conversion_p (TREE_TYPE (lhs),
+ 						  TREE_TYPE (op0))
+ 		      && ((tem = maybe_fold_offset_to_address
+ 			   (loc,
+ 			    op0, integer_zero_node, TREE_TYPE (lhs)))
+ 			  != NULL_TREE))
+ 		    return tem;
+ 		  return op0;
+ 		}
+ 
+               return
+ 		fold_unary_ignore_overflow_loc (loc, subcode,
+ 						gimple_expr_type (stmt), op0);
+             }
+ 
+           case GIMPLE_BINARY_RHS:
+             {
+               /* Handle binary operators that can appear in GIMPLE form.  */
+               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
+               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
+ 
+ 	      /* Translate &x + CST into an invariant form suitable for
+ 	         further propagation.  */
+ 	      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+ 		  && TREE_CODE (op0) == ADDR_EXPR
+ 		  && TREE_CODE (op1) == INTEGER_CST)
+ 		{
+ 		  tree off = fold_convert (ptr_type_node, op1);
+ 		  return build_fold_addr_expr
+ 			   (fold_build2 (MEM_REF,
+ 					 TREE_TYPE (TREE_TYPE (op0)),
+ 					 unshare_expr (op0), off));
+ 		}
+ 
+               return fold_binary_loc (loc, subcode,
+ 				      gimple_expr_type (stmt), op0, op1);
+             }
+ 
+           case GIMPLE_TERNARY_RHS:
+             {
+               /* Handle ternary operators that can appear in GIMPLE form.  */
+               tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
+               tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
+               tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
+ 
+               return fold_ternary_loc (loc, subcode,
+ 				       gimple_expr_type (stmt), op0, op1, op2);
+             }
+ 
+           default:
+             gcc_unreachable ();
+           }
+       }
+ 
+     case GIMPLE_CALL:
+       {
+ 	tree fn = (*valueize) (gimple_call_fn (stmt));
+ 	if (TREE_CODE (fn) == ADDR_EXPR
+ 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
+ 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
+ 	  {
+ 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
+ 	    tree call, retval;
+ 	    unsigned i;
+ 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ 	      args[i] = (*valueize) (gimple_call_arg (stmt, i));
+ 	    call = build_call_array_loc (loc,
+ 					 gimple_call_return_type (stmt),
+ 					 fn, gimple_call_num_args (stmt), args);
+ 	    retval = fold_call_expr (EXPR_LOCATION (call), call, false);
+ 	    if (retval)
+ 	      /* fold_call_expr wraps the result inside a NOP_EXPR.  */
+ 	      STRIP_NOPS (retval);
+ 	    return retval;
+ 	  }
+ 	return NULL_TREE;
+       }
+ 
+     default:
+       return NULL_TREE;
+     }
+ }
+ 
+ /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
+    Returns NULL_TREE if folding to a constant is not possible, otherwise
+    returns a constant according to is_gimple_min_invariant.  */
+ 
+ tree
+ gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
+ {
+   tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
+   if (res && is_gimple_min_invariant (res))
+     return res;
+   return NULL_TREE;
+ }
+ 
+ 
+ /* The following set of functions are supposed to fold references using
+    their constant initializers.  */
+ 
+ static tree fold_ctor_reference (tree type, tree ctor,
+ 				 unsigned HOST_WIDE_INT offset,
+ 				 unsigned HOST_WIDE_INT size);
+ 
+ /* See if we can find constructor defining value of BASE.
+    When we know the consructor with constant offset (such as
+    base is array[40] and we do know constructor of array), then
+    BIT_OFFSET is adjusted accordingly.
+ 
+    As a special case, return error_mark_node when constructor
+    is not explicitly available, but it is known to be zero
+    such as 'static const int a;'.  */
+ static tree
+ get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
+ 		      tree (*valueize)(tree))
+ {
+   HOST_WIDE_INT bit_offset2, size, max_size;
+   if (TREE_CODE (base) == MEM_REF)
+     {
+       if (!integer_zerop (TREE_OPERAND (base, 1)))
+ 	{
+ 	  if (!host_integerp (TREE_OPERAND (base, 1), 0))
+ 	    return NULL_TREE;
+ 	  *bit_offset += (mem_ref_offset (base).low
+ 			  * BITS_PER_UNIT);
+ 	}
+ 
+       if (valueize
+ 	  && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
+ 	base = valueize (TREE_OPERAND (base, 0));
+       if (!base || TREE_CODE (base) != ADDR_EXPR)
+         return NULL_TREE;
+       base = TREE_OPERAND (base, 0);
+     }
+ 
+   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
+      DECL_INITIAL.  If BASE is a nested reference into another
+      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
+      the inner reference.  */
+   switch (TREE_CODE (base))
+     {
+     case VAR_DECL:
+       if (!const_value_known_p (base))
+ 	return NULL_TREE;
+ 
+       /* Fallthru.  */
+     case CONST_DECL:
+       if (!DECL_INITIAL (base)
+ 	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
+         return error_mark_node;
+       return DECL_INITIAL (base);
+ 
+     case ARRAY_REF:
+     case COMPONENT_REF:
+       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
+       if (max_size == -1 || size != max_size)
+ 	return NULL_TREE;
+       *bit_offset +=  bit_offset2;
+       return get_base_constructor (base, bit_offset, valueize);
+ 
+     case STRING_CST:
+     case CONSTRUCTOR:
+       return base;
+ 
+     default:
+       return NULL_TREE;
+     }
+ }
+ 
+ /* CTOR is STRING_CST.  Fold reference of type TYPE and size SIZE
+    to the memory at bit OFFSET.
+ 
+    We do only simple job of folding byte accesses.  */
+ 
+ static tree
+ fold_string_cst_ctor_reference (tree type, tree ctor,
+ 				unsigned HOST_WIDE_INT offset,
+ 				unsigned HOST_WIDE_INT size)
+ {
+   if (INTEGRAL_TYPE_P (type)
+       && (TYPE_MODE (type)
+ 	  == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+       && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ 	  == MODE_INT)
+       && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
+       && size == BITS_PER_UNIT
+       && !(offset % BITS_PER_UNIT))
+     {
+       offset /= BITS_PER_UNIT;
+       if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
+ 	return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
+ 				   [offset]));
+       /* Folding
+ 	 const char a[20]="hello";
+ 	 return a[10];
+ 
+ 	 might lead to offset greater than string length.  In this case we
+ 	 know value is either initialized to 0 or out of bounds.  Return 0
+ 	 in both cases.  */
+       return build_zero_cst (type);
+     }
+   return NULL_TREE;
+ }
+ 
+ /* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
+    SIZE to the memory at bit OFFSET.  */
+ 
+ static tree
+ fold_array_ctor_reference (tree type, tree ctor,
+ 			   unsigned HOST_WIDE_INT offset,
+ 			   unsigned HOST_WIDE_INT size)
+ {
+   unsigned HOST_WIDE_INT cnt;
+   tree cfield, cval;
+   double_int low_bound, elt_size;
+   double_int index, max_index;
+   double_int access_index;
+   tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
+   HOST_WIDE_INT inner_offset;
+ 
+   /* Compute low bound and elt size.  */
+   if (domain_type && TYPE_MIN_VALUE (domain_type))
+     {
+       /* Static constructors for variably sized objects makes no sense.  */
+       gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
+       low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
+     }
+   else
+     low_bound = double_int_zero;
+   /* Static constructors for variably sized objects makes no sense.  */
+   gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
+ 	      == INTEGER_CST);
+   elt_size =
+     tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
+ 
+ 
+   /* We can handle only constantly sized accesses that are known to not
+      be larger than size of array element.  */
+   if (!TYPE_SIZE_UNIT (type)
+       || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
+       || double_int_cmp (elt_size,
+ 			 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
+     return NULL_TREE;
+ 
+   /* Compute the array index we look for.  */
+   access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
+ 				  elt_size, TRUNC_DIV_EXPR);
+   access_index = double_int_add (access_index, low_bound);
+ 
+   /* And offset within the access.  */
+   inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
+ 
+   /* See if the array field is large enough to span whole access.  We do not
+      care to fold accesses spanning multiple array indexes.  */
+   if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
+     return NULL_TREE;
+ 
+   index = double_int_sub (low_bound, double_int_one);
+   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
+     {
+       /* Array constructor might explicitely set index, or specify range
+ 	 or leave index NULL meaning that it is next index after previous
+ 	 one.  */
+       if (cfield)
+ 	{
+ 	  if (TREE_CODE (cfield) == INTEGER_CST)
+ 	    max_index = index = tree_to_double_int (cfield);
+ 	  else
+ 	    {
+ 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
+ 	      index = tree_to_double_int (TREE_OPERAND (cfield, 0));
+ 	      max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
+ 	    }
+ 	}
+       else
+ 	max_index = index = double_int_add (index, double_int_one);
+ 
+       /* Do we have match?  */
+       if (double_int_cmp (access_index, index, 1) >= 0
+ 	  && double_int_cmp (access_index, max_index, 1) <= 0)
+ 	return fold_ctor_reference (type, cval, inner_offset, size);
+     }
+   /* When memory is not explicitely mentioned in constructor,
+      it is 0 (or out of range).  */
+   return build_zero_cst (type);
+ }
+ 
+ /* CTOR is CONSTRUCTOR of an aggregate or vector.
+    Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
+ 
+ static tree
+ fold_nonarray_ctor_reference (tree type, tree ctor,
+ 			      unsigned HOST_WIDE_INT offset,
+ 			      unsigned HOST_WIDE_INT size)
+ {
+   unsigned HOST_WIDE_INT cnt;
+   tree cfield, cval;
+ 
+   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
+ 			    cval)
+     {
+       tree byte_offset = DECL_FIELD_OFFSET (cfield);
+       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
+       tree field_size = DECL_SIZE (cfield);
+       double_int bitoffset;
+       double_int byte_offset_cst = tree_to_double_int (byte_offset);
+       double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
+       double_int bitoffset_end;
+ 
+       /* Variable sized objects in static constructors makes no sense,
+ 	 but field_size can be NULL for flexible array members.  */
+       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
+ 		  && TREE_CODE (byte_offset) == INTEGER_CST
+ 		  && (field_size != NULL_TREE
+ 		      ? TREE_CODE (field_size) == INTEGER_CST
+ 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
+ 
+       /* Compute bit offset of the field.  */
+       bitoffset = double_int_add (tree_to_double_int (field_offset),
+ 				  double_int_mul (byte_offset_cst,
+ 						  bits_per_unit_cst));
+       /* Compute bit offset where the field ends.  */
+       if (field_size != NULL_TREE)
+ 	bitoffset_end = double_int_add (bitoffset,
+ 					tree_to_double_int (field_size));
+       else
+ 	bitoffset_end = double_int_zero;
+ 
+       /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)?  */
+       if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
+ 	  && (field_size == NULL_TREE
+ 	      || double_int_cmp (uhwi_to_double_int (offset),
+ 				 bitoffset_end, 0) < 0))
+ 	{
+ 	  double_int access_end = double_int_add (uhwi_to_double_int (offset),
+ 						  uhwi_to_double_int (size));
+ 	  double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
+ 						    bitoffset);
+ 	  /* We do have overlap.  Now see if field is large enough to
+ 	     cover the access.  Give up for accesses spanning multiple
+ 	     fields.  */
+ 	  if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
+ 	    return NULL_TREE;
+ 	  return fold_ctor_reference (type, cval,
+ 				      double_int_to_uhwi (inner_offset), size);
+ 	}
+     }
+   /* When memory is not explicitely mentioned in constructor, it is 0.  */
+   return build_zero_cst (type);
+ }
+ 
+ /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
+    to the memory at bit OFFSET.  */
+ 
+ static tree
+ fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
+ 		     unsigned HOST_WIDE_INT size)
+ {
+   tree ret;
+ 
+   /* We found the field with exact match.  */
+   if (useless_type_conversion_p (type, TREE_TYPE (ctor))
+       && !offset)
+     return canonicalize_constructor_val (ctor);
+ 
+   /* We are at the end of walk, see if we can view convert the
+      result.  */
+   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
+       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
+       && operand_equal_p (TYPE_SIZE (type),
+ 			  TYPE_SIZE (TREE_TYPE (ctor)), 0))
+     {
+       ret = canonicalize_constructor_val (ctor);
+       ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
+       if (ret)
+ 	STRIP_NOPS (ret);
+       return ret;
+     }
+   if (TREE_CODE (ctor) == STRING_CST)
+     return fold_string_cst_ctor_reference (type, ctor, offset, size);
+   if (TREE_CODE (ctor) == CONSTRUCTOR)
+     {
+ 
+       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
+ 	return fold_array_ctor_reference (type, ctor, offset, size);
+       else
+ 	return fold_nonarray_ctor_reference (type, ctor, offset, size);
+     }
+ 
+   return NULL_TREE;
+ }
+ 
+ /* Return the tree representing the element referenced by T if T is an
+    ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
+    names using VALUEIZE.  Return NULL_TREE otherwise.  */
+ 
+ tree
+ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
+ {
+   tree ctor, idx, base;
+   HOST_WIDE_INT offset, size, max_size;
+   tree tem;
+ 
+   if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
+     return get_symbol_constant_value (t);
+ 
+   tem = fold_read_from_constant_string (t);
+   if (tem)
+     return tem;
+ 
+   switch (TREE_CODE (t))
+     {
+     case ARRAY_REF:
+     case ARRAY_RANGE_REF:
+       /* Constant indexes are handled well by get_base_constructor.
+ 	 Only special case variable offsets.
+ 	 FIXME: This code can't handle nested references with variable indexes
+ 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
+ 	 get_ref_base_and_extent here and make it use a valueize callback.  */
+       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
+ 	  && valueize
+ 	  && (idx = (*valueize) (TREE_OPERAND (t, 1)))
+ 	  && host_integerp (idx, 0))
+ 	{
+ 	  tree low_bound, unit_size;
+ 
+ 	  /* If the resulting bit-offset is constant, track it.  */
+ 	  if ((low_bound = array_ref_low_bound (t),
+ 	       host_integerp (low_bound, 0))
+ 	      && (unit_size = array_ref_element_size (t),
+ 		  host_integerp (unit_size, 1)))
+ 	    {
+ 	      offset = TREE_INT_CST_LOW (idx);
+ 	      offset -= TREE_INT_CST_LOW (low_bound);
+ 	      offset *= TREE_INT_CST_LOW (unit_size);
+ 	      offset *= BITS_PER_UNIT;
+ 
+ 	      base = TREE_OPERAND (t, 0);
+ 	      ctor = get_base_constructor (base, &offset, valueize);
+ 	      /* Empty constructor.  Always fold to 0.  */
+ 	      if (ctor == error_mark_node)
+ 		return build_zero_cst (TREE_TYPE (t));
+ 	      /* Out of bound array access.  Value is undefined,
+ 		 but don't fold.  */
+ 	      if (offset < 0)
+ 		return NULL_TREE;
+ 	      /* We can not determine ctor.  */
+ 	      if (!ctor)
+ 		return NULL_TREE;
+ 	      return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
+ 					  TREE_INT_CST_LOW (unit_size)
+ 					  * BITS_PER_UNIT);
+ 	    }
+ 	}
+       /* Fallthru.  */
+ 
+     case COMPONENT_REF:
+     case BIT_FIELD_REF:
+     case TARGET_MEM_REF:
+     case MEM_REF:
+       base = get_ref_base_and_extent (t, &offset, &size, &max_size);
+       ctor = get_base_constructor (base, &offset, valueize);
+ 
+       /* Empty constructor.  Always fold to 0.  */
+       if (ctor == error_mark_node)
+ 	return build_zero_cst (TREE_TYPE (t));
+       /* We do not know precise address.  */
+       if (max_size == -1 || max_size != size)
+ 	return NULL_TREE;
+       /* We can not determine ctor.  */
+       if (!ctor)
+ 	return NULL_TREE;
+ 
+       /* Out of bound array access.  Value is undefined, but don't fold.  */
+       if (offset < 0)
+ 	return NULL_TREE;
+ 
+       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
+ 
+     case REALPART_EXPR:
+     case IMAGPART_EXPR:
+       {
+ 	tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
+ 	if (c && TREE_CODE (c) == COMPLEX_CST)
+ 	  return fold_build1_loc (EXPR_LOCATION (t),
+ 			      TREE_CODE (t), TREE_TYPE (t), c);
+ 	break;
+       }
+ 
+     default:
+       break;
+     }
+ 
+   return NULL_TREE;
+ }
+ 
+ tree
+ fold_const_aggregate_ref (tree t)
+ {
+   return fold_const_aggregate_ref_1 (t, NULL);
+ }
Index: gcc/tree-ssa-sccvn.c
===================================================================
*** gcc/tree-ssa-sccvn.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-ssa-sccvn.c	2011-03-22 13:08:36.000000000 +0100
*************** along with GCC; see the file COPYING3.
*** 44,49 ****
--- 44,50 ----
  #include "params.h"
  #include "tree-ssa-propagate.h"
  #include "tree-ssa-sccvn.h"
+ #include "gimple-fold.h"
  
  /* This algorithm is based on the SCC algorithm presented by Keith
     Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
*************** simplify_binary_expression (gimple stmt)
*** 2770,2775 ****
--- 2771,2786 ----
  	op1 = SSA_VAL (op1);
      }
  
+   /* Pointer plus constant can be represented as invariant address.
+      Do so to allow further propatation, see also tree forwprop.  */
+   if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+       && host_integerp (op1, 1)
+       && TREE_CODE (op0) == ADDR_EXPR
+       && is_gimple_min_invariant (op0))
+     return build_invariant_address (TREE_TYPE (op0),
+ 				    TREE_OPERAND (op0, 0),
+ 				    TREE_INT_CST_LOW (op1));
+ 
    /* Avoid folding if nothing changed.  */
    if (op0 == gimple_assign_rhs1 (stmt)
        && op1 == gimple_assign_rhs2 (stmt))
*************** simplify_unary_expression (gimple stmt)
*** 2849,2854 ****
--- 2860,2878 ----
    return NULL_TREE;
  }
  
+ /* Valueize NAME if it is an SSA name, otherwise just return it.  */
+ 
+ static inline tree
+ vn_valueize (tree name)
+ {
+   if (TREE_CODE (name) == SSA_NAME)
+     {
+       tree tem = SSA_VAL (name);
+       return tem == VN_TOP ? name : tem;
+     }
+   return name;
+ }
+ 
  /* Try to simplify RHS using equivalences and constant folding.  */
  
  static tree
*************** try_to_simplify (gimple stmt)
*** 2862,2882 ****
        && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
      return NULL_TREE;
  
    switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
      {
-     case tcc_declaration:
-       tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
-       if (tem)
- 	return tem;
-       break;
- 
      case tcc_reference:
-       /* Do not do full-blown reference lookup here, but simplify
- 	 reads from constant aggregates.  */
-       tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
-       if (tem)
- 	return tem;
- 
        /* Fallthrough for some codes that can operate on registers.  */
        if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
  	    || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
--- 2886,2900 ----
        && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
      return NULL_TREE;
  
+   /* First try constant folding based on our current lattice.  */
+   tem = gimple_fold_stmt_to_constant (stmt, vn_valueize);
+   if (tem)
+     return tem;
+ 
+   /* If that didn't work try combining multiple statements.  */
    switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
      {
      case tcc_reference:
        /* Fallthrough for some codes that can operate on registers.  */
        if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
  	    || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
*************** try_to_simplify (gimple stmt)
*** 2886,2896 ****
  	 into binary ops, but it's debatable whether it is worth it. */
      case tcc_unary:
        return simplify_unary_expression (stmt);
!       break;
      case tcc_comparison:
      case tcc_binary:
        return simplify_binary_expression (stmt);
!       break;
      default:
        break;
      }
--- 2904,2914 ----
  	 into binary ops, but it's debatable whether it is worth it. */
      case tcc_unary:
        return simplify_unary_expression (stmt);
! 
      case tcc_comparison:
      case tcc_binary:
        return simplify_binary_expression (stmt);
! 
      default:
        break;
      }
Index: gcc/Makefile.in
===================================================================
*** gcc/Makefile.in.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/Makefile.in	2011-03-22 13:10:06.000000000 +0100
*************** tree-ssa-sccvn.o : tree-ssa-sccvn.c $(TR
*** 2487,2498 ****
     $(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_PASS_H) $(FLAGS_H) $(CFGLOOP_H) \
     alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) langhooks.h $(HASHTAB_H) $(GIMPLE_H) \
     $(TREE_INLINE_H) tree-iterator.h tree-ssa-propagate.h tree-ssa-sccvn.h \
!    $(PARAMS_H) tree-pretty-print.h gimple-pretty-print.h
  tree-vrp.o : tree-vrp.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
     $(TREE_FLOW_H) $(TREE_PASS_H) $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(GGC_H) \
     $(BASIC_BLOCK_H) tree-ssa-propagate.h $(FLAGS_H) $(TREE_DUMP_H) \
     $(CFGLOOP_H) $(SCEV_H) $(TIMEVAR_H) intl.h tree-pretty-print.h \
!    gimple-pretty-print.h
  tree-cfg.o : tree-cfg.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
     $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) $(FLAGS_H) output.h \
     $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
--- 2487,2498 ----
     $(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_PASS_H) $(FLAGS_H) $(CFGLOOP_H) \
     alloc-pool.h $(BASIC_BLOCK_H) $(BITMAP_H) langhooks.h $(HASHTAB_H) $(GIMPLE_H) \
     $(TREE_INLINE_H) tree-iterator.h tree-ssa-propagate.h tree-ssa-sccvn.h \
!    $(PARAMS_H) tree-pretty-print.h gimple-pretty-print.h gimple-fold.h
  tree-vrp.o : tree-vrp.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TREE_H) \
     $(TREE_FLOW_H) $(TREE_PASS_H) $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(GGC_H) \
     $(BASIC_BLOCK_H) tree-ssa-propagate.h $(FLAGS_H) $(TREE_DUMP_H) \
     $(CFGLOOP_H) $(SCEV_H) $(TIMEVAR_H) intl.h tree-pretty-print.h \
!    gimple-pretty-print.h gimple-fold.h
  tree-cfg.o : tree-cfg.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
     $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) $(FLAGS_H) output.h \
     $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
*************** gimple-fold.o : gimple-fold.c $(TREE_FLO
*** 2640,2646 ****
     $(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h \
     $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
     $(TREE_DUMP_H) $(BASIC_BLOCK_H) $(TREE_PASS_H) langhooks.h \
!    tree-ssa-propagate.h value-prof.h $(FLAGS_H) $(TARGET_H)
  gimple-low.o : gimple-low.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
     $(DIAGNOSTIC_H) $(GIMPLE_H) $(TREE_INLINE_H) langhooks.h \
     $(LANGHOOKS_DEF_H) $(TREE_FLOW_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
--- 2640,2646 ----
     $(SYSTEM_H) $(RTL_H) $(TREE_H) $(TM_P_H) $(EXPR_H) $(GGC_H) output.h \
     $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
     $(TREE_DUMP_H) $(BASIC_BLOCK_H) $(TREE_PASS_H) langhooks.h \
!    tree-ssa-propagate.h value-prof.h $(FLAGS_H) $(TARGET_H) gimple-fold.h
  gimple-low.o : gimple-low.c $(CONFIG_H) $(SYSTEM_H) $(TREE_H) \
     $(DIAGNOSTIC_H) $(GIMPLE_H) $(TREE_INLINE_H) langhooks.h \
     $(LANGHOOKS_DEF_H) $(TREE_FLOW_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
*************** tree-ssa-ccp.o : tree-ssa-ccp.c $(TREE_F
*** 3118,3124 ****
     $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
     $(TREE_DUMP_H) $(BASIC_BLOCK_H) $(TREE_PASS_H) langhooks.h \
     tree-ssa-propagate.h value-prof.h $(FLAGS_H) $(TARGET_H) $(DIAGNOSTIC_CORE_H) \
!    $(DBGCNT_H) tree-pretty-print.h gimple-pretty-print.h
  tree-sra.o : tree-sra.c $(CONFIG_H) $(SYSTEM_H) coretypes.h alloc-pool.h \
     $(TM_H) $(TREE_H) $(GIMPLE_H) $(CGRAPH_H) $(TREE_FLOW_H) \
     $(IPA_PROP_H) $(DIAGNOSTIC_H) statistics.h $(TREE_DUMP_H) $(TIMEVAR_H) \
--- 3118,3124 ----
     $(DIAGNOSTIC_H) $(FUNCTION_H) $(TIMEVAR_H) $(TM_H) coretypes.h \
     $(TREE_DUMP_H) $(BASIC_BLOCK_H) $(TREE_PASS_H) langhooks.h \
     tree-ssa-propagate.h value-prof.h $(FLAGS_H) $(TARGET_H) $(DIAGNOSTIC_CORE_H) \
!    $(DBGCNT_H) tree-pretty-print.h gimple-pretty-print.h gimple-fold.h
  tree-sra.o : tree-sra.c $(CONFIG_H) $(SYSTEM_H) coretypes.h alloc-pool.h \
     $(TM_H) $(TREE_H) $(GIMPLE_H) $(CGRAPH_H) $(TREE_FLOW_H) \
     $(IPA_PROP_H) $(DIAGNOSTIC_H) statistics.h $(TREE_DUMP_H) $(TIMEVAR_H) \
Index: gcc/tree.c
===================================================================
*** gcc/tree.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree.c	2011-03-22 13:06:43.000000000 +0100
*************** reference_alias_ptr_type (const_tree t)
*** 4013,4018 ****
--- 4013,4032 ----
      return build_pointer_type (TYPE_MAIN_VARIANT (TREE_TYPE (base)));
  }
  
+ /* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE
+    offsetted by OFFSET units.  */
+ 
+ tree
+ build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
+ {
+   tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
+ 			  build_fold_addr_expr (base),
+ 			  build_int_cst (ptr_type_node, offset));
+   tree addr = build1 (ADDR_EXPR, type, ref);
+   recompute_tree_invariant_for_addr_expr (addr);
+   return addr;
+ }
+ 
  /* Similar except don't specify the TREE_TYPE
     and leave the TREE_SIDE_EFFECTS as 0.
     It is permissible for arguments to be null,
Index: gcc/tree.h
===================================================================
*** gcc/tree.h.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree.h	2011-03-22 13:06:43.000000000 +0100
*************** extern tree build_simple_mem_ref_loc (lo
*** 5096,5101 ****
--- 5096,5102 ----
  	build_simple_mem_ref_loc (UNKNOWN_LOCATION, T)
  extern double_int mem_ref_offset (const_tree);
  extern tree reference_alias_ptr_type (const_tree);
+ extern tree build_invariant_address (tree, tree, HOST_WIDE_INT);
  extern tree constant_boolean_node (int, tree);
  extern tree div_if_zero_remainder (enum tree_code, const_tree, const_tree);
  
Index: gcc/tree-flow-inline.h
===================================================================
*** gcc/tree-flow-inline.h.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-flow-inline.h	2011-03-22 13:06:43.000000000 +0100
*************** make_ssa_name (tree var, gimple stmt)
*** 1227,1230 ****
--- 1227,1365 ----
    return make_ssa_name_fn (cfun, var, stmt);
  }
  
+ /* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
+    denotes the starting address of the memory access EXP.
+    Returns NULL_TREE if the offset is not constant or any component
+    is not BITS_PER_UNIT-aligned.
+    VALUEIZE if non-NULL is used to valueize SSA names.  It should return
+    its argument or a constant if the argument is known to be constant.  */
+ 
+ static inline tree
+ get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
+ 				 tree (*valueize) (tree))
+ {
+   HOST_WIDE_INT byte_offset = 0;
+ 
+   /* Compute cumulative byte-offset for nested component-refs and array-refs,
+      and find the ultimate containing object.  */
+   while (1)
+     {
+       switch (TREE_CODE (exp))
+ 	{
+ 	case BIT_FIELD_REF:
+ 	  return NULL_TREE;
+ 
+ 	case COMPONENT_REF:
+ 	  {
+ 	    tree field = TREE_OPERAND (exp, 1);
+ 	    tree this_offset = component_ref_field_offset (exp);
+ 	    HOST_WIDE_INT hthis_offset;
+ 
+ 	    if (!this_offset
+ 		|| TREE_CODE (this_offset) != INTEGER_CST
+ 		|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
+ 		    % BITS_PER_UNIT))
+ 	      return NULL_TREE;
+ 
+ 	    hthis_offset = TREE_INT_CST_LOW (this_offset);
+ 	    hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
+ 			     / BITS_PER_UNIT);
+ 	    byte_offset += hthis_offset;
+ 	  }
+ 	  break;
+ 
+ 	case ARRAY_REF:
+ 	case ARRAY_RANGE_REF:
+ 	  {
+ 	    tree index = TREE_OPERAND (exp, 1);
+ 	    tree low_bound, unit_size;
+ 
+ 	    if (valueize
+ 		&& TREE_CODE (index) == SSA_NAME)
+ 	      index = (*valueize) (index);
+ 
+ 	    /* If the resulting bit-offset is constant, track it.  */
+ 	    if (TREE_CODE (index) == INTEGER_CST
+ 		&& (low_bound = array_ref_low_bound (exp),
+ 		    TREE_CODE (low_bound) == INTEGER_CST)
+ 		&& (unit_size = array_ref_element_size (exp),
+ 		    TREE_CODE (unit_size) == INTEGER_CST))
+ 	      {
+ 		HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
+ 
+ 		hindex -= TREE_INT_CST_LOW (low_bound);
+ 		hindex *= TREE_INT_CST_LOW (unit_size);
+ 		byte_offset += hindex;
+ 	      }
+ 	    else
+ 	      return NULL_TREE;
+ 	  }
+ 	  break;
+ 
+ 	case REALPART_EXPR:
+ 	  break;
+ 
+ 	case IMAGPART_EXPR:
+ 	  byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
+ 	  break;
+ 
+ 	case VIEW_CONVERT_EXPR:
+ 	  break;
+ 
+ 	case MEM_REF:
+ 	  {
+ 	    tree base = TREE_OPERAND (exp, 0);
+ 	    if (valueize
+ 		&& TREE_CODE (base) == SSA_NAME)
+ 	      base = (*valueize) (base);
+ 
+ 	    /* Hand back the decl for MEM[&decl, off].  */
+ 	    if (TREE_CODE (base) == ADDR_EXPR)
+ 	      {
+ 		if (!integer_zerop (TREE_OPERAND (exp, 1)))
+ 		  {
+ 		    double_int off = mem_ref_offset (exp);
+ 		    gcc_assert (off.high == -1 || off.high == 0);
+ 		    byte_offset += double_int_to_shwi (off);
+ 		  }
+ 		exp = TREE_OPERAND (base, 0);
+ 	      }
+ 	    goto done;
+ 	  }
+ 
+ 	case TARGET_MEM_REF:
+ 	  {
+ 	    tree base = TREE_OPERAND (exp, 0);
+ 	    if (valueize
+ 		&& TREE_CODE (base) == SSA_NAME)
+ 	      base = (*valueize) (base);
+ 
+ 	    /* Hand back the decl for MEM[&decl, off].  */
+ 	    if (TREE_CODE (base) == ADDR_EXPR)
+ 	      {
+ 		if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
+ 		  return NULL_TREE;
+ 		if (!integer_zerop (TMR_OFFSET (exp)))
+ 		  {
+ 		    double_int off = mem_ref_offset (exp);
+ 		    gcc_assert (off.high == -1 || off.high == 0);
+ 		    byte_offset += double_int_to_shwi (off);
+ 		  }
+ 		exp = TREE_OPERAND (base, 0);
+ 	      }
+ 	    goto done;
+ 	  }
+ 
+ 	default:
+ 	  goto done;
+ 	}
+ 
+       exp = TREE_OPERAND (exp, 0);
+     }
+ done:
+ 
+   *poffset = byte_offset;
+   return exp;
+ }
+ 
  #endif /* _TREE_FLOW_INLINE_H  */
Index: gcc/tree-dfa.c
===================================================================
*** gcc/tree-dfa.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-dfa.c	2011-03-22 13:06:43.000000000 +0100
*************** get_ref_base_and_extent (tree exp, HOST_
*** 963,1072 ****
  tree
  get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
  {
!   HOST_WIDE_INT byte_offset = 0;
! 
!   /* Compute cumulative byte-offset for nested component-refs and array-refs,
!      and find the ultimate containing object.  */
!   while (1)
!     {
!       switch (TREE_CODE (exp))
! 	{
! 	case BIT_FIELD_REF:
! 	  return NULL_TREE;
! 
! 	case COMPONENT_REF:
! 	  {
! 	    tree field = TREE_OPERAND (exp, 1);
! 	    tree this_offset = component_ref_field_offset (exp);
! 	    HOST_WIDE_INT hthis_offset;
! 
! 	    if (!this_offset
! 		|| TREE_CODE (this_offset) != INTEGER_CST
! 		|| (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
! 		    % BITS_PER_UNIT))
! 	      return NULL_TREE;
! 
! 	    hthis_offset = TREE_INT_CST_LOW (this_offset);
! 	    hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
! 			     / BITS_PER_UNIT);
! 	    byte_offset += hthis_offset;
! 	  }
! 	  break;
! 
! 	case ARRAY_REF:
! 	case ARRAY_RANGE_REF:
! 	  {
! 	    tree index = TREE_OPERAND (exp, 1);
! 	    tree low_bound, unit_size;
! 
! 	    /* If the resulting bit-offset is constant, track it.  */
! 	    if (TREE_CODE (index) == INTEGER_CST
! 		&& (low_bound = array_ref_low_bound (exp),
! 		    TREE_CODE (low_bound) == INTEGER_CST)
! 		&& (unit_size = array_ref_element_size (exp),
! 		    TREE_CODE (unit_size) == INTEGER_CST))
! 	      {
! 		HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
! 
! 		hindex -= TREE_INT_CST_LOW (low_bound);
! 		hindex *= TREE_INT_CST_LOW (unit_size);
! 		byte_offset += hindex;
! 	      }
! 	    else
! 	      return NULL_TREE;
! 	  }
! 	  break;
! 
! 	case REALPART_EXPR:
! 	  break;
! 
! 	case IMAGPART_EXPR:
! 	  byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
! 	  break;
! 
! 	case VIEW_CONVERT_EXPR:
! 	  break;
! 
! 	case MEM_REF:
! 	  /* Hand back the decl for MEM[&decl, off].  */
! 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
! 	    {
! 	      if (!integer_zerop (TREE_OPERAND (exp, 1)))
! 		{
! 		  double_int off = mem_ref_offset (exp);
! 		  gcc_assert (off.high == -1 || off.high == 0);
! 		  byte_offset += double_int_to_shwi (off);
! 		}
! 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
! 	    }
! 	  goto done;
! 
! 	case TARGET_MEM_REF:
! 	  /* Hand back the decl for MEM[&decl, off].  */
! 	  if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR)
! 	    {
! 	      if (TMR_INDEX (exp) || TMR_INDEX2 (exp))
! 		return NULL_TREE;
! 	      if (!integer_zerop (TMR_OFFSET (exp)))
! 		{
! 		  double_int off = mem_ref_offset (exp);
! 		  gcc_assert (off.high == -1 || off.high == 0);
! 		  byte_offset += double_int_to_shwi (off);
! 		}
! 	      exp = TREE_OPERAND (TMR_BASE (exp), 0);
! 	    }
! 	  goto done;
! 
! 	default:
! 	  goto done;
! 	}
! 
!       exp = TREE_OPERAND (exp, 0);
!     }
! done:
! 
!   *poffset = byte_offset;
!   return exp;
  }
  
  /* Returns true if STMT references an SSA_NAME that has
--- 963,969 ----
  tree
  get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
  {
!   return get_addr_base_and_unit_offset_1 (exp, poffset, NULL);
  }
  
  /* Returns true if STMT references an SSA_NAME that has
Index: gcc/tree-ssa-ccp.c
===================================================================
*** gcc/tree-ssa-ccp.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-ssa-ccp.c	2011-03-22 13:06:43.000000000 +0100
*************** along with GCC; see the file COPYING3.
*** 132,137 ****
--- 132,138 ----
  #include "target.h"
  #include "diagnostic-core.h"
  #include "dbgcnt.h"
+ #include "gimple-fold.h"
  
  
  /* Possible lattice values.  */
*************** static prop_value_t *const_val;
*** 167,175 ****
  
  static void canonicalize_float_value (prop_value_t *);
  static bool ccp_fold_stmt (gimple_stmt_iterator *);
- static tree fold_ctor_reference (tree type, tree ctor,
- 				 unsigned HOST_WIDE_INT offset,
- 				 unsigned HOST_WIDE_INT size);
  
  /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX.  */
  
--- 168,173 ----
*************** ccp_fold (gimple stmt)
*** 1098,1317 ****
    location_t loc = gimple_location (stmt);
    switch (gimple_code (stmt))
      {
-     case GIMPLE_ASSIGN:
-       {
-         enum tree_code subcode = gimple_assign_rhs_code (stmt);
- 
-         switch (get_gimple_rhs_class (subcode))
-           {
-           case GIMPLE_SINGLE_RHS:
-             {
-               tree rhs = gimple_assign_rhs1 (stmt);
-               enum tree_code_class kind = TREE_CODE_CLASS (subcode);
- 
-               if (TREE_CODE (rhs) == SSA_NAME)
-                 {
-                   /* If the RHS is an SSA_NAME, return its known constant value,
-                      if any.  */
-                   return get_constant_value (rhs);
-                 }
- 	      /* Handle propagating invariant addresses into address operations.
- 		 The folding we do here matches that in tree-ssa-forwprop.c.  */
- 	      else if (TREE_CODE (rhs) == ADDR_EXPR)
- 		{
- 		  tree *base;
- 		  base = &TREE_OPERAND (rhs, 0);
- 		  while (handled_component_p (*base))
- 		    base = &TREE_OPERAND (*base, 0);
- 		  if (TREE_CODE (*base) == MEM_REF
- 		      && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
- 		    {
- 		      tree val = get_constant_value (TREE_OPERAND (*base, 0));
- 		      if (val
- 			  && TREE_CODE (val) == ADDR_EXPR)
- 			{
- 			  tree ret, save = *base;
- 			  tree new_base;
- 			  new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
- 						  unshare_expr (val),
- 						  TREE_OPERAND (*base, 1));
- 			  /* We need to return a new tree, not modify the IL
- 			     or share parts of it.  So play some tricks to
- 			     avoid manually building it.  */
- 			  *base = new_base;
- 			  ret = unshare_expr (rhs);
- 			  recompute_tree_invariant_for_addr_expr (ret);
- 			  *base = save;
- 			  return ret;
- 			}
- 		    }
- 		}
- 	      else if (TREE_CODE (rhs) == CONSTRUCTOR
- 		       && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
- 		       && (CONSTRUCTOR_NELTS (rhs)
- 			   == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
- 		{
- 		  unsigned i;
- 		  tree val, list;
- 
- 		  list = NULL_TREE;
- 		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
- 		    {
- 		      val = valueize_op (val);
- 		      if (TREE_CODE (val) == INTEGER_CST
- 			  || TREE_CODE (val) == REAL_CST
- 			  || TREE_CODE (val) == FIXED_CST)
- 			list = tree_cons (NULL_TREE, val, list);
- 		      else
- 			return NULL_TREE;
- 		    }
- 
- 		  return build_vector (TREE_TYPE (rhs), nreverse (list));
- 		}
- 
-               if (kind == tcc_reference)
- 		{
- 		  if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
- 		       || TREE_CODE (rhs) == REALPART_EXPR
- 		       || TREE_CODE (rhs) == IMAGPART_EXPR)
- 		      && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
- 		    {
- 		      tree val = get_constant_value (TREE_OPERAND (rhs, 0));
- 		      if (val)
- 			return fold_unary_loc (EXPR_LOCATION (rhs),
- 					       TREE_CODE (rhs),
- 					       TREE_TYPE (rhs), val);
- 		    }
- 		  else if (TREE_CODE (rhs) == BIT_FIELD_REF
- 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
- 		    {
- 		      tree val = get_constant_value (TREE_OPERAND (rhs, 0));
- 		      if (val)
- 			return fold_ternary_loc (EXPR_LOCATION (rhs),
- 						 TREE_CODE (rhs),
- 						 TREE_TYPE (rhs), val,
- 						 TREE_OPERAND (rhs, 1),
- 						 TREE_OPERAND (rhs, 2));
- 		    }
- 		  else if (TREE_CODE (rhs) == MEM_REF
- 			   && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
- 		    {
- 		      tree val = get_constant_value (TREE_OPERAND (rhs, 0));
- 		      if (val
- 			  && TREE_CODE (val) == ADDR_EXPR)
- 			{
- 			  tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
- 						  unshare_expr (val),
- 						  TREE_OPERAND (rhs, 1));
- 			  if (tem)
- 			    rhs = tem;
- 			}
- 		    }
- 		  return fold_const_aggregate_ref (rhs);
- 		}
-               else if (kind == tcc_declaration)
-                 return get_symbol_constant_value (rhs);
-               return rhs;
-             }
- 
-           case GIMPLE_UNARY_RHS:
-             {
-               /* Handle unary operators that can appear in GIMPLE form.
-                  Note that we know the single operand must be a constant,
-                  so this should almost always return a simplified RHS.  */
-               tree lhs = gimple_assign_lhs (stmt);
-               tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
- 
- 	      /* Conversions are useless for CCP purposes if they are
- 		 value-preserving.  Thus the restrictions that
- 		 useless_type_conversion_p places for pointer type conversions
- 		 do not apply here.  Substitution later will only substitute to
- 		 allowed places.  */
- 	      if (CONVERT_EXPR_CODE_P (subcode)
- 		  && POINTER_TYPE_P (TREE_TYPE (lhs))
- 		  && POINTER_TYPE_P (TREE_TYPE (op0)))
- 		{
- 		  tree tem;
- 		  /* Try to re-construct array references on-the-fly.  */
- 		  if (!useless_type_conversion_p (TREE_TYPE (lhs),
- 						  TREE_TYPE (op0))
- 		      && ((tem = maybe_fold_offset_to_address
- 			   (loc,
- 			    op0, integer_zero_node, TREE_TYPE (lhs)))
- 			  != NULL_TREE))
- 		    return tem;
- 		  return op0;
- 		}
- 
-               return
- 		fold_unary_ignore_overflow_loc (loc, subcode,
- 						gimple_expr_type (stmt), op0);
-             }
- 
-           case GIMPLE_BINARY_RHS:
-             {
-               /* Handle binary operators that can appear in GIMPLE form.  */
-               tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
-               tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
- 
- 	      /* Translate &x + CST into an invariant form suitable for
- 	         further propagation.  */
- 	      if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
- 		  && TREE_CODE (op0) == ADDR_EXPR
- 		  && TREE_CODE (op1) == INTEGER_CST)
- 		{
- 		  tree off = fold_convert (ptr_type_node, op1);
- 		  return build_fold_addr_expr
- 			   (fold_build2 (MEM_REF,
- 					 TREE_TYPE (TREE_TYPE (op0)),
- 					 unshare_expr (op0), off));
- 		}
- 
-               return fold_binary_loc (loc, subcode,
- 				      gimple_expr_type (stmt), op0, op1);
-             }
- 
-           case GIMPLE_TERNARY_RHS:
-             {
-               /* Handle ternary operators that can appear in GIMPLE form.  */
-               tree op0 = valueize_op (gimple_assign_rhs1 (stmt));
-               tree op1 = valueize_op (gimple_assign_rhs2 (stmt));
-               tree op2 = valueize_op (gimple_assign_rhs3 (stmt));
- 
-               return fold_ternary_loc (loc, subcode,
- 				       gimple_expr_type (stmt), op0, op1, op2);
-             }
- 
-           default:
-             gcc_unreachable ();
-           }
-       }
-       break;
- 
-     case GIMPLE_CALL:
-       {
- 	tree fn = valueize_op (gimple_call_fn (stmt));
- 	if (TREE_CODE (fn) == ADDR_EXPR
- 	    && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
- 	    && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
- 	  {
- 	    tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
- 	    tree call, retval;
- 	    unsigned i;
- 	    for (i = 0; i < gimple_call_num_args (stmt); ++i)
- 	      args[i] = valueize_op (gimple_call_arg (stmt, i));
- 	    call = build_call_array_loc (loc,
- 					 gimple_call_return_type (stmt),
- 					 fn, gimple_call_num_args (stmt), args);
- 	    retval = fold_call_expr (EXPR_LOCATION (call), call, false);
- 	    if (retval)
- 	      /* fold_call_expr wraps the result inside a NOP_EXPR.  */
- 	      STRIP_NOPS (retval);
- 	    return retval;
- 	  }
- 	return NULL_TREE;
-       }
- 
      case GIMPLE_COND:
        {
          /* Handle comparison operators that can appear in GIMPLE form.  */
--- 1096,1101 ----
*************** ccp_fold (gimple stmt)
*** 1327,1721 ****
          return valueize_op (gimple_switch_index (stmt));
        }
  
!     default:
!       gcc_unreachable ();
!     }
! }
! 
! /* See if we can find constructor defining value of BASE.
!    When we know the consructor with constant offset (such as
!    base is array[40] and we do know constructor of array), then
!    BIT_OFFSET is adjusted accordingly.
! 
!    As a special case, return error_mark_node when constructor
!    is not explicitly available, but it is known to be zero
!    such as 'static const int a;'.  */
! static tree
! get_base_constructor (tree base, HOST_WIDE_INT *bit_offset)
! {
!   HOST_WIDE_INT bit_offset2, size, max_size;
!   if (TREE_CODE (base) == MEM_REF)
!     {
!       if (!integer_zerop (TREE_OPERAND (base, 1)))
! 	{
! 	  if (!host_integerp (TREE_OPERAND (base, 1), 0))
! 	    return NULL_TREE;
! 	  *bit_offset += (mem_ref_offset (base).low
! 			  * BITS_PER_UNIT);
! 	}
! 
!       base = get_constant_value (TREE_OPERAND (base, 0));
!       if (!base || TREE_CODE (base) != ADDR_EXPR)
!         return NULL_TREE;
!       base = TREE_OPERAND (base, 0);
!     }
! 
!   /* Get a CONSTRUCTOR.  If BASE is a VAR_DECL, get its
!      DECL_INITIAL.  If BASE is a nested reference into another
!      ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
!      the inner reference.  */
!   switch (TREE_CODE (base))
!     {
!     case VAR_DECL:
!       if (!const_value_known_p (base))
! 	return NULL_TREE;
! 
!       /* Fallthru.  */
!     case CONST_DECL:
!       if (!DECL_INITIAL (base)
! 	  && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
!         return error_mark_node;
!       return DECL_INITIAL (base);
! 
!     case ARRAY_REF:
!     case COMPONENT_REF:
!       base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
!       if (max_size == -1 || size != max_size)
! 	return NULL_TREE;
!       *bit_offset +=  bit_offset2;
!       return get_base_constructor (base, bit_offset);
! 
!     case STRING_CST:
!     case CONSTRUCTOR:
!       return base;
! 
!     default:
!       return NULL_TREE;
!     }
! }
! 
! /* CTOR is STRING_CST.  Fold reference of type TYPE and size SIZE
!    to the memory at bit OFFSET.  
! 
!    We do only simple job of folding byte accesses.  */
! 
! static tree
! fold_string_cst_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
! 				unsigned HOST_WIDE_INT size)
! {
!   if (INTEGRAL_TYPE_P (type)
!       && (TYPE_MODE (type)
! 	  == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
!       && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
! 	  == MODE_INT)
!       && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
!       && size == BITS_PER_UNIT
!       && !(offset % BITS_PER_UNIT))
!     {
!       offset /= BITS_PER_UNIT;
!       if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
! 	return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
! 				   [offset]));
!       /* Folding
! 	 const char a[20]="hello";
! 	 return a[10];
! 
! 	 might lead to offset greater than string length.  In this case we
! 	 know value is either initialized to 0 or out of bounds.  Return 0
! 	 in both cases.  */
!       return build_zero_cst (type);
!     }
!   return NULL_TREE;
! }
! 
! /* CTOR is CONSTRUCTOR of an array type.  Fold reference of type TYPE and size
!    SIZE to the memory at bit OFFSET.  */
! 
! static tree
! fold_array_ctor_reference (tree type, tree ctor,
! 			   unsigned HOST_WIDE_INT offset,
! 			   unsigned HOST_WIDE_INT size)
! {
!   unsigned HOST_WIDE_INT cnt;
!   tree cfield, cval;
!   double_int low_bound, elt_size;
!   double_int index, max_index;
!   double_int access_index;
!   tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
!   HOST_WIDE_INT inner_offset;
! 
!   /* Compute low bound and elt size.  */
!   if (domain_type && TYPE_MIN_VALUE (domain_type))
!     {
!       /* Static constructors for variably sized objects makes no sense.  */
!       gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
!       low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
!     }
!   else
!     low_bound = double_int_zero;
!   /* Static constructors for variably sized objects makes no sense.  */
!   gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
! 	      == INTEGER_CST);
!   elt_size =
!     tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
! 
! 
!   /* We can handle only constantly sized accesses that are known to not
!      be larger than size of array element.  */
!   if (!TYPE_SIZE_UNIT (type)
!       || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
!       || double_int_cmp (elt_size,
! 			 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
!     return NULL_TREE;
! 
!   /* Compute the array index we look for.  */
!   access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
! 				  elt_size, TRUNC_DIV_EXPR);
!   access_index = double_int_add (access_index, low_bound);
! 
!   /* And offset within the access.  */
!   inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
! 
!   /* See if the array field is large enough to span whole access.  We do not
!      care to fold accesses spanning multiple array indexes.  */
!   if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
!     return NULL_TREE;
! 
!   index = double_int_sub (low_bound, double_int_one);
!   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
!     {
!       /* Array constructor might explicitely set index, or specify range
! 	 or leave index NULL meaning that it is next index after previous
! 	 one.  */
!       if (cfield)
! 	{
! 	  if (TREE_CODE (cfield) == INTEGER_CST)
! 	    max_index = index = tree_to_double_int (cfield);
! 	  else
! 	    {
! 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
! 	      index = tree_to_double_int (TREE_OPERAND (cfield, 0));
! 	      max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
! 	    }
! 	}
!       else
! 	max_index = index = double_int_add (index, double_int_one);
! 
!       /* Do we have match?  */
!       if (double_int_cmp (access_index, index, 1) >= 0
! 	  && double_int_cmp (access_index, max_index, 1) <= 0)
! 	return fold_ctor_reference (type, cval, inner_offset, size);
!     }
!   /* When memory is not explicitely mentioned in constructor,
!      it is 0 (or out of range).  */
!   return build_zero_cst (type);
! }
! 
! /* CTOR is CONSTRUCTOR of an aggregate or vector.
!    Fold reference of type TYPE and size SIZE to the memory at bit OFFSET.  */
! 
! static tree
! fold_nonarray_ctor_reference (tree type, tree ctor,
! 			      unsigned HOST_WIDE_INT offset,
! 			      unsigned HOST_WIDE_INT size)
! {
!   unsigned HOST_WIDE_INT cnt;
!   tree cfield, cval;
! 
!   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
! 			    cval)
!     {
!       tree byte_offset = DECL_FIELD_OFFSET (cfield);
!       tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
!       tree field_size = DECL_SIZE (cfield);
!       double_int bitoffset;
!       double_int byte_offset_cst = tree_to_double_int (byte_offset);
!       double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
!       double_int bitoffset_end;
! 
!       /* Variable sized objects in static constructors makes no sense,
! 	 but field_size can be NULL for flexible array members.  */
!       gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
! 		  && TREE_CODE (byte_offset) == INTEGER_CST
! 		  && (field_size != NULL_TREE
! 		      ? TREE_CODE (field_size) == INTEGER_CST
! 		      : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
! 
!       /* Compute bit offset of the field.  */
!       bitoffset = double_int_add (tree_to_double_int (field_offset),
! 				  double_int_mul (byte_offset_cst,
! 						  bits_per_unit_cst));
!       /* Compute bit offset where the field ends.  */
!       if (field_size != NULL_TREE)
! 	bitoffset_end = double_int_add (bitoffset,
! 					tree_to_double_int (field_size));
!       else
! 	bitoffset_end = double_int_zero;
! 
!       /* Is OFFSET in the range (BITOFFSET, BITOFFSET_END)? */
!       if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) >= 0
! 	  && (field_size == NULL_TREE
! 	      || double_int_cmp (uhwi_to_double_int (offset),
! 				 bitoffset_end, 0) < 0))
! 	{
! 	  double_int access_end = double_int_add (uhwi_to_double_int (offset),
! 						  uhwi_to_double_int (size));
! 	  double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
! 						    bitoffset);
! 	  /* We do have overlap.  Now see if field is large enough to
! 	     cover the access.  Give up for accesses spanning multiple
! 	     fields.  */
! 	  if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
! 	    return NULL_TREE;
! 	  return fold_ctor_reference (type, cval,
! 				      double_int_to_uhwi (inner_offset), size);
! 	}
!     }
!   /* When memory is not explicitely mentioned in constructor, it is 0.  */
!   return build_zero_cst (type);
! }
! 
! /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
!    to the memory at bit OFFSET.  */
! 
! static tree
! fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
! 		     unsigned HOST_WIDE_INT size)
! {
!   tree ret;
! 
!   /* We found the field with exact match.  */
!   if (useless_type_conversion_p (type, TREE_TYPE (ctor))
!       && !offset)
!     return canonicalize_constructor_val (ctor);
! 
!   /* We are at the end of walk, see if we can view convert the
!      result.  */
!   if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
!       /* VIEW_CONVERT_EXPR is defined only for matching sizes.  */
!       && operand_equal_p (TYPE_SIZE (type),
! 			  TYPE_SIZE (TREE_TYPE (ctor)), 0))
!     {
!       ret = canonicalize_constructor_val (ctor);
!       ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
!       if (ret)
! 	STRIP_NOPS (ret);
!       return ret;
!     }
!   if (TREE_CODE (ctor) == STRING_CST)
!     return fold_string_cst_ctor_reference (type, ctor, offset, size);
!   if (TREE_CODE (ctor) == CONSTRUCTOR)
!     {
! 
!       if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
! 	return fold_array_ctor_reference (type, ctor, offset, size);
!       else
! 	return fold_nonarray_ctor_reference (type, ctor, offset, size);
!     }
! 
!   return NULL_TREE;
! }
! 
! /* Return the tree representing the element referenced by T if T is an
!    ARRAY_REF or COMPONENT_REF into constant aggregates.  Return
!    NULL_TREE otherwise.  */
! 
! tree
! fold_const_aggregate_ref (tree t)
! {
!   tree ctor, idx, base;
!   HOST_WIDE_INT offset, size, max_size;
!   tree tem;
! 
!   if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
!     return get_symbol_constant_value (t);
! 
!   tem = fold_read_from_constant_string (t);
!   if (tem)
!     return tem;
! 
!   switch (TREE_CODE (t))
!     {
!     case ARRAY_REF:
!     case ARRAY_RANGE_REF:
!       /* Constant indexes are handled well by get_base_constructor.
! 	 Only special case variable offsets.
! 	 FIXME: This code can't handle nested references with variable indexes
! 	 (they will be handled only by iteration of ccp).  Perhaps we can bring
! 	 get_ref_base_and_extent here and make it use get_constant_value.  */
!       if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
! 	  && (idx = get_constant_value (TREE_OPERAND (t, 1)))
! 	  && host_integerp (idx, 0))
! 	{
! 	  tree low_bound, unit_size;
! 
! 	  /* If the resulting bit-offset is constant, track it.  */
! 	  if ((low_bound = array_ref_low_bound (t),
! 	       host_integerp (low_bound, 0))
! 	      && (unit_size = array_ref_element_size (t),
! 		  host_integerp (unit_size, 1)))
! 	    {
! 	      offset = TREE_INT_CST_LOW (idx);
! 	      offset -= TREE_INT_CST_LOW (low_bound);
! 	      offset *= TREE_INT_CST_LOW (unit_size);
! 	      offset *= BITS_PER_UNIT;
! 
! 	      base = TREE_OPERAND (t, 0);
! 	      ctor = get_base_constructor (base, &offset);
! 	      /* Empty constructor.  Always fold to 0. */
! 	      if (ctor == error_mark_node)
! 		return build_zero_cst (TREE_TYPE (t));
! 	      /* Out of bound array access.  Value is undefined, but don't fold. */
! 	      if (offset < 0)
! 		return NULL_TREE;
! 	      /* We can not determine ctor.  */
! 	      if (!ctor)
! 		return NULL_TREE;
! 	      return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
! 					  TREE_INT_CST_LOW (unit_size)
! 					  * BITS_PER_UNIT);
! 	    }
! 	}
!       /* Fallthru.  */
! 	
!     case COMPONENT_REF:
!     case BIT_FIELD_REF:
!     case TARGET_MEM_REF:
!     case MEM_REF:
!       base = get_ref_base_and_extent (t, &offset, &size, &max_size);
!       ctor = get_base_constructor (base, &offset);
! 
!       /* Empty constructor.  Always fold to 0. */
!       if (ctor == error_mark_node)
! 	return build_zero_cst (TREE_TYPE (t));
!       /* We do not know precise address.  */
!       if (max_size == -1 || max_size != size)
! 	return NULL_TREE;
!       /* We can not determine ctor.  */
!       if (!ctor)
! 	return NULL_TREE;
! 
!       /* Out of bound array access.  Value is undefined, but don't fold. */
!       if (offset < 0)
! 	return NULL_TREE;
! 
!       return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
! 
!     case REALPART_EXPR:
!     case IMAGPART_EXPR:
!       {
! 	tree c = fold_const_aggregate_ref (TREE_OPERAND (t, 0));
! 	if (c && TREE_CODE (c) == COMPLEX_CST)
! 	  return fold_build1_loc (EXPR_LOCATION (t),
! 			      TREE_CODE (t), TREE_TYPE (t), c);
! 	break;
!       }
  
      default:
!       break;
      }
- 
-   return NULL_TREE;
  }
  
  /* Apply the operation CODE in type TYPE to the value, mask pair
--- 1111,1123 ----
          return valueize_op (gimple_switch_index (stmt));
        }
  
!     case GIMPLE_ASSIGN:
!     case GIMPLE_CALL:
!       return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
  
      default:
!       gcc_unreachable ();
      }
  }
  
  /* Apply the operation CODE in type TYPE to the value, mask pair
Index: gcc/tree-flow.h
===================================================================
*** gcc/tree-flow.h.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-flow.h	2011-03-22 13:06:43.000000000 +0100
*************** extern void ssanames_print_statistics (v
*** 594,599 ****
--- 594,600 ----
  
  /* In tree-ssa-ccp.c  */
  tree fold_const_aggregate_ref (tree);
+ tree gimple_fold_stmt_to_constant (gimple, tree (*)(tree));
  
  /* In tree-ssa-dom.c  */
  extern void dump_dominator_optimization_stats (FILE *);
Index: gcc/testsuite/c-c++-common/pr46562-2.c
===================================================================
*** /dev/null	1970-01-01 00:00:00.000000000 +0000
--- gcc/testsuite/c-c++-common/pr46562-2.c	2011-03-22 13:06:43.000000000 +0100
***************
*** 0 ****
--- 1,13 ----
+ /* { dg-do compile } */
+ /* { dg-options "-O -fno-tree-ccp -fno-tree-forwprop -fdump-tree-fre" } */
+ 
+ static const int a[4] = {};
+ int foo(void)
+ {
+   int i = 1;
+   const int *p = &a[i];
+   return *p;
+ }
+ 
+ /* { dg-final { scan-tree-dump "= 0;" "fre" } } */
+ /* { dg-final { cleanup-tree-dump "fre" } } */
Index: gcc/testsuite/c-c++-common/pr46562.c
===================================================================
*** /dev/null	1970-01-01 00:00:00.000000000 +0000
--- gcc/testsuite/c-c++-common/pr46562.c	2011-03-22 13:06:43.000000000 +0100
***************
*** 0 ****
--- 1,13 ----
+ /* { dg-do compile } */
+ /* { dg-options "-O -fdump-tree-ccp1" } */
+ 
+ static const int a[4] = {};
+ int foo(void)
+ {
+   int i = 1;
+   const int *p = &a[i];
+   return *p;
+ }
+ 
+ /* { dg-final { scan-tree-dump "return 0;" "ccp1" } } */
+ /* { dg-final { cleanup-tree-dump "ccp1" } } */
Index: gcc/tree-vrp.c
===================================================================
*** gcc/tree-vrp.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-vrp.c	2011-03-22 13:09:57.000000000 +0100
*************** along with GCC; see the file COPYING3.
*** 39,44 ****
--- 39,45 ----
  #include "tree-scalar-evolution.h"
  #include "tree-ssa-propagate.h"
  #include "tree-chrec.h"
+ #include "gimple-fold.h"
  
  
  /* Type of value ranges.  See value_range_d for a description of these
*************** vrp_initialize (void)
*** 5614,5619 ****
--- 5615,5635 ----
      }
  }
  
+ /* Return the singleton value-range for NAME or NAME.  */
+ 
+ static inline tree
+ vrp_valueize (tree name)
+ {
+   if (TREE_CODE (name) == SSA_NAME)
+     {
+       value_range_t *vr = get_value_range (name);
+       if (vr->type == VR_RANGE
+ 	  && (vr->min == vr->max
+ 	      || operand_equal_p (vr->min, vr->max, 0)))
+ 	return vr->min;
+     }
+   return name;
+ }
  
  /* Visit assignment STMT.  If it produces an interesting range, record
     the SSA name in *OUTPUT_P.  */
*************** vrp_visit_assignment_or_call (gimple stm
*** 5637,5643 ****
      {
        value_range_t new_vr = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
  
!       if (code == GIMPLE_CALL)
  	extract_range_basic (&new_vr, stmt);
        else
  	extract_range_from_assignment (&new_vr, stmt);
--- 5653,5664 ----
      {
        value_range_t new_vr = { VR_UNDEFINED, NULL_TREE, NULL_TREE, NULL };
  
!       /* Try folding the statement to a constant first.  */
!       tree tem = gimple_fold_stmt_to_constant (stmt, vrp_valueize);
!       if (tem && !is_overflow_infinity (tem))
! 	set_value_range (&new_vr, VR_RANGE, tem, tem, NULL);
!       /* Then dispatch to value-range extracting functions.  */
!       else if (code == GIMPLE_CALL)
  	extract_range_basic (&new_vr, stmt);
        else
  	extract_range_from_assignment (&new_vr, stmt);
*************** vrp_visit_stmt (gimple stmt, edge *taken
*** 6366,6372 ****
        /* In general, assignments with virtual operands are not useful
  	 for deriving ranges, with the obvious exception of calls to
  	 builtin functions.  */
- 
        if ((is_gimple_call (stmt)
  	   && gimple_call_fndecl (stmt) != NULL_TREE
  	   && DECL_IS_BUILTIN (gimple_call_fndecl (stmt)))
--- 6387,6392 ----
Index: gcc/tree-ssa-pre.c
===================================================================
*** gcc/tree-ssa-pre.c.orig	2011-03-22 13:06:05.000000000 +0100
--- gcc/tree-ssa-pre.c	2011-03-22 13:06:43.000000000 +0100
*************** eliminate (void)
*** 4380,4388 ****
  	  if (is_gimple_call (stmt)
  	      && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
  	    {
! 	      tree fn = VN_INFO (gimple_call_fn (stmt))->valnum;
  	      if (TREE_CODE (fn) == ADDR_EXPR
! 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
  		{
  		  bool can_make_abnormal_goto
  		    = stmt_can_make_abnormal_goto (stmt);
--- 4380,4391 ----
  	  if (is_gimple_call (stmt)
  	      && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME)
  	    {
! 	      tree orig_fn = gimple_call_fn (stmt);
! 	      tree fn = VN_INFO (orig_fn)->valnum;
  	      if (TREE_CODE (fn) == ADDR_EXPR
! 		  && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
! 		  && useless_type_conversion_p (TREE_TYPE (orig_fn),
! 						TREE_TYPE (fn)))
  		{
  		  bool can_make_abnormal_goto
  		    = stmt_can_make_abnormal_goto (stmt);



More information about the Gcc-patches mailing list