[prev in list] [next in list] [prev in thread] [next in thread] 

List:       gcc-patches
Subject:    No a*x+b*x factorization for signed vectors
From:       Marc Glisse <marc.glisse () inria ! fr>
Date:       2018-09-29 11:05:55
Message-ID: alpine.DEB.2.02.1809291249060.6690 () stedding ! saclay ! inria ! fr
[Download RAW message or body]

Hello,

this is a simple patch to remove the wrong-code part of PR 87319. I didn't 
spend much time polishing that code, since it is meant to disappear 
anyway.

We could probably remove the inner == inner2 test in 
signed_or_unsigned_type_for, I hadn't noticed when copy-pasting the code.

bootstrap+regtest on powerpc64le-unknown-linux-gnu.

2018-09-30  Marc Glisse  <marc.glisse@inria.fr>

 	PR middle-end/87319
 	* fold-const.c (fold_plusminus_mult_expr): Handle complex and vectors.
 	* tree.c (signed_or_unsigned_type_for): Handle complex.

-- 
Marc Glisse
["uns.patch" (TEXT/x-diff)]

Index: gcc/fold-const.c
===================================================================
--- gcc/fold-const.c	(revision 264371)
+++ gcc/fold-const.c	(working copy)
@@ -7136,21 +7136,21 @@ fold_plusminus_mult_expr (location_t loc
 	  alt1 = arg10;
 	  same = maybe_same;
 	  if (swap)
 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
 	}
     }
 
   if (!same)
     return NULL_TREE;
 
-  if (! INTEGRAL_TYPE_P (type)
+  if (! ANY_INTEGRAL_TYPE_P (type)
       || TYPE_OVERFLOW_WRAPS (type)
       /* We are neither factoring zero nor minus one.  */
       || TREE_CODE (same) == INTEGER_CST)
     return fold_build2_loc (loc, MULT_EXPR, type,
 			fold_build2_loc (loc, code, type,
 				     fold_convert_loc (loc, type, alt0),
 				     fold_convert_loc (loc, type, alt1)),
 			fold_convert_loc (loc, type, same));
 
   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
Index: gcc/tree.c
===================================================================
--- gcc/tree.c	(revision 264371)
+++ gcc/tree.c	(working copy)
@@ -11202,34 +11202,45 @@ int_cst_value (const_tree x)
   return val;
 }
 
 /* If TYPE is an integral or pointer type, return an integer type with
    the same precision which is unsigned iff UNSIGNEDP is true, or itself
    if TYPE is already an integer type of signedness UNSIGNEDP.  */
 
 tree
 signed_or_unsigned_type_for (int unsignedp, tree type)
 {
-  if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp)
+  if (ANY_INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) == unsignedp)
     return type;
 
   if (TREE_CODE (type) == VECTOR_TYPE)
     {
       tree inner = TREE_TYPE (type);
       tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
       if (!inner2)
 	return NULL_TREE;
       if (inner == inner2)
 	return type;
       return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type));
     }
 
+  if (TREE_CODE (type) == COMPLEX_TYPE)
+    {
+      tree inner = TREE_TYPE (type);
+      tree inner2 = signed_or_unsigned_type_for (unsignedp, inner);
+      if (!inner2)
+	return NULL_TREE;
+      if (inner == inner2)
+	return type;
+      return build_complex_type (inner2);
+    }
+
   if (!INTEGRAL_TYPE_P (type)
       && !POINTER_TYPE_P (type)
       && TREE_CODE (type) != OFFSET_TYPE)
     return NULL_TREE;
 
   return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp);
 }
 
 /* If TYPE is an integral or pointer type, return an integer type with
    the same precision which is unsigned, or itself if TYPE is already an


[prev in list] [next in list] [prev in thread] [next in thread] 

Configure | About | News | Add a list | Sponsored by KoreLogic