fold-const.c (fold_binary_loc): Move ~x & ~y -> ~(x | y) and ~x | ~y -> ~(x & y)...

2015-06-30  Richard Biener  <rguenther@suse.de>

	* fold-const.c (fold_binary_loc): Move ~x & ~y -> ~(x | y) and
	~x | ~y -> ~(x & y), (x & CST) ^ (x & CST2) -> (x & CST) | (x & CST2),
	(X | Y) ^ X -> Y & ~ X, ~X ^ ~Y to X ^ Y and ~X ^ C to X ^ ~C ...
	* match.pd: ... to patterns here.

From-SVN: r225184
This commit is contained in:
Richard Biener 2015-06-30 12:54:23 +00:00 committed by Richard Biener
parent d62808554d
commit 5b00d92116
3 changed files with 48 additions and 119 deletions

View File

@ -1,3 +1,10 @@
2015-06-30 Richard Biener <rguenther@suse.de>
* fold-const.c (fold_binary_loc): Move ~x & ~y -> ~(x | y) and
~x | ~y -> ~(x & y), (x & CST) ^ (x & CST2) -> (x & CST) | (x & CST2),
(X | Y) ^ X -> Y & ~ X, ~X ^ ~Y to X ^ Y and ~X ^ C to X ^ ~C ...
* match.pd: ... to patterns here.
2015-06-30 Richard Biener <rguenther@suse.de> 2015-06-30 Richard Biener <rguenther@suse.de>
PR tree-optimization/66704 PR tree-optimization/66704

View File

@ -10974,24 +10974,6 @@ fold_binary_loc (location_t loc,
if (t1 != NULL_TREE) if (t1 != NULL_TREE)
return t1; return t1;
/* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
This results in more efficient code for machines without a NAND
instruction. Combine will canonicalize to the first form
which will allow use of NAND instructions provided by the
backend if they exist. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
return
fold_build1_loc (loc, BIT_NOT_EXPR, type,
build2 (BIT_AND_EXPR, type,
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)),
fold_convert_loc (loc, type,
TREE_OPERAND (arg1, 0))));
}
/* See if this can be simplified into a rotate first. If that /* See if this can be simplified into a rotate first. If that
is unsuccessful continue in the association code. */ is unsuccessful continue in the association code. */
goto bit_rotate; goto bit_rotate;
@ -11015,90 +10997,6 @@ fold_binary_loc (location_t loc,
return omit_one_operand_loc (loc, type, t1, arg0); return omit_one_operand_loc (loc, type, t1, arg0);
} }
/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
simplifications. */
if (TREE_CODE (arg0) == BIT_AND_EXPR
&& TREE_CODE (arg1) == BIT_AND_EXPR
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
&& TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
&& wi::bit_and (TREE_OPERAND (arg0, 1),
TREE_OPERAND (arg1, 1)) == 0)
{
code = BIT_IOR_EXPR;
goto bit_ior;
}
/* (X | Y) ^ X -> Y & ~ X*/
if (TREE_CODE (arg0) == BIT_IOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
{
tree t2 = TREE_OPERAND (arg0, 1);
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
arg1);
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
fold_convert_loc (loc, type, t2),
fold_convert_loc (loc, type, t1));
return t1;
}
/* (Y | X) ^ X -> Y & ~ X*/
if (TREE_CODE (arg0) == BIT_IOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
{
tree t2 = TREE_OPERAND (arg0, 0);
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
arg1);
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
fold_convert_loc (loc, type, t2),
fold_convert_loc (loc, type, t1));
return t1;
}
/* X ^ (X | Y) -> Y & ~ X*/
if (TREE_CODE (arg1) == BIT_IOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
{
tree t2 = TREE_OPERAND (arg1, 1);
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
arg0);
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
fold_convert_loc (loc, type, t2),
fold_convert_loc (loc, type, t1));
return t1;
}
/* X ^ (Y | X) -> Y & ~ X*/
if (TREE_CODE (arg1) == BIT_IOR_EXPR
&& operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
{
tree t2 = TREE_OPERAND (arg1, 0);
t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
arg0);
t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
fold_convert_loc (loc, type, t2),
fold_convert_loc (loc, type, t1));
return t1;
}
/* Convert ~X ^ ~Y to X ^ Y. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
return fold_build2_loc (loc, code, type,
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)),
fold_convert_loc (loc, type,
TREE_OPERAND (arg1, 0)));
/* Convert ~X ^ C to X ^ ~C. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == INTEGER_CST)
return fold_build2_loc (loc, code, type,
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)),
fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
/* Fold (X & 1) ^ 1 as (X & 1) == 0. */ /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
if (TREE_CODE (arg0) == BIT_AND_EXPR if (TREE_CODE (arg0) == BIT_AND_EXPR
&& INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (type)
@ -11410,23 +11308,6 @@ fold_binary_loc (location_t loc,
fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
} }
/* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
This results in more efficient code for machines without a NOR
instruction. Combine will canonicalize to the first form
which will allow use of NOR instructions provided by the
backend if they exist. */
if (TREE_CODE (arg0) == BIT_NOT_EXPR
&& TREE_CODE (arg1) == BIT_NOT_EXPR)
{
return fold_build1_loc (loc, BIT_NOT_EXPR, type,
build2 (BIT_IOR_EXPR, type,
fold_convert_loc (loc, type,
TREE_OPERAND (arg0, 0)),
fold_convert_loc (loc, type,
TREE_OPERAND (arg1, 0))));
}
/* If arg0 is derived from the address of an object or function, we may /* If arg0 is derived from the address of an object or function, we may
be able to fold this expression using the object or function's be able to fold this expression using the object or function's
alignment. */ alignment. */

View File

@ -389,6 +389,47 @@ along with GCC; see the file COPYING3. If not see
(bit_and:c (bit_ior:c @0 @1) (bit_xor:c @1 (bit_not @0))) (bit_and:c (bit_ior:c @0 @1) (bit_xor:c @1 (bit_not @0)))
(bit_and @0 @1)) (bit_and @0 @1))
/* ~x & ~y -> ~(x | y)
~x | ~y -> ~(x & y) */
(for op (bit_and bit_ior)
rop (bit_ior bit_and)
(simplify
(op (convert1? (bit_not @0)) (convert2? (bit_not @1)))
(if (tree_nop_conversion_p (type, TREE_TYPE (@0))
&& tree_nop_conversion_p (type, TREE_TYPE (@1)))
(bit_not (rop (convert @0) (convert @1))))))
/* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
with a constant, and the two constants have no bits in common,
we should treat this as a BIT_IOR_EXPR since this may produce more
simplifications. */
(simplify
(bit_xor (convert1? (bit_and@4 @0 INTEGER_CST@1))
(convert2? (bit_and@5 @2 INTEGER_CST@3)))
(if (tree_nop_conversion_p (type, TREE_TYPE (@0))
&& tree_nop_conversion_p (type, TREE_TYPE (@2))
&& wi::bit_and (@1, @3) == 0)
(bit_ior (convert @4) (convert @5))))
/* (X | Y) ^ X -> Y & ~ X*/
(simplify
(bit_xor:c (convert? (bit_ior:c @0 @1)) (convert? @0))
(if (tree_nop_conversion_p (type, TREE_TYPE (@0)))
(convert (bit_and @1 (bit_not @0)))))
/* Convert ~X ^ ~Y to X ^ Y. */
(simplify
(bit_xor (convert1? (bit_not @0)) (convert2? (bit_not @1)))
(if (tree_nop_conversion_p (type, TREE_TYPE (@0))
&& tree_nop_conversion_p (type, TREE_TYPE (@1)))
(bit_xor (convert @0) (convert @1))))
/* Convert ~X ^ C to X ^ ~C. */
(simplify
(bit_xor (convert? (bit_not @0)) INTEGER_CST@1)
(bit_xor (convert @0) (bit_not @1)))
(simplify (simplify
(abs (abs@1 @0)) (abs (abs@1 @0))
@1) @1)