Modify gcc/*.[hc] double_int call sites to use the new interface.

This change entailed adding a few new methods to double_int.

The change results in a 0.163% time improvement with a 70% confidence.

Tested on x86_64.


Index: gcc/ChangeLog

2012-09-06  Lawrence Crowl  <crowl@google.com>

	* double-int.h (double_int::operator &=): New.
	(double_int::operator ^=): New.
	(double_int::operator |=): New.
	(double_int::mul_with_sign): Modify overflow parameter to bool*.
	(double_int::add_with_sign): New.
	(double_int::ule): New.
	(double_int::sle): New.
	(binary double_int::operator *): Remove parameter name.
	(binary double_int::operator +): Likewise.
	(binary double_int::operator -): Likewise.
	(binary double_int::operator &): Likewise.
	(double_int::operator |): Likewise.
	(double_int::operator ^): Likewise.
	(double_int::and_not): Likewise.
	(double_int::from_shwi): Tidy formatting.
	(double_int::from_uhwi): Likewise.
	(double_int::from_uhwi): Likewise.
	* double-int.c (double_int::mul_with_sign): Modify overflow parameter
	to bool*.
	(double_int::add_with_sign): New.
	(double_int::ule): New.
	(double_int::sle): New.
	* builtins.c: Modify to use the new double_int interface.
	* cgraph.c: Likewise.
	* combine.c: Likewise.
	* dwarf2out.c: Likewise.
	* emit-rtl.c: Likewise.
	* expmed.c: Likewise.
	* expr.c: Likewise.
	* fixed-value.c: Likewise.
	* fold-const.c: Likewise.
	* gimple-fold.c: Likewise.
	* gimple-ssa-strength-reduction.c: Likewise.
	* gimplify-rtx.c: Likewise.
	* ipa-prop.c: Likewise.
	* loop-iv.c: Likewise.
	* optabs.c: Likewise.
	* stor-layout.c: Likewise.
	* tree-affine.c: Likewise.
	* tree-cfg.c: Likewise.
	* tree-dfa.c: Likewise.
	* tree-flow-inline.h: Likewise.
	* tree-object-size.c: Likewise.
	* tree-predcom.c: Likewise.
	* tree-pretty-print.c: Likewise.
	* tree-sra.c: Likewise.
	* tree-ssa-address.c: Likewise.
	* tree-ssa-alias.c: Likewise.
	* tree-ssa-ccp.c: Likewise.
	* tree-ssa-forwprop.c: Likewise.
	* tree-ssa-loop-ivopts.c: Likewise.
	* tree-ssa-loop-niter.c: Likewise.
	* tree-ssa-phiopt.c: Likewise.
	* tree-ssa-pre.c: Likewise.
	* tree-ssa-sccvn: Likewise.
	* tree-ssa-structalias.c: Likewise.
	* tree-ssa.c: Likewise.
	* tree-switch-conversion.c: Likewise.
	* tree-vect-loop-manip.c: Likewise.
	* tree-vrp.c: Likewise.
	* tree.h: Likewise.
	* tree.c: Likewise.
	* varasm.c: Likewise.

From-SVN: r191047
This commit is contained in:
Lawrence Crowl 2012-09-07 00:06:35 +00:00 committed by Lawrence Crowl
parent 316b938ed7
commit 27bcd47cfa
45 changed files with 891 additions and 924 deletions

View File

@ -1,3 +1,69 @@
2012-09-06 Lawrence Crowl <crowl@google.com>
* double-int.h (double_int::operator &=): New.
(double_int::operator ^=): New.
(double_int::operator |=): New.
(double_int::mul_with_sign): Modify overflow parameter to bool*.
(double_int::add_with_sign): New.
(double_int::ule): New.
(double_int::sle): New.
(binary double_int::operator *): Remove parameter name.
(binary double_int::operator +): Likewise.
(binary double_int::operator -): Likewise.
(binary double_int::operator &): Likewise.
(double_int::operator |): Likewise.
(double_int::operator ^): Likewise.
(double_int::and_not): Likewise.
(double_int::from_shwi): Tidy formatting.
(double_int::from_uhwi): Likewise.
(double_int::from_uhwi): Likewise.
* double-int.c (double_int::mul_with_sign): Modify overflow parameter
to bool*.
(double_int::add_with_sign): New.
(double_int::ule): New.
(double_int::sle): New.
* builtins.c: Modify to use the new double_int interface.
* cgraph.c: Likewise.
* combine.c: Likewise.
* dwarf2out.c: Likewise.
* emit-rtl.c: Likewise.
* expmed.c: Likewise.
* expr.c: Likewise.
* fixed-value.c: Likewise.
* fold-const.c: Likewise.
* gimple-fold.c: Likewise.
* gimple-ssa-strength-reduction.c: Likewise.
* gimplify-rtx.c: Likewise.
* ipa-prop.c: Likewise.
* loop-iv.c: Likewise.
* optabs.c: Likewise.
* stor-layout.c: Likewise.
* tree-affine.c: Likewise.
* tree-cfg.c: Likewise.
* tree-dfa.c: Likewise.
* tree-flow-inline.h: Likewise.
* tree-object-size.c: Likewise.
* tree-predcom.c: Likewise.
* tree-pretty-print.c: Likewise.
* tree-sra.c: Likewise.
* tree-ssa-address.c: Likewise.
* tree-ssa-alias.c: Likewise.
* tree-ssa-ccp.c: Likewise.
* tree-ssa-forwprop.c: Likewise.
* tree-ssa-loop-ivopts.c: Likewise.
* tree-ssa-loop-niter.c: Likewise.
* tree-ssa-phiopt.c: Likewise.
* tree-ssa-pre.c: Likewise.
* tree-ssa-sccvn: Likewise.
* tree-ssa-structalias.c: Likewise.
* tree-ssa.c: Likewise.
* tree-switch-conversion.c: Likewise.
* tree-vect-loop-manip.c: Likewise.
* tree-vrp.c: Likewise.
* tree.h: Likewise.
* tree.c: Likewise.
* varasm.c: Likewise.
2012-09-06 Uros Bizjak <ubizjak@gmail.com> 2012-09-06 Uros Bizjak <ubizjak@gmail.com>
* configure.ac (hle prefixes): Remove .code64. * configure.ac (hle prefixes): Remove .code64.

View File

@ -4990,7 +4990,7 @@ expand_builtin_signbit (tree exp, rtx target)
if (bitpos < GET_MODE_BITSIZE (rmode)) if (bitpos < GET_MODE_BITSIZE (rmode))
{ {
double_int mask = double_int_setbit (double_int_zero, bitpos); double_int mask = double_int_zero.set_bit (bitpos);
if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode)) if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
temp = gen_lowpart (rmode, temp); temp = gen_lowpart (rmode, temp);
@ -8775,14 +8775,14 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
if (! operand_equal_p (TREE_OPERAND (src_base, 0), if (! operand_equal_p (TREE_OPERAND (src_base, 0),
TREE_OPERAND (dest_base, 0), 0)) TREE_OPERAND (dest_base, 0), 0))
return NULL_TREE; return NULL_TREE;
off = double_int_add (mem_ref_offset (src_base), off = mem_ref_offset (src_base) +
shwi_to_double_int (src_offset)); double_int::from_shwi (src_offset);
if (!double_int_fits_in_shwi_p (off)) if (!off.fits_shwi ())
return NULL_TREE; return NULL_TREE;
src_offset = off.low; src_offset = off.low;
off = double_int_add (mem_ref_offset (dest_base), off = mem_ref_offset (dest_base) +
shwi_to_double_int (dest_offset)); double_int::from_shwi (dest_offset);
if (!double_int_fits_in_shwi_p (off)) if (!off.fits_shwi ())
return NULL_TREE; return NULL_TREE;
dest_offset = off.low; dest_offset = off.low;
if (ranges_overlap_p (src_offset, maxsize, if (ranges_overlap_p (src_offset, maxsize,
@ -12696,7 +12696,7 @@ fold_builtin_object_size (tree ptr, tree ost)
{ {
bytes = compute_builtin_object_size (ptr, object_size_type); bytes = compute_builtin_object_size (ptr, object_size_type);
if (double_int_fits_to_tree_p (size_type_node, if (double_int_fits_to_tree_p (size_type_node,
uhwi_to_double_int (bytes))) double_int::from_uhwi (bytes)))
return build_int_cstu (size_type_node, bytes); return build_int_cstu (size_type_node, bytes);
} }
else if (TREE_CODE (ptr) == SSA_NAME) else if (TREE_CODE (ptr) == SSA_NAME)
@ -12707,7 +12707,7 @@ fold_builtin_object_size (tree ptr, tree ost)
bytes = compute_builtin_object_size (ptr, object_size_type); bytes = compute_builtin_object_size (ptr, object_size_type);
if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0) if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
&& double_int_fits_to_tree_p (size_type_node, && double_int_fits_to_tree_p (size_type_node,
uhwi_to_double_int (bytes))) double_int::from_uhwi (bytes)))
return build_int_cstu (size_type_node, bytes); return build_int_cstu (size_type_node, bytes);
} }

View File

@ -484,9 +484,8 @@ cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
node = cgraph_create_node (alias); node = cgraph_create_node (alias);
gcc_checking_assert (!virtual_offset gcc_checking_assert (!virtual_offset
|| double_int_equal_p || tree_to_double_int (virtual_offset) ==
(tree_to_double_int (virtual_offset), double_int::from_shwi (virtual_value));
shwi_to_double_int (virtual_value)));
node->thunk.fixed_offset = fixed_offset; node->thunk.fixed_offset = fixed_offset;
node->thunk.this_adjusting = this_adjusting; node->thunk.this_adjusting = this_adjusting;
node->thunk.virtual_value = virtual_value; node->thunk.virtual_value = virtual_value;

View File

@ -2673,11 +2673,11 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
o = rtx_to_double_int (outer); o = rtx_to_double_int (outer);
i = rtx_to_double_int (inner); i = rtx_to_double_int (inner);
m = double_int_mask (width); m = double_int::mask (width);
i = double_int_and (i, m); i &= m;
m = double_int_lshift (m, offset, HOST_BITS_PER_DOUBLE_INT, false); m = m.llshift (offset, HOST_BITS_PER_DOUBLE_INT);
i = double_int_lshift (i, offset, HOST_BITS_PER_DOUBLE_INT, false); i = i.llshift (offset, HOST_BITS_PER_DOUBLE_INT);
o = double_int_ior (double_int_and_not (o, m), i); o = o.and_not (m) | i;
combine_merges++; combine_merges++;
subst_insn = i3; subst_insn = i3;

View File

@ -165,8 +165,7 @@ prefer_and_bit_test (enum machine_mode mode, int bitnum)
/* Fill in the integers. */ /* Fill in the integers. */
XEXP (and_test, 1) XEXP (and_test, 1)
= immed_double_int_const (double_int_setbit (double_int_zero, bitnum), = immed_double_int_const (double_int_zero.set_bit (bitnum), mode);
mode);
XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum); XEXP (XEXP (shift_test, 0), 1) = GEN_INT (bitnum);
speed_p = optimize_insn_for_speed_p (); speed_p = optimize_insn_for_speed_p ();

View File

@ -606,7 +606,6 @@ div_and_round_double (unsigned code, int uns,
return overflow; return overflow;
} }
/* Returns mask for PREC bits. */ /* Returns mask for PREC bits. */
double_int double_int
@ -754,7 +753,7 @@ double_int::operator * (double_int b) const
*OVERFLOW is set to nonzero. */ *OVERFLOW is set to nonzero. */
double_int double_int
double_int::mul_with_sign (double_int b, bool unsigned_p, int *overflow) const double_int::mul_with_sign (double_int b, bool unsigned_p, bool *overflow) const
{ {
const double_int &a = *this; const double_int &a = *this;
double_int ret; double_int ret;
@ -774,6 +773,19 @@ double_int::operator + (double_int b) const
return ret; return ret;
} }
/* Returns A + B. If the operation overflows according to UNSIGNED_P,
*OVERFLOW is set to nonzero. */
double_int
double_int::add_with_sign (double_int b, bool unsigned_p, bool *overflow) const
{
const double_int &a = *this;
double_int ret;
*overflow = add_double_with_sign (a.low, a.high, b.low, b.high,
&ret.low, &ret.high, unsigned_p);
return ret;
}
/* Returns A - B. */ /* Returns A - B. */
double_int double_int
@ -1104,6 +1116,20 @@ double_int::ult (double_int b) const
return false; return false;
} }
/* Compares two unsigned values A and B for less-than or equal-to. */
bool
double_int::ule (double_int b) const
{
if ((unsigned HOST_WIDE_INT) high < (unsigned HOST_WIDE_INT) b.high)
return true;
if ((unsigned HOST_WIDE_INT) high > (unsigned HOST_WIDE_INT) b.high)
return false;
if (low <= b.low)
return true;
return false;
}
/* Compares two unsigned values A and B for greater-than. */ /* Compares two unsigned values A and B for greater-than. */
bool bool
@ -1132,6 +1158,20 @@ double_int::slt (double_int b) const
return false; return false;
} }
/* Compares two signed values A and B for less-than or equal-to. */
bool
double_int::sle (double_int b) const
{
if (high < b.high)
return true;
if (high > b.high)
return false;
if (low <= b.low)
return true;
return false;
}
/* Compares two signed values A and B for greater-than. */ /* Compares two signed values A and B for greater-than. */
bool bool

View File

@ -50,9 +50,8 @@ along with GCC; see the file COPYING3. If not see
numbers with precision higher than HOST_WIDE_INT). It might be less numbers with precision higher than HOST_WIDE_INT). It might be less
confusing to have them both signed or both unsigned. */ confusing to have them both signed or both unsigned. */
typedef struct double_int struct double_int
{ {
public:
/* Normally, we would define constructors to create instances. /* Normally, we would define constructors to create instances.
Two things prevent us from doing so. Two things prevent us from doing so.
First, defining a constructor makes the class non-POD in C++03, First, defining a constructor makes the class non-POD in C++03,
@ -78,6 +77,9 @@ public:
double_int &operator *= (double_int); double_int &operator *= (double_int);
double_int &operator += (double_int); double_int &operator += (double_int);
double_int &operator -= (double_int); double_int &operator -= (double_int);
double_int &operator &= (double_int);
double_int &operator ^= (double_int);
double_int &operator |= (double_int);
/* The following functions are non-mutating operations. */ /* The following functions are non-mutating operations. */
@ -104,17 +106,18 @@ public:
/* Arithmetic operation functions. */ /* Arithmetic operation functions. */
double_int set_bit (unsigned) const; double_int set_bit (unsigned) const;
double_int mul_with_sign (double_int, bool, int *) const; double_int mul_with_sign (double_int, bool unsigned_p, bool *overflow) const;
double_int add_with_sign (double_int, bool unsigned_p, bool *overflow) const;
double_int operator * (double_int b) const; double_int operator * (double_int) const;
double_int operator + (double_int b) const; double_int operator + (double_int) const;
double_int operator - (double_int b) const; double_int operator - (double_int) const;
double_int operator - () const; double_int operator - () const;
double_int operator ~ () const; double_int operator ~ () const;
double_int operator & (double_int b) const; double_int operator & (double_int) const;
double_int operator | (double_int b) const; double_int operator | (double_int) const;
double_int operator ^ (double_int b) const; double_int operator ^ (double_int) const;
double_int and_not (double_int b) const; double_int and_not (double_int) const;
double_int lshift (HOST_WIDE_INT count, unsigned int prec, bool arith) const; double_int lshift (HOST_WIDE_INT count, unsigned int prec, bool arith) const;
double_int rshift (HOST_WIDE_INT count, unsigned int prec, bool arith) const; double_int rshift (HOST_WIDE_INT count, unsigned int prec, bool arith) const;
@ -156,8 +159,10 @@ public:
int scmp (double_int b) const; int scmp (double_int b) const;
bool ult (double_int b) const; bool ult (double_int b) const;
bool ule (double_int b) const;
bool ugt (double_int b) const; bool ugt (double_int b) const;
bool slt (double_int b) const; bool slt (double_int b) const;
bool sle (double_int b) const;
bool sgt (double_int b) const; bool sgt (double_int b) const;
double_int max (double_int b, bool uns); double_int max (double_int b, bool uns);
@ -176,7 +181,7 @@ public:
unsigned HOST_WIDE_INT low; unsigned HOST_WIDE_INT low;
HOST_WIDE_INT high; HOST_WIDE_INT high;
} double_int; };
#define HOST_BITS_PER_DOUBLE_INT (2 * HOST_BITS_PER_WIDE_INT) #define HOST_BITS_PER_DOUBLE_INT (2 * HOST_BITS_PER_WIDE_INT)
@ -185,8 +190,8 @@ public:
/* Constructs double_int from integer CST. The bits over the precision of /* Constructs double_int from integer CST. The bits over the precision of
HOST_WIDE_INT are filled with the sign bit. */ HOST_WIDE_INT are filled with the sign bit. */
inline inline double_int
double_int double_int::from_shwi (HOST_WIDE_INT cst) double_int::from_shwi (HOST_WIDE_INT cst)
{ {
double_int r; double_int r;
r.low = (unsigned HOST_WIDE_INT) cst; r.low = (unsigned HOST_WIDE_INT) cst;
@ -215,8 +220,8 @@ shwi_to_double_int (HOST_WIDE_INT cst)
/* Constructs double_int from unsigned integer CST. The bits over the /* Constructs double_int from unsigned integer CST. The bits over the
precision of HOST_WIDE_INT are filled with zeros. */ precision of HOST_WIDE_INT are filled with zeros. */
inline inline double_int
double_int double_int::from_uhwi (unsigned HOST_WIDE_INT cst) double_int::from_uhwi (unsigned HOST_WIDE_INT cst)
{ {
double_int r; double_int r;
r.low = cst; r.low = cst;
@ -266,6 +271,27 @@ double_int::operator -= (double_int b)
return *this; return *this;
} }
inline double_int &
double_int::operator &= (double_int b)
{
*this = *this & b;
return *this;
}
inline double_int &
double_int::operator ^= (double_int b)
{
*this = *this ^ b;
return *this;
}
inline double_int &
double_int::operator |= (double_int b)
{
*this = *this | b;
return *this;
}
/* Returns value of CST as a signed number. CST must satisfy /* Returns value of CST as a signed number. CST must satisfy
double_int::fits_signed. */ double_int::fits_signed. */
@ -346,7 +372,9 @@ inline double_int
double_int_mul_with_sign (double_int a, double_int b, double_int_mul_with_sign (double_int a, double_int b,
bool unsigned_p, int *overflow) bool unsigned_p, int *overflow)
{ {
return a.mul_with_sign (b, unsigned_p, overflow); bool ovf;
return a.mul_with_sign (b, unsigned_p, &ovf);
*overflow = ovf;
} }
/* FIXME(crowl): Remove after converting callers. */ /* FIXME(crowl): Remove after converting callers. */

View File

@ -9332,13 +9332,13 @@ static inline double_int
double_int_type_size_in_bits (const_tree type) double_int_type_size_in_bits (const_tree type)
{ {
if (TREE_CODE (type) == ERROR_MARK) if (TREE_CODE (type) == ERROR_MARK)
return uhwi_to_double_int (BITS_PER_WORD); return double_int::from_uhwi (BITS_PER_WORD);
else if (TYPE_SIZE (type) == NULL_TREE) else if (TYPE_SIZE (type) == NULL_TREE)
return double_int_zero; return double_int_zero;
else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST) else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
return tree_to_double_int (TYPE_SIZE (type)); return tree_to_double_int (TYPE_SIZE (type));
else else
return uhwi_to_double_int (TYPE_ALIGN (type)); return double_int::from_uhwi (TYPE_ALIGN (type));
} }
/* Given a pointer to a tree node for a subrange type, return a pointer /* Given a pointer to a tree node for a subrange type, return a pointer
@ -11758,7 +11758,7 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode,
mem_loc_result->dw_loc_oprnd2.val_class mem_loc_result->dw_loc_oprnd2.val_class
= dw_val_class_const_double; = dw_val_class_const_double;
mem_loc_result->dw_loc_oprnd2.v.val_double mem_loc_result->dw_loc_oprnd2.v.val_double
= shwi_to_double_int (INTVAL (rtl)); = double_int::from_shwi (INTVAL (rtl));
} }
} }
break; break;
@ -12317,7 +12317,7 @@ loc_descriptor (rtx rtl, enum machine_mode mode,
double_int val = rtx_to_double_int (elt); double_int val = rtx_to_double_int (elt);
if (elt_size <= sizeof (HOST_WIDE_INT)) if (elt_size <= sizeof (HOST_WIDE_INT))
insert_int (double_int_to_shwi (val), elt_size, p); insert_int (val.to_shwi (), elt_size, p);
else else
{ {
gcc_assert (elt_size == 2 * sizeof (HOST_WIDE_INT)); gcc_assert (elt_size == 2 * sizeof (HOST_WIDE_INT));
@ -13646,11 +13646,11 @@ simple_decl_align_in_bits (const_tree decl)
static inline double_int static inline double_int
round_up_to_align (double_int t, unsigned int align) round_up_to_align (double_int t, unsigned int align)
{ {
double_int alignd = uhwi_to_double_int (align); double_int alignd = double_int::from_uhwi (align);
t = double_int_add (t, alignd); t += alignd;
t = double_int_add (t, double_int_minus_one); t += double_int_minus_one;
t = double_int_div (t, alignd, true, TRUNC_DIV_EXPR); t = t.div (alignd, true, TRUNC_DIV_EXPR);
t = double_int_mul (t, alignd); t *= alignd;
return t; return t;
} }
@ -13757,23 +13757,21 @@ field_byte_offset (const_tree decl)
/* Figure out the bit-distance from the start of the structure to /* Figure out the bit-distance from the start of the structure to
the "deepest" bit of the bit-field. */ the "deepest" bit of the bit-field. */
deepest_bitpos = double_int_add (bitpos_int, field_size_in_bits); deepest_bitpos = bitpos_int + field_size_in_bits;
/* This is the tricky part. Use some fancy footwork to deduce /* This is the tricky part. Use some fancy footwork to deduce
where the lowest addressed bit of the containing object must where the lowest addressed bit of the containing object must
be. */ be. */
object_offset_in_bits object_offset_in_bits = deepest_bitpos - type_size_in_bits;
= double_int_sub (deepest_bitpos, type_size_in_bits);
/* Round up to type_align by default. This works best for /* Round up to type_align by default. This works best for
bitfields. */ bitfields. */
object_offset_in_bits object_offset_in_bits
= round_up_to_align (object_offset_in_bits, type_align_in_bits); = round_up_to_align (object_offset_in_bits, type_align_in_bits);
if (double_int_ucmp (object_offset_in_bits, bitpos_int) > 0) if (object_offset_in_bits.ugt (bitpos_int))
{ {
object_offset_in_bits object_offset_in_bits = deepest_bitpos - type_size_in_bits;
= double_int_sub (deepest_bitpos, type_size_in_bits);
/* Round up to decl_align instead. */ /* Round up to decl_align instead. */
object_offset_in_bits object_offset_in_bits
@ -13785,10 +13783,9 @@ field_byte_offset (const_tree decl)
object_offset_in_bits = bitpos_int; object_offset_in_bits = bitpos_int;
object_offset_in_bytes object_offset_in_bytes
= double_int_div (object_offset_in_bits, = object_offset_in_bits.div (double_int::from_uhwi (BITS_PER_UNIT),
uhwi_to_double_int (BITS_PER_UNIT), true, true, TRUNC_DIV_EXPR);
TRUNC_DIV_EXPR); return object_offset_in_bytes.to_shwi ();
return double_int_to_shwi (object_offset_in_bytes);
} }
/* The following routines define various Dwarf attributes and any data /* The following routines define various Dwarf attributes and any data
@ -14064,7 +14061,7 @@ add_const_value_attribute (dw_die_ref die, rtx rtl)
double_int val = rtx_to_double_int (elt); double_int val = rtx_to_double_int (elt);
if (elt_size <= sizeof (HOST_WIDE_INT)) if (elt_size <= sizeof (HOST_WIDE_INT))
insert_int (double_int_to_shwi (val), elt_size, p); insert_int (val.to_shwi (), elt_size, p);
else else
{ {
gcc_assert (elt_size == 2 * sizeof (HOST_WIDE_INT)); gcc_assert (elt_size == 2 * sizeof (HOST_WIDE_INT));

View File

@ -490,7 +490,7 @@ rtx_to_double_int (const_rtx cst)
double_int r; double_int r;
if (CONST_INT_P (cst)) if (CONST_INT_P (cst))
r = shwi_to_double_int (INTVAL (cst)); r = double_int::from_shwi (INTVAL (cst));
else if (CONST_DOUBLE_AS_INT_P (cst)) else if (CONST_DOUBLE_AS_INT_P (cst))
{ {
r.low = CONST_DOUBLE_LOW (cst); r.low = CONST_DOUBLE_LOW (cst);

View File

@ -1985,11 +1985,11 @@ mask_rtx (enum machine_mode mode, int bitpos, int bitsize, int complement)
{ {
double_int mask; double_int mask;
mask = double_int_mask (bitsize); mask = double_int::mask (bitsize);
mask = double_int_lshift (mask, bitpos, HOST_BITS_PER_DOUBLE_INT, false); mask = mask.llshift (bitpos, HOST_BITS_PER_DOUBLE_INT);
if (complement) if (complement)
mask = double_int_not (mask); mask = ~mask;
return immed_double_int_const (mask, mode); return immed_double_int_const (mask, mode);
} }
@ -2002,8 +2002,8 @@ lshift_value (enum machine_mode mode, rtx value, int bitpos, int bitsize)
{ {
double_int val; double_int val;
val = double_int_zext (uhwi_to_double_int (INTVAL (value)), bitsize); val = double_int::from_uhwi (INTVAL (value)).zext (bitsize);
val = double_int_lshift (val, bitpos, HOST_BITS_PER_DOUBLE_INT, false); val = val.llshift (bitpos, HOST_BITS_PER_DOUBLE_INT);
return immed_double_int_const (val, mode); return immed_double_int_const (val, mode);
} }

View File

@ -727,11 +727,11 @@ convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int uns
&& GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
&& CONST_INT_P (x) && INTVAL (x) < 0) && CONST_INT_P (x) && INTVAL (x) < 0)
{ {
double_int val = uhwi_to_double_int (INTVAL (x)); double_int val = double_int::from_uhwi (INTVAL (x));
/* We need to zero extend VAL. */ /* We need to zero extend VAL. */
if (oldmode != VOIDmode) if (oldmode != VOIDmode)
val = double_int_zext (val, GET_MODE_BITSIZE (oldmode)); val = val.zext (GET_MODE_BITSIZE (oldmode));
return immed_double_int_const (val, mode); return immed_double_int_const (val, mode);
} }
@ -6557,9 +6557,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
switch (TREE_CODE (exp)) switch (TREE_CODE (exp))
{ {
case BIT_FIELD_REF: case BIT_FIELD_REF:
bit_offset bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
= double_int_add (bit_offset,
tree_to_double_int (TREE_OPERAND (exp, 2)));
break; break;
case COMPONENT_REF: case COMPONENT_REF:
@ -6574,9 +6572,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
break; break;
offset = size_binop (PLUS_EXPR, offset, this_offset); offset = size_binop (PLUS_EXPR, offset, this_offset);
bit_offset = double_int_add (bit_offset, bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
tree_to_double_int
(DECL_FIELD_BIT_OFFSET (field)));
/* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */ /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
} }
@ -6608,8 +6604,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
break; break;
case IMAGPART_EXPR: case IMAGPART_EXPR:
bit_offset = double_int_add (bit_offset, bit_offset += double_int::from_uhwi (*pbitsize);
uhwi_to_double_int (*pbitsize));
break; break;
case VIEW_CONVERT_EXPR: case VIEW_CONVERT_EXPR:
@ -6631,11 +6626,10 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
if (!integer_zerop (off)) if (!integer_zerop (off))
{ {
double_int boff, coff = mem_ref_offset (exp); double_int boff, coff = mem_ref_offset (exp);
boff = double_int_lshift (coff, boff = coff.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
bit_offset = double_int_add (bit_offset, boff); bit_offset += boff;
} }
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
} }
@ -6659,15 +6653,13 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
if (TREE_CODE (offset) == INTEGER_CST) if (TREE_CODE (offset) == INTEGER_CST)
{ {
double_int tem = tree_to_double_int (offset); double_int tem = tree_to_double_int (offset);
tem = double_int_sext (tem, TYPE_PRECISION (sizetype)); tem = tem.sext (TYPE_PRECISION (sizetype));
tem = double_int_lshift (tem, tem = tem.alshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT),
BITS_PER_UNIT == 8 HOST_BITS_PER_DOUBLE_INT);
? 3 : exact_log2 (BITS_PER_UNIT), tem += bit_offset;
HOST_BITS_PER_DOUBLE_INT, true); if (tem.fits_shwi ())
tem = double_int_add (tem, bit_offset);
if (double_int_fits_in_shwi_p (tem))
{ {
*pbitpos = double_int_to_shwi (tem); *pbitpos = tem.to_shwi ();
*poffset = offset = NULL_TREE; *poffset = offset = NULL_TREE;
} }
} }
@ -6676,24 +6668,23 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
if (offset) if (offset)
{ {
/* Avoid returning a negative bitpos as this may wreak havoc later. */ /* Avoid returning a negative bitpos as this may wreak havoc later. */
if (double_int_negative_p (bit_offset)) if (bit_offset.is_negative ())
{ {
double_int mask double_int mask
= double_int_mask (BITS_PER_UNIT == 8 = double_int::mask (BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT)); ? 3 : exact_log2 (BITS_PER_UNIT));
double_int tem = double_int_and_not (bit_offset, mask); double_int tem = bit_offset.and_not (mask);
/* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf. /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */ Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
bit_offset = double_int_sub (bit_offset, tem); bit_offset -= tem;
tem = double_int_rshift (tem, tem = tem.arshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
offset = size_binop (PLUS_EXPR, offset, offset = size_binop (PLUS_EXPR, offset,
double_int_to_tree (sizetype, tem)); double_int_to_tree (sizetype, tem));
} }
*pbitpos = double_int_to_shwi (bit_offset); *pbitpos = bit_offset.to_shwi ();
*poffset = offset; *poffset = offset;
} }
@ -8720,7 +8711,7 @@ expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
if (reduce_bit_field && TYPE_UNSIGNED (type)) if (reduce_bit_field && TYPE_UNSIGNED (type))
temp = expand_binop (mode, xor_optab, op0, temp = expand_binop (mode, xor_optab, op0,
immed_double_int_const immed_double_int_const
(double_int_mask (TYPE_PRECISION (type)), mode), (double_int::mask (TYPE_PRECISION (type)), mode),
target, 1, OPTAB_LIB_WIDEN); target, 1, OPTAB_LIB_WIDEN);
else else
temp = expand_unop (mode, one_cmpl_optab, op0, target, 1); temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
@ -10407,7 +10398,7 @@ reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
} }
else if (TYPE_UNSIGNED (type)) else if (TYPE_UNSIGNED (type))
{ {
rtx mask = immed_double_int_const (double_int_mask (prec), rtx mask = immed_double_int_const (double_int::mask (prec),
GET_MODE (exp)); GET_MODE (exp));
return expand_and (GET_MODE (exp), exp, mask, target); return expand_and (GET_MODE (exp), exp, mask, target);
} }

View File

@ -376,9 +376,8 @@ do_fixed_multiply (FIXED_VALUE_TYPE *f, const FIXED_VALUE_TYPE *a,
if (GET_MODE_PRECISION (f->mode) <= HOST_BITS_PER_WIDE_INT) if (GET_MODE_PRECISION (f->mode) <= HOST_BITS_PER_WIDE_INT)
{ {
f->data = a->data * b->data; f->data = a->data * b->data;
f->data = f->data.lshift ((-GET_MODE_FBIT (f->mode)), f->data = f->data.lshift (-GET_MODE_FBIT (f->mode),
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT, !unsigned_p);
!unsigned_p);
overflow_p = fixed_saturate1 (f->mode, f->data, &f->data, sat_p); overflow_p = fixed_saturate1 (f->mode, f->data, &f->data, sat_p);
} }
else else
@ -466,9 +465,8 @@ do_fixed_multiply (FIXED_VALUE_TYPE *f, const FIXED_VALUE_TYPE *a,
f->data.high = f->data.high | s.high; f->data.high = f->data.high | s.high;
s.low = f->data.low; s.low = f->data.low;
s.high = f->data.high; s.high = f->data.high;
r = r.lshift ((-GET_MODE_FBIT (f->mode)), r = r.lshift (-GET_MODE_FBIT (f->mode),
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT, !unsigned_p);
!unsigned_p);
} }
overflow_p = fixed_saturate2 (f->mode, r, s, &f->data, sat_p); overflow_p = fixed_saturate2 (f->mode, r, s, &f->data, sat_p);
@ -493,8 +491,7 @@ do_fixed_divide (FIXED_VALUE_TYPE *f, const FIXED_VALUE_TYPE *a,
if (GET_MODE_PRECISION (f->mode) <= HOST_BITS_PER_WIDE_INT) if (GET_MODE_PRECISION (f->mode) <= HOST_BITS_PER_WIDE_INT)
{ {
f->data = a->data.lshift (GET_MODE_FBIT (f->mode), f->data = a->data.lshift (GET_MODE_FBIT (f->mode),
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT, !unsigned_p);
!unsigned_p);
f->data = f->data.div (b->data, unsigned_p, TRUNC_DIV_EXPR); f->data = f->data.div (b->data, unsigned_p, TRUNC_DIV_EXPR);
overflow_p = fixed_saturate1 (f->mode, f->data, &f->data, sat_p); overflow_p = fixed_saturate1 (f->mode, f->data, &f->data, sat_p);
} }
@ -612,9 +609,8 @@ do_fixed_shift (FIXED_VALUE_TYPE *f, const FIXED_VALUE_TYPE *a,
if (GET_MODE_PRECISION (f->mode) <= HOST_BITS_PER_WIDE_INT || (!left_p)) if (GET_MODE_PRECISION (f->mode) <= HOST_BITS_PER_WIDE_INT || (!left_p))
{ {
f->data = a->data.lshift (left_p ? b->data.low : (-b->data.low), f->data = a->data.lshift (left_p ? b->data.low : -b->data.low,
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT, !unsigned_p);
!unsigned_p);
if (left_p) /* Only left shift saturates. */ if (left_p) /* Only left shift saturates. */
overflow_p = fixed_saturate1 (f->mode, f->data, &f->data, sat_p); overflow_p = fixed_saturate1 (f->mode, f->data, &f->data, sat_p);
} }
@ -630,8 +626,7 @@ do_fixed_shift (FIXED_VALUE_TYPE *f, const FIXED_VALUE_TYPE *a,
else else
{ {
temp_low = a->data.lshift (b->data.low, temp_low = a->data.lshift (b->data.low,
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT, !unsigned_p);
!unsigned_p);
/* Logical shift right to temp_high. */ /* Logical shift right to temp_high. */
temp_high = a->data.llshift (b->data.low - HOST_BITS_PER_DOUBLE_INT, temp_high = a->data.llshift (b->data.low - HOST_BITS_PER_DOUBLE_INT,
HOST_BITS_PER_DOUBLE_INT); HOST_BITS_PER_DOUBLE_INT);

View File

@ -192,11 +192,10 @@ div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
a signed division. */ a signed division. */
uns = TYPE_UNSIGNED (TREE_TYPE (arg2)); uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
quo = double_int_divmod (tree_to_double_int (arg1), quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
tree_to_double_int (arg2),
uns, code, &rem); uns, code, &rem);
if (double_int_zero_p (rem)) if (rem.is_zero ())
return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high); return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
return NULL_TREE; return NULL_TREE;
@ -948,55 +947,52 @@ int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
switch (code) switch (code)
{ {
case BIT_IOR_EXPR: case BIT_IOR_EXPR:
res = double_int_ior (op1, op2); res = op1 | op2;
break; break;
case BIT_XOR_EXPR: case BIT_XOR_EXPR:
res = double_int_xor (op1, op2); res = op1 ^ op2;
break; break;
case BIT_AND_EXPR: case BIT_AND_EXPR:
res = double_int_and (op1, op2); res = op1 & op2;
break; break;
case RSHIFT_EXPR: case RSHIFT_EXPR:
res = double_int_rshift (op1, double_int_to_shwi (op2), res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
TYPE_PRECISION (type), !uns);
break; break;
case LSHIFT_EXPR: case LSHIFT_EXPR:
/* It's unclear from the C standard whether shifts can overflow. /* It's unclear from the C standard whether shifts can overflow.
The following code ignores overflow; perhaps a C standard The following code ignores overflow; perhaps a C standard
interpretation ruling is needed. */ interpretation ruling is needed. */
res = double_int_lshift (op1, double_int_to_shwi (op2), res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
TYPE_PRECISION (type), !uns);
break; break;
case RROTATE_EXPR: case RROTATE_EXPR:
res = double_int_rrotate (op1, double_int_to_shwi (op2), res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
TYPE_PRECISION (type));
break; break;
case LROTATE_EXPR: case LROTATE_EXPR:
res = double_int_lrotate (op1, double_int_to_shwi (op2), res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
TYPE_PRECISION (type));
break; break;
case PLUS_EXPR: case PLUS_EXPR:
overflow = add_double (op1.low, op1.high, op2.low, op2.high, res = op1.add_with_sign (op2, false, &overflow);
&res.low, &res.high);
break; break;
case MINUS_EXPR: case MINUS_EXPR:
/* FIXME(crowl) Remove this code if the replacment works.
neg_double (op2.low, op2.high, &res.low, &res.high); neg_double (op2.low, op2.high, &res.low, &res.high);
add_double (op1.low, op1.high, res.low, res.high, add_double (op1.low, op1.high, res.low, res.high,
&res.low, &res.high); &res.low, &res.high);
overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high); overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high);
*/
res = op1.add_with_sign (-op2, false, &overflow);
break; break;
case MULT_EXPR: case MULT_EXPR:
overflow = mul_double (op1.low, op1.high, op2.low, op2.high, res = op1.mul_with_sign (op2, false, &overflow);
&res.low, &res.high);
break; break;
case MULT_HIGHPART_EXPR: case MULT_HIGHPART_EXPR:
@ -1004,9 +1000,8 @@ int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
to the multiply primitive, to handle very large highparts. */ to the multiply primitive, to handle very large highparts. */
if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT) if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
return NULL_TREE; return NULL_TREE;
tmp = double_int_mul (op1, op2); tmp = op1 - op2;
res = double_int_rshift (tmp, TYPE_PRECISION (type), res = tmp.rshift (TYPE_PRECISION (type), TYPE_PRECISION (type), !uns);
TYPE_PRECISION (type), !uns);
break; break;
case TRUNC_DIV_EXPR: case TRUNC_DIV_EXPR:
@ -1028,15 +1023,14 @@ int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
/* ... fall through ... */ /* ... fall through ... */
case ROUND_DIV_EXPR: case ROUND_DIV_EXPR:
if (double_int_zero_p (op2)) if (op2.is_zero ())
return NULL_TREE; return NULL_TREE;
if (double_int_one_p (op2)) if (op2.is_one ())
{ {
res = op1; res = op1;
break; break;
} }
if (double_int_equal_p (op1, op2) if (op1 == op2 && !op1.is_zero ())
&& ! double_int_zero_p (op1))
{ {
res = double_int_one; res = double_int_one;
break; break;
@ -1064,7 +1058,7 @@ int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
/* ... fall through ... */ /* ... fall through ... */
case ROUND_MOD_EXPR: case ROUND_MOD_EXPR:
if (double_int_zero_p (op2)) if (op2.is_zero ())
return NULL_TREE; return NULL_TREE;
overflow = div_and_round_double (code, uns, overflow = div_and_round_double (code, uns,
op1.low, op1.high, op2.low, op2.high, op1.low, op1.high, op2.low, op2.high,
@ -1073,11 +1067,11 @@ int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
break; break;
case MIN_EXPR: case MIN_EXPR:
res = double_int_min (op1, op2, uns); res = op1.min (op2, uns);
break; break;
case MAX_EXPR: case MAX_EXPR:
res = double_int_max (op1, op2, uns); res = op1.max (op2, uns);
break; break;
default: default:
@ -1602,12 +1596,12 @@ fold_convert_const_int_from_fixed (tree type, const_tree arg1)
mode = TREE_FIXED_CST (arg1).mode; mode = TREE_FIXED_CST (arg1).mode;
if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT) if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
{ {
temp = double_int_rshift (temp, GET_MODE_FBIT (mode), temp = temp.rshift (GET_MODE_FBIT (mode),
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT,
SIGNED_FIXED_POINT_MODE_P (mode)); SIGNED_FIXED_POINT_MODE_P (mode));
/* Left shift temp to temp_trunc by fbit. */ /* Left shift temp to temp_trunc by fbit. */
temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode), temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
HOST_BITS_PER_DOUBLE_INT, HOST_BITS_PER_DOUBLE_INT,
SIGNED_FIXED_POINT_MODE_P (mode)); SIGNED_FIXED_POINT_MODE_P (mode));
} }
@ -1620,14 +1614,14 @@ fold_convert_const_int_from_fixed (tree type, const_tree arg1)
/* If FIXED_CST is negative, we need to round the value toward 0. /* If FIXED_CST is negative, we need to round the value toward 0.
By checking if the fractional bits are not zero to add 1 to temp. */ By checking if the fractional bits are not zero to add 1 to temp. */
if (SIGNED_FIXED_POINT_MODE_P (mode) if (SIGNED_FIXED_POINT_MODE_P (mode)
&& double_int_negative_p (temp_trunc) && temp_trunc.is_negative ()
&& !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc)) && TREE_FIXED_CST (arg1).data != temp_trunc)
temp = double_int_add (temp, double_int_one); temp += double_int_one;
/* Given a fixed-point constant, make new constant with new type, /* Given a fixed-point constant, make new constant with new type,
appropriately sign-extended or truncated. */ appropriately sign-extended or truncated. */
t = force_fit_type_double (type, temp, -1, t = force_fit_type_double (type, temp, -1,
(double_int_negative_p (temp) (temp.is_negative ()
&& (TYPE_UNSIGNED (type) && (TYPE_UNSIGNED (type)
< TYPE_UNSIGNED (TREE_TYPE (arg1)))) < TYPE_UNSIGNED (TREE_TYPE (arg1))))
| TREE_OVERFLOW (arg1)); | TREE_OVERFLOW (arg1));
@ -5890,20 +5884,16 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
if (tcode == code) if (tcode == code)
{ {
double_int mul; double_int mul;
int overflow_p; bool overflow_p;
mul = double_int_mul_with_sign unsigned prec = TYPE_PRECISION (ctype);
(double_int_ext bool uns = TYPE_UNSIGNED (ctype);
(tree_to_double_int (op1), double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)), double_int dic = tree_to_double_int (c).ext (prec, uns);
double_int_ext mul = diop1.mul_with_sign (dic, false, &overflow_p);
(tree_to_double_int (c), overflow_p = ((!uns && overflow_p)
TYPE_PRECISION (ctype), TYPE_UNSIGNED (ctype)),
false, &overflow_p);
overflow_p = ((!TYPE_UNSIGNED (ctype) && overflow_p)
| TREE_OVERFLOW (c) | TREE_OVERFLOW (op1)); | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
if (!double_int_fits_to_tree_p (ctype, mul) if (!double_int_fits_to_tree_p (ctype, mul)
&& ((TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR) && ((uns && tcode != MULT_EXPR) || !uns))
|| !TYPE_UNSIGNED (ctype)))
overflow_p = 1; overflow_p = 1;
if (!overflow_p) if (!overflow_p)
return fold_build2 (tcode, ctype, fold_convert (ctype, op0), return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
@ -11044,24 +11034,23 @@ fold_binary_loc (location_t loc,
c2 = tree_to_double_int (arg1); c2 = tree_to_double_int (arg1);
/* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */ /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
if (double_int_equal_p (double_int_and (c1, c2), c1)) if ((c1 & c2) == c1)
return omit_one_operand_loc (loc, type, arg1, return omit_one_operand_loc (loc, type, arg1,
TREE_OPERAND (arg0, 0)); TREE_OPERAND (arg0, 0));
msk = double_int_mask (width); msk = double_int::mask (width);
/* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
if (double_int_zero_p (double_int_and_not (msk, if (msk.and_not (c1 | c2).is_zero ())
double_int_ior (c1, c2))))
return fold_build2_loc (loc, BIT_IOR_EXPR, type, return fold_build2_loc (loc, BIT_IOR_EXPR, type,
TREE_OPERAND (arg0, 0), arg1); TREE_OPERAND (arg0, 0), arg1);
/* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2, /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
mode which allows further optimizations. */ mode which allows further optimizations. */
c1 = double_int_and (c1, msk); c1 &= msk;
c2 = double_int_and (c2, msk); c2 &= msk;
c3 = double_int_and_not (c1, c2); c3 = c1.and_not (c2);
for (w = BITS_PER_UNIT; for (w = BITS_PER_UNIT;
w <= width && w <= HOST_BITS_PER_WIDE_INT; w <= width && w <= HOST_BITS_PER_WIDE_INT;
w <<= 1) w <<= 1)
@ -11071,11 +11060,11 @@ fold_binary_loc (location_t loc,
if (((c1.low | c2.low) & mask) == mask if (((c1.low | c2.low) & mask) == mask
&& (c1.low & ~mask) == 0 && c1.high == 0) && (c1.low & ~mask) == 0 && c1.high == 0)
{ {
c3 = uhwi_to_double_int (mask); c3 = double_int::from_uhwi (mask);
break; break;
} }
} }
if (!double_int_equal_p (c3, c1)) if (c3 != c1)
return fold_build2_loc (loc, BIT_IOR_EXPR, type, return fold_build2_loc (loc, BIT_IOR_EXPR, type,
fold_build2_loc (loc, BIT_AND_EXPR, type, fold_build2_loc (loc, BIT_AND_EXPR, type,
TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 0),
@ -11451,10 +11440,9 @@ fold_binary_loc (location_t loc,
if (TREE_CODE (arg1) == INTEGER_CST) if (TREE_CODE (arg1) == INTEGER_CST)
{ {
double_int cst1 = tree_to_double_int (arg1); double_int cst1 = tree_to_double_int (arg1);
double_int ncst1 = double_int_ext (double_int_neg (cst1), double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
TYPE_PRECISION (TREE_TYPE (arg1)),
TYPE_UNSIGNED (TREE_TYPE (arg1))); TYPE_UNSIGNED (TREE_TYPE (arg1)));
if (double_int_equal_p (double_int_and (cst1, ncst1), ncst1) if ((cst1 & ncst1) == ncst1
&& multiple_of_p (type, arg0, && multiple_of_p (type, arg0,
double_int_to_tree (TREE_TYPE (arg1), ncst1))) double_int_to_tree (TREE_TYPE (arg1), ncst1)))
return fold_convert_loc (loc, type, arg0); return fold_convert_loc (loc, type, arg0);
@ -11467,18 +11455,18 @@ fold_binary_loc (location_t loc,
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
{ {
int arg1tz int arg1tz
= double_int_ctz (tree_to_double_int (TREE_OPERAND (arg0, 1))); = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
if (arg1tz > 0) if (arg1tz > 0)
{ {
double_int arg1mask, masked; double_int arg1mask, masked;
arg1mask = double_int_not (double_int_mask (arg1tz)); arg1mask = ~double_int::mask (arg1tz);
arg1mask = double_int_ext (arg1mask, TYPE_PRECISION (type), arg1mask = arg1mask.ext (TYPE_PRECISION (type),
TYPE_UNSIGNED (type)); TYPE_UNSIGNED (type));
masked = double_int_and (arg1mask, tree_to_double_int (arg1)); masked = arg1mask & tree_to_double_int (arg1);
if (double_int_zero_p (masked)) if (masked.is_zero ())
return omit_two_operands_loc (loc, type, build_zero_cst (type), return omit_two_operands_loc (loc, type, build_zero_cst (type),
arg0, arg1); arg0, arg1);
else if (!double_int_equal_p (masked, tree_to_double_int (arg1))) else if (masked != tree_to_double_int (arg1))
return fold_build2_loc (loc, code, type, op0, return fold_build2_loc (loc, code, type, op0,
double_int_to_tree (type, masked)); double_int_to_tree (type, masked));
} }
@ -16002,7 +15990,7 @@ fold_abs_const (tree arg0, tree type)
/* If the value is unsigned or non-negative, then the absolute value /* If the value is unsigned or non-negative, then the absolute value
is the same as the ordinary value. */ is the same as the ordinary value. */
if (TYPE_UNSIGNED (type) if (TYPE_UNSIGNED (type)
|| !double_int_negative_p (val)) || !val.is_negative ())
t = arg0; t = arg0;
/* If the value is negative, then the absolute value is /* If the value is negative, then the absolute value is
@ -16042,7 +16030,7 @@ fold_not_const (const_tree arg0, tree type)
gcc_assert (TREE_CODE (arg0) == INTEGER_CST); gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
val = double_int_not (tree_to_double_int (arg0)); val = ~tree_to_double_int (arg0);
return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0)); return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
} }

View File

@ -2807,32 +2807,28 @@ fold_array_ctor_reference (tree type, tree ctor,
be larger than size of array element. */ be larger than size of array element. */
if (!TYPE_SIZE_UNIT (type) if (!TYPE_SIZE_UNIT (type)
|| TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
|| double_int_cmp (elt_size, || elt_size.slt (tree_to_double_int (TYPE_SIZE_UNIT (type))))
tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
return NULL_TREE; return NULL_TREE;
/* Compute the array index we look for. */ /* Compute the array index we look for. */
access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT), access_index = double_int::from_uhwi (offset / BITS_PER_UNIT)
elt_size, TRUNC_DIV_EXPR); .udiv (elt_size, TRUNC_DIV_EXPR);
access_index = double_int_add (access_index, low_bound); access_index += low_bound;
if (index_type) if (index_type)
access_index = double_int_ext (access_index, access_index = access_index.ext (TYPE_PRECISION (index_type),
TYPE_PRECISION (index_type),
TYPE_UNSIGNED (index_type)); TYPE_UNSIGNED (index_type));
/* And offset within the access. */ /* And offset within the access. */
inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT); inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
/* See if the array field is large enough to span whole access. We do not /* See if the array field is large enough to span whole access. We do not
care to fold accesses spanning multiple array indexes. */ care to fold accesses spanning multiple array indexes. */
if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT) if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
return NULL_TREE; return NULL_TREE;
index = double_int_sub (low_bound, double_int_one); index = low_bound - double_int_one;
if (index_type) if (index_type)
index = double_int_ext (index, index = index.ext (TYPE_PRECISION (index_type), TYPE_UNSIGNED (index_type));
TYPE_PRECISION (index_type),
TYPE_UNSIGNED (index_type));
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
{ {
@ -2852,17 +2848,16 @@ fold_array_ctor_reference (tree type, tree ctor,
} }
else else
{ {
index = double_int_add (index, double_int_one); index += double_int_one;
if (index_type) if (index_type)
index = double_int_ext (index, index = index.ext (TYPE_PRECISION (index_type),
TYPE_PRECISION (index_type),
TYPE_UNSIGNED (index_type)); TYPE_UNSIGNED (index_type));
max_index = index; max_index = index;
} }
/* Do we have match? */ /* Do we have match? */
if (double_int_cmp (access_index, index, 1) >= 0 if (access_index.cmp (index, 1) >= 0
&& double_int_cmp (access_index, max_index, 1) <= 0) && access_index.cmp (max_index, 1) <= 0)
return fold_ctor_reference (type, cval, inner_offset, size, return fold_ctor_reference (type, cval, inner_offset, size,
from_decl); from_decl);
} }
@ -2891,7 +2886,7 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
tree field_size = DECL_SIZE (cfield); tree field_size = DECL_SIZE (cfield);
double_int bitoffset; double_int bitoffset;
double_int byte_offset_cst = tree_to_double_int (byte_offset); double_int byte_offset_cst = tree_to_double_int (byte_offset);
double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT); double_int bits_per_unit_cst = double_int::from_uhwi (BITS_PER_UNIT);
double_int bitoffset_end, access_end; double_int bitoffset_end, access_end;
/* Variable sized objects in static constructors makes no sense, /* Variable sized objects in static constructors makes no sense,
@ -2903,37 +2898,33 @@ fold_nonarray_ctor_reference (tree type, tree ctor,
: TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
/* Compute bit offset of the field. */ /* Compute bit offset of the field. */
bitoffset = double_int_add (tree_to_double_int (field_offset), bitoffset = tree_to_double_int (field_offset)
double_int_mul (byte_offset_cst, + byte_offset_cst * bits_per_unit_cst;
bits_per_unit_cst));
/* Compute bit offset where the field ends. */ /* Compute bit offset where the field ends. */
if (field_size != NULL_TREE) if (field_size != NULL_TREE)
bitoffset_end = double_int_add (bitoffset, bitoffset_end = bitoffset + tree_to_double_int (field_size);
tree_to_double_int (field_size));
else else
bitoffset_end = double_int_zero; bitoffset_end = double_int_zero;
access_end = double_int_add (uhwi_to_double_int (offset), access_end = double_int::from_uhwi (offset)
uhwi_to_double_int (size)); + double_int::from_uhwi (size);
/* Is there any overlap between [OFFSET, OFFSET+SIZE) and /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
[BITOFFSET, BITOFFSET_END)? */ [BITOFFSET, BITOFFSET_END)? */
if (double_int_cmp (access_end, bitoffset, 0) > 0 if (access_end.cmp (bitoffset, 0) > 0
&& (field_size == NULL_TREE && (field_size == NULL_TREE
|| double_int_cmp (uhwi_to_double_int (offset), || double_int::from_uhwi (offset).slt (bitoffset_end)))
bitoffset_end, 0) < 0))
{ {
double_int inner_offset = double_int_sub (uhwi_to_double_int (offset), double_int inner_offset = double_int::from_uhwi (offset) - bitoffset;
bitoffset);
/* We do have overlap. Now see if field is large enough to /* We do have overlap. Now see if field is large enough to
cover the access. Give up for accesses spanning multiple cover the access. Give up for accesses spanning multiple
fields. */ fields. */
if (double_int_cmp (access_end, bitoffset_end, 0) > 0) if (access_end.cmp (bitoffset_end, 0) > 0)
return NULL_TREE; return NULL_TREE;
if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0) if (double_int::from_uhwi (offset).slt (bitoffset))
return NULL_TREE; return NULL_TREE;
return fold_ctor_reference (type, cval, return fold_ctor_reference (type, cval,
double_int_to_uhwi (inner_offset), size, inner_offset.to_uhwi (), size,
from_decl); from_decl);
} }
} }
@ -3028,13 +3019,11 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
TREE_CODE (low_bound) == INTEGER_CST) TREE_CODE (low_bound) == INTEGER_CST)
&& (unit_size = array_ref_element_size (t), && (unit_size = array_ref_element_size (t),
host_integerp (unit_size, 1)) host_integerp (unit_size, 1))
&& (doffset = double_int_sext && (doffset = (TREE_INT_CST (idx) - TREE_INT_CST (low_bound))
(double_int_sub (TREE_INT_CST (idx), .sext (TYPE_PRECISION (TREE_TYPE (idx))),
TREE_INT_CST (low_bound)), doffset.fits_shwi ()))
TYPE_PRECISION (TREE_TYPE (idx))),
double_int_fits_in_shwi_p (doffset)))
{ {
offset = double_int_to_shwi (doffset); offset = doffset.to_shwi ();
offset *= TREE_INT_CST_LOW (unit_size); offset *= TREE_INT_CST_LOW (unit_size);
offset *= BITS_PER_UNIT; offset *= BITS_PER_UNIT;

View File

@ -539,7 +539,7 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
{ {
tree base = *pbase, offset = *poffset; tree base = *pbase, offset = *poffset;
double_int index = *pindex; double_int index = *pindex;
double_int bpu = uhwi_to_double_int (BITS_PER_UNIT); double_int bpu = double_int::from_uhwi (BITS_PER_UNIT);
tree mult_op0, mult_op1, t1, t2, type; tree mult_op0, mult_op1, t1, t2, type;
double_int c1, c2, c3, c4; double_int c1, c2, c3, c4;
@ -548,7 +548,7 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
|| TREE_CODE (base) != MEM_REF || TREE_CODE (base) != MEM_REF
|| TREE_CODE (offset) != MULT_EXPR || TREE_CODE (offset) != MULT_EXPR
|| TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST || TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
|| !double_int_zero_p (double_int_umod (index, bpu, FLOOR_MOD_EXPR))) || !index.umod (bpu, FLOOR_MOD_EXPR).is_zero ())
return false; return false;
t1 = TREE_OPERAND (base, 0); t1 = TREE_OPERAND (base, 0);
@ -575,7 +575,7 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST) if (TREE_CODE (TREE_OPERAND (mult_op0, 1)) == INTEGER_CST)
{ {
t2 = TREE_OPERAND (mult_op0, 0); t2 = TREE_OPERAND (mult_op0, 0);
c2 = double_int_neg (tree_to_double_int (TREE_OPERAND (mult_op0, 1))); c2 = -tree_to_double_int (TREE_OPERAND (mult_op0, 1));
} }
else else
return false; return false;
@ -586,12 +586,12 @@ restructure_reference (tree *pbase, tree *poffset, double_int *pindex,
c2 = double_int_zero; c2 = double_int_zero;
} }
c4 = double_int_udiv (index, bpu, FLOOR_DIV_EXPR); c4 = index.udiv (bpu, FLOOR_DIV_EXPR);
*pbase = t1; *pbase = t1;
*poffset = fold_build2 (MULT_EXPR, sizetype, t2, *poffset = fold_build2 (MULT_EXPR, sizetype, t2,
double_int_to_tree (sizetype, c3)); double_int_to_tree (sizetype, c3));
*pindex = double_int_add (double_int_add (c1, double_int_mul (c2, c3)), c4); *pindex = c1 + c2 * c3 + c4;
*ptype = type; *ptype = type;
return true; return true;
@ -623,7 +623,7 @@ slsr_process_ref (gimple gs)
base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode, base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
&unsignedp, &volatilep, false); &unsignedp, &volatilep, false);
index = uhwi_to_double_int (bitpos); index = double_int::from_uhwi (bitpos);
if (!restructure_reference (&base, &offset, &index, &type)) if (!restructure_reference (&base, &offset, &index, &type))
return; return;
@ -677,8 +677,7 @@ create_mul_ssa_cand (gimple gs, tree base_in, tree stride_in, bool speed)
============================ ============================
X = B + ((i' * S) * Z) */ X = B + ((i' * S) * Z) */
base = base_cand->base_expr; base = base_cand->base_expr;
index = double_int_mul (base_cand->index, index = base_cand->index * tree_to_double_int (base_cand->stride);
tree_to_double_int (base_cand->stride));
stride = stride_in; stride = stride_in;
ctype = base_cand->cand_type; ctype = base_cand->cand_type;
if (has_single_use (base_in)) if (has_single_use (base_in))
@ -734,8 +733,8 @@ create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
X = (B + i') * (S * c) */ X = (B + i') * (S * c) */
base = base_cand->base_expr; base = base_cand->base_expr;
index = base_cand->index; index = base_cand->index;
temp = double_int_mul (tree_to_double_int (base_cand->stride), temp = tree_to_double_int (base_cand->stride)
tree_to_double_int (stride_in)); * tree_to_double_int (stride_in);
stride = double_int_to_tree (TREE_TYPE (stride_in), temp); stride = double_int_to_tree (TREE_TYPE (stride_in), temp);
ctype = base_cand->cand_type; ctype = base_cand->cand_type;
if (has_single_use (base_in)) if (has_single_use (base_in))
@ -758,7 +757,7 @@ create_mul_imm_cand (gimple gs, tree base_in, tree stride_in, bool speed)
+ stmt_cost (base_cand->cand_stmt, speed)); + stmt_cost (base_cand->cand_stmt, speed));
} }
else if (base_cand->kind == CAND_ADD else if (base_cand->kind == CAND_ADD
&& double_int_one_p (base_cand->index) && base_cand->index.is_one ()
&& TREE_CODE (base_cand->stride) == INTEGER_CST) && TREE_CODE (base_cand->stride) == INTEGER_CST)
{ {
/* Y = B + (1 * S), S constant /* Y = B + (1 * S), S constant
@ -859,7 +858,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
while (addend_cand && !base) while (addend_cand && !base)
{ {
if (addend_cand->kind == CAND_MULT if (addend_cand->kind == CAND_MULT
&& double_int_zero_p (addend_cand->index) && addend_cand->index.is_zero ()
&& TREE_CODE (addend_cand->stride) == INTEGER_CST) && TREE_CODE (addend_cand->stride) == INTEGER_CST)
{ {
/* Z = (B + 0) * S, S constant /* Z = (B + 0) * S, S constant
@ -869,7 +868,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
base = base_in; base = base_in;
index = tree_to_double_int (addend_cand->stride); index = tree_to_double_int (addend_cand->stride);
if (subtract_p) if (subtract_p)
index = double_int_neg (index); index = -index;
stride = addend_cand->base_expr; stride = addend_cand->base_expr;
ctype = TREE_TYPE (base_in); ctype = TREE_TYPE (base_in);
if (has_single_use (addend_in)) if (has_single_use (addend_in))
@ -886,7 +885,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
while (base_cand && !base) while (base_cand && !base)
{ {
if (base_cand->kind == CAND_ADD if (base_cand->kind == CAND_ADD
&& (double_int_zero_p (base_cand->index) && (base_cand->index.is_zero ()
|| operand_equal_p (base_cand->stride, || operand_equal_p (base_cand->stride,
integer_zero_node, 0))) integer_zero_node, 0)))
{ {
@ -909,7 +908,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
while (subtrahend_cand && !base) while (subtrahend_cand && !base)
{ {
if (subtrahend_cand->kind == CAND_MULT if (subtrahend_cand->kind == CAND_MULT
&& double_int_zero_p (subtrahend_cand->index) && subtrahend_cand->index.is_zero ()
&& TREE_CODE (subtrahend_cand->stride) == INTEGER_CST) && TREE_CODE (subtrahend_cand->stride) == INTEGER_CST)
{ {
/* Z = (B + 0) * S, S constant /* Z = (B + 0) * S, S constant
@ -918,7 +917,7 @@ create_add_ssa_cand (gimple gs, tree base_in, tree addend_in,
Value: X = Y + ((-1 * S) * B) */ Value: X = Y + ((-1 * S) * B) */
base = base_in; base = base_in;
index = tree_to_double_int (subtrahend_cand->stride); index = tree_to_double_int (subtrahend_cand->stride);
index = double_int_neg (index); index = -index;
stride = subtrahend_cand->base_expr; stride = subtrahend_cand->base_expr;
ctype = TREE_TYPE (base_in); ctype = TREE_TYPE (base_in);
if (has_single_use (addend_in)) if (has_single_use (addend_in))
@ -973,10 +972,8 @@ create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed)
bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (base_cand->stride)); bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (base_cand->stride));
if (TREE_CODE (base_cand->stride) == INTEGER_CST if (TREE_CODE (base_cand->stride) == INTEGER_CST
&& double_int_multiple_of (index_in, && index_in.multiple_of (tree_to_double_int (base_cand->stride),
tree_to_double_int (base_cand->stride), unsigned_p, &multiple))
unsigned_p,
&multiple))
{ {
/* Y = (B + i') * S, S constant, c = kS for some integer k /* Y = (B + i') * S, S constant, c = kS for some integer k
X = Y + c X = Y + c
@ -989,7 +986,7 @@ create_add_imm_cand (gimple gs, tree base_in, double_int index_in, bool speed)
X = (B + (i'+ k)) * S */ X = (B + (i'+ k)) * S */
kind = base_cand->kind; kind = base_cand->kind;
base = base_cand->base_expr; base = base_cand->base_expr;
index = double_int_add (base_cand->index, multiple); index = base_cand->index + multiple;
stride = base_cand->stride; stride = base_cand->stride;
ctype = base_cand->cand_type; ctype = base_cand->cand_type;
if (has_single_use (base_in)) if (has_single_use (base_in))
@ -1066,7 +1063,7 @@ slsr_process_add (gimple gs, tree rhs1, tree rhs2, bool speed)
/* Record an interpretation for the add-immediate. */ /* Record an interpretation for the add-immediate. */
index = tree_to_double_int (rhs2); index = tree_to_double_int (rhs2);
if (subtract_p) if (subtract_p)
index = double_int_neg (index); index = -index;
c = create_add_imm_cand (gs, rhs1, index, speed); c = create_add_imm_cand (gs, rhs1, index, speed);
@ -1581,7 +1578,7 @@ cand_increment (slsr_cand_t c)
basis = lookup_cand (c->basis); basis = lookup_cand (c->basis);
gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0)); gcc_assert (operand_equal_p (c->base_expr, basis->base_expr, 0));
return double_int_sub (c->index, basis->index); return c->index - basis->index;
} }
/* Calculate the increment required for candidate C relative to /* Calculate the increment required for candidate C relative to
@ -1594,8 +1591,8 @@ cand_abs_increment (slsr_cand_t c)
{ {
double_int increment = cand_increment (c); double_int increment = cand_increment (c);
if (!address_arithmetic_p && double_int_negative_p (increment)) if (!address_arithmetic_p && increment.is_negative ())
increment = double_int_neg (increment); increment = -increment;
return increment; return increment;
} }
@ -1626,7 +1623,7 @@ static void
replace_dependent (slsr_cand_t c, enum tree_code cand_code) replace_dependent (slsr_cand_t c, enum tree_code cand_code)
{ {
double_int stride = tree_to_double_int (c->stride); double_int stride = tree_to_double_int (c->stride);
double_int bump = double_int_mul (cand_increment (c), stride); double_int bump = cand_increment (c) * stride;
gimple stmt_to_print = NULL; gimple stmt_to_print = NULL;
slsr_cand_t basis; slsr_cand_t basis;
tree basis_name, incr_type, bump_tree; tree basis_name, incr_type, bump_tree;
@ -1637,7 +1634,7 @@ replace_dependent (slsr_cand_t c, enum tree_code cand_code)
in this case. Restriction to signed HWI is conservative in this case. Restriction to signed HWI is conservative
for unsigned types but allows for safe negation without for unsigned types but allows for safe negation without
twisted logic. */ twisted logic. */
if (!double_int_fits_in_shwi_p (bump)) if (!bump.fits_shwi ())
return; return;
basis = lookup_cand (c->basis); basis = lookup_cand (c->basis);
@ -1645,10 +1642,10 @@ replace_dependent (slsr_cand_t c, enum tree_code cand_code)
incr_type = TREE_TYPE (gimple_assign_rhs1 (c->cand_stmt)); incr_type = TREE_TYPE (gimple_assign_rhs1 (c->cand_stmt));
code = PLUS_EXPR; code = PLUS_EXPR;
if (double_int_negative_p (bump)) if (bump.is_negative ())
{ {
code = MINUS_EXPR; code = MINUS_EXPR;
bump = double_int_neg (bump); bump = -bump;
} }
bump_tree = double_int_to_tree (incr_type, bump); bump_tree = double_int_to_tree (incr_type, bump);
@ -1659,7 +1656,7 @@ replace_dependent (slsr_cand_t c, enum tree_code cand_code)
print_gimple_stmt (dump_file, c->cand_stmt, 0, 0); print_gimple_stmt (dump_file, c->cand_stmt, 0, 0);
} }
if (double_int_zero_p (bump)) if (bump.is_zero ())
{ {
tree lhs = gimple_assign_lhs (c->cand_stmt); tree lhs = gimple_assign_lhs (c->cand_stmt);
gimple copy_stmt = gimple_build_assign (lhs, basis_name); gimple copy_stmt = gimple_build_assign (lhs, basis_name);
@ -1739,9 +1736,7 @@ incr_vec_index (double_int increment)
{ {
unsigned i; unsigned i;
for (i = 0; for (i = 0; i < incr_vec_len && increment != incr_vec[i].incr; i++)
i < incr_vec_len && !double_int_equal_p (increment, incr_vec[i].incr);
i++)
; ;
gcc_assert (i < incr_vec_len); gcc_assert (i < incr_vec_len);
@ -1778,12 +1773,12 @@ record_increment (slsr_cand_t c, double_int increment)
/* Treat increments that differ only in sign as identical so as to /* Treat increments that differ only in sign as identical so as to
share initializers, unless we are generating pointer arithmetic. */ share initializers, unless we are generating pointer arithmetic. */
if (!address_arithmetic_p && double_int_negative_p (increment)) if (!address_arithmetic_p && increment.is_negative ())
increment = double_int_neg (increment); increment = -increment;
for (i = 0; i < incr_vec_len; i++) for (i = 0; i < incr_vec_len; i++)
{ {
if (double_int_equal_p (incr_vec[i].incr, increment)) if (incr_vec[i].incr == increment)
{ {
incr_vec[i].count++; incr_vec[i].count++;
found = true; found = true;
@ -1819,9 +1814,9 @@ record_increment (slsr_cand_t c, double_int increment)
opinion later if it doesn't dominate all other occurrences. opinion later if it doesn't dominate all other occurrences.
Exception: increments of -1, 0, 1 never need initializers. */ Exception: increments of -1, 0, 1 never need initializers. */
if (c->kind == CAND_ADD if (c->kind == CAND_ADD
&& double_int_equal_p (c->index, increment) && c->index == increment
&& (double_int_scmp (increment, double_int_one) > 0 && (increment.sgt (double_int_one)
|| double_int_scmp (increment, double_int_minus_one) < 0)) || increment.slt (double_int_minus_one)))
{ {
tree t0; tree t0;
tree rhs1 = gimple_assign_rhs1 (c->cand_stmt); tree rhs1 = gimple_assign_rhs1 (c->cand_stmt);
@ -1923,7 +1918,7 @@ lowest_cost_path (int cost_in, int repl_savings, slsr_cand_t c, double_int incr)
if (cand_already_replaced (c)) if (cand_already_replaced (c))
local_cost = cost_in; local_cost = cost_in;
else if (double_int_equal_p (incr, cand_incr)) else if (incr == cand_incr)
local_cost = cost_in - repl_savings - c->dead_savings; local_cost = cost_in - repl_savings - c->dead_savings;
else else
local_cost = cost_in - c->dead_savings; local_cost = cost_in - c->dead_savings;
@ -1954,8 +1949,7 @@ total_savings (int repl_savings, slsr_cand_t c, double_int incr)
int savings = 0; int savings = 0;
double_int cand_incr = cand_abs_increment (c); double_int cand_incr = cand_abs_increment (c);
if (double_int_equal_p (incr, cand_incr) if (incr == cand_incr && !cand_already_replaced (c))
&& !cand_already_replaced (c))
savings += repl_savings + c->dead_savings; savings += repl_savings + c->dead_savings;
if (c->dependent) if (c->dependent)
@ -1984,13 +1978,12 @@ analyze_increments (slsr_cand_t first_dep, enum machine_mode mode, bool speed)
for (i = 0; i < incr_vec_len; i++) for (i = 0; i < incr_vec_len; i++)
{ {
HOST_WIDE_INT incr = double_int_to_shwi (incr_vec[i].incr); HOST_WIDE_INT incr = incr_vec[i].incr.to_shwi ();
/* If somehow this increment is bigger than a HWI, we won't /* If somehow this increment is bigger than a HWI, we won't
be optimizing candidates that use it. And if the increment be optimizing candidates that use it. And if the increment
has a count of zero, nothing will be done with it. */ has a count of zero, nothing will be done with it. */
if (!double_int_fits_in_shwi_p (incr_vec[i].incr) if (!incr_vec[i].incr.fits_shwi () || !incr_vec[i].count)
|| !incr_vec[i].count)
incr_vec[i].cost = COST_INFINITE; incr_vec[i].cost = COST_INFINITE;
/* Increments of 0, 1, and -1 are always profitable to replace, /* Increments of 0, 1, and -1 are always profitable to replace,
@ -2168,7 +2161,7 @@ nearest_common_dominator_for_cands (slsr_cand_t c, double_int incr,
in, then the result depends only on siblings and dependents. */ in, then the result depends only on siblings and dependents. */
cand_incr = cand_abs_increment (c); cand_incr = cand_abs_increment (c);
if (!double_int_equal_p (cand_incr, incr) || cand_already_replaced (c)) if (cand_incr != incr || cand_already_replaced (c))
{ {
*where = new_where; *where = new_where;
return ncd; return ncd;
@ -2213,10 +2206,10 @@ insert_initializers (slsr_cand_t c)
double_int incr = incr_vec[i].incr; double_int incr = incr_vec[i].incr;
if (!profitable_increment_p (i) if (!profitable_increment_p (i)
|| double_int_one_p (incr) || incr.is_one ()
|| (double_int_minus_one_p (incr) || (incr.is_minus_one ()
&& gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR) && gimple_assign_rhs_code (c->cand_stmt) != POINTER_PLUS_EXPR)
|| double_int_zero_p (incr)) || incr.is_zero ())
continue; continue;
/* We may have already identified an existing initializer that /* We may have already identified an existing initializer that
@ -2384,7 +2377,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var,
incr_vec[i].initializer, incr_vec[i].initializer,
new_var); new_var);
if (!double_int_equal_p (incr_vec[i].incr, cand_incr)) if (incr_vec[i].incr != cand_incr)
{ {
gcc_assert (repl_code == PLUS_EXPR); gcc_assert (repl_code == PLUS_EXPR);
repl_code = MINUS_EXPR; repl_code = MINUS_EXPR;
@ -2400,7 +2393,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var,
from the basis name, or an add of the stride to the basis from the basis name, or an add of the stride to the basis
name, respectively. It may be necessary to introduce a name, respectively. It may be necessary to introduce a
cast (or reuse an existing cast). */ cast (or reuse an existing cast). */
else if (double_int_one_p (cand_incr)) else if (cand_incr.is_one ())
{ {
tree stride_type = TREE_TYPE (c->stride); tree stride_type = TREE_TYPE (c->stride);
tree orig_type = TREE_TYPE (orig_rhs2); tree orig_type = TREE_TYPE (orig_rhs2);
@ -2415,7 +2408,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var,
c); c);
} }
else if (double_int_minus_one_p (cand_incr)) else if (cand_incr.is_minus_one ())
{ {
tree stride_type = TREE_TYPE (c->stride); tree stride_type = TREE_TYPE (c->stride);
tree orig_type = TREE_TYPE (orig_rhs2); tree orig_type = TREE_TYPE (orig_rhs2);
@ -2441,7 +2434,7 @@ replace_one_candidate (slsr_cand_t c, unsigned i, tree *new_var,
fputs (" (duplicate, not actually replacing)\n", dump_file); fputs (" (duplicate, not actually replacing)\n", dump_file);
} }
else if (double_int_zero_p (cand_incr)) else if (cand_incr.is_zero ())
{ {
tree lhs = gimple_assign_lhs (c->cand_stmt); tree lhs = gimple_assign_lhs (c->cand_stmt);
tree lhs_type = TREE_TYPE (lhs); tree lhs_type = TREE_TYPE (lhs);

View File

@ -2887,8 +2887,8 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
unsigned HOST_WIDE_INT misalign; unsigned HOST_WIDE_INT misalign;
get_pointer_alignment_1 (base, &align, &misalign); get_pointer_alignment_1 (base, &align, &misalign);
misalign += (double_int_sext (tree_to_double_int (off), misalign += (tree_to_double_int (off)
TYPE_PRECISION (TREE_TYPE (off))).low .sext (TYPE_PRECISION (TREE_TYPE (off))).low
* BITS_PER_UNIT); * BITS_PER_UNIT);
misalign = misalign & (align - 1); misalign = misalign & (align - 1);
if (misalign != 0) if (misalign != 0)

View File

@ -2295,7 +2295,7 @@ iv_number_of_iterations (struct loop *loop, rtx insn, rtx condition,
desc->niter_expr = NULL_RTX; desc->niter_expr = NULL_RTX;
desc->niter_max = 0; desc->niter_max = 0;
if (loop->any_upper_bound if (loop->any_upper_bound
&& double_int_fits_in_uhwi_p (loop->nb_iterations_upper_bound)) && loop->nb_iterations_upper_bound.fits_uhwi ())
desc->niter_max = loop->nb_iterations_upper_bound.low; desc->niter_max = loop->nb_iterations_upper_bound.low;
cond = GET_CODE (condition); cond = GET_CODE (condition);

View File

@ -2908,9 +2908,9 @@ expand_absneg_bit (enum rtx_code code, enum machine_mode mode,
nwords = (GET_MODE_BITSIZE (mode) + BITS_PER_WORD - 1) / BITS_PER_WORD; nwords = (GET_MODE_BITSIZE (mode) + BITS_PER_WORD - 1) / BITS_PER_WORD;
} }
mask = double_int_setbit (double_int_zero, bitpos); mask = double_int_zero.set_bit (bitpos);
if (code == ABS) if (code == ABS)
mask = double_int_not (mask); mask = ~mask;
if (target == 0 if (target == 0
|| target == op0 || target == op0
@ -3569,7 +3569,7 @@ expand_copysign_absneg (enum machine_mode mode, rtx op0, rtx op1, rtx target,
op1 = operand_subword_force (op1, word, mode); op1 = operand_subword_force (op1, word, mode);
} }
mask = double_int_setbit (double_int_zero, bitpos); mask = double_int_zero.set_bit (bitpos);
sign = expand_binop (imode, and_optab, op1, sign = expand_binop (imode, and_optab, op1,
immed_double_int_const (mask, imode), immed_double_int_const (mask, imode),
@ -3640,7 +3640,7 @@ expand_copysign_bit (enum machine_mode mode, rtx op0, rtx op1, rtx target,
nwords = (GET_MODE_BITSIZE (mode) + BITS_PER_WORD - 1) / BITS_PER_WORD; nwords = (GET_MODE_BITSIZE (mode) + BITS_PER_WORD - 1) / BITS_PER_WORD;
} }
mask = double_int_setbit (double_int_zero, bitpos); mask = double_int_zero.set_bit (bitpos);
if (target == 0 if (target == 0
|| target == op0 || target == op0
@ -3662,8 +3662,7 @@ expand_copysign_bit (enum machine_mode mode, rtx op0, rtx op1, rtx target,
if (!op0_is_abs) if (!op0_is_abs)
op0_piece op0_piece
= expand_binop (imode, and_optab, op0_piece, = expand_binop (imode, and_optab, op0_piece,
immed_double_int_const (double_int_not (mask), immed_double_int_const (~mask, imode),
imode),
NULL_RTX, 1, OPTAB_LIB_WIDEN); NULL_RTX, 1, OPTAB_LIB_WIDEN);
op1 = expand_binop (imode, and_optab, op1 = expand_binop (imode, and_optab,
@ -3694,8 +3693,7 @@ expand_copysign_bit (enum machine_mode mode, rtx op0, rtx op1, rtx target,
op0 = gen_lowpart (imode, op0); op0 = gen_lowpart (imode, op0);
if (!op0_is_abs) if (!op0_is_abs)
op0 = expand_binop (imode, and_optab, op0, op0 = expand_binop (imode, and_optab, op0,
immed_double_int_const (double_int_not (mask), immed_double_int_const (~mask, imode),
imode),
NULL_RTX, 1, OPTAB_LIB_WIDEN); NULL_RTX, 1, OPTAB_LIB_WIDEN);
temp = expand_binop (imode, ior_optab, op0, op1, temp = expand_binop (imode, ior_optab, op0, op1,

View File

@ -1986,7 +1986,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
else if (GET_CODE (lhs) == MULT else if (GET_CODE (lhs) == MULT
&& CONST_INT_P (XEXP (lhs, 1))) && CONST_INT_P (XEXP (lhs, 1)))
{ {
coeff0 = shwi_to_double_int (INTVAL (XEXP (lhs, 1))); coeff0 = double_int::from_shwi (INTVAL (XEXP (lhs, 1)));
lhs = XEXP (lhs, 0); lhs = XEXP (lhs, 0);
} }
else if (GET_CODE (lhs) == ASHIFT else if (GET_CODE (lhs) == ASHIFT
@ -1994,8 +1994,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& INTVAL (XEXP (lhs, 1)) >= 0 && INTVAL (XEXP (lhs, 1)) >= 0
&& INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
{ {
coeff0 = double_int_setbit (double_int_zero, coeff0 = double_int_zero.set_bit (INTVAL (XEXP (lhs, 1)));
INTVAL (XEXP (lhs, 1)));
lhs = XEXP (lhs, 0); lhs = XEXP (lhs, 0);
} }
@ -2007,7 +2006,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
else if (GET_CODE (rhs) == MULT else if (GET_CODE (rhs) == MULT
&& CONST_INT_P (XEXP (rhs, 1))) && CONST_INT_P (XEXP (rhs, 1)))
{ {
coeff1 = shwi_to_double_int (INTVAL (XEXP (rhs, 1))); coeff1 = double_int::from_shwi (INTVAL (XEXP (rhs, 1)));
rhs = XEXP (rhs, 0); rhs = XEXP (rhs, 0);
} }
else if (GET_CODE (rhs) == ASHIFT else if (GET_CODE (rhs) == ASHIFT
@ -2015,8 +2014,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& INTVAL (XEXP (rhs, 1)) >= 0 && INTVAL (XEXP (rhs, 1)) >= 0
&& INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
{ {
coeff1 = double_int_setbit (double_int_zero, coeff1 = double_int_zero.set_bit (INTVAL (XEXP (rhs, 1)));
INTVAL (XEXP (rhs, 1)));
rhs = XEXP (rhs, 0); rhs = XEXP (rhs, 0);
} }
@ -2027,7 +2025,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
double_int val; double_int val;
bool speed = optimize_function_for_speed_p (cfun); bool speed = optimize_function_for_speed_p (cfun);
val = double_int_add (coeff0, coeff1); val = coeff0 + coeff1;
coeff = immed_double_int_const (val, mode); coeff = immed_double_int_const (val, mode);
tem = simplify_gen_binary (MULT, mode, lhs, coeff); tem = simplify_gen_binary (MULT, mode, lhs, coeff);
@ -2165,7 +2163,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
else if (GET_CODE (lhs) == MULT else if (GET_CODE (lhs) == MULT
&& CONST_INT_P (XEXP (lhs, 1))) && CONST_INT_P (XEXP (lhs, 1)))
{ {
coeff0 = shwi_to_double_int (INTVAL (XEXP (lhs, 1))); coeff0 = double_int::from_shwi (INTVAL (XEXP (lhs, 1)));
lhs = XEXP (lhs, 0); lhs = XEXP (lhs, 0);
} }
else if (GET_CODE (lhs) == ASHIFT else if (GET_CODE (lhs) == ASHIFT
@ -2173,8 +2171,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& INTVAL (XEXP (lhs, 1)) >= 0 && INTVAL (XEXP (lhs, 1)) >= 0
&& INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (lhs, 1)) < HOST_BITS_PER_WIDE_INT)
{ {
coeff0 = double_int_setbit (double_int_zero, coeff0 = double_int_zero.set_bit (INTVAL (XEXP (lhs, 1)));
INTVAL (XEXP (lhs, 1)));
lhs = XEXP (lhs, 0); lhs = XEXP (lhs, 0);
} }
@ -2186,7 +2183,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
else if (GET_CODE (rhs) == MULT else if (GET_CODE (rhs) == MULT
&& CONST_INT_P (XEXP (rhs, 1))) && CONST_INT_P (XEXP (rhs, 1)))
{ {
negcoeff1 = shwi_to_double_int (-INTVAL (XEXP (rhs, 1))); negcoeff1 = double_int::from_shwi (-INTVAL (XEXP (rhs, 1)));
rhs = XEXP (rhs, 0); rhs = XEXP (rhs, 0);
} }
else if (GET_CODE (rhs) == ASHIFT else if (GET_CODE (rhs) == ASHIFT
@ -2194,9 +2191,8 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
&& INTVAL (XEXP (rhs, 1)) >= 0 && INTVAL (XEXP (rhs, 1)) >= 0
&& INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT) && INTVAL (XEXP (rhs, 1)) < HOST_BITS_PER_WIDE_INT)
{ {
negcoeff1 = double_int_setbit (double_int_zero, negcoeff1 = double_int_zero.set_bit (INTVAL (XEXP (rhs, 1)));
INTVAL (XEXP (rhs, 1))); negcoeff1 = -negcoeff1;
negcoeff1 = double_int_neg (negcoeff1);
rhs = XEXP (rhs, 0); rhs = XEXP (rhs, 0);
} }
@ -2207,7 +2203,7 @@ simplify_binary_operation_1 (enum rtx_code code, enum machine_mode mode,
double_int val; double_int val;
bool speed = optimize_function_for_speed_p (cfun); bool speed = optimize_function_for_speed_p (cfun);
val = double_int_add (coeff0, negcoeff1); val = coeff0 + negcoeff1;
coeff = immed_double_int_const (val, mode); coeff = immed_double_int_const (val, mode);
tem = simplify_gen_binary (MULT, mode, lhs, coeff); tem = simplify_gen_binary (MULT, mode, lhs, coeff);
@ -3590,16 +3586,16 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
{ {
case MINUS: case MINUS:
/* A - B == A + (-B). */ /* A - B == A + (-B). */
o1 = double_int_neg (o1); o1 = -o1;
/* Fall through.... */ /* Fall through.... */
case PLUS: case PLUS:
res = double_int_add (o0, o1); res = o0 + o1;
break; break;
case MULT: case MULT:
res = double_int_mul (o0, o1); res = o0 * o1;
break; break;
case DIV: case DIV:
@ -3635,31 +3631,31 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
break; break;
case AND: case AND:
res = double_int_and (o0, o1); res = o0 & o1;
break; break;
case IOR: case IOR:
res = double_int_ior (o0, o1); res = o0 | o1;
break; break;
case XOR: case XOR:
res = double_int_xor (o0, o1); res = o0 ^ o1;
break; break;
case SMIN: case SMIN:
res = double_int_smin (o0, o1); res = o0.smin (o1);
break; break;
case SMAX: case SMAX:
res = double_int_smax (o0, o1); res = o0.smax (o1);
break; break;
case UMIN: case UMIN:
res = double_int_umin (o0, o1); res = o0.umin (o1);
break; break;
case UMAX: case UMAX:
res = double_int_umax (o0, o1); res = o0.umax (o1);
break; break;
case LSHIFTRT: case ASHIFTRT: case LSHIFTRT: case ASHIFTRT:
@ -3674,22 +3670,21 @@ simplify_const_binary_operation (enum rtx_code code, enum machine_mode mode,
o1.low &= GET_MODE_PRECISION (mode) - 1; o1.low &= GET_MODE_PRECISION (mode) - 1;
} }
if (!double_int_fits_in_uhwi_p (o1) if (!o1.fits_uhwi ()
|| double_int_to_uhwi (o1) >= GET_MODE_PRECISION (mode)) || o1.to_uhwi () >= GET_MODE_PRECISION (mode))
return 0; return 0;
cnt = double_int_to_uhwi (o1); cnt = o1.to_uhwi ();
unsigned short prec = GET_MODE_PRECISION (mode);
if (code == LSHIFTRT || code == ASHIFTRT) if (code == LSHIFTRT || code == ASHIFTRT)
res = double_int_rshift (o0, cnt, GET_MODE_PRECISION (mode), res = o0.rshift (cnt, prec, code == ASHIFTRT);
code == ASHIFTRT);
else if (code == ASHIFT) else if (code == ASHIFT)
res = double_int_lshift (o0, cnt, GET_MODE_PRECISION (mode), res = o0.alshift (cnt, prec);
true);
else if (code == ROTATE) else if (code == ROTATE)
res = double_int_lrotate (o0, cnt, GET_MODE_PRECISION (mode)); res = o0.lrotate (cnt, prec);
else /* code == ROTATERT */ else /* code == ROTATERT */
res = double_int_rrotate (o0, cnt, GET_MODE_PRECISION (mode)); res = o0.rrotate (cnt, prec);
} }
break; break;

View File

@ -2218,14 +2218,13 @@ layout_type (tree type)
&& TYPE_UNSIGNED (TREE_TYPE (lb)) && TYPE_UNSIGNED (TREE_TYPE (lb))
&& tree_int_cst_lt (ub, lb)) && tree_int_cst_lt (ub, lb))
{ {
unsigned prec = TYPE_PRECISION (TREE_TYPE (lb));
lb = double_int_to_tree lb = double_int_to_tree
(ssizetype, (ssizetype,
double_int_sext (tree_to_double_int (lb), tree_to_double_int (lb).sext (prec));
TYPE_PRECISION (TREE_TYPE (lb))));
ub = double_int_to_tree ub = double_int_to_tree
(ssizetype, (ssizetype,
double_int_sext (tree_to_double_int (ub), tree_to_double_int (ub).sext (prec));
TYPE_PRECISION (TREE_TYPE (ub))));
} }
length length
= fold_convert (sizetype, = fold_convert (sizetype,

View File

@ -33,7 +33,7 @@ along with GCC; see the file COPYING3. If not see
double_int double_int
double_int_ext_for_comb (double_int cst, aff_tree *comb) double_int_ext_for_comb (double_int cst, aff_tree *comb)
{ {
return double_int_sext (cst, TYPE_PRECISION (comb->type)); return cst.sext (TYPE_PRECISION (comb->type));
} }
/* Initializes affine combination COMB so that its value is zero in TYPE. */ /* Initializes affine combination COMB so that its value is zero in TYPE. */
@ -76,27 +76,26 @@ aff_combination_scale (aff_tree *comb, double_int scale)
unsigned i, j; unsigned i, j;
scale = double_int_ext_for_comb (scale, comb); scale = double_int_ext_for_comb (scale, comb);
if (double_int_one_p (scale)) if (scale.is_one ())
return; return;
if (double_int_zero_p (scale)) if (scale.is_zero ())
{ {
aff_combination_zero (comb, comb->type); aff_combination_zero (comb, comb->type);
return; return;
} }
comb->offset comb->offset
= double_int_ext_for_comb (double_int_mul (scale, comb->offset), comb); = double_int_ext_for_comb (scale * comb->offset, comb);
for (i = 0, j = 0; i < comb->n; i++) for (i = 0, j = 0; i < comb->n; i++)
{ {
double_int new_coef; double_int new_coef;
new_coef new_coef
= double_int_ext_for_comb (double_int_mul (scale, comb->elts[i].coef), = double_int_ext_for_comb (scale * comb->elts[i].coef, comb);
comb);
/* A coefficient may become zero due to overflow. Remove the zero /* A coefficient may become zero due to overflow. Remove the zero
elements. */ elements. */
if (double_int_zero_p (new_coef)) if (new_coef.is_zero ())
continue; continue;
comb->elts[j].coef = new_coef; comb->elts[j].coef = new_coef;
comb->elts[j].val = comb->elts[i].val; comb->elts[j].val = comb->elts[i].val;
@ -131,7 +130,7 @@ aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
tree type; tree type;
scale = double_int_ext_for_comb (scale, comb); scale = double_int_ext_for_comb (scale, comb);
if (double_int_zero_p (scale)) if (scale.is_zero ())
return; return;
for (i = 0; i < comb->n; i++) for (i = 0; i < comb->n; i++)
@ -139,9 +138,9 @@ aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
{ {
double_int new_coef; double_int new_coef;
new_coef = double_int_add (comb->elts[i].coef, scale); new_coef = comb->elts[i].coef + scale;
new_coef = double_int_ext_for_comb (new_coef, comb); new_coef = double_int_ext_for_comb (new_coef, comb);
if (!double_int_zero_p (new_coef)) if (!new_coef.is_zero ())
{ {
comb->elts[i].coef = new_coef; comb->elts[i].coef = new_coef;
return; return;
@ -172,7 +171,7 @@ aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
if (POINTER_TYPE_P (type)) if (POINTER_TYPE_P (type))
type = sizetype; type = sizetype;
if (double_int_one_p (scale)) if (scale.is_one ())
elt = fold_convert (type, elt); elt = fold_convert (type, elt);
else else
elt = fold_build2 (MULT_EXPR, type, elt = fold_build2 (MULT_EXPR, type,
@ -191,7 +190,7 @@ aff_combination_add_elt (aff_tree *comb, tree elt, double_int scale)
static void static void
aff_combination_add_cst (aff_tree *c, double_int cst) aff_combination_add_cst (aff_tree *c, double_int cst)
{ {
c->offset = double_int_ext_for_comb (double_int_add (c->offset, cst), c); c->offset = double_int_ext_for_comb (c->offset + cst, c);
} }
/* Adds COMB2 to COMB1. */ /* Adds COMB2 to COMB1. */
@ -234,7 +233,7 @@ aff_combination_convert (aff_tree *comb, tree type)
for (i = j = 0; i < comb->n; i++) for (i = j = 0; i < comb->n; i++)
{ {
double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb); double_int new_coef = double_int_ext_for_comb (comb->elts[i].coef, comb);
if (double_int_zero_p (new_coef)) if (new_coef.is_zero ())
continue; continue;
comb->elts[j].coef = new_coef; comb->elts[j].coef = new_coef;
comb->elts[j].val = fold_convert (type, comb->elts[i].val); comb->elts[j].val = fold_convert (type, comb->elts[i].val);
@ -323,7 +322,7 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
if (bitpos % BITS_PER_UNIT != 0) if (bitpos % BITS_PER_UNIT != 0)
break; break;
aff_combination_const (comb, type, aff_combination_const (comb, type,
uhwi_to_double_int (bitpos / BITS_PER_UNIT)); double_int::from_uhwi (bitpos / BITS_PER_UNIT));
core = build_fold_addr_expr (core); core = build_fold_addr_expr (core);
if (TREE_CODE (core) == ADDR_EXPR) if (TREE_CODE (core) == ADDR_EXPR)
aff_combination_add_elt (comb, core, double_int_one); aff_combination_add_elt (comb, core, double_int_one);
@ -380,7 +379,7 @@ add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
scale = double_int_ext_for_comb (scale, comb); scale = double_int_ext_for_comb (scale, comb);
elt = fold_convert (type1, elt); elt = fold_convert (type1, elt);
if (double_int_one_p (scale)) if (scale.is_one ())
{ {
if (!expr) if (!expr)
return fold_convert (type, elt); return fold_convert (type, elt);
@ -390,7 +389,7 @@ add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
return fold_build2 (PLUS_EXPR, type, expr, elt); return fold_build2 (PLUS_EXPR, type, expr, elt);
} }
if (double_int_minus_one_p (scale)) if (scale.is_minus_one ())
{ {
if (!expr) if (!expr)
return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt)); return fold_convert (type, fold_build1 (NEGATE_EXPR, type1, elt));
@ -408,10 +407,10 @@ add_elt_to_tree (tree expr, tree type, tree elt, double_int scale,
fold_build2 (MULT_EXPR, type1, elt, fold_build2 (MULT_EXPR, type1, elt,
double_int_to_tree (type1, scale))); double_int_to_tree (type1, scale)));
if (double_int_negative_p (scale)) if (scale.is_negative ())
{ {
code = MINUS_EXPR; code = MINUS_EXPR;
scale = double_int_neg (scale); scale = -scale;
} }
else else
code = PLUS_EXPR; code = PLUS_EXPR;
@ -451,9 +450,9 @@ aff_combination_to_tree (aff_tree *comb)
/* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
unsigned. */ unsigned. */
if (double_int_negative_p (comb->offset)) if (comb->offset.is_negative ())
{ {
off = double_int_neg (comb->offset); off = -comb->offset;
sgn = double_int_minus_one; sgn = double_int_minus_one;
} }
else else
@ -516,8 +515,7 @@ aff_combination_add_product (aff_tree *c, double_int coef, tree val,
fold_convert (type, val)); fold_convert (type, val));
} }
aff_combination_add_elt (r, aval, aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
double_int_mul (coef, c->elts[i].coef));
} }
if (c->rest) if (c->rest)
@ -534,10 +532,9 @@ aff_combination_add_product (aff_tree *c, double_int coef, tree val,
} }
if (val) if (val)
aff_combination_add_elt (r, val, aff_combination_add_elt (r, val, coef * c->offset);
double_int_mul (coef, c->offset));
else else
aff_combination_add_cst (r, double_int_mul (coef, c->offset)); aff_combination_add_cst (r, coef * c->offset);
} }
/* Multiplies C1 by C2, storing the result to R */ /* Multiplies C1 by C2, storing the result to R */
@ -685,7 +682,7 @@ aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
it from COMB. */ it from COMB. */
scale = comb->elts[i].coef; scale = comb->elts[i].coef;
aff_combination_zero (&curre, comb->type); aff_combination_zero (&curre, comb->type);
aff_combination_add_elt (&curre, e, double_int_neg (scale)); aff_combination_add_elt (&curre, e, -scale);
aff_combination_scale (&current, scale); aff_combination_scale (&current, scale);
aff_combination_add (&to_add, &current); aff_combination_add (&to_add, &current);
aff_combination_add (&to_add, &curre); aff_combination_add (&to_add, &curre);
@ -751,17 +748,17 @@ double_int_constant_multiple_p (double_int val, double_int div,
{ {
double_int rem, cst; double_int rem, cst;
if (double_int_zero_p (val)) if (val.is_zero ())
return true; return true;
if (double_int_zero_p (div)) if (div.is_zero ())
return false; return false;
cst = double_int_sdivmod (val, div, FLOOR_DIV_EXPR, &rem); cst = val.sdivmod (div, FLOOR_DIV_EXPR, &rem);
if (!double_int_zero_p (rem)) if (!rem.is_zero ())
return false; return false;
if (*mult_set && !double_int_equal_p (*mult, cst)) if (*mult_set && *mult != cst)
return false; return false;
*mult_set = true; *mult_set = true;
@ -779,7 +776,7 @@ aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
bool mult_set = false; bool mult_set = false;
unsigned i; unsigned i;
if (val->n == 0 && double_int_zero_p (val->offset)) if (val->n == 0 && val->offset.is_zero ())
{ {
*mult = double_int_zero; *mult = double_int_zero;
return true; return true;
@ -880,10 +877,10 @@ get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
} }
aff_combination_const (&tmp, sizetype, aff_combination_const (&tmp, sizetype,
shwi_to_double_int (bitpos / BITS_PER_UNIT)); double_int::from_shwi (bitpos / BITS_PER_UNIT));
aff_combination_add (addr, &tmp); aff_combination_add (addr, &tmp);
*size = shwi_to_double_int ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT); *size = double_int::from_shwi ((bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
} }
/* Returns true if a region of size SIZE1 at position 0 and a region of /* Returns true if a region of size SIZE1 at position 0 and a region of
@ -899,17 +896,17 @@ aff_comb_cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
return false; return false;
d = diff->offset; d = diff->offset;
if (double_int_negative_p (d)) if (d.is_negative ())
{ {
/* The second object is before the first one, we succeed if the last /* The second object is before the first one, we succeed if the last
element of the second object is before the start of the first one. */ element of the second object is before the start of the first one. */
bound = double_int_add (d, double_int_add (size2, double_int_minus_one)); bound = d + size2 + double_int_minus_one;
return double_int_negative_p (bound); return bound.is_negative ();
} }
else else
{ {
/* We succeed if the second object starts after the first one ends. */ /* We succeed if the second object starts after the first one ends. */
return double_int_scmp (size1, d) <= 0; return size1.sle (d);
} }
} }

View File

@ -1371,14 +1371,12 @@ group_case_labels_stmt (gimple stmt)
{ {
tree merge_case = gimple_switch_label (stmt, i); tree merge_case = gimple_switch_label (stmt, i);
basic_block merge_bb = label_to_block (CASE_LABEL (merge_case)); basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
double_int bhp1 = double_int_add (tree_to_double_int (base_high), double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
double_int_one);
/* Merge the cases if they jump to the same place, /* Merge the cases if they jump to the same place,
and their ranges are consecutive. */ and their ranges are consecutive. */
if (merge_bb == base_bb if (merge_bb == base_bb
&& double_int_equal_p (tree_to_double_int (CASE_LOW (merge_case)), && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
bhp1))
{ {
base_high = CASE_HIGH (merge_case) ? base_high = CASE_HIGH (merge_case) ?
CASE_HIGH (merge_case) : CASE_LOW (merge_case); CASE_HIGH (merge_case) : CASE_LOW (merge_case);

View File

@ -423,9 +423,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
switch (TREE_CODE (exp)) switch (TREE_CODE (exp))
{ {
case BIT_FIELD_REF: case BIT_FIELD_REF:
bit_offset bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
= double_int_add (bit_offset,
tree_to_double_int (TREE_OPERAND (exp, 2)));
break; break;
case COMPONENT_REF: case COMPONENT_REF:
@ -436,14 +434,11 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
if (this_offset && TREE_CODE (this_offset) == INTEGER_CST) if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
{ {
double_int doffset = tree_to_double_int (this_offset); double_int doffset = tree_to_double_int (this_offset);
doffset = double_int_lshift (doffset, doffset = doffset.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
doffset = double_int_add (doffset, doffset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
tree_to_double_int bit_offset = bit_offset + doffset;
(DECL_FIELD_BIT_OFFSET (field)));
bit_offset = double_int_add (bit_offset, doffset);
/* If we had seen a variable array ref already and we just /* If we had seen a variable array ref already and we just
referenced the last field of a struct or a union member referenced the last field of a struct or a union member
@ -462,11 +457,11 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
tree ssize = TYPE_SIZE_UNIT (stype); tree ssize = TYPE_SIZE_UNIT (stype);
if (host_integerp (fsize, 0) if (host_integerp (fsize, 0)
&& host_integerp (ssize, 0) && host_integerp (ssize, 0)
&& double_int_fits_in_shwi_p (doffset)) && doffset.fits_shwi ())
maxsize += ((TREE_INT_CST_LOW (ssize) maxsize += ((TREE_INT_CST_LOW (ssize)
- TREE_INT_CST_LOW (fsize)) - TREE_INT_CST_LOW (fsize))
* BITS_PER_UNIT * BITS_PER_UNIT
- double_int_to_shwi (doffset)); - doffset.to_shwi ());
else else
maxsize = -1; maxsize = -1;
} }
@ -481,9 +476,9 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
if (maxsize != -1 if (maxsize != -1
&& csize && csize
&& host_integerp (csize, 1) && host_integerp (csize, 1)
&& double_int_fits_in_shwi_p (bit_offset)) && bit_offset.fits_shwi ())
maxsize = TREE_INT_CST_LOW (csize) maxsize = TREE_INT_CST_LOW (csize)
- double_int_to_shwi (bit_offset); - bit_offset.to_shwi ();
else else
maxsize = -1; maxsize = -1;
} }
@ -504,17 +499,13 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
TREE_CODE (unit_size) == INTEGER_CST)) TREE_CODE (unit_size) == INTEGER_CST))
{ {
double_int doffset double_int doffset
= double_int_sext = (TREE_INT_CST (index) - TREE_INT_CST (low_bound))
(double_int_sub (TREE_INT_CST (index), .sext (TYPE_PRECISION (TREE_TYPE (index)));
TREE_INT_CST (low_bound)), doffset *= tree_to_double_int (unit_size);
TYPE_PRECISION (TREE_TYPE (index))); doffset = doffset.alshift (BITS_PER_UNIT == 8
doffset = double_int_mul (doffset,
tree_to_double_int (unit_size));
doffset = double_int_lshift (doffset,
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
bit_offset = double_int_add (bit_offset, doffset); bit_offset = bit_offset + doffset;
/* An array ref with a constant index up in the structure /* An array ref with a constant index up in the structure
hierarchy will constrain the size of any variable array ref hierarchy will constrain the size of any variable array ref
@ -530,9 +521,9 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
if (maxsize != -1 if (maxsize != -1
&& asize && asize
&& host_integerp (asize, 1) && host_integerp (asize, 1)
&& double_int_fits_in_shwi_p (bit_offset)) && bit_offset.fits_shwi ())
maxsize = TREE_INT_CST_LOW (asize) maxsize = TREE_INT_CST_LOW (asize)
- double_int_to_shwi (bit_offset); - bit_offset.to_shwi ();
else else
maxsize = -1; maxsize = -1;
@ -547,8 +538,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
break; break;
case IMAGPART_EXPR: case IMAGPART_EXPR:
bit_offset bit_offset += double_int::from_uhwi (bitsize);
= double_int_add (bit_offset, uhwi_to_double_int (bitsize));
break; break;
case VIEW_CONVERT_EXPR: case VIEW_CONVERT_EXPR:
@ -563,12 +553,11 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
else else
{ {
double_int off = mem_ref_offset (exp); double_int off = mem_ref_offset (exp);
off = double_int_lshift (off, off = off.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
off = double_int_add (off, bit_offset); off = off + bit_offset;
if (double_int_fits_in_shwi_p (off)) if (off.fits_shwi ())
{ {
bit_offset = off; bit_offset = off;
exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0); exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
@ -595,12 +584,11 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
else else
{ {
double_int off = mem_ref_offset (exp); double_int off = mem_ref_offset (exp);
off = double_int_lshift (off, off = off.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
off = double_int_add (off, bit_offset); off += bit_offset;
if (double_int_fits_in_shwi_p (off)) if (off.fits_shwi ())
{ {
bit_offset = off; bit_offset = off;
exp = TREE_OPERAND (TMR_BASE (exp), 0); exp = TREE_OPERAND (TMR_BASE (exp), 0);
@ -617,7 +605,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
} }
done: done:
if (!double_int_fits_in_shwi_p (bit_offset)) if (!bit_offset.fits_shwi ())
{ {
*poffset = 0; *poffset = 0;
*psize = bitsize; *psize = bitsize;
@ -626,7 +614,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
return exp; return exp;
} }
hbit_offset = double_int_to_shwi (bit_offset); hbit_offset = bit_offset.to_shwi ();
/* We need to deal with variable arrays ending structures such as /* We need to deal with variable arrays ending structures such as
struct { int length; int a[1]; } x; x.a[d] struct { int length; int a[1]; } x; x.a[d]

View File

@ -1271,7 +1271,7 @@ get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
{ {
double_int off = mem_ref_offset (exp); double_int off = mem_ref_offset (exp);
gcc_assert (off.high == -1 || off.high == 0); gcc_assert (off.high == -1 || off.high == 0);
byte_offset += double_int_to_shwi (off); byte_offset += off.to_shwi ();
} }
exp = TREE_OPERAND (base, 0); exp = TREE_OPERAND (base, 0);
} }
@ -1294,7 +1294,7 @@ get_addr_base_and_unit_offset_1 (tree exp, HOST_WIDE_INT *poffset,
{ {
double_int off = mem_ref_offset (exp); double_int off = mem_ref_offset (exp);
gcc_assert (off.high == -1 || off.high == 0); gcc_assert (off.high == -1 || off.high == 0);
byte_offset += double_int_to_shwi (off); byte_offset += off.to_shwi ();
} }
exp = TREE_OPERAND (base, 0); exp = TREE_OPERAND (base, 0);
} }

View File

@ -192,12 +192,11 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
} }
if (sz != unknown[object_size_type]) if (sz != unknown[object_size_type])
{ {
double_int dsz = double_int_sub (uhwi_to_double_int (sz), double_int dsz = double_int::from_uhwi (sz) - mem_ref_offset (pt_var);
mem_ref_offset (pt_var)); if (dsz.is_negative ())
if (double_int_negative_p (dsz))
sz = 0; sz = 0;
else if (double_int_fits_in_uhwi_p (dsz)) else if (dsz.fits_uhwi ())
sz = double_int_to_uhwi (dsz); sz = dsz.to_uhwi ();
else else
sz = unknown[object_size_type]; sz = unknown[object_size_type];
} }

View File

@ -901,7 +901,7 @@ order_drefs (const void *a, const void *b)
{ {
const dref *const da = (const dref *) a; const dref *const da = (const dref *) a;
const dref *const db = (const dref *) b; const dref *const db = (const dref *) b;
int offcmp = double_int_scmp ((*da)->offset, (*db)->offset); int offcmp = (*da)->offset.scmp ((*db)->offset);
if (offcmp != 0) if (offcmp != 0)
return offcmp; return offcmp;
@ -925,18 +925,18 @@ add_ref_to_chain (chain_p chain, dref ref)
dref root = get_chain_root (chain); dref root = get_chain_root (chain);
double_int dist; double_int dist;
gcc_assert (double_int_scmp (root->offset, ref->offset) <= 0); gcc_assert (root->offset.sle (ref->offset));
dist = double_int_sub (ref->offset, root->offset); dist = ref->offset - root->offset;
if (double_int_ucmp (uhwi_to_double_int (MAX_DISTANCE), dist) <= 0) if (double_int::from_uhwi (MAX_DISTANCE).ule (dist))
{ {
free (ref); free (ref);
return; return;
} }
gcc_assert (double_int_fits_in_uhwi_p (dist)); gcc_assert (dist.fits_uhwi ());
VEC_safe_push (dref, heap, chain->refs, ref); VEC_safe_push (dref, heap, chain->refs, ref);
ref->distance = double_int_to_uhwi (dist); ref->distance = dist.to_uhwi ();
if (ref->distance >= chain->length) if (ref->distance >= chain->length)
{ {
@ -1055,7 +1055,7 @@ valid_initializer_p (struct data_reference *ref,
if (!aff_combination_constant_multiple_p (&diff, &step, &off)) if (!aff_combination_constant_multiple_p (&diff, &step, &off))
return false; return false;
if (!double_int_equal_p (off, uhwi_to_double_int (distance))) if (off != double_int::from_uhwi (distance))
return false; return false;
return true; return true;
@ -1198,8 +1198,7 @@ determine_roots_comp (struct loop *loop,
FOR_EACH_VEC_ELT (dref, comp->refs, i, a) FOR_EACH_VEC_ELT (dref, comp->refs, i, a)
{ {
if (!chain || DR_IS_WRITE (a->ref) if (!chain || DR_IS_WRITE (a->ref)
|| double_int_ucmp (uhwi_to_double_int (MAX_DISTANCE), || double_int::from_uhwi (MAX_DISTANCE).ule (a->offset - last_ofs))
double_int_sub (a->offset, last_ofs)) <= 0)
{ {
if (nontrivial_chain_p (chain)) if (nontrivial_chain_p (chain))
{ {

View File

@ -1330,8 +1330,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
} }
else if (is_array_init else if (is_array_init
&& (TREE_CODE (field) != INTEGER_CST && (TREE_CODE (field) != INTEGER_CST
|| !double_int_equal_p (tree_to_double_int (field), || tree_to_double_int (field) != curidx))
curidx)))
{ {
pp_character (buffer, '['); pp_character (buffer, '[');
if (TREE_CODE (field) == RANGE_EXPR) if (TREE_CODE (field) == RANGE_EXPR)
@ -1352,7 +1351,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
} }
} }
if (is_array_init) if (is_array_init)
curidx = double_int_add (curidx, double_int_one); curidx += double_int_one;
if (val && TREE_CODE (val) == ADDR_EXPR) if (val && TREE_CODE (val) == ADDR_EXPR)
if (TREE_CODE (TREE_OPERAND (val, 0)) == FUNCTION_DECL) if (TREE_CODE (TREE_OPERAND (val, 0)) == FUNCTION_DECL)
val = TREE_OPERAND (val, 0); val = TREE_OPERAND (val, 0);

View File

@ -1488,8 +1488,8 @@ build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
|| TREE_CODE (prev_base) == TARGET_MEM_REF) || TREE_CODE (prev_base) == TARGET_MEM_REF)
align = TYPE_ALIGN (TREE_TYPE (prev_base)); align = TYPE_ALIGN (TREE_TYPE (prev_base));
} }
misalign += (double_int_sext (tree_to_double_int (off), misalign += (tree_to_double_int (off)
TYPE_PRECISION (TREE_TYPE (off))).low .sext (TYPE_PRECISION (TREE_TYPE (off))).low
* BITS_PER_UNIT); * BITS_PER_UNIT);
misalign = misalign & (align - 1); misalign = misalign & (align - 1);
if (misalign != 0) if (misalign != 0)

View File

@ -198,8 +198,8 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
if (addr->offset && !integer_zerop (addr->offset)) if (addr->offset && !integer_zerop (addr->offset))
off = immed_double_int_const off = immed_double_int_const
(double_int_sext (tree_to_double_int (addr->offset), (tree_to_double_int (addr->offset)
TYPE_PRECISION (TREE_TYPE (addr->offset))), .sext (TYPE_PRECISION (TREE_TYPE (addr->offset))),
pointer_mode); pointer_mode);
else else
off = NULL_RTX; off = NULL_RTX;
@ -400,7 +400,7 @@ move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
for (i = 0; i < addr->n; i++) for (i = 0; i < addr->n; i++)
{ {
if (!double_int_one_p (addr->elts[i].coef)) if (!addr->elts[i].coef.is_one ())
continue; continue;
val = addr->elts[i].val; val = addr->elts[i].val;
@ -428,7 +428,7 @@ move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
for (i = 0; i < addr->n; i++) for (i = 0; i < addr->n; i++)
{ {
if (!double_int_one_p (addr->elts[i].coef)) if (!addr->elts[i].coef.is_one ())
continue; continue;
val = addr->elts[i].val; val = addr->elts[i].val;
@ -460,7 +460,7 @@ move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
for (i = 0; i < addr->n; i++) for (i = 0; i < addr->n; i++)
{ {
if (!double_int_one_p (addr->elts[i].coef)) if (!addr->elts[i].coef.is_one ())
continue; continue;
val = addr->elts[i].val; val = addr->elts[i].val;
@ -548,10 +548,10 @@ most_expensive_mult_to_index (tree type, struct mem_address *parts,
best_mult = double_int_zero; best_mult = double_int_zero;
for (i = 0; i < addr->n; i++) for (i = 0; i < addr->n; i++)
{ {
if (!double_int_fits_in_shwi_p (addr->elts[i].coef)) if (!addr->elts[i].coef.fits_shwi ())
continue; continue;
coef = double_int_to_shwi (addr->elts[i].coef); coef = addr->elts[i].coef.to_shwi ();
if (coef == 1 if (coef == 1
|| !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as)) || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
continue; continue;
@ -572,11 +572,11 @@ most_expensive_mult_to_index (tree type, struct mem_address *parts,
for (i = j = 0; i < addr->n; i++) for (i = j = 0; i < addr->n; i++)
{ {
amult = addr->elts[i].coef; amult = addr->elts[i].coef;
amult_neg = double_int_ext_for_comb (double_int_neg (amult), addr); amult_neg = double_int_ext_for_comb (-amult, addr);
if (double_int_equal_p (amult, best_mult)) if (amult == best_mult)
op_code = PLUS_EXPR; op_code = PLUS_EXPR;
else if (double_int_equal_p (amult_neg, best_mult)) else if (amult_neg == best_mult)
op_code = MINUS_EXPR; op_code = MINUS_EXPR;
else else
{ {
@ -624,7 +624,7 @@ addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
parts->index = NULL_TREE; parts->index = NULL_TREE;
parts->step = NULL_TREE; parts->step = NULL_TREE;
if (!double_int_zero_p (addr->offset)) if (!addr->offset.is_zero ())
parts->offset = double_int_to_tree (sizetype, addr->offset); parts->offset = double_int_to_tree (sizetype, addr->offset);
else else
parts->offset = NULL_TREE; parts->offset = NULL_TREE;
@ -656,7 +656,7 @@ addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
for (i = 0; i < addr->n; i++) for (i = 0; i < addr->n; i++)
{ {
part = fold_convert (sizetype, addr->elts[i].val); part = fold_convert (sizetype, addr->elts[i].val);
if (!double_int_one_p (addr->elts[i].coef)) if (!addr->elts[i].coef.is_one ())
part = fold_build2 (MULT_EXPR, sizetype, part, part = fold_build2 (MULT_EXPR, sizetype, part,
double_int_to_tree (sizetype, addr->elts[i].coef)); double_int_to_tree (sizetype, addr->elts[i].coef));
add_to_parts (parts, part); add_to_parts (parts, part);
@ -876,8 +876,8 @@ copy_ref_info (tree new_ref, tree old_ref)
&& (TREE_INT_CST_LOW (TMR_STEP (new_ref)) && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
< align))))) < align)))))
{ {
unsigned int inc = double_int_sub (mem_ref_offset (old_ref), unsigned int inc = (mem_ref_offset (old_ref)
mem_ref_offset (new_ref)).low; - mem_ref_offset (new_ref)).low;
adjust_ptr_info_misalignment (new_pi, inc); adjust_ptr_info_misalignment (new_pi, inc);
} }
else else

View File

@ -756,12 +756,11 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
/* The offset embedded in MEM_REFs can be negative. Bias them /* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */ so that the resulting offset adjustment is positive. */
moff = mem_ref_offset (base1); moff = mem_ref_offset (base1);
moff = double_int_lshift (moff, moff = moff.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
if (double_int_negative_p (moff)) if (moff.is_negative ())
offset2p += double_int_neg (moff).low; offset2p += (-moff).low;
else else
offset1p += moff.low; offset1p += moff.low;
@ -835,12 +834,11 @@ indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
|| TREE_CODE (dbase2) == TARGET_MEM_REF) || TREE_CODE (dbase2) == TARGET_MEM_REF)
{ {
double_int moff = mem_ref_offset (dbase2); double_int moff = mem_ref_offset (dbase2);
moff = double_int_lshift (moff, moff = moff.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
if (double_int_negative_p (moff)) if (moff.is_negative ())
doffset1 -= double_int_neg (moff).low; doffset1 -= (-moff).low;
else else
doffset2 -= moff.low; doffset2 -= moff.low;
} }
@ -932,21 +930,19 @@ indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
/* The offset embedded in MEM_REFs can be negative. Bias them /* The offset embedded in MEM_REFs can be negative. Bias them
so that the resulting offset adjustment is positive. */ so that the resulting offset adjustment is positive. */
moff = mem_ref_offset (base1); moff = mem_ref_offset (base1);
moff = double_int_lshift (moff, moff = moff.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
if (double_int_negative_p (moff)) if (moff.is_negative ())
offset2 += double_int_neg (moff).low; offset2 += (-moff).low;
else else
offset1 += moff.low; offset1 += moff.low;
moff = mem_ref_offset (base2); moff = mem_ref_offset (base2);
moff = double_int_lshift (moff, moff = moff.alshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true); HOST_BITS_PER_DOUBLE_INT);
if (double_int_negative_p (moff)) if (moff.is_negative ())
offset1 += double_int_neg (moff).low; offset1 += (-moff).low;
else else
offset2 += moff.low; offset2 += moff.low;
return ranges_overlap_p (offset1, max_size1, offset2, max_size2); return ranges_overlap_p (offset1, max_size1, offset2, max_size2);

View File

@ -186,12 +186,11 @@ dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
case CONSTANT: case CONSTANT:
fprintf (outf, "%sCONSTANT ", prefix); fprintf (outf, "%sCONSTANT ", prefix);
if (TREE_CODE (val.value) != INTEGER_CST if (TREE_CODE (val.value) != INTEGER_CST
|| double_int_zero_p (val.mask)) || val.mask.is_zero ())
print_generic_expr (outf, val.value, dump_flags); print_generic_expr (outf, val.value, dump_flags);
else else
{ {
double_int cval = double_int_and_not (tree_to_double_int (val.value), double_int cval = tree_to_double_int (val.value).and_not (val.mask);
val.mask);
fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX, fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
prefix, cval.high, cval.low); prefix, cval.high, cval.low);
fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")", fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
@ -323,7 +322,7 @@ get_constant_value (tree var)
if (val if (val
&& val->lattice_val == CONSTANT && val->lattice_val == CONSTANT
&& (TREE_CODE (val->value) != INTEGER_CST && (TREE_CODE (val->value) != INTEGER_CST
|| double_int_zero_p (val->mask))) || val->mask.is_zero ()))
return val->value; return val->value;
return NULL_TREE; return NULL_TREE;
} }
@ -414,11 +413,8 @@ valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
/* Bit-lattices have to agree in the still valid bits. */ /* Bit-lattices have to agree in the still valid bits. */
if (TREE_CODE (old_val.value) == INTEGER_CST if (TREE_CODE (old_val.value) == INTEGER_CST
&& TREE_CODE (new_val.value) == INTEGER_CST) && TREE_CODE (new_val.value) == INTEGER_CST)
return double_int_equal_p return tree_to_double_int (old_val.value).and_not (new_val.mask)
(double_int_and_not (tree_to_double_int (old_val.value), == tree_to_double_int (new_val.value).and_not (new_val.mask);
new_val.mask),
double_int_and_not (tree_to_double_int (new_val.value),
new_val.mask));
/* Otherwise constant values have to agree. */ /* Otherwise constant values have to agree. */
return operand_equal_p (old_val.value, new_val.value, 0); return operand_equal_p (old_val.value, new_val.value, 0);
@ -444,10 +440,9 @@ set_lattice_value (tree var, prop_value_t new_val)
&& TREE_CODE (old_val->value) == INTEGER_CST) && TREE_CODE (old_val->value) == INTEGER_CST)
{ {
double_int diff; double_int diff;
diff = double_int_xor (tree_to_double_int (new_val.value), diff = tree_to_double_int (new_val.value)
tree_to_double_int (old_val->value)); ^ tree_to_double_int (old_val->value);
new_val.mask = double_int_ior (new_val.mask, new_val.mask = new_val.mask | old_val->mask | diff;
double_int_ior (old_val->mask, diff));
} }
gcc_assert (valid_lattice_transition (*old_val, new_val)); gcc_assert (valid_lattice_transition (*old_val, new_val));
@ -458,7 +453,7 @@ set_lattice_value (tree var, prop_value_t new_val)
|| (new_val.lattice_val == CONSTANT || (new_val.lattice_val == CONSTANT
&& TREE_CODE (new_val.value) == INTEGER_CST && TREE_CODE (new_val.value) == INTEGER_CST
&& (TREE_CODE (old_val->value) != INTEGER_CST && (TREE_CODE (old_val->value) != INTEGER_CST
|| !double_int_equal_p (new_val.mask, old_val->mask)))) || new_val.mask != old_val->mask)))
{ {
/* ??? We would like to delay creation of INTEGER_CSTs from /* ??? We would like to delay creation of INTEGER_CSTs from
partially constants here. */ partially constants here. */
@ -511,15 +506,15 @@ get_value_from_alignment (tree expr)
gcc_assert (TREE_CODE (expr) == ADDR_EXPR); gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
get_pointer_alignment_1 (expr, &align, &bitpos); get_pointer_alignment_1 (expr, &align, &bitpos);
val.mask val.mask = (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
= double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type) ? double_int::mask (TYPE_PRECISION (type))
? double_int_mask (TYPE_PRECISION (type)) : double_int_minus_one)
: double_int_minus_one, .and_not (double_int::from_uhwi (align / BITS_PER_UNIT - 1));
uhwi_to_double_int (align / BITS_PER_UNIT - 1)); val.lattice_val = val.mask.is_minus_one () ? VARYING : CONSTANT;
val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
if (val.lattice_val == CONSTANT) if (val.lattice_val == CONSTANT)
val.value val.value
= double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT)); = double_int_to_tree (type,
double_int::from_uhwi (bitpos / BITS_PER_UNIT));
else else
val.value = NULL_TREE; val.value = NULL_TREE;
@ -880,12 +875,10 @@ ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
For INTEGER_CSTs mask unequal bits. If no equal bits remain, For INTEGER_CSTs mask unequal bits. If no equal bits remain,
drop to varying. */ drop to varying. */
val1->mask val1->mask = val1->mask | val2->mask
= double_int_ior (double_int_ior (val1->mask, | (tree_to_double_int (val1->value)
val2->mask), ^ tree_to_double_int (val2->value));
double_int_xor (tree_to_double_int (val1->value), if (val1->mask.is_minus_one ())
tree_to_double_int (val2->value)));
if (double_int_minus_one_p (val1->mask))
{ {
val1->lattice_val = VARYING; val1->lattice_val = VARYING;
val1->value = NULL_TREE; val1->value = NULL_TREE;
@ -1080,7 +1073,7 @@ bit_value_unop_1 (enum tree_code code, tree type,
{ {
case BIT_NOT_EXPR: case BIT_NOT_EXPR:
*mask = rmask; *mask = rmask;
*val = double_int_not (rval); *val = ~rval;
break; break;
case NEGATE_EXPR: case NEGATE_EXPR:
@ -1100,13 +1093,13 @@ bit_value_unop_1 (enum tree_code code, tree type,
/* First extend mask and value according to the original type. */ /* First extend mask and value according to the original type. */
uns = TYPE_UNSIGNED (rtype); uns = TYPE_UNSIGNED (rtype);
*mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns); *mask = rmask.ext (TYPE_PRECISION (rtype), uns);
*val = double_int_ext (rval, TYPE_PRECISION (rtype), uns); *val = rval.ext (TYPE_PRECISION (rtype), uns);
/* Then extend mask and value according to the target type. */ /* Then extend mask and value according to the target type. */
uns = TYPE_UNSIGNED (type); uns = TYPE_UNSIGNED (type);
*mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); *mask = (*mask).ext (TYPE_PRECISION (type), uns);
*val = double_int_ext (*val, TYPE_PRECISION (type), uns); *val = (*val).ext (TYPE_PRECISION (type), uns);
break; break;
} }
@ -1135,37 +1128,33 @@ bit_value_binop_1 (enum tree_code code, tree type,
case BIT_AND_EXPR: case BIT_AND_EXPR:
/* The mask is constant where there is a known not /* The mask is constant where there is a known not
set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */ set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
*mask = double_int_and (double_int_ior (r1mask, r2mask), *mask = (r1mask | r2mask) & (r1val | r1mask) & (r2val | r2mask);
double_int_and (double_int_ior (r1val, r1mask), *val = r1val & r2val;
double_int_ior (r2val, r2mask)));
*val = double_int_and (r1val, r2val);
break; break;
case BIT_IOR_EXPR: case BIT_IOR_EXPR:
/* The mask is constant where there is a known /* The mask is constant where there is a known
set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */ set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
*mask = double_int_and_not *mask = (r1mask | r2mask)
(double_int_ior (r1mask, r2mask), .and_not (r1val.and_not (r1mask) | r2val.and_not (r2mask));
double_int_ior (double_int_and_not (r1val, r1mask), *val = r1val | r2val;
double_int_and_not (r2val, r2mask)));
*val = double_int_ior (r1val, r2val);
break; break;
case BIT_XOR_EXPR: case BIT_XOR_EXPR:
/* m1 | m2 */ /* m1 | m2 */
*mask = double_int_ior (r1mask, r2mask); *mask = r1mask | r2mask;
*val = double_int_xor (r1val, r2val); *val = r1val ^ r2val;
break; break;
case LROTATE_EXPR: case LROTATE_EXPR:
case RROTATE_EXPR: case RROTATE_EXPR:
if (double_int_zero_p (r2mask)) if (r2mask.is_zero ())
{ {
HOST_WIDE_INT shift = r2val.low; HOST_WIDE_INT shift = r2val.low;
if (code == RROTATE_EXPR) if (code == RROTATE_EXPR)
shift = -shift; shift = -shift;
*mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type)); *mask = r1mask.lrotate (shift, TYPE_PRECISION (type));
*val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type)); *val = r1val.lrotate (shift, TYPE_PRECISION (type));
} }
break; break;
@ -1174,7 +1163,7 @@ bit_value_binop_1 (enum tree_code code, tree type,
/* ??? We can handle partially known shift counts if we know /* ??? We can handle partially known shift counts if we know
its sign. That way we can tell that (x << (y | 8)) & 255 its sign. That way we can tell that (x << (y | 8)) & 255
is zero. */ is zero. */
if (double_int_zero_p (r2mask)) if (r2mask.is_zero ())
{ {
HOST_WIDE_INT shift = r2val.low; HOST_WIDE_INT shift = r2val.low;
if (code == RSHIFT_EXPR) if (code == RSHIFT_EXPR)
@ -1186,18 +1175,14 @@ bit_value_binop_1 (enum tree_code code, tree type,
the sign bit was varying. */ the sign bit was varying. */
if (shift > 0) if (shift > 0)
{ {
*mask = double_int_lshift (r1mask, shift, *mask = r1mask.llshift (shift, TYPE_PRECISION (type));
TYPE_PRECISION (type), false); *val = r1val.llshift (shift, TYPE_PRECISION (type));
*val = double_int_lshift (r1val, shift,
TYPE_PRECISION (type), false);
} }
else if (shift < 0) else if (shift < 0)
{ {
shift = -shift; shift = -shift;
*mask = double_int_rshift (r1mask, shift, *mask = r1mask.rshift (shift, TYPE_PRECISION (type), !uns);
TYPE_PRECISION (type), !uns); *val = r1val.rshift (shift, TYPE_PRECISION (type), !uns);
*val = double_int_rshift (r1val, shift,
TYPE_PRECISION (type), !uns);
} }
else else
{ {
@ -1213,21 +1198,18 @@ bit_value_binop_1 (enum tree_code code, tree type,
double_int lo, hi; double_int lo, hi;
/* Do the addition with unknown bits set to zero, to give carry-ins of /* Do the addition with unknown bits set to zero, to give carry-ins of
zero wherever possible. */ zero wherever possible. */
lo = double_int_add (double_int_and_not (r1val, r1mask), lo = r1val.and_not (r1mask) + r2val.and_not (r2mask);
double_int_and_not (r2val, r2mask)); lo = lo.ext (TYPE_PRECISION (type), uns);
lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
/* Do the addition with unknown bits set to one, to give carry-ins of /* Do the addition with unknown bits set to one, to give carry-ins of
one wherever possible. */ one wherever possible. */
hi = double_int_add (double_int_ior (r1val, r1mask), hi = (r1val | r1mask) + (r2val | r2mask);
double_int_ior (r2val, r2mask)); hi = hi.ext (TYPE_PRECISION (type), uns);
hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
/* Each bit in the result is known if (a) the corresponding bits in /* Each bit in the result is known if (a) the corresponding bits in
both inputs are known, and (b) the carry-in to that bit position both inputs are known, and (b) the carry-in to that bit position
is known. We can check condition (b) by seeing if we got the same is known. We can check condition (b) by seeing if we got the same
result with minimised carries as with maximised carries. */ result with minimised carries as with maximised carries. */
*mask = double_int_ior (double_int_ior (r1mask, r2mask), *mask = r1mask | r2mask | (lo ^ hi);
double_int_xor (lo, hi)); *mask = (*mask).ext (TYPE_PRECISION (type), uns);
*mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
/* It shouldn't matter whether we choose lo or hi here. */ /* It shouldn't matter whether we choose lo or hi here. */
*val = lo; *val = lo;
break; break;
@ -1248,8 +1230,8 @@ bit_value_binop_1 (enum tree_code code, tree type,
{ {
/* Just track trailing zeros in both operands and transfer /* Just track trailing zeros in both operands and transfer
them to the other. */ them to the other. */
int r1tz = double_int_ctz (double_int_ior (r1val, r1mask)); int r1tz = (r1val | r1mask).trailing_zeros ();
int r2tz = double_int_ctz (double_int_ior (r2val, r2mask)); int r2tz = (r2val | r2mask).trailing_zeros ();
if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT) if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
{ {
*mask = double_int_zero; *mask = double_int_zero;
@ -1257,8 +1239,8 @@ bit_value_binop_1 (enum tree_code code, tree type,
} }
else if (r1tz + r2tz > 0) else if (r1tz + r2tz > 0)
{ {
*mask = double_int_not (double_int_mask (r1tz + r2tz)); *mask = ~double_int::mask (r1tz + r2tz);
*mask = double_int_ext (*mask, TYPE_PRECISION (type), uns); *mask = (*mask).ext (TYPE_PRECISION (type), uns);
*val = double_int_zero; *val = double_int_zero;
} }
break; break;
@ -1267,9 +1249,8 @@ bit_value_binop_1 (enum tree_code code, tree type,
case EQ_EXPR: case EQ_EXPR:
case NE_EXPR: case NE_EXPR:
{ {
double_int m = double_int_ior (r1mask, r2mask); double_int m = r1mask | r2mask;
if (!double_int_equal_p (double_int_and_not (r1val, m), if (r1val.and_not (m) != r2val.and_not (m))
double_int_and_not (r2val, m)))
{ {
*mask = double_int_zero; *mask = double_int_zero;
*val = ((code == EQ_EXPR) ? double_int_zero : double_int_one); *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
@ -1300,7 +1281,7 @@ bit_value_binop_1 (enum tree_code code, tree type,
{ {
int minmax, maxmin; int minmax, maxmin;
/* If the most significant bits are not known we know nothing. */ /* If the most significant bits are not known we know nothing. */
if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask)) if (r1mask.is_negative () || r2mask.is_negative ())
break; break;
/* For comparisons the signedness is in the comparison operands. */ /* For comparisons the signedness is in the comparison operands. */
@ -1309,10 +1290,8 @@ bit_value_binop_1 (enum tree_code code, tree type,
/* If we know the most significant bits we know the values /* If we know the most significant bits we know the values
value ranges by means of treating varying bits as zero value ranges by means of treating varying bits as zero
or one. Do a cross comparison of the max/min pairs. */ or one. Do a cross comparison of the max/min pairs. */
maxmin = double_int_cmp (double_int_ior (r1val, r1mask), maxmin = (r1val | r1mask).cmp (r2val.and_not (r2mask), uns);
double_int_and_not (r2val, r2mask), uns); minmax = r1val.and_not (r1mask).cmp (r2val | r2mask, uns);
minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
double_int_ior (r2val, r2mask), uns);
if (maxmin < 0) /* r1 is less than r2. */ if (maxmin < 0) /* r1 is less than r2. */
{ {
*mask = double_int_zero; *mask = double_int_zero;
@ -1358,10 +1337,10 @@ bit_value_unop (enum tree_code code, tree type, tree rhs)
gcc_assert ((rval.lattice_val == CONSTANT gcc_assert ((rval.lattice_val == CONSTANT
&& TREE_CODE (rval.value) == INTEGER_CST) && TREE_CODE (rval.value) == INTEGER_CST)
|| double_int_minus_one_p (rval.mask)); || rval.mask.is_minus_one ());
bit_value_unop_1 (code, type, &value, &mask, bit_value_unop_1 (code, type, &value, &mask,
TREE_TYPE (rhs), value_to_double_int (rval), rval.mask); TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
if (!double_int_minus_one_p (mask)) if (!mask.is_minus_one ())
{ {
val.lattice_val = CONSTANT; val.lattice_val = CONSTANT;
val.mask = mask; val.mask = mask;
@ -1399,14 +1378,14 @@ bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
gcc_assert ((r1val.lattice_val == CONSTANT gcc_assert ((r1val.lattice_val == CONSTANT
&& TREE_CODE (r1val.value) == INTEGER_CST) && TREE_CODE (r1val.value) == INTEGER_CST)
|| double_int_minus_one_p (r1val.mask)); || r1val.mask.is_minus_one ());
gcc_assert ((r2val.lattice_val == CONSTANT gcc_assert ((r2val.lattice_val == CONSTANT
&& TREE_CODE (r2val.value) == INTEGER_CST) && TREE_CODE (r2val.value) == INTEGER_CST)
|| double_int_minus_one_p (r2val.mask)); || r2val.mask.is_minus_one ());
bit_value_binop_1 (code, type, &value, &mask, bit_value_binop_1 (code, type, &value, &mask,
TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask, TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask); TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
if (!double_int_minus_one_p (mask)) if (!mask.is_minus_one ())
{ {
val.lattice_val = CONSTANT; val.lattice_val = CONSTANT;
val.mask = mask; val.mask = mask;
@ -1439,7 +1418,7 @@ bit_value_assume_aligned (gimple stmt)
return ptrval; return ptrval;
gcc_assert ((ptrval.lattice_val == CONSTANT gcc_assert ((ptrval.lattice_val == CONSTANT
&& TREE_CODE (ptrval.value) == INTEGER_CST) && TREE_CODE (ptrval.value) == INTEGER_CST)
|| double_int_minus_one_p (ptrval.mask)); || ptrval.mask.is_minus_one ());
align = gimple_call_arg (stmt, 1); align = gimple_call_arg (stmt, 1);
if (!host_integerp (align, 1)) if (!host_integerp (align, 1))
return ptrval; return ptrval;
@ -1461,7 +1440,7 @@ bit_value_assume_aligned (gimple stmt)
bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask, bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
type, value_to_double_int (ptrval), ptrval.mask, type, value_to_double_int (ptrval), ptrval.mask,
type, value_to_double_int (alignval), alignval.mask); type, value_to_double_int (alignval), alignval.mask);
if (!double_int_minus_one_p (mask)) if (!mask.is_minus_one ())
{ {
val.lattice_val = CONSTANT; val.lattice_val = CONSTANT;
val.mask = mask; val.mask = mask;
@ -1625,7 +1604,7 @@ evaluate_stmt (gimple stmt)
case BUILT_IN_STRNDUP: case BUILT_IN_STRNDUP:
val.lattice_val = CONSTANT; val.lattice_val = CONSTANT;
val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
val.mask = shwi_to_double_int val.mask = double_int::from_shwi
(~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT) (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
/ BITS_PER_UNIT - 1)); / BITS_PER_UNIT - 1));
break; break;
@ -1637,8 +1616,7 @@ evaluate_stmt (gimple stmt)
: BIGGEST_ALIGNMENT); : BIGGEST_ALIGNMENT);
val.lattice_val = CONSTANT; val.lattice_val = CONSTANT;
val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0); val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
val.mask = shwi_to_double_int val.mask = double_int::from_shwi (~(((HOST_WIDE_INT) align)
(~(((HOST_WIDE_INT) align)
/ BITS_PER_UNIT - 1)); / BITS_PER_UNIT - 1));
break; break;
@ -1857,7 +1835,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
fold more conditionals here. */ fold more conditionals here. */
val = evaluate_stmt (stmt); val = evaluate_stmt (stmt);
if (val.lattice_val != CONSTANT if (val.lattice_val != CONSTANT
|| !double_int_zero_p (val.mask)) || !val.mask.is_zero ())
return false; return false;
if (dump_file) if (dump_file)
@ -2037,7 +2015,7 @@ visit_cond_stmt (gimple stmt, edge *taken_edge_p)
block = gimple_bb (stmt); block = gimple_bb (stmt);
val = evaluate_stmt (stmt); val = evaluate_stmt (stmt);
if (val.lattice_val != CONSTANT if (val.lattice_val != CONSTANT
|| !double_int_zero_p (val.mask)) || !val.mask.is_zero ())
return SSA_PROP_VARYING; return SSA_PROP_VARYING;
/* Find which edge out of the conditional block will be taken and add it /* Find which edge out of the conditional block will be taken and add it

View File

@ -813,11 +813,10 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
{ {
double_int off = mem_ref_offset (lhs); double_int off = mem_ref_offset (lhs);
tree new_ptr; tree new_ptr;
off = double_int_add (off, off += double_int::from_shwi (def_rhs_offset);
shwi_to_double_int (def_rhs_offset));
if (TREE_CODE (def_rhs_base) == MEM_REF) if (TREE_CODE (def_rhs_base) == MEM_REF)
{ {
off = double_int_add (off, mem_ref_offset (def_rhs_base)); off += mem_ref_offset (def_rhs_base);
new_ptr = TREE_OPERAND (def_rhs_base, 0); new_ptr = TREE_OPERAND (def_rhs_base, 0);
} }
else else
@ -898,11 +897,10 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
{ {
double_int off = mem_ref_offset (rhs); double_int off = mem_ref_offset (rhs);
tree new_ptr; tree new_ptr;
off = double_int_add (off, off += double_int::from_shwi (def_rhs_offset);
shwi_to_double_int (def_rhs_offset));
if (TREE_CODE (def_rhs_base) == MEM_REF) if (TREE_CODE (def_rhs_base) == MEM_REF)
{ {
off = double_int_add (off, mem_ref_offset (def_rhs_base)); off += mem_ref_offset (def_rhs_base);
new_ptr = TREE_OPERAND (def_rhs_base, 0); new_ptr = TREE_OPERAND (def_rhs_base, 0);
} }
else else
@ -2373,8 +2371,7 @@ associate_pointerplus (gimple_stmt_iterator *gsi)
if (gimple_assign_rhs1 (def_stmt) != ptr) if (gimple_assign_rhs1 (def_stmt) != ptr)
return false; return false;
algn = double_int_to_tree (TREE_TYPE (ptr), algn = double_int_to_tree (TREE_TYPE (ptr), ~tree_to_double_int (algn));
double_int_not (tree_to_double_int (algn)));
gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn); gimple_assign_set_rhs_with_ops (gsi, BIT_AND_EXPR, ptr, algn);
fold_stmt_inplace (gsi); fold_stmt_inplace (gsi);
update_stmt (stmt); update_stmt (stmt);
@ -2537,7 +2534,7 @@ combine_conversions (gimple_stmt_iterator *gsi)
tem = fold_build2 (BIT_AND_EXPR, inside_type, tem = fold_build2 (BIT_AND_EXPR, inside_type,
defop0, defop0,
double_int_to_tree double_int_to_tree
(inside_type, double_int_mask (inter_prec))); (inside_type, double_int::mask (inter_prec)));
if (!useless_type_conversion_p (type, inside_type)) if (!useless_type_conversion_p (type, inside_type))
{ {
tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true, tem = force_gimple_operand_gsi (gsi, tem, true, NULL_TREE, true,

View File

@ -1571,8 +1571,7 @@ constant_multiple_of (tree top, tree bot, double_int *mul)
if (!constant_multiple_of (TREE_OPERAND (top, 0), bot, &res)) if (!constant_multiple_of (TREE_OPERAND (top, 0), bot, &res))
return false; return false;
*mul = double_int_sext (double_int_mul (res, tree_to_double_int (mby)), *mul = (res * tree_to_double_int (mby)).sext (precision);
precision);
return true; return true;
case PLUS_EXPR: case PLUS_EXPR:
@ -1582,21 +1581,20 @@ constant_multiple_of (tree top, tree bot, double_int *mul)
return false; return false;
if (code == MINUS_EXPR) if (code == MINUS_EXPR)
p1 = double_int_neg (p1); p1 = -p1;
*mul = double_int_sext (double_int_add (p0, p1), precision); *mul = (p0 + p1).sext (precision);
return true; return true;
case INTEGER_CST: case INTEGER_CST:
if (TREE_CODE (bot) != INTEGER_CST) if (TREE_CODE (bot) != INTEGER_CST)
return false; return false;
p0 = double_int_sext (tree_to_double_int (top), precision); p0 = tree_to_double_int (top).sext (precision);
p1 = double_int_sext (tree_to_double_int (bot), precision); p1 = tree_to_double_int (bot).sext (precision);
if (double_int_zero_p (p1)) if (p1.is_zero ())
return false; return false;
*mul = double_int_sext (double_int_sdivmod (p0, p1, FLOOR_DIV_EXPR, &res), *mul = p0.sdivmod (p1, FLOOR_DIV_EXPR, &res).sext (precision);
precision); return res.is_zero ();
return double_int_zero_p (res);
default: default:
return false; return false;
@ -3000,7 +2998,7 @@ get_computation_aff (struct loop *loop,
aff_combination_add (&cbase_aff, &cstep_aff); aff_combination_add (&cbase_aff, &cstep_aff);
} }
aff_combination_scale (&cbase_aff, double_int_neg (rat)); aff_combination_scale (&cbase_aff, -rat);
aff_combination_add (aff, &cbase_aff); aff_combination_add (aff, &cbase_aff);
if (common_type != uutype) if (common_type != uutype)
aff_combination_convert (aff, uutype); aff_combination_convert (aff, uutype);
@ -3777,7 +3775,7 @@ compare_aff_trees (aff_tree *aff1, aff_tree *aff2)
for (i = 0; i < aff1->n; i++) for (i = 0; i < aff1->n; i++)
{ {
if (double_int_cmp (aff1->elts[i].coef, aff2->elts[i].coef, 0) != 0) if (aff1->elts[i].coef != aff2->elts[i].coef)
return false; return false;
if (!operand_equal_p (aff1->elts[i].val, aff2->elts[i].val, 0)) if (!operand_equal_p (aff1->elts[i].val, aff2->elts[i].val, 0))
@ -3904,7 +3902,7 @@ get_loop_invariant_expr_id (struct ivopts_data *data, tree ubase,
tree_to_aff_combination (ub, TREE_TYPE (ub), &ubase_aff); tree_to_aff_combination (ub, TREE_TYPE (ub), &ubase_aff);
tree_to_aff_combination (cb, TREE_TYPE (cb), &cbase_aff); tree_to_aff_combination (cb, TREE_TYPE (cb), &cbase_aff);
aff_combination_scale (&cbase_aff, shwi_to_double_int (-1 * ratio)); aff_combination_scale (&cbase_aff, double_int::from_shwi (-1 * ratio));
aff_combination_add (&ubase_aff, &cbase_aff); aff_combination_add (&ubase_aff, &cbase_aff);
expr = aff_combination_to_tree (&ubase_aff); expr = aff_combination_to_tree (&ubase_aff);
return get_expr_id (data, expr); return get_expr_id (data, expr);
@ -3990,8 +3988,8 @@ get_computation_cost_at (struct ivopts_data *data,
if (!constant_multiple_of (ustep, cstep, &rat)) if (!constant_multiple_of (ustep, cstep, &rat))
return infinite_cost; return infinite_cost;
if (double_int_fits_in_shwi_p (rat)) if (rat.fits_shwi ())
ratio = double_int_to_shwi (rat); ratio = rat.to_shwi ();
else else
return infinite_cost; return infinite_cost;
@ -4504,7 +4502,7 @@ iv_elimination_compare_lt (struct ivopts_data *data,
aff_combination_scale (&tmpa, double_int_minus_one); aff_combination_scale (&tmpa, double_int_minus_one);
aff_combination_add (&tmpb, &tmpa); aff_combination_add (&tmpb, &tmpa);
aff_combination_add (&tmpb, &nit); aff_combination_add (&tmpb, &nit);
if (tmpb.n != 0 || !double_int_equal_p (tmpb.offset, double_int_one)) if (tmpb.n != 0 || tmpb.offset != double_int_one)
return false; return false;
/* Finally, check that CAND->IV->BASE - CAND->IV->STEP * A does not /* Finally, check that CAND->IV->BASE - CAND->IV->STEP * A does not
@ -4594,9 +4592,9 @@ may_eliminate_iv (struct ivopts_data *data,
max_niter = desc->max; max_niter = desc->max;
if (stmt_after_increment (loop, cand, use->stmt)) if (stmt_after_increment (loop, cand, use->stmt))
max_niter = double_int_add (max_niter, double_int_one); max_niter += double_int_one;
period_value = tree_to_double_int (period); period_value = tree_to_double_int (period);
if (double_int_ucmp (max_niter, period_value) > 0) if (max_niter.ugt (period_value))
{ {
/* See if we can take advantage of inferred loop bound information. */ /* See if we can take advantage of inferred loop bound information. */
if (data->loop_single_exit_p) if (data->loop_single_exit_p)
@ -4604,7 +4602,7 @@ may_eliminate_iv (struct ivopts_data *data,
if (!max_loop_iterations (loop, &max_niter)) if (!max_loop_iterations (loop, &max_niter))
return false; return false;
/* The loop bound is already adjusted by adding 1. */ /* The loop bound is already adjusted by adding 1. */
if (double_int_ucmp (max_niter, period_value) > 0) if (max_niter.ugt (period_value))
return false; return false;
} }
else else

View File

@ -91,7 +91,7 @@ split_to_var_and_offset (tree expr, tree *var, mpz_t offset)
*var = op0; *var = op0;
/* Always sign extend the offset. */ /* Always sign extend the offset. */
off = tree_to_double_int (op1); off = tree_to_double_int (op1);
off = double_int_sext (off, TYPE_PRECISION (type)); off = off.sext (TYPE_PRECISION (type));
mpz_set_double_int (offset, off, false); mpz_set_double_int (offset, off, false);
if (negate) if (negate)
mpz_neg (offset, offset); mpz_neg (offset, offset);
@ -170,7 +170,7 @@ bound_difference_of_offsetted_base (tree type, mpz_t x, mpz_t y,
} }
mpz_init (m); mpz_init (m);
mpz_set_double_int (m, double_int_mask (TYPE_PRECISION (type)), true); mpz_set_double_int (m, double_int::mask (TYPE_PRECISION (type)), true);
mpz_add_ui (m, m, 1); mpz_add_ui (m, m, 1);
mpz_sub (bnds->up, x, y); mpz_sub (bnds->up, x, y);
mpz_set (bnds->below, bnds->up); mpz_set (bnds->below, bnds->up);
@ -457,7 +457,7 @@ bounds_add (bounds *bnds, double_int delta, tree type)
mpz_set_double_int (mdelta, delta, false); mpz_set_double_int (mdelta, delta, false);
mpz_init (max); mpz_init (max);
mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type)), true); mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type)), true);
mpz_add (bnds->up, bnds->up, mdelta); mpz_add (bnds->up, bnds->up, mdelta);
mpz_add (bnds->below, bnds->below, mdelta); mpz_add (bnds->below, bnds->below, mdelta);
@ -573,7 +573,7 @@ number_of_iterations_ne_max (mpz_t bnd, bool no_overflow, tree c, tree s,
the whole # of iterations analysis will fail). */ the whole # of iterations analysis will fail). */
if (!no_overflow) if (!no_overflow)
{ {
max = double_int_mask (TYPE_PRECISION (TREE_TYPE (c)) max = double_int::mask (TYPE_PRECISION (TREE_TYPE (c))
- tree_low_cst (num_ending_zeros (s), 1)); - tree_low_cst (num_ending_zeros (s), 1));
mpz_set_double_int (bnd, max, true); mpz_set_double_int (bnd, max, true);
return; return;
@ -581,7 +581,7 @@ number_of_iterations_ne_max (mpz_t bnd, bool no_overflow, tree c, tree s,
/* Now we know that the induction variable does not overflow, so the loop /* Now we know that the induction variable does not overflow, so the loop
iterates at most (range of type / S) times. */ iterates at most (range of type / S) times. */
mpz_set_double_int (bnd, double_int_mask (TYPE_PRECISION (TREE_TYPE (c))), mpz_set_double_int (bnd, double_int::mask (TYPE_PRECISION (TREE_TYPE (c))),
true); true);
/* If the induction variable is guaranteed to reach the value of C before /* If the induction variable is guaranteed to reach the value of C before
@ -922,9 +922,8 @@ assert_loop_rolls_lt (tree type, affine_iv *iv0, affine_iv *iv1,
dstep = tree_to_double_int (iv0->step); dstep = tree_to_double_int (iv0->step);
else else
{ {
dstep = double_int_sext (tree_to_double_int (iv1->step), dstep = tree_to_double_int (iv1->step).sext (TYPE_PRECISION (type));
TYPE_PRECISION (type)); dstep = -dstep;
dstep = double_int_neg (dstep);
} }
mpz_init (mstep); mpz_init (mstep);
@ -935,7 +934,7 @@ assert_loop_rolls_lt (tree type, affine_iv *iv0, affine_iv *iv1,
rolls_p = mpz_cmp (mstep, bnds->below) <= 0; rolls_p = mpz_cmp (mstep, bnds->below) <= 0;
mpz_init (max); mpz_init (max);
mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type)), true); mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type)), true);
mpz_add (max, max, mstep); mpz_add (max, max, mstep);
no_overflow_p = (mpz_cmp (bnds->up, max) <= 0 no_overflow_p = (mpz_cmp (bnds->up, max) <= 0
/* For pointers, only values lying inside a single object /* For pointers, only values lying inside a single object
@ -2394,7 +2393,7 @@ derive_constant_upper_bound_ops (tree type, tree op0,
/* If the bound does not fit in TYPE, max. value of TYPE could be /* If the bound does not fit in TYPE, max. value of TYPE could be
attained. */ attained. */
if (double_int_ucmp (max, bnd) < 0) if (max.ult (bnd))
return max; return max;
return bnd; return bnd;
@ -2410,27 +2409,27 @@ derive_constant_upper_bound_ops (tree type, tree op0,
choose the most logical way how to treat this constant regardless choose the most logical way how to treat this constant regardless
of the signedness of the type. */ of the signedness of the type. */
cst = tree_to_double_int (op1); cst = tree_to_double_int (op1);
cst = double_int_sext (cst, TYPE_PRECISION (type)); cst = cst.sext (TYPE_PRECISION (type));
if (code != MINUS_EXPR) if (code != MINUS_EXPR)
cst = double_int_neg (cst); cst = -cst;
bnd = derive_constant_upper_bound (op0); bnd = derive_constant_upper_bound (op0);
if (double_int_negative_p (cst)) if (cst.is_negative ())
{ {
cst = double_int_neg (cst); cst = -cst;
/* Avoid CST == 0x80000... */ /* Avoid CST == 0x80000... */
if (double_int_negative_p (cst)) if (cst.is_negative ())
return max;; return max;;
/* OP0 + CST. We need to check that /* OP0 + CST. We need to check that
BND <= MAX (type) - CST. */ BND <= MAX (type) - CST. */
mmax = double_int_sub (max, cst); mmax -= cst;
if (double_int_ucmp (bnd, mmax) > 0) if (bnd.ugt (mmax))
return max; return max;
return double_int_add (bnd, cst); return bnd + cst;
} }
else else
{ {
@ -2447,7 +2446,7 @@ derive_constant_upper_bound_ops (tree type, tree op0,
/* This should only happen if the type is unsigned; however, for /* This should only happen if the type is unsigned; however, for
buggy programs that use overflowing signed arithmetics even with buggy programs that use overflowing signed arithmetics even with
-fno-wrapv, this condition may also be true for signed values. */ -fno-wrapv, this condition may also be true for signed values. */
if (double_int_ucmp (bnd, cst) < 0) if (bnd.ult (cst))
return max; return max;
if (TYPE_UNSIGNED (type)) if (TYPE_UNSIGNED (type))
@ -2458,7 +2457,7 @@ derive_constant_upper_bound_ops (tree type, tree op0,
return max; return max;
} }
bnd = double_int_sub (bnd, cst); bnd -= cst;
} }
return bnd; return bnd;
@ -2470,7 +2469,7 @@ derive_constant_upper_bound_ops (tree type, tree op0,
return max; return max;
bnd = derive_constant_upper_bound (op0); bnd = derive_constant_upper_bound (op0);
return double_int_udiv (bnd, tree_to_double_int (op1), FLOOR_DIV_EXPR); return bnd.udiv (tree_to_double_int (op1), FLOOR_DIV_EXPR);
case BIT_AND_EXPR: case BIT_AND_EXPR:
if (TREE_CODE (op1) != INTEGER_CST if (TREE_CODE (op1) != INTEGER_CST
@ -2503,14 +2502,14 @@ record_niter_bound (struct loop *loop, double_int i_bound, bool realistic,
current estimation is smaller. */ current estimation is smaller. */
if (upper if (upper
&& (!loop->any_upper_bound && (!loop->any_upper_bound
|| double_int_ucmp (i_bound, loop->nb_iterations_upper_bound) < 0)) || i_bound.ult (loop->nb_iterations_upper_bound)))
{ {
loop->any_upper_bound = true; loop->any_upper_bound = true;
loop->nb_iterations_upper_bound = i_bound; loop->nb_iterations_upper_bound = i_bound;
} }
if (realistic if (realistic
&& (!loop->any_estimate && (!loop->any_estimate
|| double_int_ucmp (i_bound, loop->nb_iterations_estimate) < 0)) || i_bound.ult (loop->nb_iterations_estimate)))
{ {
loop->any_estimate = true; loop->any_estimate = true;
loop->nb_iterations_estimate = i_bound; loop->nb_iterations_estimate = i_bound;
@ -2520,8 +2519,7 @@ record_niter_bound (struct loop *loop, double_int i_bound, bool realistic,
number of iterations, use the upper bound instead. */ number of iterations, use the upper bound instead. */
if (loop->any_upper_bound if (loop->any_upper_bound
&& loop->any_estimate && loop->any_estimate
&& double_int_ucmp (loop->nb_iterations_upper_bound, && loop->nb_iterations_upper_bound.ult (loop->nb_iterations_estimate))
loop->nb_iterations_estimate) < 0)
loop->nb_iterations_estimate = loop->nb_iterations_upper_bound; loop->nb_iterations_estimate = loop->nb_iterations_upper_bound;
} }
@ -2583,10 +2581,10 @@ record_estimate (struct loop *loop, tree bound, double_int i_bound,
delta = double_int_zero; delta = double_int_zero;
else else
delta = double_int_one; delta = double_int_one;
i_bound = double_int_add (i_bound, delta); i_bound += delta;
/* If an overflow occurred, ignore the result. */ /* If an overflow occurred, ignore the result. */
if (double_int_ucmp (i_bound, delta) < 0) if (i_bound.ult (delta))
return; return;
record_niter_bound (loop, i_bound, realistic, upper); record_niter_bound (loop, i_bound, realistic, upper);
@ -3050,9 +3048,9 @@ estimated_loop_iterations_int (struct loop *loop)
if (!estimated_loop_iterations (loop, &nit)) if (!estimated_loop_iterations (loop, &nit))
return -1; return -1;
if (!double_int_fits_in_shwi_p (nit)) if (!nit.fits_shwi ())
return -1; return -1;
hwi_nit = double_int_to_shwi (nit); hwi_nit = nit.to_shwi ();
return hwi_nit < 0 ? -1 : hwi_nit; return hwi_nit < 0 ? -1 : hwi_nit;
} }
@ -3070,9 +3068,9 @@ max_loop_iterations_int (struct loop *loop)
if (!max_loop_iterations (loop, &nit)) if (!max_loop_iterations (loop, &nit))
return -1; return -1;
if (!double_int_fits_in_shwi_p (nit)) if (!nit.fits_shwi ())
return -1; return -1;
hwi_nit = double_int_to_shwi (nit); hwi_nit = nit.to_shwi ();
return hwi_nit < 0 ? -1 : hwi_nit; return hwi_nit < 0 ? -1 : hwi_nit;
} }
@ -3129,9 +3127,9 @@ max_stmt_executions (struct loop *loop, double_int *nit)
nit_minus_one = *nit; nit_minus_one = *nit;
*nit = double_int_add (*nit, double_int_one); *nit += double_int_one;
return double_int_ucmp (*nit, nit_minus_one) > 0; return (*nit).ugt (nit_minus_one);
} }
/* Sets NIT to the estimated number of executions of the latch of the /* Sets NIT to the estimated number of executions of the latch of the
@ -3148,9 +3146,9 @@ estimated_stmt_executions (struct loop *loop, double_int *nit)
nit_minus_one = *nit; nit_minus_one = *nit;
*nit = double_int_add (*nit, double_int_one); *nit += double_int_one;
return double_int_ucmp (*nit, nit_minus_one) > 0; return (*nit).ugt (nit_minus_one);
} }
/* Records estimates on numbers of iterations of loops. */ /* Records estimates on numbers of iterations of loops. */
@ -3255,8 +3253,8 @@ n_of_executions_at_most (gimple stmt,
|| (gimple_bb (stmt) != gimple_bb (niter_bound->stmt) || (gimple_bb (stmt) != gimple_bb (niter_bound->stmt)
&& !stmt_dominates_stmt_p (niter_bound->stmt, stmt))) && !stmt_dominates_stmt_p (niter_bound->stmt, stmt)))
{ {
bound = double_int_add (bound, double_int_one); bound += double_int_one;
if (double_int_zero_p (bound) if (bound.is_zero ()
|| !double_int_fits_to_tree_p (nit_type, bound)) || !double_int_fits_to_tree_p (nit_type, bound))
return false; return false;
} }

View File

@ -720,9 +720,7 @@ jump_function_from_stmt (tree *arg, gimple stmt)
&offset); &offset);
if (tem if (tem
&& TREE_CODE (tem) == MEM_REF && TREE_CODE (tem) == MEM_REF
&& double_int_zero_p && (mem_ref_offset (tem) + double_int::from_shwi (offset)).is_zero ())
(double_int_add (mem_ref_offset (tem),
shwi_to_double_int (offset))))
{ {
*arg = TREE_OPERAND (tem, 0); *arg = TREE_OPERAND (tem, 0);
return true; return true;

View File

@ -1600,11 +1600,9 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
&& TREE_CODE (op[2]) == INTEGER_CST) && TREE_CODE (op[2]) == INTEGER_CST)
{ {
double_int off = tree_to_double_int (op[0]); double_int off = tree_to_double_int (op[0]);
off = double_int_add (off, off += -tree_to_double_int (op[1]);
double_int_neg off *= tree_to_double_int (op[2]);
(tree_to_double_int (op[1]))); if (off.fits_shwi ())
off = double_int_mul (off, tree_to_double_int (op[2]));
if (double_int_fits_in_shwi_p (off))
newop.off = off.low; newop.off = off.low;
} }
VEC_replace (vn_reference_op_s, newoperands, j, newop); VEC_replace (vn_reference_op_s, newoperands, j, newop);

View File

@ -656,13 +656,12 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0) if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
{ {
double_int off double_int off
= double_int_add (tree_to_double_int (this_offset), = tree_to_double_int (this_offset)
double_int_rshift + tree_to_double_int (bit_offset)
(tree_to_double_int (bit_offset), .arshift (BITS_PER_UNIT == 8
BITS_PER_UNIT == 8
? 3 : exact_log2 (BITS_PER_UNIT), ? 3 : exact_log2 (BITS_PER_UNIT),
HOST_BITS_PER_DOUBLE_INT, true)); HOST_BITS_PER_DOUBLE_INT);
if (double_int_fits_in_shwi_p (off)) if (off.fits_shwi ())
temp.off = off.low; temp.off = off.low;
} }
} }
@ -680,11 +679,9 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
&& TREE_CODE (temp.op2) == INTEGER_CST) && TREE_CODE (temp.op2) == INTEGER_CST)
{ {
double_int off = tree_to_double_int (temp.op0); double_int off = tree_to_double_int (temp.op0);
off = double_int_add (off, off += -tree_to_double_int (temp.op1);
double_int_neg off *= tree_to_double_int (temp.op2);
(tree_to_double_int (temp.op1))); if (off.fits_shwi ())
off = double_int_mul (off, tree_to_double_int (temp.op2));
if (double_int_fits_in_shwi_p (off))
temp.off = off.low; temp.off = off.low;
} }
break; break;
@ -1018,8 +1015,8 @@ vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
if (addr_base != op->op0) if (addr_base != op->op0)
{ {
double_int off = tree_to_double_int (mem_op->op0); double_int off = tree_to_double_int (mem_op->op0);
off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0))); off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
off = double_int_add (off, shwi_to_double_int (addr_offset)); off += double_int::from_shwi (addr_offset);
mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off); mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
op->op0 = build_fold_addr_expr (addr_base); op->op0 = build_fold_addr_expr (addr_base);
if (host_integerp (mem_op->op0, 0)) if (host_integerp (mem_op->op0, 0))
@ -1052,7 +1049,7 @@ vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
return; return;
off = tree_to_double_int (mem_op->op0); off = tree_to_double_int (mem_op->op0);
off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0))); off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
/* The only thing we have to do is from &OBJ.foo.bar add the offset /* The only thing we have to do is from &OBJ.foo.bar add the offset
from .foo.bar to the preceding MEM_REF offset and replace the from .foo.bar to the preceding MEM_REF offset and replace the
@ -1069,8 +1066,8 @@ vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
|| TREE_CODE (addr_base) != MEM_REF) || TREE_CODE (addr_base) != MEM_REF)
return; return;
off = double_int_add (off, shwi_to_double_int (addr_offset)); off += double_int::from_shwi (addr_offset);
off = double_int_add (off, mem_ref_offset (addr_base)); off += mem_ref_offset (addr_base);
op->op0 = TREE_OPERAND (addr_base, 0); op->op0 = TREE_OPERAND (addr_base, 0);
} }
else else
@ -1082,7 +1079,7 @@ vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
|| TREE_CODE (ptroff) != INTEGER_CST) || TREE_CODE (ptroff) != INTEGER_CST)
return; return;
off = double_int_add (off, tree_to_double_int (ptroff)); off += tree_to_double_int (ptroff);
op->op0 = ptr; op->op0 = ptr;
} }
@ -1242,11 +1239,9 @@ valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
&& TREE_CODE (vro->op2) == INTEGER_CST) && TREE_CODE (vro->op2) == INTEGER_CST)
{ {
double_int off = tree_to_double_int (vro->op0); double_int off = tree_to_double_int (vro->op0);
off = double_int_add (off, off += -tree_to_double_int (vro->op1);
double_int_neg off *= tree_to_double_int (vro->op2);
(tree_to_double_int (vro->op1))); if (off.fits_shwi ())
off = double_int_mul (off, tree_to_double_int (vro->op2));
if (double_int_fits_in_shwi_p (off))
vro->off = off.low; vro->off = off.low;
} }
} }

View File

@ -2902,10 +2902,9 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
else else
{ {
/* Sign-extend the offset. */ /* Sign-extend the offset. */
double_int soffset double_int soffset = tree_to_double_int (offset)
= double_int_sext (tree_to_double_int (offset), .sext (TYPE_PRECISION (TREE_TYPE (offset)));
TYPE_PRECISION (TREE_TYPE (offset))); if (!soffset.fits_shwi ())
if (!double_int_fits_in_shwi_p (soffset))
rhsoffset = UNKNOWN_OFFSET; rhsoffset = UNKNOWN_OFFSET;
else else
{ {

View File

@ -1833,10 +1833,9 @@ non_rewritable_mem_ref_base (tree ref)
|| TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE) || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
&& useless_type_conversion_p (TREE_TYPE (base), && useless_type_conversion_p (TREE_TYPE (base),
TREE_TYPE (TREE_TYPE (decl))) TREE_TYPE (TREE_TYPE (decl)))
&& double_int_fits_in_uhwi_p (mem_ref_offset (base)) && mem_ref_offset (base).fits_uhwi ()
&& double_int_ucmp && tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
(tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (decl))), .ugt (mem_ref_offset (base))
mem_ref_offset (base)) == 1
&& multiple_of_p (sizetype, TREE_OPERAND (base, 1), && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
TYPE_SIZE_UNIT (TREE_TYPE (base)))) TYPE_SIZE_UNIT (TREE_TYPE (base))))
return NULL_TREE; return NULL_TREE;

View File

@ -970,17 +970,14 @@ array_value_type (gimple swtch, tree type, int num,
if (prec > HOST_BITS_PER_WIDE_INT) if (prec > HOST_BITS_PER_WIDE_INT)
return type; return type;
if (sign >= 0 if (sign >= 0 && cst == cst.zext (prec))
&& double_int_equal_p (cst, double_int_zext (cst, prec)))
{ {
if (sign == 0 if (sign == 0 && cst == cst.sext (prec))
&& double_int_equal_p (cst, double_int_sext (cst, prec)))
break; break;
sign = 1; sign = 1;
break; break;
} }
if (sign <= 0 if (sign <= 0 && cst == cst.sext (prec))
&& double_int_equal_p (cst, double_int_sext (cst, prec)))
{ {
sign = -1; sign = -1;
break; break;

View File

@ -1908,7 +1908,7 @@ vect_do_peeling_for_loop_bound (loop_vec_info loop_vinfo, tree *ratio,
max_iter = LOOP_VINFO_VECT_FACTOR (loop_vinfo) - 1; max_iter = LOOP_VINFO_VECT_FACTOR (loop_vinfo) - 1;
if (check_profitability) if (check_profitability)
max_iter = MAX (max_iter, (int) th); max_iter = MAX (max_iter, (int) th);
record_niter_bound (new_loop, shwi_to_double_int (max_iter), false, true); record_niter_bound (new_loop, double_int::from_shwi (max_iter), false, true);
if (dump_file && (dump_flags & TDF_DETAILS)) if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Setting upper bound of nb iterations for epilogue " fprintf (dump_file, "Setting upper bound of nb iterations for epilogue "
"loop to %d\n", max_iter); "loop to %d\n", max_iter);
@ -2130,7 +2130,7 @@ vect_do_peeling_for_alignment (loop_vec_info loop_vinfo,
max_iter = LOOP_VINFO_VECT_FACTOR (loop_vinfo) - 1; max_iter = LOOP_VINFO_VECT_FACTOR (loop_vinfo) - 1;
if (check_profitability) if (check_profitability)
max_iter = MAX (max_iter, (int) th); max_iter = MAX (max_iter, (int) th);
record_niter_bound (new_loop, shwi_to_double_int (max_iter), false, true); record_niter_bound (new_loop, double_int::from_shwi (max_iter), false, true);
if (dump_file && (dump_flags & TDF_DETAILS)) if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Setting upper bound of nb iterations for prologue " fprintf (dump_file, "Setting upper bound of nb iterations for prologue "
"loop to %d\n", max_iter); "loop to %d\n", max_iter);

View File

@ -1961,9 +1961,9 @@ zero_nonzero_bits_from_vr (value_range_t *vr,
{ {
double_int dmin = tree_to_double_int (vr->min); double_int dmin = tree_to_double_int (vr->min);
double_int dmax = tree_to_double_int (vr->max); double_int dmax = tree_to_double_int (vr->max);
double_int xor_mask = double_int_xor (dmin, dmax); double_int xor_mask = dmin ^ dmax;
*may_be_nonzero = double_int_ior (dmin, dmax); *may_be_nonzero = dmin | dmax;
*must_be_nonzero = double_int_and (dmin, dmax); *must_be_nonzero = dmin & dmax;
if (xor_mask.high != 0) if (xor_mask.high != 0)
{ {
unsigned HOST_WIDE_INT mask unsigned HOST_WIDE_INT mask
@ -2014,16 +2014,14 @@ ranges_from_anti_range (value_range_t *ar,
vr0->min = vrp_val_min (type); vr0->min = vrp_val_min (type);
vr0->max vr0->max
= double_int_to_tree (type, = double_int_to_tree (type,
double_int_sub (tree_to_double_int (ar->min), tree_to_double_int (ar->min) - double_int_one);
double_int_one));
} }
if (!vrp_val_is_max (ar->max)) if (!vrp_val_is_max (ar->max))
{ {
vr1->type = VR_RANGE; vr1->type = VR_RANGE;
vr1->min vr1->min
= double_int_to_tree (type, = double_int_to_tree (type,
double_int_add (tree_to_double_int (ar->max), tree_to_double_int (ar->max) + double_int_one);
double_int_one));
vr1->max = vrp_val_max (type); vr1->max = vrp_val_max (type);
} }
if (vr0->type == VR_UNDEFINED) if (vr0->type == VR_UNDEFINED)
@ -2193,9 +2191,9 @@ static int
quad_int_cmp (double_int l0, double_int h0, quad_int_cmp (double_int l0, double_int h0,
double_int l1, double_int h1, bool uns) double_int l1, double_int h1, bool uns)
{ {
int c = double_int_cmp (h0, h1, uns); int c = h0.cmp (h1, uns);
if (c != 0) return c; if (c != 0) return c;
return double_int_ucmp (l0, l1); return l0.ucmp (l1);
} }
static void static void
@ -2389,37 +2387,33 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
double_int max1 = tree_to_double_int (vr1.max); double_int max1 = tree_to_double_int (vr1.max);
bool uns = TYPE_UNSIGNED (expr_type); bool uns = TYPE_UNSIGNED (expr_type);
double_int type_min double_int type_min
= double_int_min_value (TYPE_PRECISION (expr_type), uns); = double_int::min_value (TYPE_PRECISION (expr_type), uns);
double_int type_max double_int type_max
= double_int_max_value (TYPE_PRECISION (expr_type), uns); = double_int::max_value (TYPE_PRECISION (expr_type), uns);
double_int dmin, dmax; double_int dmin, dmax;
int min_ovf = 0; int min_ovf = 0;
int max_ovf = 0; int max_ovf = 0;
if (code == PLUS_EXPR) if (code == PLUS_EXPR)
{ {
dmin = double_int_add (min0, min1); dmin = min0 + min1;
dmax = double_int_add (max0, max1); dmax = max0 + max1;
/* Check for overflow in double_int. */ /* Check for overflow in double_int. */
if (double_int_cmp (min1, double_int_zero, uns) if (min1.cmp (double_int_zero, uns) != dmin.cmp (min0, uns))
!= double_int_cmp (dmin, min0, uns)) min_ovf = min0.cmp (dmin, uns);
min_ovf = double_int_cmp (min0, dmin, uns); if (max1.cmp (double_int_zero, uns) != dmax.cmp (max0, uns))
if (double_int_cmp (max1, double_int_zero, uns) max_ovf = max0.cmp (dmax, uns);
!= double_int_cmp (dmax, max0, uns))
max_ovf = double_int_cmp (max0, dmax, uns);
} }
else /* if (code == MINUS_EXPR) */ else /* if (code == MINUS_EXPR) */
{ {
dmin = double_int_sub (min0, max1); dmin = min0 - max1;
dmax = double_int_sub (max0, min1); dmax = max0 - min1;
if (double_int_cmp (double_int_zero, max1, uns) if (double_int_zero.cmp (max1, uns) != dmin.cmp (min0, uns))
!= double_int_cmp (dmin, min0, uns)) min_ovf = min0.cmp (max1, uns);
min_ovf = double_int_cmp (min0, max1, uns); if (double_int_zero.cmp (min1, uns) != dmax.cmp (max0, uns))
if (double_int_cmp (double_int_zero, min1, uns) max_ovf = max0.cmp (min1, uns);
!= double_int_cmp (dmax, max0, uns))
max_ovf = double_int_cmp (max0, min1, uns);
} }
/* For non-wrapping arithmetic look at possibly smaller /* For non-wrapping arithmetic look at possibly smaller
@ -2435,16 +2429,16 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
/* Check for type overflow. */ /* Check for type overflow. */
if (min_ovf == 0) if (min_ovf == 0)
{ {
if (double_int_cmp (dmin, type_min, uns) == -1) if (dmin.cmp (type_min, uns) == -1)
min_ovf = -1; min_ovf = -1;
else if (double_int_cmp (dmin, type_max, uns) == 1) else if (dmin.cmp (type_max, uns) == 1)
min_ovf = 1; min_ovf = 1;
} }
if (max_ovf == 0) if (max_ovf == 0)
{ {
if (double_int_cmp (dmax, type_min, uns) == -1) if (dmax.cmp (type_min, uns) == -1)
max_ovf = -1; max_ovf = -1;
else if (double_int_cmp (dmax, type_max, uns) == 1) else if (dmax.cmp (type_max, uns) == 1)
max_ovf = 1; max_ovf = 1;
} }
@ -2453,9 +2447,9 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
/* If overflow wraps, truncate the values and adjust the /* If overflow wraps, truncate the values and adjust the
range kind and bounds appropriately. */ range kind and bounds appropriately. */
double_int tmin double_int tmin
= double_int_ext (dmin, TYPE_PRECISION (expr_type), uns); = dmin.ext (TYPE_PRECISION (expr_type), uns);
double_int tmax double_int tmax
= double_int_ext (dmax, TYPE_PRECISION (expr_type), uns); = dmax.ext (TYPE_PRECISION (expr_type), uns);
if (min_ovf == max_ovf) if (min_ovf == max_ovf)
{ {
/* No overflow or both overflow or underflow. The /* No overflow or both overflow or underflow. The
@ -2479,16 +2473,16 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
gcc_assert ((min_ovf == -1 && max_ovf == 0) gcc_assert ((min_ovf == -1 && max_ovf == 0)
|| (max_ovf == 1 && min_ovf == 0)); || (max_ovf == 1 && min_ovf == 0));
type = VR_ANTI_RANGE; type = VR_ANTI_RANGE;
tmin = double_int_add (tmax, double_int_one); tmin = tmax + double_int_one;
if (double_int_cmp (tmin, tmax, uns) < 0) if (tmin.cmp (tmax, uns) < 0)
covers = true; covers = true;
tmax = double_int_add (tem, double_int_minus_one); tmax = tem + double_int_minus_one;
if (double_int_cmp (tmax, tem, uns) > 0) if (double_int_cmp (tmax, tem, uns) > 0)
covers = true; covers = true;
/* If the anti-range would cover nothing, drop to varying. /* If the anti-range would cover nothing, drop to varying.
Likewise if the anti-range bounds are outside of the Likewise if the anti-range bounds are outside of the
types values. */ types values. */
if (covers || double_int_cmp (tmin, tmax, uns) > 0) if (covers || tmin.cmp (tmax, uns) > 0)
{ {
set_value_range_to_varying (vr); set_value_range_to_varying (vr);
return; return;
@ -2605,8 +2599,8 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
prod2l, prod2h, prod3l, prod3h; prod2l, prod2h, prod3l, prod3h;
bool uns0, uns1, uns; bool uns0, uns1, uns;
sizem1 = double_int_max_value (TYPE_PRECISION (expr_type), true); sizem1 = double_int::max_value (TYPE_PRECISION (expr_type), true);
size = double_int_add (sizem1, double_int_one); size = sizem1 + double_int_one;
min0 = tree_to_double_int (vr0.min); min0 = tree_to_double_int (vr0.min);
max0 = tree_to_double_int (vr0.max); max0 = tree_to_double_int (vr0.max);
@ -2619,19 +2613,19 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
/* Canonicalize the intervals. */ /* Canonicalize the intervals. */
if (TYPE_UNSIGNED (expr_type)) if (TYPE_UNSIGNED (expr_type))
{ {
double_int min2 = double_int_sub (size, min0); double_int min2 = size - min0;
if (double_int_cmp (min2, max0, true) < 0) if (min2.cmp (max0, true) < 0)
{ {
min0 = double_int_neg (min2); min0 = -min2;
max0 = double_int_sub (max0, size); max0 -= size;
uns0 = false; uns0 = false;
} }
min2 = double_int_sub (size, min1); min2 = size - min1;
if (double_int_cmp (min2, max1, true) < 0) if (min2.cmp (max1, true) < 0)
{ {
min1 = double_int_neg (min2); min1 = -min2;
max1 = double_int_sub (max1, size); max1 -= size;
uns1 = false; uns1 = false;
} }
} }
@ -2641,37 +2635,37 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
min1.low, min1.high, min1.low, min1.high,
&prod0l.low, &prod0l.high, &prod0l.low, &prod0l.high,
&prod0h.low, &prod0h.high, true); &prod0h.low, &prod0h.high, true);
if (!uns0 && double_int_negative_p (min0)) if (!uns0 && min0.is_negative ())
prod0h = double_int_sub (prod0h, min1); prod0h -= min1;
if (!uns1 && double_int_negative_p (min1)) if (!uns1 && min1.is_negative ())
prod0h = double_int_sub (prod0h, min0); prod0h -= min0;
mul_double_wide_with_sign (min0.low, min0.high, mul_double_wide_with_sign (min0.low, min0.high,
max1.low, max1.high, max1.low, max1.high,
&prod1l.low, &prod1l.high, &prod1l.low, &prod1l.high,
&prod1h.low, &prod1h.high, true); &prod1h.low, &prod1h.high, true);
if (!uns0 && double_int_negative_p (min0)) if (!uns0 && min0.is_negative ())
prod1h = double_int_sub (prod1h, max1); prod1h -= max1;
if (!uns1 && double_int_negative_p (max1)) if (!uns1 && max1.is_negative ())
prod1h = double_int_sub (prod1h, min0); prod1h -= min0;
mul_double_wide_with_sign (max0.low, max0.high, mul_double_wide_with_sign (max0.low, max0.high,
min1.low, min1.high, min1.low, min1.high,
&prod2l.low, &prod2l.high, &prod2l.low, &prod2l.high,
&prod2h.low, &prod2h.high, true); &prod2h.low, &prod2h.high, true);
if (!uns0 && double_int_negative_p (max0)) if (!uns0 && max0.is_negative ())
prod2h = double_int_sub (prod2h, min1); prod2h -= min1;
if (!uns1 && double_int_negative_p (min1)) if (!uns1 && min1.is_negative ())
prod2h = double_int_sub (prod2h, max0); prod2h -= max0;
mul_double_wide_with_sign (max0.low, max0.high, mul_double_wide_with_sign (max0.low, max0.high,
max1.low, max1.high, max1.low, max1.high,
&prod3l.low, &prod3l.high, &prod3l.low, &prod3l.high,
&prod3h.low, &prod3h.high, true); &prod3h.low, &prod3h.high, true);
if (!uns0 && double_int_negative_p (max0)) if (!uns0 && max0.is_negative ())
prod3h = double_int_sub (prod3h, max1); prod3h -= max1;
if (!uns1 && double_int_negative_p (max1)) if (!uns1 && max1.is_negative ())
prod3h = double_int_sub (prod3h, max0); prod3h -= max0;
/* Sort the 4 products. */ /* Sort the 4 products. */
quad_int_pair_sort (&prod0l, &prod0h, &prod3l, &prod3h, uns); quad_int_pair_sort (&prod0l, &prod0h, &prod3l, &prod3h, uns);
@ -2680,23 +2674,23 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
quad_int_pair_sort (&prod2l, &prod2h, &prod3l, &prod3h, uns); quad_int_pair_sort (&prod2l, &prod2h, &prod3l, &prod3h, uns);
/* Max - min. */ /* Max - min. */
if (double_int_zero_p (prod0l)) if (prod0l.is_zero ())
{ {
prod1l = double_int_zero; prod1l = double_int_zero;
prod1h = double_int_neg (prod0h); prod1h = -prod0h;
} }
else else
{ {
prod1l = double_int_neg (prod0l); prod1l = -prod0l;
prod1h = double_int_not (prod0h); prod1h = ~prod0h;
} }
prod2l = double_int_add (prod3l, prod1l); prod2l = prod3l + prod1l;
prod2h = double_int_add (prod3h, prod1h); prod2h = prod3h + prod1h;
if (double_int_ucmp (prod2l, prod3l) < 0) if (prod2l.ult (prod3l))
prod2h = double_int_add (prod2h, double_int_one); /* carry */ prod2h += double_int_one; /* carry */
if (!double_int_zero_p (prod2h) if (!prod2h.is_zero ()
|| double_int_cmp (prod2l, sizem1, true) >= 0) || prod2l.cmp (sizem1, true) >= 0)
{ {
/* the range covers all values. */ /* the range covers all values. */
set_value_range_to_varying (vr); set_value_range_to_varying (vr);
@ -2755,11 +2749,9 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
vr1p.type = VR_RANGE; vr1p.type = VR_RANGE;
vr1p.min vr1p.min
= double_int_to_tree (expr_type, = double_int_to_tree (expr_type,
double_int_lshift double_int_one
(double_int_one, .llshift (TREE_INT_CST_LOW (vr1.min),
TREE_INT_CST_LOW (vr1.min), TYPE_PRECISION (expr_type)));
TYPE_PRECISION (expr_type),
false));
vr1p.max = vr1p.min; vr1p.max = vr1p.min;
/* We have to use a wrapping multiply though as signed overflow /* We have to use a wrapping multiply though as signed overflow
on lshifts is implementation defined in C89. */ on lshifts is implementation defined in C89. */
@ -2903,9 +2895,8 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
{ {
double_int dmax; double_int dmax;
min = double_int_to_tree (expr_type, min = double_int_to_tree (expr_type,
double_int_and (must_be_nonzero0, must_be_nonzero0 & must_be_nonzero1);
must_be_nonzero1)); dmax = may_be_nonzero0 & may_be_nonzero1;
dmax = double_int_and (may_be_nonzero0, may_be_nonzero1);
/* If both input ranges contain only negative values we can /* If both input ranges contain only negative values we can
truncate the result range maximum to the minimum of the truncate the result range maximum to the minimum of the
input range maxima. */ input range maxima. */
@ -2913,19 +2904,19 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
&& tree_int_cst_sgn (vr0.max) < 0 && tree_int_cst_sgn (vr0.max) < 0
&& tree_int_cst_sgn (vr1.max) < 0) && tree_int_cst_sgn (vr1.max) < 0)
{ {
dmax = double_int_min (dmax, tree_to_double_int (vr0.max), dmax = dmax.min (tree_to_double_int (vr0.max),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
dmax = double_int_min (dmax, tree_to_double_int (vr1.max), dmax = dmax.min (tree_to_double_int (vr1.max),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
} }
/* If either input range contains only non-negative values /* If either input range contains only non-negative values
we can truncate the result range maximum to the respective we can truncate the result range maximum to the respective
maximum of the input range. */ maximum of the input range. */
if (int_cst_range0 && tree_int_cst_sgn (vr0.min) >= 0) if (int_cst_range0 && tree_int_cst_sgn (vr0.min) >= 0)
dmax = double_int_min (dmax, tree_to_double_int (vr0.max), dmax = dmax.min (tree_to_double_int (vr0.max),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
if (int_cst_range1 && tree_int_cst_sgn (vr1.min) >= 0) if (int_cst_range1 && tree_int_cst_sgn (vr1.min) >= 0)
dmax = double_int_min (dmax, tree_to_double_int (vr1.max), dmax = dmax.min (tree_to_double_int (vr1.max),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
max = double_int_to_tree (expr_type, dmax); max = double_int_to_tree (expr_type, dmax);
} }
@ -2933,9 +2924,8 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
{ {
double_int dmin; double_int dmin;
max = double_int_to_tree (expr_type, max = double_int_to_tree (expr_type,
double_int_ior (may_be_nonzero0, may_be_nonzero0 | may_be_nonzero1);
may_be_nonzero1)); dmin = must_be_nonzero0 | must_be_nonzero1;
dmin = double_int_ior (must_be_nonzero0, must_be_nonzero1);
/* If the input ranges contain only positive values we can /* If the input ranges contain only positive values we can
truncate the minimum of the result range to the maximum truncate the minimum of the result range to the maximum
of the input range minima. */ of the input range minima. */
@ -2943,40 +2933,30 @@ extract_range_from_binary_expr_1 (value_range_t *vr,
&& tree_int_cst_sgn (vr0.min) >= 0 && tree_int_cst_sgn (vr0.min) >= 0
&& tree_int_cst_sgn (vr1.min) >= 0) && tree_int_cst_sgn (vr1.min) >= 0)
{ {
dmin = double_int_max (dmin, tree_to_double_int (vr0.min), dmin = dmin.max (tree_to_double_int (vr0.min),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
dmin = double_int_max (dmin, tree_to_double_int (vr1.min), dmin = dmin.max (tree_to_double_int (vr1.min),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
} }
/* If either input range contains only negative values /* If either input range contains only negative values
we can truncate the minimum of the result range to the we can truncate the minimum of the result range to the
respective minimum range. */ respective minimum range. */
if (int_cst_range0 && tree_int_cst_sgn (vr0.max) < 0) if (int_cst_range0 && tree_int_cst_sgn (vr0.max) < 0)
dmin = double_int_max (dmin, tree_to_double_int (vr0.min), dmin = dmin.max (tree_to_double_int (vr0.min),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
if (int_cst_range1 && tree_int_cst_sgn (vr1.max) < 0) if (int_cst_range1 && tree_int_cst_sgn (vr1.max) < 0)
dmin = double_int_max (dmin, tree_to_double_int (vr1.min), dmin = dmin.max (tree_to_double_int (vr1.min),
TYPE_UNSIGNED (expr_type)); TYPE_UNSIGNED (expr_type));
min = double_int_to_tree (expr_type, dmin); min = double_int_to_tree (expr_type, dmin);
} }
else if (code == BIT_XOR_EXPR) else if (code == BIT_XOR_EXPR)
{ {
double_int result_zero_bits, result_one_bits; double_int result_zero_bits, result_one_bits;
result_zero_bits result_zero_bits = (must_be_nonzero0 & must_be_nonzero1)
= double_int_ior (double_int_and (must_be_nonzero0, | ~(may_be_nonzero0 | may_be_nonzero1);
must_be_nonzero1), result_one_bits = must_be_nonzero0.and_not (may_be_nonzero1)
double_int_not | must_be_nonzero1.and_not (may_be_nonzero0);
(double_int_ior (may_be_nonzero0, max = double_int_to_tree (expr_type, ~result_zero_bits);
may_be_nonzero1)));
result_one_bits
= double_int_ior (double_int_and
(must_be_nonzero0,
double_int_not (may_be_nonzero1)),
double_int_and
(must_be_nonzero1,
double_int_not (may_be_nonzero0)));
max = double_int_to_tree (expr_type,
double_int_not (result_zero_bits));
min = double_int_to_tree (expr_type, result_one_bits); min = double_int_to_tree (expr_type, result_one_bits);
/* If the range has all positive or all negative values the /* If the range has all positive or all negative values the
result is better than VARYING. */ result is better than VARYING. */
@ -3606,10 +3586,10 @@ adjust_range_with_scev (value_range_t *vr, struct loop *loop,
value_range_t maxvr = VR_INITIALIZER; value_range_t maxvr = VR_INITIALIZER;
double_int dtmp; double_int dtmp;
bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (step)); bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (step));
int overflow = 0; bool overflow = false;
dtmp = double_int_mul_with_sign (tree_to_double_int (step), nit, dtmp = tree_to_double_int (step)
unsigned_p, &overflow); .mul_with_sign (nit, unsigned_p, &overflow);
/* If the multiplication overflowed we can't do a meaningful /* If the multiplication overflowed we can't do a meaningful
adjustment. Likewise if the result doesn't fit in the type adjustment. Likewise if the result doesn't fit in the type
of the induction variable. For a signed type we have to of the induction variable. For a signed type we have to
@ -4519,19 +4499,19 @@ masked_increment (double_int val, double_int mask, double_int sgnbit,
double_int bit = double_int_one, res; double_int bit = double_int_one, res;
unsigned int i; unsigned int i;
val = double_int_xor (val, sgnbit); val ^= sgnbit;
for (i = 0; i < prec; i++, bit = double_int_add (bit, bit)) for (i = 0; i < prec; i++, bit += bit)
{ {
res = mask; res = mask;
if (double_int_zero_p (double_int_and (res, bit))) if ((res & bit).is_zero ())
continue; continue;
res = double_int_sub (bit, double_int_one); res = bit - double_int_one;
res = double_int_and_not (double_int_add (val, bit), res); res = (val + bit).and_not (res);
res = double_int_and (res, mask); res &= mask;
if (double_int_ucmp (res, val) > 0) if (res.ugt (val))
return double_int_xor (res, sgnbit); return res ^ sgnbit;
} }
return double_int_xor (val, sgnbit); return val ^ sgnbit;
} }
/* Try to register an edge assertion for SSA name NAME on edge E for /* Try to register an edge assertion for SSA name NAME on edge E for
@ -4735,7 +4715,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
&& live_on_edge (e, name2) && live_on_edge (e, name2)
&& !has_single_use (name2)) && !has_single_use (name2))
{ {
mask = double_int_mask (tree_low_cst (cst2, 1)); mask = double_int::mask (tree_low_cst (cst2, 1));
val2 = fold_binary (LSHIFT_EXPR, TREE_TYPE (val), val, cst2); val2 = fold_binary (LSHIFT_EXPR, TREE_TYPE (val), val, cst2);
} }
} }
@ -4766,9 +4746,9 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
else else
{ {
double_int maxval double_int maxval
= double_int_max_value (prec, TYPE_UNSIGNED (TREE_TYPE (val))); = double_int::max_value (prec, TYPE_UNSIGNED (TREE_TYPE (val)));
mask = double_int_ior (tree_to_double_int (val2), mask); mask |= tree_to_double_int (val2);
if (double_int_equal_p (mask, maxval)) if (mask == maxval)
new_val = NULL_TREE; new_val = NULL_TREE;
else else
new_val = double_int_to_tree (TREE_TYPE (val2), mask); new_val = double_int_to_tree (TREE_TYPE (val2), mask);
@ -4835,12 +4815,12 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
bool valid_p = false, valn = false, cst2n = false; bool valid_p = false, valn = false, cst2n = false;
enum tree_code ccode = comp_code; enum tree_code ccode = comp_code;
valv = double_int_zext (tree_to_double_int (val), prec); valv = tree_to_double_int (val).zext (prec);
cst2v = double_int_zext (tree_to_double_int (cst2), prec); cst2v = tree_to_double_int (cst2).zext (prec);
if (!TYPE_UNSIGNED (TREE_TYPE (val))) if (!TYPE_UNSIGNED (TREE_TYPE (val)))
{ {
valn = double_int_negative_p (double_int_sext (valv, prec)); valn = valv.sext (prec).is_negative ();
cst2n = double_int_negative_p (double_int_sext (cst2v, prec)); cst2n = cst2v.sext (prec).is_negative ();
} }
/* If CST2 doesn't have most significant bit set, /* If CST2 doesn't have most significant bit set,
but VAL is negative, we have comparison like but VAL is negative, we have comparison like
@ -4848,12 +4828,10 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
if (!cst2n && valn) if (!cst2n && valn)
ccode = ERROR_MARK; ccode = ERROR_MARK;
if (cst2n) if (cst2n)
sgnbit = double_int_zext (double_int_lshift (double_int_one, sgnbit = double_int_one.llshift (prec - 1, prec).zext (prec);
prec - 1, prec,
false), prec);
else else
sgnbit = double_int_zero; sgnbit = double_int_zero;
minv = double_int_and (valv, cst2v); minv = valv & cst2v;
switch (ccode) switch (ccode)
{ {
case EQ_EXPR: case EQ_EXPR:
@ -4861,15 +4839,15 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
(should be equal to VAL, otherwise we probably should (should be equal to VAL, otherwise we probably should
have folded the comparison into false) and have folded the comparison into false) and
maximum unsigned value is VAL | ~CST2. */ maximum unsigned value is VAL | ~CST2. */
maxv = double_int_ior (valv, double_int_not (cst2v)); maxv = valv | ~cst2v;
maxv = double_int_zext (maxv, prec); maxv = maxv.zext (prec);
valid_p = true; valid_p = true;
break; break;
case NE_EXPR: case NE_EXPR:
tem = double_int_ior (valv, double_int_not (cst2v)); tem = valv | ~cst2v;
tem = double_int_zext (tem, prec); tem = tem.zext (prec);
/* If VAL is 0, handle (X & CST2) != 0 as (X & CST2) > 0U. */ /* If VAL is 0, handle (X & CST2) != 0 as (X & CST2) > 0U. */
if (double_int_zero_p (valv)) if (valv.is_zero ())
{ {
cst2n = false; cst2n = false;
sgnbit = double_int_zero; sgnbit = double_int_zero;
@ -4877,7 +4855,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
} }
/* If (VAL | ~CST2) is all ones, handle it as /* If (VAL | ~CST2) is all ones, handle it as
(X & CST2) < VAL. */ (X & CST2) < VAL. */
if (double_int_equal_p (tem, double_int_mask (prec))) if (tem == double_int::mask (prec))
{ {
cst2n = false; cst2n = false;
valn = false; valn = false;
@ -4885,19 +4863,17 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
goto lt_expr; goto lt_expr;
} }
if (!cst2n if (!cst2n
&& double_int_negative_p (double_int_sext (cst2v, prec))) && cst2v.sext (prec).is_negative ())
sgnbit = double_int_zext (double_int_lshift (double_int_one, sgnbit = double_int_one.llshift (prec - 1, prec).zext (prec);
prec - 1, prec, if (!sgnbit.is_zero ())
false), prec);
if (!double_int_zero_p (sgnbit))
{ {
if (double_int_equal_p (valv, sgnbit)) if (valv == sgnbit)
{ {
cst2n = true; cst2n = true;
valn = true; valn = true;
goto gt_expr; goto gt_expr;
} }
if (double_int_equal_p (tem, double_int_mask (prec - 1))) if (tem == double_int::mask (prec - 1))
{ {
cst2n = true; cst2n = true;
goto lt_expr; goto lt_expr;
@ -4912,15 +4888,15 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
comparison, if CST2 doesn't have most significant bit comparison, if CST2 doesn't have most significant bit
set, handle it similarly. If CST2 has MSB set, set, handle it similarly. If CST2 has MSB set,
the minimum is the same, and maximum is ~0U/2. */ the minimum is the same, and maximum is ~0U/2. */
if (!double_int_equal_p (minv, valv)) if (minv != valv)
{ {
/* If (VAL & CST2) != VAL, X & CST2 can't be equal to /* If (VAL & CST2) != VAL, X & CST2 can't be equal to
VAL. */ VAL. */
minv = masked_increment (valv, cst2v, sgnbit, prec); minv = masked_increment (valv, cst2v, sgnbit, prec);
if (double_int_equal_p (minv, valv)) if (minv == valv)
break; break;
} }
maxv = double_int_mask (prec - (cst2n ? 1 : 0)); maxv = double_int::mask (prec - (cst2n ? 1 : 0));
valid_p = true; valid_p = true;
break; break;
case GT_EXPR: case GT_EXPR:
@ -4929,9 +4905,9 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
&& (MINV & CST2) == MINV, if any. If VAL is signed and && (MINV & CST2) == MINV, if any. If VAL is signed and
CST2 has MSB set, compute it biased by 1 << (prec - 1). */ CST2 has MSB set, compute it biased by 1 << (prec - 1). */
minv = masked_increment (valv, cst2v, sgnbit, prec); minv = masked_increment (valv, cst2v, sgnbit, prec);
if (double_int_equal_p (minv, valv)) if (minv == valv)
break; break;
maxv = double_int_mask (prec - (cst2n ? 1 : 0)); maxv = double_int::mask (prec - (cst2n ? 1 : 0));
valid_p = true; valid_p = true;
break; break;
case LE_EXPR: case LE_EXPR:
@ -4943,17 +4919,17 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
For signed comparison, if CST2 doesn't have most For signed comparison, if CST2 doesn't have most
significant bit set, handle it similarly. If CST2 has significant bit set, handle it similarly. If CST2 has
MSB set, the maximum is the same and minimum is INT_MIN. */ MSB set, the maximum is the same and minimum is INT_MIN. */
if (double_int_equal_p (minv, valv)) if (minv == valv)
maxv = valv; maxv = valv;
else else
{ {
maxv = masked_increment (valv, cst2v, sgnbit, prec); maxv = masked_increment (valv, cst2v, sgnbit, prec);
if (double_int_equal_p (maxv, valv)) if (maxv == valv)
break; break;
maxv = double_int_sub (maxv, double_int_one); maxv -= double_int_one;
} }
maxv = double_int_ior (maxv, double_int_not (cst2v)); maxv |= ~cst2v;
maxv = double_int_zext (maxv, prec); maxv = maxv.zext (prec);
minv = sgnbit; minv = sgnbit;
valid_p = true; valid_p = true;
break; break;
@ -4967,21 +4943,21 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
For signed comparison, if CST2 doesn't have most For signed comparison, if CST2 doesn't have most
significant bit set, handle it similarly. If CST2 has significant bit set, handle it similarly. If CST2 has
MSB set, the maximum is the same and minimum is INT_MIN. */ MSB set, the maximum is the same and minimum is INT_MIN. */
if (double_int_equal_p (minv, valv)) if (minv == valv)
{ {
if (double_int_equal_p (valv, sgnbit)) if (valv == sgnbit)
break; break;
maxv = valv; maxv = valv;
} }
else else
{ {
maxv = masked_increment (valv, cst2v, sgnbit, prec); maxv = masked_increment (valv, cst2v, sgnbit, prec);
if (double_int_equal_p (maxv, valv)) if (maxv == valv)
break; break;
} }
maxv = double_int_sub (maxv, double_int_one); maxv -= double_int_one;
maxv = double_int_ior (maxv, double_int_not (cst2v)); maxv |= ~cst2v;
maxv = double_int_zext (maxv, prec); maxv = maxv.zext (prec);
minv = sgnbit; minv = sgnbit;
valid_p = true; valid_p = true;
break; break;
@ -4989,10 +4965,7 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
break; break;
} }
if (valid_p if (valid_p
&& !double_int_equal_p (double_int_zext (double_int_sub (maxv, && (maxv - minv).zext (prec) != double_int::mask (prec))
minv),
prec),
double_int_mask (prec)))
{ {
tree tmp, new_val, type; tree tmp, new_val, type;
int i; int i;
@ -5008,12 +4981,11 @@ register_edge_assert_for_2 (tree name, edge e, gimple_stmt_iterator bsi,
type = build_nonstandard_integer_type (prec, 1); type = build_nonstandard_integer_type (prec, 1);
tmp = build1 (NOP_EXPR, type, names[i]); tmp = build1 (NOP_EXPR, type, names[i]);
} }
if (!double_int_zero_p (minv)) if (!minv.is_zero ())
{ {
tmp = build2 (PLUS_EXPR, type, tmp, tmp = build2 (PLUS_EXPR, type, tmp,
double_int_to_tree (type, double_int_to_tree (type, -minv));
double_int_neg (minv))); maxv2 = maxv - minv;
maxv2 = double_int_sub (maxv, minv);
} }
new_val = double_int_to_tree (type, maxv2); new_val = double_int_to_tree (type, maxv2);
@ -5947,20 +5919,16 @@ search_for_addr_array (tree t, location_t location)
return; return;
idx = mem_ref_offset (t); idx = mem_ref_offset (t);
idx = double_int_sdiv (idx, tree_to_double_int (el_sz), TRUNC_DIV_EXPR); idx = idx.sdiv (tree_to_double_int (el_sz), TRUNC_DIV_EXPR);
if (double_int_scmp (idx, double_int_zero) < 0) if (idx.slt (double_int_zero))
{ {
warning_at (location, OPT_Warray_bounds, warning_at (location, OPT_Warray_bounds,
"array subscript is below array bounds"); "array subscript is below array bounds");
TREE_NO_WARNING (t) = 1; TREE_NO_WARNING (t) = 1;
} }
else if (double_int_scmp (idx, else if (idx.sgt (tree_to_double_int (up_bound)
double_int_add - tree_to_double_int (low_bound)
(double_int_add + double_int_one))
(tree_to_double_int (up_bound),
double_int_neg
(tree_to_double_int (low_bound))),
double_int_one)) > 0)
{ {
warning_at (location, OPT_Warray_bounds, warning_at (location, OPT_Warray_bounds,
"array subscript is above array bounds"); "array subscript is above array bounds");
@ -8221,28 +8189,28 @@ simplify_bit_ops_using_ranges (gimple_stmt_iterator *gsi, gimple stmt)
switch (gimple_assign_rhs_code (stmt)) switch (gimple_assign_rhs_code (stmt))
{ {
case BIT_AND_EXPR: case BIT_AND_EXPR:
mask = double_int_and_not (may_be_nonzero0, must_be_nonzero1); mask = may_be_nonzero0.and_not (must_be_nonzero1);
if (double_int_zero_p (mask)) if (mask.is_zero ())
{ {
op = op0; op = op0;
break; break;
} }
mask = double_int_and_not (may_be_nonzero1, must_be_nonzero0); mask = may_be_nonzero1.and_not (must_be_nonzero0);
if (double_int_zero_p (mask)) if (mask.is_zero ())
{ {
op = op1; op = op1;
break; break;
} }
break; break;
case BIT_IOR_EXPR: case BIT_IOR_EXPR:
mask = double_int_and_not (may_be_nonzero0, must_be_nonzero1); mask = may_be_nonzero0.and_not (must_be_nonzero1);
if (double_int_zero_p (mask)) if (mask.is_zero ())
{ {
op = op1; op = op1;
break; break;
} }
mask = double_int_and_not (may_be_nonzero1, must_be_nonzero0); mask = may_be_nonzero1.and_not (must_be_nonzero0);
if (double_int_zero_p (mask)) if (mask.is_zero ())
{ {
op = op0; op = op0;
break; break;
@ -8549,42 +8517,34 @@ simplify_conversion_using_ranges (gimple stmt)
/* If the first conversion is not injective, the second must not /* If the first conversion is not injective, the second must not
be widening. */ be widening. */
if (double_int_cmp (double_int_sub (innermax, innermin), if ((innermax - innermin).ugt (double_int::mask (middle_prec))
double_int_mask (middle_prec), true) > 0
&& middle_prec < final_prec) && middle_prec < final_prec)
return false; return false;
/* We also want a medium value so that we can track the effect that /* We also want a medium value so that we can track the effect that
narrowing conversions with sign change have. */ narrowing conversions with sign change have. */
inner_unsigned_p = TYPE_UNSIGNED (TREE_TYPE (innerop)); inner_unsigned_p = TYPE_UNSIGNED (TREE_TYPE (innerop));
if (inner_unsigned_p) if (inner_unsigned_p)
innermed = double_int_rshift (double_int_mask (inner_prec), innermed = double_int::mask (inner_prec).lrshift (1, inner_prec);
1, inner_prec, false);
else else
innermed = double_int_zero; innermed = double_int_zero;
if (double_int_cmp (innermin, innermed, inner_unsigned_p) >= 0 if (innermin.cmp (innermed, inner_unsigned_p) >= 0
|| double_int_cmp (innermed, innermax, inner_unsigned_p) >= 0) || innermed.cmp (innermax, inner_unsigned_p) >= 0)
innermed = innermin; innermed = innermin;
middle_unsigned_p = TYPE_UNSIGNED (TREE_TYPE (middleop)); middle_unsigned_p = TYPE_UNSIGNED (TREE_TYPE (middleop));
middlemin = double_int_ext (innermin, middle_prec, middle_unsigned_p); middlemin = innermin.ext (middle_prec, middle_unsigned_p);
middlemed = double_int_ext (innermed, middle_prec, middle_unsigned_p); middlemed = innermed.ext (middle_prec, middle_unsigned_p);
middlemax = double_int_ext (innermax, middle_prec, middle_unsigned_p); middlemax = innermax.ext (middle_prec, middle_unsigned_p);
/* Require that the final conversion applied to both the original /* Require that the final conversion applied to both the original
and the intermediate range produces the same result. */ and the intermediate range produces the same result. */
final_unsigned_p = TYPE_UNSIGNED (finaltype); final_unsigned_p = TYPE_UNSIGNED (finaltype);
if (!double_int_equal_p (double_int_ext (middlemin, if (middlemin.ext (final_prec, final_unsigned_p)
final_prec, final_unsigned_p), != innermin.ext (final_prec, final_unsigned_p)
double_int_ext (innermin, || middlemed.ext (final_prec, final_unsigned_p)
final_prec, final_unsigned_p)) != innermed.ext (final_prec, final_unsigned_p)
|| !double_int_equal_p (double_int_ext (middlemed, || middlemax.ext (final_prec, final_unsigned_p)
final_prec, final_unsigned_p), != innermax.ext (final_prec, final_unsigned_p))
double_int_ext (innermed,
final_prec, final_unsigned_p))
|| !double_int_equal_p (double_int_ext (middlemax,
final_prec, final_unsigned_p),
double_int_ext (innermax,
final_prec, final_unsigned_p)))
return false; return false;
gimple_assign_set_rhs1 (stmt, innerop); gimple_assign_set_rhs1 (stmt, innerop);
@ -8629,11 +8589,11 @@ range_fits_type_p (value_range_t *vr, unsigned precision, bool unsigned_p)
/* Then we can perform the conversion on both ends and compare /* Then we can perform the conversion on both ends and compare
the result for equality. */ the result for equality. */
tem = double_int_ext (tree_to_double_int (vr->min), precision, unsigned_p); tem = tree_to_double_int (vr->min).ext (precision, unsigned_p);
if (!double_int_equal_p (tree_to_double_int (vr->min), tem)) if (tree_to_double_int (vr->min) != tem)
return false; return false;
tem = double_int_ext (tree_to_double_int (vr->max), precision, unsigned_p); tem = tree_to_double_int (vr->max).ext (precision, unsigned_p);
if (!double_int_equal_p (tree_to_double_int (vr->max), tem)) if (tree_to_double_int (vr->max) != tem)
return false; return false;
return true; return true;

View File

@ -1041,7 +1041,7 @@ build_int_cst (tree type, HOST_WIDE_INT low)
if (!type) if (!type)
type = integer_type_node; type = integer_type_node;
return double_int_to_tree (type, shwi_to_double_int (low)); return double_int_to_tree (type, double_int::from_shwi (low));
} }
/* Create an INT_CST node with a LOW value sign extended to TYPE. */ /* Create an INT_CST node with a LOW value sign extended to TYPE. */
@ -1051,7 +1051,7 @@ build_int_cst_type (tree type, HOST_WIDE_INT low)
{ {
gcc_assert (type); gcc_assert (type);
return double_int_to_tree (type, shwi_to_double_int (low)); return double_int_to_tree (type, double_int::from_shwi (low));
} }
/* Constructs tree in type TYPE from with value given by CST. Signedness /* Constructs tree in type TYPE from with value given by CST. Signedness
@ -1062,7 +1062,7 @@ double_int_to_tree (tree type, double_int cst)
{ {
bool sign_extended_type = !TYPE_UNSIGNED (type); bool sign_extended_type = !TYPE_UNSIGNED (type);
cst = double_int_ext (cst, TYPE_PRECISION (type), !sign_extended_type); cst = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
return build_int_cst_wide (type, cst.low, cst.high); return build_int_cst_wide (type, cst.low, cst.high);
} }
@ -1077,9 +1077,9 @@ double_int_fits_to_tree_p (const_tree type, double_int cst)
bool sign_extended_type = !TYPE_UNSIGNED (type); bool sign_extended_type = !TYPE_UNSIGNED (type);
double_int ext double_int ext
= double_int_ext (cst, TYPE_PRECISION (type), !sign_extended_type); = cst.ext (TYPE_PRECISION (type), !sign_extended_type);
return double_int_equal_p (cst, ext); return cst == ext;
} }
/* We force the double_int CST to the range of the type TYPE by sign or /* We force the double_int CST to the range of the type TYPE by sign or
@ -1114,7 +1114,7 @@ force_fit_type_double (tree type, double_int cst, int overflowable,
|| (overflowable > 0 && sign_extended_type)) || (overflowable > 0 && sign_extended_type))
{ {
tree t = make_node (INTEGER_CST); tree t = make_node (INTEGER_CST);
TREE_INT_CST (t) = double_int_ext (cst, TYPE_PRECISION (type), TREE_INT_CST (t) = cst.ext (TYPE_PRECISION (type),
!sign_extended_type); !sign_extended_type);
TREE_TYPE (t) = type; TREE_TYPE (t) = type;
TREE_OVERFLOW (t) = 1; TREE_OVERFLOW (t) = 1;
@ -1285,7 +1285,7 @@ build_low_bits_mask (tree type, unsigned bits)
/* Sign extended all-ones mask. */ /* Sign extended all-ones mask. */
mask = double_int_minus_one; mask = double_int_minus_one;
else else
mask = double_int_mask (bits); mask = double_int::mask (bits);
return build_int_cst_wide (type, mask.low, mask.high); return build_int_cst_wide (type, mask.low, mask.high);
} }
@ -1910,7 +1910,7 @@ int
fixed_zerop (const_tree expr) fixed_zerop (const_tree expr)
{ {
return (TREE_CODE (expr) == FIXED_CST return (TREE_CODE (expr) == FIXED_CST
&& double_int_zero_p (TREE_FIXED_CST (expr).data)); && TREE_FIXED_CST (expr).data.is_zero ());
} }
/* Return the power of two represented by a tree node known to be a /* Return the power of two represented by a tree node known to be a
@ -3998,8 +3998,7 @@ double_int
mem_ref_offset (const_tree t) mem_ref_offset (const_tree t)
{ {
tree toff = TREE_OPERAND (t, 1); tree toff = TREE_OPERAND (t, 1);
return double_int_sext (tree_to_double_int (toff), return tree_to_double_int (toff).sext (TYPE_PRECISION (TREE_TYPE (toff)));
TYPE_PRECISION (TREE_TYPE (toff)));
} }
/* Return the pointer-type relevant for TBAA purposes from the /* Return the pointer-type relevant for TBAA purposes from the
@ -6557,7 +6556,7 @@ HOST_WIDE_INT
size_low_cst (const_tree t) size_low_cst (const_tree t)
{ {
double_int d = tree_to_double_int (t); double_int d = tree_to_double_int (t);
return double_int_sext (d, TYPE_PRECISION (TREE_TYPE (t))).low; return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
} }
/* Return the most significant (sign) bit of T. */ /* Return the most significant (sign) bit of T. */
@ -8295,15 +8294,15 @@ retry:
dd = tree_to_double_int (type_low_bound); dd = tree_to_double_int (type_low_bound);
if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_low_bound))) if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_low_bound)))
{ {
int c_neg = (!unsc && double_int_negative_p (dc)); int c_neg = (!unsc && dc.is_negative ());
int t_neg = (unsc && double_int_negative_p (dd)); int t_neg = (unsc && dd.is_negative ());
if (c_neg && !t_neg) if (c_neg && !t_neg)
return false; return false;
if ((c_neg || !t_neg) && double_int_ucmp (dc, dd) < 0) if ((c_neg || !t_neg) && dc.ult (dd))
return false; return false;
} }
else if (double_int_cmp (dc, dd, unsc) < 0) else if (dc.cmp (dd, unsc) < 0)
return false; return false;
ok_for_low_bound = true; ok_for_low_bound = true;
} }
@ -8316,15 +8315,15 @@ retry:
dd = tree_to_double_int (type_high_bound); dd = tree_to_double_int (type_high_bound);
if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_high_bound))) if (unsc != TYPE_UNSIGNED (TREE_TYPE (type_high_bound)))
{ {
int c_neg = (!unsc && double_int_negative_p (dc)); int c_neg = (!unsc && dc.is_negative ());
int t_neg = (unsc && double_int_negative_p (dd)); int t_neg = (unsc && dd.is_negative ());
if (t_neg && !c_neg) if (t_neg && !c_neg)
return false; return false;
if ((t_neg || !c_neg) && double_int_ucmp (dc, dd) > 0) if ((t_neg || !c_neg) && dc.ugt (dd))
return false; return false;
} }
else if (double_int_cmp (dc, dd, unsc) > 0) else if (dc.cmp (dd, unsc) > 0)
return false; return false;
ok_for_high_bound = true; ok_for_high_bound = true;
} }
@ -8338,7 +8337,7 @@ retry:
/* Perform some generic filtering which may allow making a decision /* Perform some generic filtering which may allow making a decision
even if the bounds are not constant. First, negative integers even if the bounds are not constant. First, negative integers
never fit in unsigned types, */ never fit in unsigned types, */
if (TYPE_UNSIGNED (type) && !unsc && double_int_negative_p (dc)) if (TYPE_UNSIGNED (type) && !unsc && dc.is_negative ())
return false; return false;
/* Second, narrower types always fit in wider ones. */ /* Second, narrower types always fit in wider ones. */
@ -8393,9 +8392,8 @@ get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
else else
{ {
double_int mn; double_int mn;
mn = double_int_mask (TYPE_PRECISION (type) - 1); mn = double_int::mask (TYPE_PRECISION (type) - 1);
mn = double_int_sext (double_int_add (mn, double_int_one), mn = (mn + double_int_one).sext (TYPE_PRECISION (type));
TYPE_PRECISION (type));
mpz_set_double_int (min, mn, false); mpz_set_double_int (min, mn, false);
} }
} }
@ -8407,10 +8405,10 @@ get_type_static_bounds (const_tree type, mpz_t min, mpz_t max)
else else
{ {
if (TYPE_UNSIGNED (type)) if (TYPE_UNSIGNED (type))
mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type)), mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type)),
true); true);
else else
mpz_set_double_int (max, double_int_mask (TYPE_PRECISION (type) - 1), mpz_set_double_int (max, double_int::mask (TYPE_PRECISION (type) - 1),
true); true);
} }
} }

View File

@ -4718,7 +4718,7 @@ extern tree force_fit_type_double (tree, double_int, int, bool);
static inline tree static inline tree
build_int_cstu (tree type, unsigned HOST_WIDE_INT cst) build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
{ {
return double_int_to_tree (type, uhwi_to_double_int (cst)); return double_int_to_tree (type, double_int::from_uhwi (cst));
} }
extern tree build_int_cst (tree, HOST_WIDE_INT); extern tree build_int_cst (tree, HOST_WIDE_INT);

View File

@ -4649,14 +4649,13 @@ array_size_for_constructor (tree val)
/* Compute the total number of array elements. */ /* Compute the total number of array elements. */
tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val))); tmp = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (val)));
i = double_int_sub (tree_to_double_int (max_index), tree_to_double_int (tmp)); i = tree_to_double_int (max_index) - tree_to_double_int (tmp);
i = double_int_add (i, double_int_one); i += double_int_one;
/* Multiply by the array element unit size to find number of bytes. */ /* Multiply by the array element unit size to find number of bytes. */
i = double_int_mul (i, tree_to_double_int i *= tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val))));
(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (val)))));
gcc_assert (double_int_fits_in_uhwi_p (i)); gcc_assert (i.fits_uhwi ());
return i.low; return i.low;
} }
@ -4740,9 +4739,9 @@ output_constructor_regular_field (oc_local_state *local)
sign-extend the result because Ada has negative DECL_FIELD_OFFSETs sign-extend the result because Ada has negative DECL_FIELD_OFFSETs
but we are using an unsigned sizetype. */ but we are using an unsigned sizetype. */
unsigned prec = TYPE_PRECISION (sizetype); unsigned prec = TYPE_PRECISION (sizetype);
double_int idx = double_int_sub (tree_to_double_int (local->index), double_int idx = tree_to_double_int (local->index)
tree_to_double_int (local->min_index)); - tree_to_double_int (local->min_index);
idx = double_int_sext (idx, prec); idx = idx.sext (prec);
fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1) fieldpos = (tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (local->val)), 1)
* idx.low); * idx.low);
} }