utils.c (scale_by_factor_of): New function.

* gcc-interface/utils.c (scale_by_factor_of): New function.
	(rest_of_record_type_compilation): Use scale_by_factor_of in order to
	scale the original offset for both rounding cases; in the second case,
	take into accout the addend to compute the alignment.  Tidy up.

From-SVN: r203852
This commit is contained in:
Eric Botcazou 2013-10-19 11:11:03 +00:00 committed by Eric Botcazou
parent 457f72ac38
commit 5c475ba98b
2 changed files with 71 additions and 56 deletions

View File

@ -1,3 +1,10 @@
2013-10-19 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/utils.c (scale_by_factor_of): New function.
(rest_of_record_type_compilation): Use scale_by_factor_of in order to
scale the original offset for both rounding cases; in the second case,
take into accout the addend to compute the alignment. Tidy up.
2013-10-19 Eric Botcazou <ebotcazou@adacore.com> 2013-10-19 Eric Botcazou <ebotcazou@adacore.com>
* gcc-interface/cuintp.c: Remove useless include directives. * gcc-interface/cuintp.c: Remove useless include directives.

View File

@ -232,6 +232,7 @@ static tree compute_related_constant (tree, tree);
static tree split_plus (tree, tree *); static tree split_plus (tree, tree *);
static tree float_type_for_precision (int, enum machine_mode); static tree float_type_for_precision (int, enum machine_mode);
static tree convert_to_fat_pointer (tree, tree); static tree convert_to_fat_pointer (tree, tree);
static unsigned int scale_by_factor_of (tree, unsigned int);
static bool potential_alignment_gap (tree, tree, tree); static bool potential_alignment_gap (tree, tree, tree);
/* Initialize data structures of the utils.c module. */ /* Initialize data structures of the utils.c module. */
@ -1708,93 +1709,74 @@ rest_of_record_type_compilation (tree record_type)
TYPE_SIZE_UNIT (new_record_type) TYPE_SIZE_UNIT (new_record_type)
= size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT); = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
/* Now scan all the fields, replacing each field with a new /* Now scan all the fields, replacing each field with a new field
field corresponding to the new encoding. */ corresponding to the new encoding. */
for (old_field = TYPE_FIELDS (record_type); old_field; for (old_field = TYPE_FIELDS (record_type); old_field;
old_field = DECL_CHAIN (old_field)) old_field = DECL_CHAIN (old_field))
{ {
tree field_type = TREE_TYPE (old_field); tree field_type = TREE_TYPE (old_field);
tree field_name = DECL_NAME (old_field); tree field_name = DECL_NAME (old_field);
tree new_field;
tree curpos = bit_position (old_field); tree curpos = bit_position (old_field);
tree pos, new_field;
bool var = false; bool var = false;
unsigned int align = 0; unsigned int align = 0;
tree pos;
/* We're going to do some pattern matching below so remove as many
conversions as possible. */
curpos = remove_conversions (curpos, true);
/* See how the position was modified from the last position. /* See how the position was modified from the last position.
There are two basic cases we support: a value was added There are two basic cases we support: a value was added
to the last position or the last position was rounded to to the last position or the last position was rounded to
a boundary and they something was added. Check for the a boundary and they something was added. Check for the
first case first. If not, see if there is any evidence first case first. If not, see if there is any evidence
of rounding. If so, round the last position and try of rounding. If so, round the last position and retry.
again.
If this is a union, the position can be taken as zero. */
/* Some computations depend on the shape of the position expression,
so strip conversions to make sure it's exposed. */
curpos = remove_conversions (curpos, true);
If this is a union, the position can be taken as zero. */
if (TREE_CODE (new_record_type) == UNION_TYPE) if (TREE_CODE (new_record_type) == UNION_TYPE)
pos = bitsize_zero_node, align = 0; pos = bitsize_zero_node;
else else
pos = compute_related_constant (curpos, last_pos); pos = compute_related_constant (curpos, last_pos);
if (!pos && TREE_CODE (curpos) == MULT_EXPR if (!pos
&& TREE_CODE (curpos) == MULT_EXPR
&& host_integerp (TREE_OPERAND (curpos, 1), 1)) && host_integerp (TREE_OPERAND (curpos, 1), 1))
{ {
tree offset = TREE_OPERAND (curpos, 0); tree offset = TREE_OPERAND (curpos, 0);
align = tree_low_cst (TREE_OPERAND (curpos, 1), 1); align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
align = scale_by_factor_of (offset, align);
/* An offset which is a bitwise AND with a mask increases the last_pos = round_up (last_pos, align);
alignment according to the number of trailing zeros. */ pos = compute_related_constant (curpos, last_pos);
offset = remove_conversions (offset, true);
if (TREE_CODE (offset) == BIT_AND_EXPR
&& TREE_CODE (TREE_OPERAND (offset, 1)) == INTEGER_CST)
{
unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (TREE_OPERAND (offset, 1));
unsigned int i;
for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
{
if (mask & 1)
break;
mask >>= 1;
align *= 2;
}
}
pos = compute_related_constant (curpos,
round_up (last_pos, align));
} }
else if (!pos && TREE_CODE (curpos) == PLUS_EXPR else if (!pos
&& TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST && TREE_CODE (curpos) == PLUS_EXPR
&& host_integerp (TREE_OPERAND (curpos, 1), 1)
&& TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
&& host_integerp (TREE_OPERAND && host_integerp
(TREE_OPERAND (curpos, 0), 1), (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1))
1))
{ {
tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
unsigned HOST_WIDE_INT addend
= tree_low_cst (TREE_OPERAND (curpos, 1), 1);
align align
= tree_low_cst = tree_low_cst (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
(TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1); align = scale_by_factor_of (offset, align);
pos = compute_related_constant (curpos, align = MIN (align, addend & -addend);
round_up (last_pos, align)); last_pos = round_up (last_pos, align);
pos = compute_related_constant (curpos, last_pos);
} }
else if (potential_alignment_gap (prev_old_field, old_field, else if (potential_alignment_gap (prev_old_field, old_field, pos))
pos))
{ {
align = TYPE_ALIGN (field_type); align = TYPE_ALIGN (field_type);
pos = compute_related_constant (curpos, last_pos = round_up (last_pos, align);
round_up (last_pos, align)); pos = compute_related_constant (curpos, last_pos);
} }
/* If we can't compute a position, set it to zero. /* If we can't compute a position, set it to zero.
??? We really should abort here, but it's too much work ??? We really should abort here, but it's too much work
to get this correct for all cases. */ to get this correct for all cases. */
if (!pos) if (!pos)
pos = bitsize_zero_node; pos = bitsize_zero_node;
@ -2576,6 +2558,32 @@ value_factor_p (tree value, HOST_WIDE_INT factor)
return false; return false;
} }
/* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
static unsigned int
scale_by_factor_of (tree expr, unsigned int value)
{
expr = remove_conversions (expr, true);
/* An expression which is a bitwise AND with a mask has a power-of-2 factor
corresponding to the number of trailing zeros of the mask. */
if (TREE_CODE (expr) == BIT_AND_EXPR
&& TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
{
unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
unsigned int i = 0;
while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
{
mask >>= 1;
value *= 2;
i++;
}
}
return value;
}
/* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
unless we can prove these 2 fields are laid out in such a way that no gap unless we can prove these 2 fields are laid out in such a way that no gap
exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET