mirror of git://gcc.gnu.org/git/gcc.git
[27/46] Remove duplicated stmt_vec_info lookups
Various places called vect_dr_stmt or vinfo_for_stmt multiple times on the same input. This patch makes them reuse the earlier result. It also splits a couple of single vinfo_for_stmt calls out into separate statements so that they can be reused in later patches. 2018-07-31 Richard Sandiford <richard.sandiford@arm.com> gcc/ * tree-vect-data-refs.c (vect_analyze_data_ref_dependence) (vect_slp_analyze_node_dependences, vect_analyze_data_ref_accesses) (vect_permute_store_chain, vect_permute_load_chain) (vect_shift_permute_load_chain, vect_transform_grouped_load): Avoid repeated stmt_vec_info lookups. * tree-vect-loop-manip.c (vect_can_advance_ivs_p): Likewise. (vect_update_ivs_after_vectorizer): Likewise. * tree-vect-loop.c (vect_is_simple_reduction): Likewise. (vect_create_epilog_for_reduction, vectorizable_reduction): Likewise. * tree-vect-patterns.c (adjust_bool_stmts): Likewise. * tree-vect-slp.c (vect_analyze_slp_instance): Likewise. (vect_bb_slp_scalar_cost): Likewise. * tree-vect-stmts.c (get_group_alias_ptr_type): Likewise. From-SVN: r263142
This commit is contained in:
parent
beb456c375
commit
91987857e6
|
|
@ -1,3 +1,19 @@
|
|||
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* tree-vect-data-refs.c (vect_analyze_data_ref_dependence)
|
||||
(vect_slp_analyze_node_dependences, vect_analyze_data_ref_accesses)
|
||||
(vect_permute_store_chain, vect_permute_load_chain)
|
||||
(vect_shift_permute_load_chain, vect_transform_grouped_load): Avoid
|
||||
repeated stmt_vec_info lookups.
|
||||
* tree-vect-loop-manip.c (vect_can_advance_ivs_p): Likewise.
|
||||
(vect_update_ivs_after_vectorizer): Likewise.
|
||||
* tree-vect-loop.c (vect_is_simple_reduction): Likewise.
|
||||
(vect_create_epilog_for_reduction, vectorizable_reduction): Likewise.
|
||||
* tree-vect-patterns.c (adjust_bool_stmts): Likewise.
|
||||
* tree-vect-slp.c (vect_analyze_slp_instance): Likewise.
|
||||
(vect_bb_slp_scalar_cost): Likewise.
|
||||
* tree-vect-stmts.c (get_group_alias_ptr_type): Likewise.
|
||||
|
||||
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* tree-vect-data-refs.c (vect_check_gather_scatter): Pass the
|
||||
|
|
|
|||
|
|
@ -472,8 +472,7 @@ vect_analyze_data_ref_dependence (struct data_dependence_relation *ddr,
|
|||
... = a[i];
|
||||
a[i+1] = ...;
|
||||
where loads from the group interleave with the store. */
|
||||
if (!vect_preserves_scalar_order_p (vect_dr_stmt(dra),
|
||||
vect_dr_stmt (drb)))
|
||||
if (!vect_preserves_scalar_order_p (stmtinfo_a, stmtinfo_b))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
|
||||
|
|
@ -673,6 +672,7 @@ vect_slp_analyze_node_dependences (slp_instance instance, slp_tree node,
|
|||
in NODE verifying we can sink them up to the last stmt in the
|
||||
group. */
|
||||
stmt_vec_info last_access_info = vect_find_last_scalar_stmt_in_slp (node);
|
||||
vec_info *vinfo = last_access_info->vinfo;
|
||||
for (unsigned k = 0; k < SLP_INSTANCE_GROUP_SIZE (instance); ++k)
|
||||
{
|
||||
stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k];
|
||||
|
|
@ -691,7 +691,8 @@ vect_slp_analyze_node_dependences (slp_instance instance, slp_tree node,
|
|||
|
||||
/* If we couldn't record a (single) data reference for this
|
||||
stmt we have to resort to the alias oracle. */
|
||||
data_reference *dr_b = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt));
|
||||
stmt_vec_info stmt_info = vinfo->lookup_stmt (stmt);
|
||||
data_reference *dr_b = STMT_VINFO_DATA_REF (stmt_info);
|
||||
if (!dr_b)
|
||||
{
|
||||
/* We are moving a store or sinking a load - this means
|
||||
|
|
@ -2951,7 +2952,7 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
|| data_ref_compare_tree (DR_BASE_ADDRESS (dra),
|
||||
DR_BASE_ADDRESS (drb)) != 0
|
||||
|| data_ref_compare_tree (DR_OFFSET (dra), DR_OFFSET (drb)) != 0
|
||||
|| !can_group_stmts_p (vect_dr_stmt (dra), vect_dr_stmt (drb)))
|
||||
|| !can_group_stmts_p (stmtinfo_a, stmtinfo_b))
|
||||
break;
|
||||
|
||||
/* Check that the data-refs have the same constant size. */
|
||||
|
|
@ -3040,11 +3041,11 @@ vect_analyze_data_ref_accesses (vec_info *vinfo)
|
|||
/* Link the found element into the group list. */
|
||||
if (!DR_GROUP_FIRST_ELEMENT (stmtinfo_a))
|
||||
{
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_a) = vect_dr_stmt (dra);
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_a) = stmtinfo_a;
|
||||
lastinfo = stmtinfo_a;
|
||||
}
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_b) = vect_dr_stmt (dra);
|
||||
DR_GROUP_NEXT_ELEMENT (lastinfo) = vect_dr_stmt (drb);
|
||||
DR_GROUP_FIRST_ELEMENT (stmtinfo_b) = stmtinfo_a;
|
||||
DR_GROUP_NEXT_ELEMENT (lastinfo) = stmtinfo_b;
|
||||
lastinfo = stmtinfo_b;
|
||||
}
|
||||
}
|
||||
|
|
@ -5219,9 +5220,10 @@ vect_permute_store_chain (vec<tree> dr_chain,
|
|||
gimple_stmt_iterator *gsi,
|
||||
vec<tree> *result_chain)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
tree vect1, vect2, high, low;
|
||||
gimple *perm_stmt;
|
||||
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
tree perm_mask_low, perm_mask_high;
|
||||
tree data_ref;
|
||||
tree perm3_mask_low, perm3_mask_high;
|
||||
|
|
@ -5840,11 +5842,12 @@ vect_permute_load_chain (vec<tree> dr_chain,
|
|||
gimple_stmt_iterator *gsi,
|
||||
vec<tree> *result_chain)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
tree data_ref, first_vect, second_vect;
|
||||
tree perm_mask_even, perm_mask_odd;
|
||||
tree perm3_mask_low, perm3_mask_high;
|
||||
gimple *perm_stmt;
|
||||
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
unsigned int i, j, log_length = exact_log2 (length);
|
||||
|
||||
result_chain->quick_grow (length);
|
||||
|
|
@ -6043,14 +6046,14 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
|
|||
gimple_stmt_iterator *gsi,
|
||||
vec<tree> *result_chain)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
tree vect[3], vect_shift[3], data_ref, first_vect, second_vect;
|
||||
tree perm2_mask1, perm2_mask2, perm3_mask;
|
||||
tree select_mask, shift1_mask, shift2_mask, shift3_mask, shift4_mask;
|
||||
gimple *perm_stmt;
|
||||
|
||||
tree vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
unsigned int i;
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
|
||||
|
||||
unsigned HOST_WIDE_INT nelt, vf;
|
||||
|
|
@ -6310,6 +6313,7 @@ void
|
|||
vect_transform_grouped_load (gimple *stmt, vec<tree> dr_chain, int size,
|
||||
gimple_stmt_iterator *gsi)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
machine_mode mode;
|
||||
vec<tree> result_chain = vNULL;
|
||||
|
||||
|
|
@ -6321,7 +6325,7 @@ vect_transform_grouped_load (gimple *stmt, vec<tree> dr_chain, int size,
|
|||
/* If reassociation width for vector type is 2 or greater target machine can
|
||||
execute 2 or more vector instructions in parallel. Otherwise try to
|
||||
get chain for loads group using vect_shift_permute_load_chain. */
|
||||
mode = TYPE_MODE (STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt)));
|
||||
mode = TYPE_MODE (STMT_VINFO_VECTYPE (stmt_info));
|
||||
if (targetm.sched.reassociation_width (VEC_PERM_EXPR, mode) > 1
|
||||
|| pow2p_hwi (size)
|
||||
|| !vect_shift_permute_load_chain (dr_chain, size, stmt,
|
||||
|
|
|
|||
|
|
@ -1377,6 +1377,7 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
|
|||
tree evolution_part;
|
||||
|
||||
gphi *phi = gsi.phi ();
|
||||
stmt_vec_info phi_info = loop_vinfo->lookup_stmt (phi);
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: ");
|
||||
|
|
@ -1397,8 +1398,7 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
|
|||
|
||||
/* Analyze the evolution function. */
|
||||
|
||||
evolution_part
|
||||
= STMT_VINFO_LOOP_PHI_EVOLUTION_PART (vinfo_for_stmt (phi));
|
||||
evolution_part = STMT_VINFO_LOOP_PHI_EVOLUTION_PART (phi_info);
|
||||
if (evolution_part == NULL_TREE)
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
|
|
@ -1500,6 +1500,7 @@ vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo,
|
|||
|
||||
gphi *phi = gsi.phi ();
|
||||
gphi *phi1 = gsi1.phi ();
|
||||
stmt_vec_info phi_info = loop_vinfo->lookup_stmt (phi);
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
dump_printf_loc (MSG_NOTE, vect_location,
|
||||
|
|
@ -1517,7 +1518,7 @@ vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo,
|
|||
}
|
||||
|
||||
type = TREE_TYPE (gimple_phi_result (phi));
|
||||
step_expr = STMT_VINFO_LOOP_PHI_EVOLUTION_PART (vinfo_for_stmt (phi));
|
||||
step_expr = STMT_VINFO_LOOP_PHI_EVOLUTION_PART (phi_info);
|
||||
step_expr = unshare_expr (step_expr);
|
||||
|
||||
/* FORNOW: We do not support IVs whose evolution function is a polynomial
|
||||
|
|
|
|||
|
|
@ -3252,7 +3252,7 @@ vect_is_simple_reduction (loop_vec_info loop_info, stmt_vec_info phi_info,
|
|||
}
|
||||
|
||||
/* Dissolve group eventually half-built by vect_is_slp_reduction. */
|
||||
stmt_vec_info first = REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (def_stmt));
|
||||
stmt_vec_info first = REDUC_GROUP_FIRST_ELEMENT (def_stmt_info);
|
||||
while (first)
|
||||
{
|
||||
stmt_vec_info next = REDUC_GROUP_NEXT_ELEMENT (first);
|
||||
|
|
@ -4784,7 +4784,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
|
|||
# b1 = phi <b2, b0>
|
||||
a2 = operation (a1)
|
||||
b2 = operation (b1) */
|
||||
slp_reduc = (slp_node && !REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)));
|
||||
slp_reduc = (slp_node && !REDUC_GROUP_FIRST_ELEMENT (stmt_info));
|
||||
|
||||
/* True if we should implement SLP_REDUC using native reduction operations
|
||||
instead of scalar operations. */
|
||||
|
|
@ -4799,7 +4799,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
|
|||
|
||||
we may end up with more than one vector result. Here we reduce them to
|
||||
one vector. */
|
||||
if (REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)) || direct_slp_reduc)
|
||||
if (REDUC_GROUP_FIRST_ELEMENT (stmt_info) || direct_slp_reduc)
|
||||
{
|
||||
tree first_vect = PHI_RESULT (new_phis[0]);
|
||||
gassign *new_vec_stmt = NULL;
|
||||
|
|
@ -5544,7 +5544,7 @@ vect_finalize_reduction:
|
|||
necessary, hence we set here REDUC_GROUP_SIZE to 1. SCALAR_DEST is the
|
||||
LHS of the last stmt in the reduction chain, since we are looking for
|
||||
the loop exit phi node. */
|
||||
if (REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
|
||||
if (REDUC_GROUP_FIRST_ELEMENT (stmt_info))
|
||||
{
|
||||
stmt_vec_info dest_stmt_info
|
||||
= SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1];
|
||||
|
|
@ -6095,8 +6095,8 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
tree cond_reduc_val = NULL_TREE;
|
||||
|
||||
/* Make sure it was already recognized as a reduction computation. */
|
||||
if (STMT_VINFO_DEF_TYPE (vinfo_for_stmt (stmt)) != vect_reduction_def
|
||||
&& STMT_VINFO_DEF_TYPE (vinfo_for_stmt (stmt)) != vect_nested_cycle)
|
||||
if (STMT_VINFO_DEF_TYPE (stmt_info) != vect_reduction_def
|
||||
&& STMT_VINFO_DEF_TYPE (stmt_info) != vect_nested_cycle)
|
||||
return false;
|
||||
|
||||
if (nested_in_vect_loop_p (loop, stmt))
|
||||
|
|
@ -6789,7 +6789,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
|
||||
if (reduction_type == FOLD_LEFT_REDUCTION
|
||||
&& slp_node
|
||||
&& !REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
|
||||
&& !REDUC_GROUP_FIRST_ELEMENT (stmt_info))
|
||||
{
|
||||
/* We cannot use in-order reductions in this case because there is
|
||||
an implicit reassociation of the operations involved. */
|
||||
|
|
@ -6818,7 +6818,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
|
||||
/* Check extra constraints for variable-length unchained SLP reductions. */
|
||||
if (STMT_SLP_TYPE (stmt_info)
|
||||
&& !REDUC_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt))
|
||||
&& !REDUC_GROUP_FIRST_ELEMENT (stmt_info)
|
||||
&& !nunits_out.is_constant ())
|
||||
{
|
||||
/* We checked above that we could build the initial vector when
|
||||
|
|
|
|||
|
|
@ -3505,6 +3505,8 @@ static tree
|
|||
adjust_bool_stmts (hash_set <gimple *> &bool_stmt_set,
|
||||
tree out_type, gimple *stmt)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
|
||||
|
||||
/* Gather original stmts in the bool pattern in their order of appearance
|
||||
in the IL. */
|
||||
auto_vec<gimple *> bool_stmts (bool_stmt_set.elements ());
|
||||
|
|
@ -3517,11 +3519,11 @@ adjust_bool_stmts (hash_set <gimple *> &bool_stmt_set,
|
|||
hash_map <tree, tree> defs;
|
||||
for (unsigned i = 0; i < bool_stmts.length (); ++i)
|
||||
adjust_bool_pattern (gimple_assign_lhs (bool_stmts[i]),
|
||||
out_type, vinfo_for_stmt (stmt), defs);
|
||||
out_type, stmt_info, defs);
|
||||
|
||||
/* Pop the last pattern seq stmt and install it as pattern root for STMT. */
|
||||
gimple *pattern_stmt
|
||||
= gimple_seq_last_stmt (STMT_VINFO_PATTERN_DEF_SEQ (vinfo_for_stmt (stmt)));
|
||||
= gimple_seq_last_stmt (STMT_VINFO_PATTERN_DEF_SEQ (stmt_info));
|
||||
return gimple_assign_lhs (pattern_stmt);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2157,8 +2157,8 @@ vect_analyze_slp_instance (vec_info *vinfo,
|
|||
vector size. */
|
||||
unsigned HOST_WIDE_INT const_nunits;
|
||||
if (is_a <bb_vec_info> (vinfo)
|
||||
&& STMT_VINFO_GROUPED_ACCESS (vinfo_for_stmt (stmt))
|
||||
&& DR_GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt))
|
||||
&& STMT_VINFO_GROUPED_ACCESS (stmt_info)
|
||||
&& DR_GROUP_FIRST_ELEMENT (stmt_info)
|
||||
&& nunits.is_constant (&const_nunits))
|
||||
{
|
||||
/* We consider breaking the group only on VF boundaries from the existing
|
||||
|
|
@ -2693,6 +2693,7 @@ vect_bb_slp_scalar_cost (basic_block bb,
|
|||
FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt_info)
|
||||
{
|
||||
gimple *stmt = stmt_info->stmt;
|
||||
vec_info *vinfo = stmt_info->vinfo;
|
||||
ssa_op_iter op_iter;
|
||||
def_operand_p def_p;
|
||||
|
||||
|
|
@ -2709,12 +2710,14 @@ vect_bb_slp_scalar_cost (basic_block bb,
|
|||
imm_use_iterator use_iter;
|
||||
gimple *use_stmt;
|
||||
FOR_EACH_IMM_USE_STMT (use_stmt, use_iter, DEF_FROM_PTR (def_p))
|
||||
if (!is_gimple_debug (use_stmt)
|
||||
&& (! vect_stmt_in_region_p (stmt_info->vinfo, use_stmt)
|
||||
|| ! PURE_SLP_STMT (vinfo_for_stmt (use_stmt))))
|
||||
if (!is_gimple_debug (use_stmt))
|
||||
{
|
||||
(*life)[i] = true;
|
||||
BREAK_FROM_IMM_USE_STMT (use_iter);
|
||||
stmt_vec_info use_stmt_info = vinfo->lookup_stmt (use_stmt);
|
||||
if (!use_stmt_info || !PURE_SLP_STMT (use_stmt_info))
|
||||
{
|
||||
(*life)[i] = true;
|
||||
BREAK_FROM_IMM_USE_STMT (use_iter);
|
||||
}
|
||||
}
|
||||
}
|
||||
if ((*life)[i])
|
||||
|
|
|
|||
|
|
@ -6193,11 +6193,11 @@ ensure_base_align (struct data_reference *dr)
|
|||
static tree
|
||||
get_group_alias_ptr_type (gimple *first_stmt)
|
||||
{
|
||||
stmt_vec_info first_stmt_info = vinfo_for_stmt (first_stmt);
|
||||
struct data_reference *first_dr, *next_dr;
|
||||
|
||||
first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt));
|
||||
stmt_vec_info next_stmt_info
|
||||
= DR_GROUP_NEXT_ELEMENT (vinfo_for_stmt (first_stmt));
|
||||
first_dr = STMT_VINFO_DATA_REF (first_stmt_info);
|
||||
stmt_vec_info next_stmt_info = DR_GROUP_NEXT_ELEMENT (first_stmt_info);
|
||||
while (next_stmt_info)
|
||||
{
|
||||
next_dr = STMT_VINFO_DATA_REF (next_stmt_info);
|
||||
|
|
|
|||
Loading…
Reference in New Issue