mirror of git://gcc.gnu.org/git/gcc.git
[31/46] Use stmt_vec_info in function interfaces (part 1)
This first (less mechanical) part handles cases that involve changes in the callers or non-trivial changes in the functions themselves. 2018-07-31 Richard Sandiford <richard.sandiford@arm.com> gcc/ * tree-vect-data-refs.c (vect_describe_gather_scatter_call): Take a stmt_vec_info instead of a gcall. (vect_check_gather_scatter): Update call accordingly. * tree-vect-loop-manip.c (iv_phi_p): Take a stmt_vec_info instead of a gphi. (vect_can_advance_ivs_p, vect_update_ivs_after_vectorizer) (slpeel_update_phi_nodes_for_loops):): Update calls accordingly. * tree-vect-loop.c (vect_transform_loop_stmt): Take a stmt_vec_info instead of a gimple stmt. (vect_transform_loop): Update calls accordingly. * tree-vect-slp.c (vect_split_slp_store_group): Take and return stmt_vec_infos instead of gimple stmts. (vect_analyze_slp_instance): Update use accordingly. * tree-vect-stmts.c (read_vector_array, write_vector_array) (vect_clobber_variable, vect_stmt_relevant_p, permute_vec_elements) (vect_use_strided_gather_scatters_p, vect_build_all_ones_mask) (vect_build_zero_merge_argument, vect_get_gather_scatter_ops) (vect_gen_widened_results_half, vect_get_loop_based_defs) (vect_create_vectorized_promotion_stmts, can_vectorize_live_stmts): Take a stmt_vec_info instead of a gimple stmt and pass stmt_vec_infos down to subroutines. From-SVN: r263146
This commit is contained in:
parent
eca52fdd6c
commit
825702749a
|
|
@ -1,3 +1,27 @@
|
|||
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* tree-vect-data-refs.c (vect_describe_gather_scatter_call): Take
|
||||
a stmt_vec_info instead of a gcall.
|
||||
(vect_check_gather_scatter): Update call accordingly.
|
||||
* tree-vect-loop-manip.c (iv_phi_p): Take a stmt_vec_info instead
|
||||
of a gphi.
|
||||
(vect_can_advance_ivs_p, vect_update_ivs_after_vectorizer)
|
||||
(slpeel_update_phi_nodes_for_loops):): Update calls accordingly.
|
||||
* tree-vect-loop.c (vect_transform_loop_stmt): Take a stmt_vec_info
|
||||
instead of a gimple stmt.
|
||||
(vect_transform_loop): Update calls accordingly.
|
||||
* tree-vect-slp.c (vect_split_slp_store_group): Take and return
|
||||
stmt_vec_infos instead of gimple stmts.
|
||||
(vect_analyze_slp_instance): Update use accordingly.
|
||||
* tree-vect-stmts.c (read_vector_array, write_vector_array)
|
||||
(vect_clobber_variable, vect_stmt_relevant_p, permute_vec_elements)
|
||||
(vect_use_strided_gather_scatters_p, vect_build_all_ones_mask)
|
||||
(vect_build_zero_merge_argument, vect_get_gather_scatter_ops)
|
||||
(vect_gen_widened_results_half, vect_get_loop_based_defs)
|
||||
(vect_create_vectorized_promotion_stmts, can_vectorize_live_stmts):
|
||||
Take a stmt_vec_info instead of a gimple stmt and pass stmt_vec_infos
|
||||
down to subroutines.
|
||||
|
||||
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
|
||||
|
||||
* tree-vect-loop.c (vect_analyze_scalar_cycles_1): Change the type
|
||||
|
|
|
|||
|
|
@ -3621,13 +3621,14 @@ vect_gather_scatter_fn_p (bool read_p, bool masked_p, tree vectype,
|
|||
return true;
|
||||
}
|
||||
|
||||
/* CALL is a call to an internal gather load or scatter store function.
|
||||
/* STMT_INFO is a call to an internal gather load or scatter store function.
|
||||
Describe the operation in INFO. */
|
||||
|
||||
static void
|
||||
vect_describe_gather_scatter_call (gcall *call, gather_scatter_info *info)
|
||||
vect_describe_gather_scatter_call (stmt_vec_info stmt_info,
|
||||
gather_scatter_info *info)
|
||||
{
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (call);
|
||||
gcall *call = as_a <gcall *> (stmt_info->stmt);
|
||||
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
|
||||
data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
|
||||
|
||||
|
|
@ -3672,7 +3673,7 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
ifn = gimple_call_internal_fn (call);
|
||||
if (internal_gather_scatter_fn_p (ifn))
|
||||
{
|
||||
vect_describe_gather_scatter_call (call, info);
|
||||
vect_describe_gather_scatter_call (stmt_info, info);
|
||||
return true;
|
||||
}
|
||||
masked_p = (ifn == IFN_MASK_LOAD || ifn == IFN_MASK_STORE);
|
||||
|
|
|
|||
|
|
@ -1335,16 +1335,16 @@ find_loop_location (struct loop *loop)
|
|||
return dump_user_location_t ();
|
||||
}
|
||||
|
||||
/* Return true if PHI defines an IV of the loop to be vectorized. */
|
||||
/* Return true if the phi described by STMT_INFO defines an IV of the
|
||||
loop to be vectorized. */
|
||||
|
||||
static bool
|
||||
iv_phi_p (gphi *phi)
|
||||
iv_phi_p (stmt_vec_info stmt_info)
|
||||
{
|
||||
gphi *phi = as_a <gphi *> (stmt_info->stmt);
|
||||
if (virtual_operand_p (PHI_RESULT (phi)))
|
||||
return false;
|
||||
|
||||
stmt_vec_info stmt_info = vinfo_for_stmt (phi);
|
||||
gcc_assert (stmt_info != NULL_STMT_VEC_INFO);
|
||||
if (STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def
|
||||
|| STMT_VINFO_DEF_TYPE (stmt_info) == vect_double_reduction_def)
|
||||
return false;
|
||||
|
|
@ -1388,7 +1388,7 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
|
|||
virtual defs/uses (i.e., memory accesses) are analyzed elsewhere.
|
||||
|
||||
Skip reduction phis. */
|
||||
if (!iv_phi_p (phi))
|
||||
if (!iv_phi_p (phi_info))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_NOTE, vect_location,
|
||||
|
|
@ -1509,7 +1509,7 @@ vect_update_ivs_after_vectorizer (loop_vec_info loop_vinfo,
|
|||
}
|
||||
|
||||
/* Skip reduction and virtual phis. */
|
||||
if (!iv_phi_p (phi))
|
||||
if (!iv_phi_p (phi_info))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_NOTE, vect_location,
|
||||
|
|
@ -2088,7 +2088,8 @@ slpeel_update_phi_nodes_for_loops (loop_vec_info loop_vinfo,
|
|||
tree arg = PHI_ARG_DEF_FROM_EDGE (orig_phi, first_latch_e);
|
||||
/* Generate lcssa PHI node for the first loop. */
|
||||
gphi *vect_phi = (loop == first) ? orig_phi : update_phi;
|
||||
if (create_lcssa_for_iv_phis || !iv_phi_p (vect_phi))
|
||||
stmt_vec_info vect_phi_info = loop_vinfo->lookup_stmt (vect_phi);
|
||||
if (create_lcssa_for_iv_phis || !iv_phi_p (vect_phi_info))
|
||||
{
|
||||
tree new_res = copy_ssa_name (PHI_RESULT (orig_phi));
|
||||
gphi *lcssa_phi = create_phi_node (new_res, between_bb);
|
||||
|
|
|
|||
|
|
@ -8207,21 +8207,18 @@ scale_profile_for_vect_loop (struct loop *loop, unsigned vf)
|
|||
scale_bbs_frequencies (&loop->latch, 1, exit_l->probability / prob);
|
||||
}
|
||||
|
||||
/* Vectorize STMT if relevant, inserting any new instructions before GSI.
|
||||
When vectorizing STMT as a store, set *SEEN_STORE to its stmt_vec_info.
|
||||
/* Vectorize STMT_INFO if relevant, inserting any new instructions before GSI.
|
||||
When vectorizing STMT_INFO as a store, set *SEEN_STORE to its stmt_vec_info.
|
||||
*SLP_SCHEDULE is a running record of whether we have called
|
||||
vect_schedule_slp. */
|
||||
|
||||
static void
|
||||
vect_transform_loop_stmt (loop_vec_info loop_vinfo, gimple *stmt,
|
||||
vect_transform_loop_stmt (loop_vec_info loop_vinfo, stmt_vec_info stmt_info,
|
||||
gimple_stmt_iterator *gsi,
|
||||
stmt_vec_info *seen_store, bool *slp_scheduled)
|
||||
{
|
||||
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
|
||||
poly_uint64 vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
|
||||
stmt_vec_info stmt_info = loop_vinfo->lookup_stmt (stmt);
|
||||
if (!stmt_info)
|
||||
return;
|
||||
|
||||
if (dump_enabled_p ())
|
||||
{
|
||||
|
|
@ -8476,15 +8473,19 @@ vect_transform_loop (loop_vec_info loop_vinfo)
|
|||
gimple *def_seq = STMT_VINFO_PATTERN_DEF_SEQ (stmt_info);
|
||||
for (gimple_stmt_iterator subsi = gsi_start (def_seq);
|
||||
!gsi_end_p (subsi); gsi_next (&subsi))
|
||||
vect_transform_loop_stmt (loop_vinfo,
|
||||
gsi_stmt (subsi), &si,
|
||||
&seen_store,
|
||||
&slp_scheduled);
|
||||
gimple *pat_stmt = STMT_VINFO_RELATED_STMT (stmt_info);
|
||||
vect_transform_loop_stmt (loop_vinfo, pat_stmt, &si,
|
||||
{
|
||||
stmt_vec_info pat_stmt_info
|
||||
= loop_vinfo->lookup_stmt (gsi_stmt (subsi));
|
||||
vect_transform_loop_stmt (loop_vinfo, pat_stmt_info,
|
||||
&si, &seen_store,
|
||||
&slp_scheduled);
|
||||
}
|
||||
stmt_vec_info pat_stmt_info
|
||||
= STMT_VINFO_RELATED_STMT (stmt_info);
|
||||
vect_transform_loop_stmt (loop_vinfo, pat_stmt_info, &si,
|
||||
&seen_store, &slp_scheduled);
|
||||
}
|
||||
vect_transform_loop_stmt (loop_vinfo, stmt, &si,
|
||||
vect_transform_loop_stmt (loop_vinfo, stmt_info, &si,
|
||||
&seen_store, &slp_scheduled);
|
||||
}
|
||||
if (seen_store)
|
||||
|
|
|
|||
|
|
@ -1856,16 +1856,15 @@ vect_find_last_scalar_stmt_in_slp (slp_tree node)
|
|||
return last;
|
||||
}
|
||||
|
||||
/* Splits a group of stores, currently beginning at FIRST_STMT, into two groups:
|
||||
one (still beginning at FIRST_STMT) of size GROUP1_SIZE (also containing
|
||||
the first GROUP1_SIZE stmts, since stores are consecutive), the second
|
||||
containing the remainder.
|
||||
/* Splits a group of stores, currently beginning at FIRST_VINFO, into
|
||||
two groups: one (still beginning at FIRST_VINFO) of size GROUP1_SIZE
|
||||
(also containing the first GROUP1_SIZE stmts, since stores are
|
||||
consecutive), the second containing the remainder.
|
||||
Return the first stmt in the second group. */
|
||||
|
||||
static gimple *
|
||||
vect_split_slp_store_group (gimple *first_stmt, unsigned group1_size)
|
||||
static stmt_vec_info
|
||||
vect_split_slp_store_group (stmt_vec_info first_vinfo, unsigned group1_size)
|
||||
{
|
||||
stmt_vec_info first_vinfo = vinfo_for_stmt (first_stmt);
|
||||
gcc_assert (DR_GROUP_FIRST_ELEMENT (first_vinfo) == first_vinfo);
|
||||
gcc_assert (group1_size > 0);
|
||||
int group2_size = DR_GROUP_SIZE (first_vinfo) - group1_size;
|
||||
|
|
@ -2174,7 +2173,8 @@ vect_analyze_slp_instance (vec_info *vinfo,
|
|||
gcc_assert ((const_nunits & (const_nunits - 1)) == 0);
|
||||
unsigned group1_size = i & ~(const_nunits - 1);
|
||||
|
||||
gimple *rest = vect_split_slp_store_group (stmt_info, group1_size);
|
||||
stmt_vec_info rest = vect_split_slp_store_group (stmt_info,
|
||||
group1_size);
|
||||
bool res = vect_analyze_slp_instance (vinfo, stmt_info,
|
||||
max_tree_size);
|
||||
/* If the first non-match was in the middle of a vector,
|
||||
|
|
|
|||
|
|
@ -117,12 +117,12 @@ create_vector_array (tree elem_type, unsigned HOST_WIDE_INT nelems)
|
|||
|
||||
/* ARRAY is an array of vectors created by create_vector_array.
|
||||
Return an SSA_NAME for the vector in index N. The reference
|
||||
is part of the vectorization of STMT and the vector is associated
|
||||
is part of the vectorization of STMT_INFO and the vector is associated
|
||||
with scalar destination SCALAR_DEST. */
|
||||
|
||||
static tree
|
||||
read_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
|
||||
tree array, unsigned HOST_WIDE_INT n)
|
||||
read_vector_array (stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
|
||||
tree scalar_dest, tree array, unsigned HOST_WIDE_INT n)
|
||||
{
|
||||
tree vect_type, vect, vect_name, array_ref;
|
||||
gimple *new_stmt;
|
||||
|
|
@ -137,18 +137,18 @@ read_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree scalar_dest,
|
|||
new_stmt = gimple_build_assign (vect, array_ref);
|
||||
vect_name = make_ssa_name (vect, new_stmt);
|
||||
gimple_assign_set_lhs (new_stmt, vect_name);
|
||||
vect_finish_stmt_generation (stmt, new_stmt, gsi);
|
||||
vect_finish_stmt_generation (stmt_info, new_stmt, gsi);
|
||||
|
||||
return vect_name;
|
||||
}
|
||||
|
||||
/* ARRAY is an array of vectors created by create_vector_array.
|
||||
Emit code to store SSA_NAME VECT in index N of the array.
|
||||
The store is part of the vectorization of STMT. */
|
||||
The store is part of the vectorization of STMT_INFO. */
|
||||
|
||||
static void
|
||||
write_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree vect,
|
||||
tree array, unsigned HOST_WIDE_INT n)
|
||||
write_vector_array (stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
|
||||
tree vect, tree array, unsigned HOST_WIDE_INT n)
|
||||
{
|
||||
tree array_ref;
|
||||
gimple *new_stmt;
|
||||
|
|
@ -158,7 +158,7 @@ write_vector_array (gimple *stmt, gimple_stmt_iterator *gsi, tree vect,
|
|||
NULL_TREE, NULL_TREE);
|
||||
|
||||
new_stmt = gimple_build_assign (array_ref, vect);
|
||||
vect_finish_stmt_generation (stmt, new_stmt, gsi);
|
||||
vect_finish_stmt_generation (stmt_info, new_stmt, gsi);
|
||||
}
|
||||
|
||||
/* PTR is a pointer to an array of type TYPE. Return a representation
|
||||
|
|
@ -176,15 +176,16 @@ create_array_ref (tree type, tree ptr, tree alias_ptr_type)
|
|||
return mem_ref;
|
||||
}
|
||||
|
||||
/* Add a clobber of variable VAR to the vectorization of STMT.
|
||||
/* Add a clobber of variable VAR to the vectorization of STMT_INFO.
|
||||
Emit the clobber before *GSI. */
|
||||
|
||||
static void
|
||||
vect_clobber_variable (gimple *stmt, gimple_stmt_iterator *gsi, tree var)
|
||||
vect_clobber_variable (stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
|
||||
tree var)
|
||||
{
|
||||
tree clobber = build_clobber (TREE_TYPE (var));
|
||||
gimple *new_stmt = gimple_build_assign (var, clobber);
|
||||
vect_finish_stmt_generation (stmt, new_stmt, gsi);
|
||||
vect_finish_stmt_generation (stmt_info, new_stmt, gsi);
|
||||
}
|
||||
|
||||
/* Utility functions used by vect_mark_stmts_to_be_vectorized. */
|
||||
|
|
@ -281,8 +282,8 @@ is_simple_and_all_uses_invariant (gimple *stmt, loop_vec_info loop_vinfo)
|
|||
|
||||
/* Function vect_stmt_relevant_p.
|
||||
|
||||
Return true if STMT in loop that is represented by LOOP_VINFO is
|
||||
"relevant for vectorization".
|
||||
Return true if STMT_INFO, in the loop that is represented by LOOP_VINFO,
|
||||
is "relevant for vectorization".
|
||||
|
||||
A stmt is considered "relevant for vectorization" if:
|
||||
- it has uses outside the loop.
|
||||
|
|
@ -292,7 +293,7 @@ is_simple_and_all_uses_invariant (gimple *stmt, loop_vec_info loop_vinfo)
|
|||
CHECKME: what other side effects would the vectorizer allow? */
|
||||
|
||||
static bool
|
||||
vect_stmt_relevant_p (gimple *stmt, loop_vec_info loop_vinfo,
|
||||
vect_stmt_relevant_p (stmt_vec_info stmt_info, loop_vec_info loop_vinfo,
|
||||
enum vect_relevant *relevant, bool *live_p)
|
||||
{
|
||||
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
|
||||
|
|
@ -305,15 +306,14 @@ vect_stmt_relevant_p (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
*live_p = false;
|
||||
|
||||
/* cond stmt other than loop exit cond. */
|
||||
if (is_ctrl_stmt (stmt)
|
||||
&& STMT_VINFO_TYPE (vinfo_for_stmt (stmt))
|
||||
!= loop_exit_ctrl_vec_info_type)
|
||||
if (is_ctrl_stmt (stmt_info->stmt)
|
||||
&& STMT_VINFO_TYPE (stmt_info) != loop_exit_ctrl_vec_info_type)
|
||||
*relevant = vect_used_in_scope;
|
||||
|
||||
/* changing memory. */
|
||||
if (gimple_code (stmt) != GIMPLE_PHI)
|
||||
if (gimple_vdef (stmt)
|
||||
&& !gimple_clobber_p (stmt))
|
||||
if (gimple_code (stmt_info->stmt) != GIMPLE_PHI)
|
||||
if (gimple_vdef (stmt_info->stmt)
|
||||
&& !gimple_clobber_p (stmt_info->stmt))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_NOTE, vect_location,
|
||||
|
|
@ -322,7 +322,7 @@ vect_stmt_relevant_p (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
}
|
||||
|
||||
/* uses outside the loop. */
|
||||
FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
|
||||
FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt_info->stmt, op_iter, SSA_OP_DEF)
|
||||
{
|
||||
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, DEF_FROM_PTR (def_p))
|
||||
{
|
||||
|
|
@ -347,7 +347,7 @@ vect_stmt_relevant_p (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
}
|
||||
|
||||
if (*live_p && *relevant == vect_unused_in_scope
|
||||
&& !is_simple_and_all_uses_invariant (stmt, loop_vinfo))
|
||||
&& !is_simple_and_all_uses_invariant (stmt_info, loop_vinfo))
|
||||
{
|
||||
if (dump_enabled_p ())
|
||||
dump_printf_loc (MSG_NOTE, vect_location,
|
||||
|
|
@ -1838,7 +1838,7 @@ vectorizable_internal_function (combined_fn cfn, tree fndecl,
|
|||
}
|
||||
|
||||
|
||||
static tree permute_vec_elements (tree, tree, tree, gimple *,
|
||||
static tree permute_vec_elements (tree, tree, tree, stmt_vec_info,
|
||||
gimple_stmt_iterator *);
|
||||
|
||||
/* Check whether a load or store statement in the loop described by
|
||||
|
|
@ -2072,19 +2072,19 @@ vect_truncate_gather_scatter_offset (gimple *stmt, loop_vec_info loop_vinfo,
|
|||
}
|
||||
|
||||
/* Return true if we can use gather/scatter internal functions to
|
||||
vectorize STMT, which is a grouped or strided load or store.
|
||||
vectorize STMT_INFO, which is a grouped or strided load or store.
|
||||
MASKED_P is true if load or store is conditional. When returning
|
||||
true, fill in GS_INFO with the information required to perform the
|
||||
operation. */
|
||||
|
||||
static bool
|
||||
vect_use_strided_gather_scatters_p (gimple *stmt, loop_vec_info loop_vinfo,
|
||||
bool masked_p,
|
||||
vect_use_strided_gather_scatters_p (stmt_vec_info stmt_info,
|
||||
loop_vec_info loop_vinfo, bool masked_p,
|
||||
gather_scatter_info *gs_info)
|
||||
{
|
||||
if (!vect_check_gather_scatter (stmt, loop_vinfo, gs_info)
|
||||
if (!vect_check_gather_scatter (stmt_info, loop_vinfo, gs_info)
|
||||
|| gs_info->decl)
|
||||
return vect_truncate_gather_scatter_offset (stmt, loop_vinfo,
|
||||
return vect_truncate_gather_scatter_offset (stmt_info, loop_vinfo,
|
||||
masked_p, gs_info);
|
||||
|
||||
scalar_mode element_mode = SCALAR_TYPE_MODE (gs_info->element_type);
|
||||
|
|
@ -2613,12 +2613,12 @@ vect_check_store_rhs (gimple *stmt, tree rhs, vect_def_type *rhs_dt_out,
|
|||
return true;
|
||||
}
|
||||
|
||||
/* Build an all-ones vector mask of type MASKTYPE while vectorizing STMT.
|
||||
/* Build an all-ones vector mask of type MASKTYPE while vectorizing STMT_INFO.
|
||||
Note that we support masks with floating-point type, in which case the
|
||||
floats are interpreted as a bitmask. */
|
||||
|
||||
static tree
|
||||
vect_build_all_ones_mask (gimple *stmt, tree masktype)
|
||||
vect_build_all_ones_mask (stmt_vec_info stmt_info, tree masktype)
|
||||
{
|
||||
if (TREE_CODE (masktype) == INTEGER_TYPE)
|
||||
return build_int_cst (masktype, -1);
|
||||
|
|
@ -2626,7 +2626,7 @@ vect_build_all_ones_mask (gimple *stmt, tree masktype)
|
|||
{
|
||||
tree mask = build_int_cst (TREE_TYPE (masktype), -1);
|
||||
mask = build_vector_from_val (masktype, mask);
|
||||
return vect_init_vector (stmt, mask, masktype, NULL);
|
||||
return vect_init_vector (stmt_info, mask, masktype, NULL);
|
||||
}
|
||||
else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (masktype)))
|
||||
{
|
||||
|
|
@ -2637,16 +2637,16 @@ vect_build_all_ones_mask (gimple *stmt, tree masktype)
|
|||
real_from_target (&r, tmp, TYPE_MODE (TREE_TYPE (masktype)));
|
||||
tree mask = build_real (TREE_TYPE (masktype), r);
|
||||
mask = build_vector_from_val (masktype, mask);
|
||||
return vect_init_vector (stmt, mask, masktype, NULL);
|
||||
return vect_init_vector (stmt_info, mask, masktype, NULL);
|
||||
}
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* Build an all-zero merge value of type VECTYPE while vectorizing
|
||||
STMT as a gather load. */
|
||||
STMT_INFO as a gather load. */
|
||||
|
||||
static tree
|
||||
vect_build_zero_merge_argument (gimple *stmt, tree vectype)
|
||||
vect_build_zero_merge_argument (stmt_vec_info stmt_info, tree vectype)
|
||||
{
|
||||
tree merge;
|
||||
if (TREE_CODE (TREE_TYPE (vectype)) == INTEGER_TYPE)
|
||||
|
|
@ -2663,7 +2663,7 @@ vect_build_zero_merge_argument (gimple *stmt, tree vectype)
|
|||
else
|
||||
gcc_unreachable ();
|
||||
merge = build_vector_from_val (vectype, merge);
|
||||
return vect_init_vector (stmt, merge, vectype, NULL);
|
||||
return vect_init_vector (stmt_info, merge, vectype, NULL);
|
||||
}
|
||||
|
||||
/* Build a gather load call while vectorizing STMT. Insert new instructions
|
||||
|
|
@ -2871,11 +2871,12 @@ vect_build_gather_load_calls (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
|
||||
/* Prepare the base and offset in GS_INFO for vectorization.
|
||||
Set *DATAREF_PTR to the loop-invariant base address and *VEC_OFFSET
|
||||
to the vectorized offset argument for the first copy of STMT. STMT
|
||||
is the statement described by GS_INFO and LOOP is the containing loop. */
|
||||
to the vectorized offset argument for the first copy of STMT_INFO.
|
||||
STMT_INFO is the statement described by GS_INFO and LOOP is the
|
||||
containing loop. */
|
||||
|
||||
static void
|
||||
vect_get_gather_scatter_ops (struct loop *loop, gimple *stmt,
|
||||
vect_get_gather_scatter_ops (struct loop *loop, stmt_vec_info stmt_info,
|
||||
gather_scatter_info *gs_info,
|
||||
tree *dataref_ptr, tree *vec_offset)
|
||||
{
|
||||
|
|
@ -2890,7 +2891,7 @@ vect_get_gather_scatter_ops (struct loop *loop, gimple *stmt,
|
|||
}
|
||||
tree offset_type = TREE_TYPE (gs_info->offset);
|
||||
tree offset_vectype = get_vectype_for_scalar_type (offset_type);
|
||||
*vec_offset = vect_get_vec_def_for_operand (gs_info->offset, stmt,
|
||||
*vec_offset = vect_get_vec_def_for_operand (gs_info->offset, stmt_info,
|
||||
offset_vectype);
|
||||
}
|
||||
|
||||
|
|
@ -4403,14 +4404,14 @@ vectorizable_simd_clone_call (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
VEC_OPRND0 and VEC_OPRND1. The new vector stmt is to be inserted at BSI.
|
||||
In the case that CODE is a CALL_EXPR, this means that a call to DECL
|
||||
needs to be created (DECL is a function-decl of a target-builtin).
|
||||
STMT is the original scalar stmt that we are vectorizing. */
|
||||
STMT_INFO is the original scalar stmt that we are vectorizing. */
|
||||
|
||||
static gimple *
|
||||
vect_gen_widened_results_half (enum tree_code code,
|
||||
tree decl,
|
||||
tree vec_oprnd0, tree vec_oprnd1, int op_type,
|
||||
tree vec_dest, gimple_stmt_iterator *gsi,
|
||||
gimple *stmt)
|
||||
stmt_vec_info stmt_info)
|
||||
{
|
||||
gimple *new_stmt;
|
||||
tree new_temp;
|
||||
|
|
@ -4436,22 +4437,23 @@ vect_gen_widened_results_half (enum tree_code code,
|
|||
new_temp = make_ssa_name (vec_dest, new_stmt);
|
||||
gimple_assign_set_lhs (new_stmt, new_temp);
|
||||
}
|
||||
vect_finish_stmt_generation (stmt, new_stmt, gsi);
|
||||
vect_finish_stmt_generation (stmt_info, new_stmt, gsi);
|
||||
|
||||
return new_stmt;
|
||||
}
|
||||
|
||||
|
||||
/* Get vectorized definitions for loop-based vectorization. For the first
|
||||
operand we call vect_get_vec_def_for_operand() (with OPRND containing
|
||||
scalar operand), and for the rest we get a copy with
|
||||
/* Get vectorized definitions for loop-based vectorization of STMT_INFO.
|
||||
For the first operand we call vect_get_vec_def_for_operand (with OPRND
|
||||
containing scalar operand), and for the rest we get a copy with
|
||||
vect_get_vec_def_for_stmt_copy() using the previous vector definition
|
||||
(stored in OPRND). See vect_get_vec_def_for_stmt_copy() for details.
|
||||
The vectors are collected into VEC_OPRNDS. */
|
||||
|
||||
static void
|
||||
vect_get_loop_based_defs (tree *oprnd, gimple *stmt, enum vect_def_type dt,
|
||||
vec<tree> *vec_oprnds, int multi_step_cvt)
|
||||
vect_get_loop_based_defs (tree *oprnd, stmt_vec_info stmt_info,
|
||||
enum vect_def_type dt, vec<tree> *vec_oprnds,
|
||||
int multi_step_cvt)
|
||||
{
|
||||
tree vec_oprnd;
|
||||
|
||||
|
|
@ -4459,7 +4461,7 @@ vect_get_loop_based_defs (tree *oprnd, gimple *stmt, enum vect_def_type dt,
|
|||
/* All the vector operands except the very first one (that is scalar oprnd)
|
||||
are stmt copies. */
|
||||
if (TREE_CODE (TREE_TYPE (*oprnd)) != VECTOR_TYPE)
|
||||
vec_oprnd = vect_get_vec_def_for_operand (*oprnd, stmt);
|
||||
vec_oprnd = vect_get_vec_def_for_operand (*oprnd, stmt_info);
|
||||
else
|
||||
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, *oprnd);
|
||||
|
||||
|
|
@ -4474,7 +4476,8 @@ vect_get_loop_based_defs (tree *oprnd, gimple *stmt, enum vect_def_type dt,
|
|||
/* For conversion in multiple steps, continue to get operands
|
||||
recursively. */
|
||||
if (multi_step_cvt)
|
||||
vect_get_loop_based_defs (oprnd, stmt, dt, vec_oprnds, multi_step_cvt - 1);
|
||||
vect_get_loop_based_defs (oprnd, stmt_info, dt, vec_oprnds,
|
||||
multi_step_cvt - 1);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -4549,13 +4552,14 @@ vect_create_vectorized_demotion_stmts (vec<tree> *vec_oprnds,
|
|||
|
||||
|
||||
/* Create vectorized promotion statements for vector operands from VEC_OPRNDS0
|
||||
and VEC_OPRNDS1 (for binary operations). For multi-step conversions store
|
||||
the resulting vectors and call the function recursively. */
|
||||
and VEC_OPRNDS1, for a binary operation associated with scalar statement
|
||||
STMT_INFO. For multi-step conversions store the resulting vectors and
|
||||
call the function recursively. */
|
||||
|
||||
static void
|
||||
vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
|
||||
vec<tree> *vec_oprnds1,
|
||||
gimple *stmt, tree vec_dest,
|
||||
stmt_vec_info stmt_info, tree vec_dest,
|
||||
gimple_stmt_iterator *gsi,
|
||||
enum tree_code code1,
|
||||
enum tree_code code2, tree decl1,
|
||||
|
|
@ -4576,9 +4580,11 @@ vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
|
|||
|
||||
/* Generate the two halves of promotion operation. */
|
||||
new_stmt1 = vect_gen_widened_results_half (code1, decl1, vop0, vop1,
|
||||
op_type, vec_dest, gsi, stmt);
|
||||
op_type, vec_dest, gsi,
|
||||
stmt_info);
|
||||
new_stmt2 = vect_gen_widened_results_half (code2, decl2, vop0, vop1,
|
||||
op_type, vec_dest, gsi, stmt);
|
||||
op_type, vec_dest, gsi,
|
||||
stmt_info);
|
||||
if (is_gimple_call (new_stmt1))
|
||||
{
|
||||
new_tmp1 = gimple_call_lhs (new_stmt1);
|
||||
|
|
@ -7318,19 +7324,19 @@ vect_gen_perm_mask_checked (tree vectype, const vec_perm_indices &sel)
|
|||
}
|
||||
|
||||
/* Given a vector variable X and Y, that was generated for the scalar
|
||||
STMT, generate instructions to permute the vector elements of X and Y
|
||||
STMT_INFO, generate instructions to permute the vector elements of X and Y
|
||||
using permutation mask MASK_VEC, insert them at *GSI and return the
|
||||
permuted vector variable. */
|
||||
|
||||
static tree
|
||||
permute_vec_elements (tree x, tree y, tree mask_vec, gimple *stmt,
|
||||
permute_vec_elements (tree x, tree y, tree mask_vec, stmt_vec_info stmt_info,
|
||||
gimple_stmt_iterator *gsi)
|
||||
{
|
||||
tree vectype = TREE_TYPE (x);
|
||||
tree perm_dest, data_ref;
|
||||
gimple *perm_stmt;
|
||||
|
||||
tree scalar_dest = gimple_get_lhs (stmt);
|
||||
tree scalar_dest = gimple_get_lhs (stmt_info->stmt);
|
||||
if (TREE_CODE (scalar_dest) == SSA_NAME)
|
||||
perm_dest = vect_create_destination_var (scalar_dest, vectype);
|
||||
else
|
||||
|
|
@ -7339,7 +7345,7 @@ permute_vec_elements (tree x, tree y, tree mask_vec, gimple *stmt,
|
|||
|
||||
/* Generate the permute statement. */
|
||||
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, x, y, mask_vec);
|
||||
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
|
||||
vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
|
||||
|
||||
return data_ref;
|
||||
}
|
||||
|
|
@ -9409,11 +9415,11 @@ vectorizable_comparison (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
|
||||
/* If SLP_NODE is nonnull, return true if vectorizable_live_operation
|
||||
can handle all live statements in the node. Otherwise return true
|
||||
if STMT is not live or if vectorizable_live_operation can handle it.
|
||||
if STMT_INFO is not live or if vectorizable_live_operation can handle it.
|
||||
GSI and VEC_STMT are as for vectorizable_live_operation. */
|
||||
|
||||
static bool
|
||||
can_vectorize_live_stmts (gimple *stmt, gimple_stmt_iterator *gsi,
|
||||
can_vectorize_live_stmts (stmt_vec_info stmt_info, gimple_stmt_iterator *gsi,
|
||||
slp_tree slp_node, stmt_vec_info *vec_stmt,
|
||||
stmt_vector_for_cost *cost_vec)
|
||||
{
|
||||
|
|
@ -9429,9 +9435,9 @@ can_vectorize_live_stmts (gimple *stmt, gimple_stmt_iterator *gsi,
|
|||
return false;
|
||||
}
|
||||
}
|
||||
else if (STMT_VINFO_LIVE_P (vinfo_for_stmt (stmt))
|
||||
&& !vectorizable_live_operation (stmt, gsi, slp_node, -1, vec_stmt,
|
||||
cost_vec))
|
||||
else if (STMT_VINFO_LIVE_P (stmt_info)
|
||||
&& !vectorizable_live_operation (stmt_info, gsi, slp_node, -1,
|
||||
vec_stmt, cost_vec))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
|
|
|
|||
Loading…
Reference in New Issue