[28/46] Use stmt_vec_info instead of gimple stmts internally (part 1)

This first part makes functions use stmt_vec_infos instead of
gimple stmts in cases where the stmt_vec_info was already available
and where the change is mechanical.  Most of it is just replacing
"stmt" with "stmt_info".

2018-07-31  Richard Sandiford  <richard.sandiford@arm.com>

gcc/
	* tree-vect-data-refs.c (vect_slp_analyze_node_dependences):
	(vect_check_gather_scatter, vect_create_data_ref_ptr, bump_vector_ptr)
	(vect_permute_store_chain, vect_setup_realignment)
	(vect_permute_load_chain, vect_shift_permute_load_chain)
	(vect_transform_grouped_load): Use stmt_vec_info rather than gimple
	stmts internally, and when passing values to other vectorizer routines.
	* tree-vect-loop-manip.c (vect_can_advance_ivs_p): Likewise.
	* tree-vect-loop.c (vect_analyze_scalar_cycles_1)
	(vect_analyze_loop_operations, get_initial_def_for_reduction)
	(vect_create_epilog_for_reduction, vectorize_fold_left_reduction)
	(vectorizable_reduction, vectorizable_induction)
	(vectorizable_live_operation, vect_transform_loop_stmt)
	(vect_transform_loop): Likewise.
	* tree-vect-patterns.c (vect_reassociating_reduction_p)
	(vect_recog_widen_op_pattern, vect_recog_mixed_size_cond_pattern)
	(vect_recog_bool_pattern, vect_recog_gather_scatter_pattern): Likewise.
	* tree-vect-slp.c (vect_analyze_slp_instance): Likewise.
	(vect_slp_analyze_node_operations_1): Likewise.
	* tree-vect-stmts.c (vect_mark_relevant, process_use)
	(exist_non_indexing_operands_for_use_p, vect_init_vector_1)
	(vect_mark_stmts_to_be_vectorized, vect_get_vec_def_for_operand)
	(vect_finish_stmt_generation_1, get_group_load_store_type)
	(get_load_store_type, vect_build_gather_load_calls)
	(vectorizable_bswap, vectorizable_call, vectorizable_simd_clone_call)
	(vect_create_vectorized_demotion_stmts, vectorizable_conversion)
	(vectorizable_assignment, vectorizable_shift, vectorizable_operation)
	(vectorizable_store, vectorizable_load, vectorizable_condition)
	(vectorizable_comparison, vect_analyze_stmt, vect_transform_stmt)
	(supportable_widening_operation): Likewise.
	(vect_get_vector_types_for_stmt): Likewise.
	* tree-vectorizer.h (vect_dr_behavior): Likewise.

From-SVN: r263143
This commit is contained in:
Richard Sandiford 2018-07-31 14:24:27 +00:00 committed by Richard Sandiford
parent 91987857e6
commit 86a91c0a7d
8 changed files with 459 additions and 378 deletions

View File

@ -1,3 +1,37 @@
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
* tree-vect-data-refs.c (vect_slp_analyze_node_dependences):
(vect_check_gather_scatter, vect_create_data_ref_ptr, bump_vector_ptr)
(vect_permute_store_chain, vect_setup_realignment)
(vect_permute_load_chain, vect_shift_permute_load_chain)
(vect_transform_grouped_load): Use stmt_vec_info rather than gimple
stmts internally, and when passing values to other vectorizer routines.
* tree-vect-loop-manip.c (vect_can_advance_ivs_p): Likewise.
* tree-vect-loop.c (vect_analyze_scalar_cycles_1)
(vect_analyze_loop_operations, get_initial_def_for_reduction)
(vect_create_epilog_for_reduction, vectorize_fold_left_reduction)
(vectorizable_reduction, vectorizable_induction)
(vectorizable_live_operation, vect_transform_loop_stmt)
(vect_transform_loop): Likewise.
* tree-vect-patterns.c (vect_reassociating_reduction_p)
(vect_recog_widen_op_pattern, vect_recog_mixed_size_cond_pattern)
(vect_recog_bool_pattern, vect_recog_gather_scatter_pattern): Likewise.
* tree-vect-slp.c (vect_analyze_slp_instance): Likewise.
(vect_slp_analyze_node_operations_1): Likewise.
* tree-vect-stmts.c (vect_mark_relevant, process_use)
(exist_non_indexing_operands_for_use_p, vect_init_vector_1)
(vect_mark_stmts_to_be_vectorized, vect_get_vec_def_for_operand)
(vect_finish_stmt_generation_1, get_group_load_store_type)
(get_load_store_type, vect_build_gather_load_calls)
(vectorizable_bswap, vectorizable_call, vectorizable_simd_clone_call)
(vect_create_vectorized_demotion_stmts, vectorizable_conversion)
(vectorizable_assignment, vectorizable_shift, vectorizable_operation)
(vectorizable_store, vectorizable_load, vectorizable_condition)
(vectorizable_comparison, vect_analyze_stmt, vect_transform_stmt)
(supportable_widening_operation): Likewise.
(vect_get_vector_types_for_stmt): Likewise.
* tree-vectorizer.h (vect_dr_behavior): Likewise.
2018-07-31 Richard Sandiford <richard.sandiford@arm.com> 2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
* tree-vect-data-refs.c (vect_analyze_data_ref_dependence) * tree-vect-data-refs.c (vect_analyze_data_ref_dependence)

View File

@ -712,7 +712,7 @@ vect_slp_analyze_node_dependences (slp_instance instance, slp_tree node,
been sunk to (and we verify if we can do that as well). */ been sunk to (and we verify if we can do that as well). */
if (gimple_visited_p (stmt)) if (gimple_visited_p (stmt))
{ {
if (stmt != last_store) if (stmt_info != last_store)
continue; continue;
unsigned i; unsigned i;
stmt_vec_info store_info; stmt_vec_info store_info;
@ -3666,7 +3666,7 @@ vect_check_gather_scatter (gimple *stmt, loop_vec_info loop_vinfo,
/* See whether this is already a call to a gather/scatter internal function. /* See whether this is already a call to a gather/scatter internal function.
If not, see whether it's a masked load or store. */ If not, see whether it's a masked load or store. */
gcall *call = dyn_cast <gcall *> (stmt); gcall *call = dyn_cast <gcall *> (stmt_info->stmt);
if (call && gimple_call_internal_p (call)) if (call && gimple_call_internal_p (call))
{ {
ifn = gimple_call_internal_fn (call); ifn = gimple_call_internal_fn (call);
@ -4677,8 +4677,8 @@ vect_create_data_ref_ptr (gimple *stmt, tree aggr_type, struct loop *at_loop,
if (loop_vinfo) if (loop_vinfo)
{ {
loop = LOOP_VINFO_LOOP (loop_vinfo); loop = LOOP_VINFO_LOOP (loop_vinfo);
nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt); nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt_info);
containing_loop = (gimple_bb (stmt))->loop_father; containing_loop = (gimple_bb (stmt_info->stmt))->loop_father;
pe = loop_preheader_edge (loop); pe = loop_preheader_edge (loop);
} }
else else
@ -4786,7 +4786,7 @@ vect_create_data_ref_ptr (gimple *stmt, tree aggr_type, struct loop *at_loop,
/* Create: (&(base[init_val+offset]+byte_offset) in the loop preheader. */ /* Create: (&(base[init_val+offset]+byte_offset) in the loop preheader. */
new_temp = vect_create_addr_base_for_vector_ref (stmt, &new_stmt_list, new_temp = vect_create_addr_base_for_vector_ref (stmt_info, &new_stmt_list,
offset, byte_offset); offset, byte_offset);
if (new_stmt_list) if (new_stmt_list)
{ {
@ -4934,7 +4934,7 @@ bump_vector_ptr (tree dataref_ptr, gimple *ptr_incr, gimple_stmt_iterator *gsi,
new_dataref_ptr = make_ssa_name (TREE_TYPE (dataref_ptr)); new_dataref_ptr = make_ssa_name (TREE_TYPE (dataref_ptr));
incr_stmt = gimple_build_assign (new_dataref_ptr, POINTER_PLUS_EXPR, incr_stmt = gimple_build_assign (new_dataref_ptr, POINTER_PLUS_EXPR,
dataref_ptr, update); dataref_ptr, update);
vect_finish_stmt_generation (stmt, incr_stmt, gsi); vect_finish_stmt_generation (stmt_info, incr_stmt, gsi);
/* Copy the points-to information if it exists. */ /* Copy the points-to information if it exists. */
if (DR_PTR_INFO (dr)) if (DR_PTR_INFO (dr))
@ -5282,7 +5282,7 @@ vect_permute_store_chain (vec<tree> dr_chain,
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_low"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_low");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect1, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect1,
vect2, perm3_mask_low); vect2, perm3_mask_low);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
vect1 = data_ref; vect1 = data_ref;
vect2 = dr_chain[2]; vect2 = dr_chain[2];
@ -5293,7 +5293,7 @@ vect_permute_store_chain (vec<tree> dr_chain,
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_high"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_high");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect1, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect1,
vect2, perm3_mask_high); vect2, perm3_mask_high);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[j] = data_ref; (*result_chain)[j] = data_ref;
} }
} }
@ -5332,7 +5332,7 @@ vect_permute_store_chain (vec<tree> dr_chain,
high = make_temp_ssa_name (vectype, NULL, "vect_inter_high"); high = make_temp_ssa_name (vectype, NULL, "vect_inter_high");
perm_stmt = gimple_build_assign (high, VEC_PERM_EXPR, vect1, perm_stmt = gimple_build_assign (high, VEC_PERM_EXPR, vect1,
vect2, perm_mask_high); vect2, perm_mask_high);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[2*j] = high; (*result_chain)[2*j] = high;
/* Create interleaving stmt: /* Create interleaving stmt:
@ -5342,7 +5342,7 @@ vect_permute_store_chain (vec<tree> dr_chain,
low = make_temp_ssa_name (vectype, NULL, "vect_inter_low"); low = make_temp_ssa_name (vectype, NULL, "vect_inter_low");
perm_stmt = gimple_build_assign (low, VEC_PERM_EXPR, vect1, perm_stmt = gimple_build_assign (low, VEC_PERM_EXPR, vect1,
vect2, perm_mask_low); vect2, perm_mask_low);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[2*j+1] = low; (*result_chain)[2*j+1] = low;
} }
memcpy (dr_chain.address (), result_chain->address (), memcpy (dr_chain.address (), result_chain->address (),
@ -5415,7 +5415,7 @@ vect_setup_realignment (gimple *stmt, gimple_stmt_iterator *gsi,
struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info); struct data_reference *dr = STMT_VINFO_DATA_REF (stmt_info);
struct loop *loop = NULL; struct loop *loop = NULL;
edge pe = NULL; edge pe = NULL;
tree scalar_dest = gimple_assign_lhs (stmt); tree scalar_dest = gimple_assign_lhs (stmt_info->stmt);
tree vec_dest; tree vec_dest;
gimple *inc; gimple *inc;
tree ptr; tree ptr;
@ -5429,13 +5429,13 @@ vect_setup_realignment (gimple *stmt, gimple_stmt_iterator *gsi,
bool inv_p; bool inv_p;
bool compute_in_loop = false; bool compute_in_loop = false;
bool nested_in_vect_loop = false; bool nested_in_vect_loop = false;
struct loop *containing_loop = (gimple_bb (stmt))->loop_father; struct loop *containing_loop = (gimple_bb (stmt_info->stmt))->loop_father;
struct loop *loop_for_initial_load = NULL; struct loop *loop_for_initial_load = NULL;
if (loop_vinfo) if (loop_vinfo)
{ {
loop = LOOP_VINFO_LOOP (loop_vinfo); loop = LOOP_VINFO_LOOP (loop_vinfo);
nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt); nested_in_vect_loop = nested_in_vect_loop_p (loop, stmt_info);
} }
gcc_assert (alignment_support_scheme == dr_explicit_realign gcc_assert (alignment_support_scheme == dr_explicit_realign
@ -5518,9 +5518,9 @@ vect_setup_realignment (gimple *stmt, gimple_stmt_iterator *gsi,
gcc_assert (!compute_in_loop); gcc_assert (!compute_in_loop);
vec_dest = vect_create_destination_var (scalar_dest, vectype); vec_dest = vect_create_destination_var (scalar_dest, vectype);
ptr = vect_create_data_ref_ptr (stmt, vectype, loop_for_initial_load, ptr = vect_create_data_ref_ptr (stmt_info, vectype,
NULL_TREE, &init_addr, NULL, &inc, loop_for_initial_load, NULL_TREE,
true, &inv_p); &init_addr, NULL, &inc, true, &inv_p);
if (TREE_CODE (ptr) == SSA_NAME) if (TREE_CODE (ptr) == SSA_NAME)
new_temp = copy_ssa_name (ptr); new_temp = copy_ssa_name (ptr);
else else
@ -5562,7 +5562,7 @@ vect_setup_realignment (gimple *stmt, gimple_stmt_iterator *gsi,
if (!init_addr) if (!init_addr)
{ {
/* Generate the INIT_ADDR computation outside LOOP. */ /* Generate the INIT_ADDR computation outside LOOP. */
init_addr = vect_create_addr_base_for_vector_ref (stmt, &stmts, init_addr = vect_create_addr_base_for_vector_ref (stmt_info, &stmts,
NULL_TREE); NULL_TREE);
if (loop) if (loop)
{ {
@ -5890,7 +5890,7 @@ vect_permute_load_chain (vec<tree> dr_chain,
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_low"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_low");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, first_vect, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, first_vect,
second_vect, perm3_mask_low); second_vect, perm3_mask_low);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
/* Create interleaving stmt (high part of): /* Create interleaving stmt (high part of):
high = VEC_PERM_EXPR <first_vect, second_vect2, {k, 3 + k, 6 + k, high = VEC_PERM_EXPR <first_vect, second_vect2, {k, 3 + k, 6 + k,
@ -5900,7 +5900,7 @@ vect_permute_load_chain (vec<tree> dr_chain,
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_high"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle3_high");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, first_vect, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, first_vect,
second_vect, perm3_mask_high); second_vect, perm3_mask_high);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[k] = data_ref; (*result_chain)[k] = data_ref;
} }
} }
@ -5935,7 +5935,7 @@ vect_permute_load_chain (vec<tree> dr_chain,
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
first_vect, second_vect, first_vect, second_vect,
perm_mask_even); perm_mask_even);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[j/2] = data_ref; (*result_chain)[j/2] = data_ref;
/* data_ref = permute_odd (first_data_ref, second_data_ref); */ /* data_ref = permute_odd (first_data_ref, second_data_ref); */
@ -5943,7 +5943,7 @@ vect_permute_load_chain (vec<tree> dr_chain,
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
first_vect, second_vect, first_vect, second_vect,
perm_mask_odd); perm_mask_odd);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[j/2+length/2] = data_ref; (*result_chain)[j/2+length/2] = data_ref;
} }
memcpy (dr_chain.address (), result_chain->address (), memcpy (dr_chain.address (), result_chain->address (),
@ -6143,26 +6143,26 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
first_vect, first_vect, first_vect, first_vect,
perm2_mask1); perm2_mask1);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
vect[0] = data_ref; vect[0] = data_ref;
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle2"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shuffle2");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
second_vect, second_vect, second_vect, second_vect,
perm2_mask2); perm2_mask2);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
vect[1] = data_ref; vect[1] = data_ref;
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shift"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shift");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
vect[0], vect[1], shift1_mask); vect[0], vect[1], shift1_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[j/2 + length/2] = data_ref; (*result_chain)[j/2 + length/2] = data_ref;
data_ref = make_temp_ssa_name (vectype, NULL, "vect_select"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_select");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
vect[0], vect[1], select_mask); vect[0], vect[1], select_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[j/2] = data_ref; (*result_chain)[j/2] = data_ref;
} }
memcpy (dr_chain.address (), result_chain->address (), memcpy (dr_chain.address (), result_chain->address (),
@ -6259,7 +6259,7 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
dr_chain[k], dr_chain[k], dr_chain[k], dr_chain[k],
perm3_mask); perm3_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
vect[k] = data_ref; vect[k] = data_ref;
} }
@ -6269,7 +6269,7 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR,
vect[k % 3], vect[(k + 1) % 3], vect[k % 3], vect[(k + 1) % 3],
shift1_mask); shift1_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
vect_shift[k] = data_ref; vect_shift[k] = data_ref;
} }
@ -6280,7 +6280,7 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
vect_shift[(4 - k) % 3], vect_shift[(4 - k) % 3],
vect_shift[(3 - k) % 3], vect_shift[(3 - k) % 3],
shift2_mask); shift2_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
vect[k] = data_ref; vect[k] = data_ref;
} }
@ -6289,13 +6289,13 @@ vect_shift_permute_load_chain (vec<tree> dr_chain,
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shift3"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shift3");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect[0], perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect[0],
vect[0], shift3_mask); vect[0], shift3_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[nelt % 3] = data_ref; (*result_chain)[nelt % 3] = data_ref;
data_ref = make_temp_ssa_name (vectype, NULL, "vect_shift4"); data_ref = make_temp_ssa_name (vectype, NULL, "vect_shift4");
perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect[1], perm_stmt = gimple_build_assign (data_ref, VEC_PERM_EXPR, vect[1],
vect[1], shift4_mask); vect[1], shift4_mask);
vect_finish_stmt_generation (stmt, perm_stmt, gsi); vect_finish_stmt_generation (stmt_info, perm_stmt, gsi);
(*result_chain)[0] = data_ref; (*result_chain)[0] = data_ref;
return true; return true;
} }
@ -6328,10 +6328,10 @@ vect_transform_grouped_load (gimple *stmt, vec<tree> dr_chain, int size,
mode = TYPE_MODE (STMT_VINFO_VECTYPE (stmt_info)); mode = TYPE_MODE (STMT_VINFO_VECTYPE (stmt_info));
if (targetm.sched.reassociation_width (VEC_PERM_EXPR, mode) > 1 if (targetm.sched.reassociation_width (VEC_PERM_EXPR, mode) > 1
|| pow2p_hwi (size) || pow2p_hwi (size)
|| !vect_shift_permute_load_chain (dr_chain, size, stmt, || !vect_shift_permute_load_chain (dr_chain, size, stmt_info,
gsi, &result_chain)) gsi, &result_chain))
vect_permute_load_chain (dr_chain, size, stmt, gsi, &result_chain); vect_permute_load_chain (dr_chain, size, stmt_info, gsi, &result_chain);
vect_record_grouped_load_vectors (stmt, result_chain); vect_record_grouped_load_vectors (stmt_info, result_chain);
result_chain.release (); result_chain.release ();
} }

View File

@ -1380,8 +1380,8 @@ vect_can_advance_ivs_p (loop_vec_info loop_vinfo)
stmt_vec_info phi_info = loop_vinfo->lookup_stmt (phi); stmt_vec_info phi_info = loop_vinfo->lookup_stmt (phi);
if (dump_enabled_p ()) if (dump_enabled_p ())
{ {
dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: "); dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: ");
dump_gimple_stmt (MSG_NOTE, TDF_SLIM, phi, 0); dump_gimple_stmt (MSG_NOTE, TDF_SLIM, phi_info->stmt, 0);
} }
/* Skip virtual phi's. The data dependences that are associated with /* Skip virtual phi's. The data dependences that are associated with

View File

@ -526,7 +526,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
|| (LOOP_VINFO_LOOP (loop_vinfo) != loop || (LOOP_VINFO_LOOP (loop_vinfo) != loop
&& TREE_CODE (step) != INTEGER_CST)) && TREE_CODE (step) != INTEGER_CST))
{ {
worklist.safe_push (phi); worklist.safe_push (stmt_vinfo);
continue; continue;
} }
@ -1595,11 +1595,12 @@ vect_analyze_loop_operations (loop_vec_info loop_vinfo)
need_to_vectorize = true; need_to_vectorize = true;
if (STMT_VINFO_DEF_TYPE (stmt_info) == vect_induction_def if (STMT_VINFO_DEF_TYPE (stmt_info) == vect_induction_def
&& ! PURE_SLP_STMT (stmt_info)) && ! PURE_SLP_STMT (stmt_info))
ok = vectorizable_induction (phi, NULL, NULL, NULL, &cost_vec); ok = vectorizable_induction (stmt_info, NULL, NULL, NULL,
&cost_vec);
else if ((STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def else if ((STMT_VINFO_DEF_TYPE (stmt_info) == vect_reduction_def
|| STMT_VINFO_DEF_TYPE (stmt_info) == vect_nested_cycle) || STMT_VINFO_DEF_TYPE (stmt_info) == vect_nested_cycle)
&& ! PURE_SLP_STMT (stmt_info)) && ! PURE_SLP_STMT (stmt_info))
ok = vectorizable_reduction (phi, NULL, NULL, NULL, NULL, ok = vectorizable_reduction (stmt_info, NULL, NULL, NULL, NULL,
&cost_vec); &cost_vec);
} }
@ -1607,7 +1608,7 @@ vect_analyze_loop_operations (loop_vec_info loop_vinfo)
if (ok if (ok
&& STMT_VINFO_LIVE_P (stmt_info) && STMT_VINFO_LIVE_P (stmt_info)
&& !PURE_SLP_STMT (stmt_info)) && !PURE_SLP_STMT (stmt_info))
ok = vectorizable_live_operation (phi, NULL, NULL, -1, NULL, ok = vectorizable_live_operation (stmt_info, NULL, NULL, -1, NULL,
&cost_vec); &cost_vec);
if (!ok) if (!ok)
@ -4045,7 +4046,7 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val,
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo); struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
tree scalar_type = TREE_TYPE (init_val); tree scalar_type = TREE_TYPE (init_val);
tree vectype = get_vectype_for_scalar_type (scalar_type); tree vectype = get_vectype_for_scalar_type (scalar_type);
enum tree_code code = gimple_assign_rhs_code (stmt); enum tree_code code = gimple_assign_rhs_code (stmt_vinfo->stmt);
tree def_for_init; tree def_for_init;
tree init_def; tree init_def;
REAL_VALUE_TYPE real_init_val = dconst0; REAL_VALUE_TYPE real_init_val = dconst0;
@ -4057,8 +4058,8 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val,
gcc_assert (POINTER_TYPE_P (scalar_type) || INTEGRAL_TYPE_P (scalar_type) gcc_assert (POINTER_TYPE_P (scalar_type) || INTEGRAL_TYPE_P (scalar_type)
|| SCALAR_FLOAT_TYPE_P (scalar_type)); || SCALAR_FLOAT_TYPE_P (scalar_type));
gcc_assert (nested_in_vect_loop_p (loop, stmt) gcc_assert (nested_in_vect_loop_p (loop, stmt_vinfo)
|| loop == (gimple_bb (stmt))->loop_father); || loop == (gimple_bb (stmt_vinfo->stmt))->loop_father);
vect_reduction_type reduction_type vect_reduction_type reduction_type
= STMT_VINFO_VEC_REDUCTION_TYPE (stmt_vinfo); = STMT_VINFO_VEC_REDUCTION_TYPE (stmt_vinfo);
@ -4127,7 +4128,7 @@ get_initial_def_for_reduction (gimple *stmt, tree init_val,
if (reduction_type != COND_REDUCTION if (reduction_type != COND_REDUCTION
&& reduction_type != EXTRACT_LAST_REDUCTION) && reduction_type != EXTRACT_LAST_REDUCTION)
{ {
init_def = vect_get_vec_def_for_operand (init_val, stmt); init_def = vect_get_vec_def_for_operand (init_val, stmt_vinfo);
break; break;
} }
} }
@ -4406,7 +4407,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
tree vec_dest; tree vec_dest;
tree new_temp = NULL_TREE, new_dest, new_name, new_scalar_dest; tree new_temp = NULL_TREE, new_dest, new_name, new_scalar_dest;
gimple *epilog_stmt = NULL; gimple *epilog_stmt = NULL;
enum tree_code code = gimple_assign_rhs_code (stmt); enum tree_code code = gimple_assign_rhs_code (stmt_info->stmt);
gimple *exit_phi; gimple *exit_phi;
tree bitsize; tree bitsize;
tree adjustment_def = NULL; tree adjustment_def = NULL;
@ -4435,7 +4436,7 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
if (slp_node) if (slp_node)
group_size = SLP_TREE_SCALAR_STMTS (slp_node).length (); group_size = SLP_TREE_SCALAR_STMTS (slp_node).length ();
if (nested_in_vect_loop_p (loop, stmt)) if (nested_in_vect_loop_p (loop, stmt_info))
{ {
outer_loop = loop; outer_loop = loop;
loop = loop->inner; loop = loop->inner;
@ -4504,11 +4505,13 @@ vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple *stmt,
/* Do not use an adjustment def as that case is not supported /* Do not use an adjustment def as that case is not supported
correctly if ncopies is not one. */ correctly if ncopies is not one. */
vect_is_simple_use (initial_def, loop_vinfo, &initial_def_dt); vect_is_simple_use (initial_def, loop_vinfo, &initial_def_dt);
vec_initial_def = vect_get_vec_def_for_operand (initial_def, stmt); vec_initial_def = vect_get_vec_def_for_operand (initial_def,
stmt_info);
} }
else else
vec_initial_def = get_initial_def_for_reduction (stmt, initial_def, vec_initial_def
&adjustment_def); = get_initial_def_for_reduction (stmt_info, initial_def,
&adjustment_def);
vec_initial_defs.create (1); vec_initial_defs.create (1);
vec_initial_defs.quick_push (vec_initial_def); vec_initial_defs.quick_push (vec_initial_def);
} }
@ -5676,7 +5679,7 @@ vect_finalize_reduction:
preheader_arg = PHI_ARG_DEF_FROM_EDGE (use_stmt, preheader_arg = PHI_ARG_DEF_FROM_EDGE (use_stmt,
loop_preheader_edge (outer_loop)); loop_preheader_edge (outer_loop));
vect_phi_init = get_initial_def_for_reduction vect_phi_init = get_initial_def_for_reduction
(stmt, preheader_arg, NULL); (stmt_info, preheader_arg, NULL);
/* Update phi node arguments with vs0 and vs2. */ /* Update phi node arguments with vs0 and vs2. */
add_phi_arg (vect_phi, vect_phi_init, add_phi_arg (vect_phi, vect_phi_init,
@ -5841,7 +5844,7 @@ vectorize_fold_left_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
else else
ncopies = vect_get_num_copies (loop_vinfo, vectype_in); ncopies = vect_get_num_copies (loop_vinfo, vectype_in);
gcc_assert (!nested_in_vect_loop_p (loop, stmt)); gcc_assert (!nested_in_vect_loop_p (loop, stmt_info));
gcc_assert (ncopies == 1); gcc_assert (ncopies == 1);
gcc_assert (TREE_CODE_LENGTH (code) == binary_op); gcc_assert (TREE_CODE_LENGTH (code) == binary_op);
gcc_assert (reduc_index == (code == MINUS_EXPR ? 0 : 1)); gcc_assert (reduc_index == (code == MINUS_EXPR ? 0 : 1));
@ -5859,13 +5862,14 @@ vectorize_fold_left_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
auto_vec<tree> vec_oprnds0; auto_vec<tree> vec_oprnds0;
if (slp_node) if (slp_node)
{ {
vect_get_vec_defs (op0, NULL_TREE, stmt, &vec_oprnds0, NULL, slp_node); vect_get_vec_defs (op0, NULL_TREE, stmt_info, &vec_oprnds0, NULL,
slp_node);
group_size = SLP_TREE_SCALAR_STMTS (slp_node).length (); group_size = SLP_TREE_SCALAR_STMTS (slp_node).length ();
scalar_dest_def_info = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1]; scalar_dest_def_info = SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1];
} }
else else
{ {
tree loop_vec_def0 = vect_get_vec_def_for_operand (op0, stmt); tree loop_vec_def0 = vect_get_vec_def_for_operand (op0, stmt_info);
vec_oprnds0.create (1); vec_oprnds0.create (1);
vec_oprnds0.quick_push (loop_vec_def0); vec_oprnds0.quick_push (loop_vec_def0);
scalar_dest_def_info = stmt_info; scalar_dest_def_info = stmt_info;
@ -6099,7 +6103,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
&& STMT_VINFO_DEF_TYPE (stmt_info) != vect_nested_cycle) && STMT_VINFO_DEF_TYPE (stmt_info) != vect_nested_cycle)
return false; return false;
if (nested_in_vect_loop_p (loop, stmt)) if (nested_in_vect_loop_p (loop, stmt_info))
{ {
loop = loop->inner; loop = loop->inner;
nested_cycle = true; nested_cycle = true;
@ -6109,7 +6113,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
gcc_assert (slp_node gcc_assert (slp_node
&& REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info); && REDUC_GROUP_FIRST_ELEMENT (stmt_info) == stmt_info);
if (gphi *phi = dyn_cast <gphi *> (stmt)) if (gphi *phi = dyn_cast <gphi *> (stmt_info->stmt))
{ {
tree phi_result = gimple_phi_result (phi); tree phi_result = gimple_phi_result (phi);
/* Analysis is fully done on the reduction stmt invocation. */ /* Analysis is fully done on the reduction stmt invocation. */
@ -6164,7 +6168,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
&& STMT_VINFO_RELEVANT (reduc_stmt_info) <= vect_used_only_live && STMT_VINFO_RELEVANT (reduc_stmt_info) <= vect_used_only_live
&& (use_stmt_info = loop_vinfo->lookup_single_use (phi_result)) && (use_stmt_info = loop_vinfo->lookup_single_use (phi_result))
&& (use_stmt_info == reduc_stmt_info && (use_stmt_info == reduc_stmt_info
|| STMT_VINFO_RELATED_STMT (use_stmt_info) == reduc_stmt)) || STMT_VINFO_RELATED_STMT (use_stmt_info) == reduc_stmt_info))
single_defuse_cycle = true; single_defuse_cycle = true;
/* Create the destination vector */ /* Create the destination vector */
@ -6548,7 +6552,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
{ {
/* Only call during the analysis stage, otherwise we'll lose /* Only call during the analysis stage, otherwise we'll lose
STMT_VINFO_TYPE. */ STMT_VINFO_TYPE. */
if (!vec_stmt && !vectorizable_condition (stmt, gsi, NULL, if (!vec_stmt && !vectorizable_condition (stmt_info, gsi, NULL,
ops[reduc_index], 0, NULL, ops[reduc_index], 0, NULL,
cost_vec)) cost_vec))
{ {
@ -6935,7 +6939,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
&& (STMT_VINFO_RELEVANT (stmt_info) <= vect_used_only_live) && (STMT_VINFO_RELEVANT (stmt_info) <= vect_used_only_live)
&& (use_stmt_info = loop_vinfo->lookup_single_use (reduc_phi_result)) && (use_stmt_info = loop_vinfo->lookup_single_use (reduc_phi_result))
&& (use_stmt_info == stmt_info && (use_stmt_info == stmt_info
|| STMT_VINFO_RELATED_STMT (use_stmt_info) == stmt)) || STMT_VINFO_RELATED_STMT (use_stmt_info) == stmt_info))
{ {
single_defuse_cycle = true; single_defuse_cycle = true;
epilog_copies = 1; epilog_copies = 1;
@ -7015,13 +7019,13 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
if (reduction_type == FOLD_LEFT_REDUCTION) if (reduction_type == FOLD_LEFT_REDUCTION)
return vectorize_fold_left_reduction return vectorize_fold_left_reduction
(stmt, gsi, vec_stmt, slp_node, reduc_def_phi, code, (stmt_info, gsi, vec_stmt, slp_node, reduc_def_phi, code,
reduc_fn, ops, vectype_in, reduc_index, masks); reduc_fn, ops, vectype_in, reduc_index, masks);
if (reduction_type == EXTRACT_LAST_REDUCTION) if (reduction_type == EXTRACT_LAST_REDUCTION)
{ {
gcc_assert (!slp_node); gcc_assert (!slp_node);
return vectorizable_condition (stmt, gsi, vec_stmt, return vectorizable_condition (stmt_info, gsi, vec_stmt,
NULL, reduc_index, NULL, NULL); NULL, reduc_index, NULL, NULL);
} }
@ -7053,7 +7057,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
if (code == COND_EXPR) if (code == COND_EXPR)
{ {
gcc_assert (!slp_node); gcc_assert (!slp_node);
vectorizable_condition (stmt, gsi, vec_stmt, vectorizable_condition (stmt_info, gsi, vec_stmt,
PHI_RESULT (phis[0]->stmt), PHI_RESULT (phis[0]->stmt),
reduc_index, NULL, NULL); reduc_index, NULL, NULL);
/* Multiple types are not supported for condition. */ /* Multiple types are not supported for condition. */
@ -7090,12 +7094,12 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
else else
{ {
vec_oprnds0.quick_push vec_oprnds0.quick_push
(vect_get_vec_def_for_operand (ops[0], stmt)); (vect_get_vec_def_for_operand (ops[0], stmt_info));
vec_oprnds1.quick_push vec_oprnds1.quick_push
(vect_get_vec_def_for_operand (ops[1], stmt)); (vect_get_vec_def_for_operand (ops[1], stmt_info));
if (op_type == ternary_op) if (op_type == ternary_op)
vec_oprnds2.quick_push vec_oprnds2.quick_push
(vect_get_vec_def_for_operand (ops[2], stmt)); (vect_get_vec_def_for_operand (ops[2], stmt_info));
} }
} }
else else
@ -7144,7 +7148,8 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
new_temp = make_ssa_name (vec_dest, call); new_temp = make_ssa_name (vec_dest, call);
gimple_call_set_lhs (call, new_temp); gimple_call_set_lhs (call, new_temp);
gimple_call_set_nothrow (call, true); gimple_call_set_nothrow (call, true);
new_stmt_info = vect_finish_stmt_generation (stmt, call, gsi); new_stmt_info
= vect_finish_stmt_generation (stmt_info, call, gsi);
} }
else else
{ {
@ -7156,7 +7161,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
new_temp = make_ssa_name (vec_dest, new_stmt); new_temp = make_ssa_name (vec_dest, new_stmt);
gimple_assign_set_lhs (new_stmt, new_temp); gimple_assign_set_lhs (new_stmt, new_temp);
new_stmt_info new_stmt_info
= vect_finish_stmt_generation (stmt, new_stmt, gsi); = vect_finish_stmt_generation (stmt_info, new_stmt, gsi);
} }
if (slp_node) if (slp_node)
@ -7184,7 +7189,7 @@ vectorizable_reduction (gimple *stmt, gimple_stmt_iterator *gsi,
if ((!single_defuse_cycle || code == COND_EXPR) && !slp_node) if ((!single_defuse_cycle || code == COND_EXPR) && !slp_node)
vect_defs[0] = gimple_get_lhs ((*vec_stmt)->stmt); vect_defs[0] = gimple_get_lhs ((*vec_stmt)->stmt);
vect_create_epilog_for_reduction (vect_defs, stmt, reduc_def_phi, vect_create_epilog_for_reduction (vect_defs, stmt_info, reduc_def_phi,
epilog_copies, reduc_fn, phis, epilog_copies, reduc_fn, phis,
double_reduc, slp_node, slp_node_instance, double_reduc, slp_node, slp_node_instance,
cond_reduc_val, cond_reduc_op_code, cond_reduc_val, cond_reduc_op_code,
@ -7293,7 +7298,7 @@ vectorizable_induction (gimple *phi,
gcc_assert (ncopies >= 1); gcc_assert (ncopies >= 1);
/* FORNOW. These restrictions should be relaxed. */ /* FORNOW. These restrictions should be relaxed. */
if (nested_in_vect_loop_p (loop, phi)) if (nested_in_vect_loop_p (loop, stmt_info))
{ {
imm_use_iterator imm_iter; imm_use_iterator imm_iter;
use_operand_p use_p; use_operand_p use_p;
@ -7443,10 +7448,10 @@ vectorizable_induction (gimple *phi,
new_name = fold_build2 (MULT_EXPR, TREE_TYPE (step_expr), new_name = fold_build2 (MULT_EXPR, TREE_TYPE (step_expr),
expr, step_expr); expr, step_expr);
if (! CONSTANT_CLASS_P (new_name)) if (! CONSTANT_CLASS_P (new_name))
new_name = vect_init_vector (phi, new_name, new_name = vect_init_vector (stmt_info, new_name,
TREE_TYPE (step_expr), NULL); TREE_TYPE (step_expr), NULL);
new_vec = build_vector_from_val (vectype, new_name); new_vec = build_vector_from_val (vectype, new_name);
vec_step = vect_init_vector (phi, new_vec, vectype, NULL); vec_step = vect_init_vector (stmt_info, new_vec, vectype, NULL);
/* Now generate the IVs. */ /* Now generate the IVs. */
unsigned group_size = SLP_TREE_SCALAR_STMTS (slp_node).length (); unsigned group_size = SLP_TREE_SCALAR_STMTS (slp_node).length ();
@ -7513,10 +7518,10 @@ vectorizable_induction (gimple *phi,
new_name = fold_build2 (MULT_EXPR, TREE_TYPE (step_expr), new_name = fold_build2 (MULT_EXPR, TREE_TYPE (step_expr),
expr, step_expr); expr, step_expr);
if (! CONSTANT_CLASS_P (new_name)) if (! CONSTANT_CLASS_P (new_name))
new_name = vect_init_vector (phi, new_name, new_name = vect_init_vector (stmt_info, new_name,
TREE_TYPE (step_expr), NULL); TREE_TYPE (step_expr), NULL);
new_vec = build_vector_from_val (vectype, new_name); new_vec = build_vector_from_val (vectype, new_name);
vec_step = vect_init_vector (phi, new_vec, vectype, NULL); vec_step = vect_init_vector (stmt_info, new_vec, vectype, NULL);
for (; ivn < nvects; ++ivn) for (; ivn < nvects; ++ivn)
{ {
gimple *iv = SLP_TREE_VEC_STMTS (slp_node)[ivn - nivs]->stmt; gimple *iv = SLP_TREE_VEC_STMTS (slp_node)[ivn - nivs]->stmt;
@ -7549,7 +7554,7 @@ vectorizable_induction (gimple *phi,
/* iv_loop is nested in the loop to be vectorized. init_expr had already /* iv_loop is nested in the loop to be vectorized. init_expr had already
been created during vectorization of previous stmts. We obtain it been created during vectorization of previous stmts. We obtain it
from the STMT_VINFO_VEC_STMT of the defining stmt. */ from the STMT_VINFO_VEC_STMT of the defining stmt. */
vec_init = vect_get_vec_def_for_operand (init_expr, phi); vec_init = vect_get_vec_def_for_operand (init_expr, stmt_info);
/* If the initial value is not of proper type, convert it. */ /* If the initial value is not of proper type, convert it. */
if (!useless_type_conversion_p (vectype, TREE_TYPE (vec_init))) if (!useless_type_conversion_p (vectype, TREE_TYPE (vec_init)))
{ {
@ -7651,7 +7656,7 @@ vectorizable_induction (gimple *phi,
gcc_assert (CONSTANT_CLASS_P (new_name) gcc_assert (CONSTANT_CLASS_P (new_name)
|| TREE_CODE (new_name) == SSA_NAME); || TREE_CODE (new_name) == SSA_NAME);
new_vec = build_vector_from_val (vectype, t); new_vec = build_vector_from_val (vectype, t);
vec_step = vect_init_vector (phi, new_vec, vectype, NULL); vec_step = vect_init_vector (stmt_info, new_vec, vectype, NULL);
/* Create the following def-use cycle: /* Create the following def-use cycle:
@ -7717,7 +7722,7 @@ vectorizable_induction (gimple *phi,
gcc_assert (CONSTANT_CLASS_P (new_name) gcc_assert (CONSTANT_CLASS_P (new_name)
|| TREE_CODE (new_name) == SSA_NAME); || TREE_CODE (new_name) == SSA_NAME);
new_vec = build_vector_from_val (vectype, t); new_vec = build_vector_from_val (vectype, t);
vec_step = vect_init_vector (phi, new_vec, vectype, NULL); vec_step = vect_init_vector (stmt_info, new_vec, vectype, NULL);
vec_def = induc_def; vec_def = induc_def;
prev_stmt_vinfo = induction_phi_info; prev_stmt_vinfo = induction_phi_info;
@ -7815,7 +7820,7 @@ vectorizable_live_operation (gimple *stmt,
return false; return false;
/* FORNOW. CHECKME. */ /* FORNOW. CHECKME. */
if (nested_in_vect_loop_p (loop, stmt)) if (nested_in_vect_loop_p (loop, stmt_info))
return false; return false;
/* If STMT is not relevant and it is a simple assignment and its inputs are /* If STMT is not relevant and it is a simple assignment and its inputs are
@ -7823,7 +7828,7 @@ vectorizable_live_operation (gimple *stmt,
scalar value that it computes will be used. */ scalar value that it computes will be used. */
if (!STMT_VINFO_RELEVANT_P (stmt_info)) if (!STMT_VINFO_RELEVANT_P (stmt_info))
{ {
gcc_assert (is_simple_and_all_uses_invariant (stmt, loop_vinfo)); gcc_assert (is_simple_and_all_uses_invariant (stmt_info, loop_vinfo));
if (dump_enabled_p ()) if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, dump_printf_loc (MSG_NOTE, vect_location,
"statement is simple and uses invariant. Leaving in " "statement is simple and uses invariant. Leaving in "
@ -8222,11 +8227,11 @@ vect_transform_loop_stmt (loop_vec_info loop_vinfo, gimple *stmt,
{ {
dump_printf_loc (MSG_NOTE, vect_location, dump_printf_loc (MSG_NOTE, vect_location,
"------>vectorizing statement: "); "------>vectorizing statement: ");
dump_gimple_stmt (MSG_NOTE, TDF_SLIM, stmt, 0); dump_gimple_stmt (MSG_NOTE, TDF_SLIM, stmt_info->stmt, 0);
} }
if (MAY_HAVE_DEBUG_BIND_STMTS && !STMT_VINFO_LIVE_P (stmt_info)) if (MAY_HAVE_DEBUG_BIND_STMTS && !STMT_VINFO_LIVE_P (stmt_info))
vect_loop_kill_debug_uses (loop, stmt); vect_loop_kill_debug_uses (loop, stmt_info);
if (!STMT_VINFO_RELEVANT_P (stmt_info) if (!STMT_VINFO_RELEVANT_P (stmt_info)
&& !STMT_VINFO_LIVE_P (stmt_info)) && !STMT_VINFO_LIVE_P (stmt_info))
@ -8267,7 +8272,7 @@ vect_transform_loop_stmt (loop_vec_info loop_vinfo, gimple *stmt,
dump_printf_loc (MSG_NOTE, vect_location, "transform statement.\n"); dump_printf_loc (MSG_NOTE, vect_location, "transform statement.\n");
bool grouped_store = false; bool grouped_store = false;
if (vect_transform_stmt (stmt, gsi, &grouped_store, NULL, NULL)) if (vect_transform_stmt (stmt_info, gsi, &grouped_store, NULL, NULL))
*seen_store = stmt_info; *seen_store = stmt_info;
} }
@ -8422,7 +8427,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
continue; continue;
if (MAY_HAVE_DEBUG_BIND_STMTS && !STMT_VINFO_LIVE_P (stmt_info)) if (MAY_HAVE_DEBUG_BIND_STMTS && !STMT_VINFO_LIVE_P (stmt_info))
vect_loop_kill_debug_uses (loop, phi); vect_loop_kill_debug_uses (loop, stmt_info);
if (!STMT_VINFO_RELEVANT_P (stmt_info) if (!STMT_VINFO_RELEVANT_P (stmt_info)
&& !STMT_VINFO_LIVE_P (stmt_info)) && !STMT_VINFO_LIVE_P (stmt_info))
@ -8441,7 +8446,7 @@ vect_transform_loop (loop_vec_info loop_vinfo)
{ {
if (dump_enabled_p ()) if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "transform phi.\n"); dump_printf_loc (MSG_NOTE, vect_location, "transform phi.\n");
vect_transform_stmt (phi, NULL, NULL, NULL, NULL); vect_transform_stmt (stmt_info, NULL, NULL, NULL, NULL);
} }
} }

View File

@ -842,7 +842,7 @@ vect_reassociating_reduction_p (stmt_vec_info stmt_info, tree_code code,
/* We don't allow changing the order of the computation in the inner-loop /* We don't allow changing the order of the computation in the inner-loop
when doing outer-loop vectorization. */ when doing outer-loop vectorization. */
struct loop *loop = LOOP_VINFO_LOOP (loop_info); struct loop *loop = LOOP_VINFO_LOOP (loop_info);
if (loop && nested_in_vect_loop_p (loop, assign)) if (loop && nested_in_vect_loop_p (loop, stmt_info))
return false; return false;
if (!vect_reassociating_reduction_p (stmt_info)) if (!vect_reassociating_reduction_p (stmt_info))
@ -1196,7 +1196,7 @@ vect_recog_widen_op_pattern (stmt_vec_info last_stmt_info, tree *type_out,
auto_vec<tree> dummy_vec; auto_vec<tree> dummy_vec;
if (!vectype if (!vectype
|| !vecitype || !vecitype
|| !supportable_widening_operation (wide_code, last_stmt, || !supportable_widening_operation (wide_code, last_stmt_info,
vecitype, vectype, vecitype, vectype,
&dummy_code, &dummy_code, &dummy_code, &dummy_code,
&dummy_int, &dummy_vec)) &dummy_int, &dummy_vec))
@ -3118,11 +3118,11 @@ vect_recog_mixed_size_cond_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
return NULL; return NULL;
if ((TREE_CODE (then_clause) != INTEGER_CST if ((TREE_CODE (then_clause) != INTEGER_CST
&& !type_conversion_p (then_clause, last_stmt, false, &orig_type0, && !type_conversion_p (then_clause, stmt_vinfo, false, &orig_type0,
&def_stmt0, &promotion)) &def_stmt0, &promotion))
|| (TREE_CODE (else_clause) != INTEGER_CST || (TREE_CODE (else_clause) != INTEGER_CST
&& !type_conversion_p (else_clause, last_stmt, false, &orig_type1, && !type_conversion_p (else_clause, stmt_vinfo, false, &orig_type1,
&def_stmt1, &promotion))) &def_stmt1, &promotion)))
return NULL; return NULL;
if (orig_type0 && orig_type1 if (orig_type0 && orig_type1
@ -3709,7 +3709,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (check_bool_pattern (var, vinfo, bool_stmts)) if (check_bool_pattern (var, vinfo, bool_stmts))
{ {
rhs = adjust_bool_stmts (bool_stmts, TREE_TYPE (lhs), last_stmt); rhs = adjust_bool_stmts (bool_stmts, TREE_TYPE (lhs), stmt_vinfo);
lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL); lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs))) if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
pattern_stmt = gimple_build_assign (lhs, SSA_NAME, rhs); pattern_stmt = gimple_build_assign (lhs, SSA_NAME, rhs);
@ -3776,7 +3776,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
if (!check_bool_pattern (var, vinfo, bool_stmts)) if (!check_bool_pattern (var, vinfo, bool_stmts))
return NULL; return NULL;
rhs = adjust_bool_stmts (bool_stmts, type, last_stmt); rhs = adjust_bool_stmts (bool_stmts, type, stmt_vinfo);
lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL); lhs = vect_recog_temp_ssa_var (TREE_TYPE (lhs), NULL);
pattern_stmt pattern_stmt
@ -3800,7 +3800,7 @@ vect_recog_bool_pattern (stmt_vec_info stmt_vinfo, tree *type_out)
return NULL; return NULL;
if (check_bool_pattern (var, vinfo, bool_stmts)) if (check_bool_pattern (var, vinfo, bool_stmts))
rhs = adjust_bool_stmts (bool_stmts, TREE_TYPE (vectype), last_stmt); rhs = adjust_bool_stmts (bool_stmts, TREE_TYPE (vectype), stmt_vinfo);
else else
{ {
tree type = search_type_for_mask (var, vinfo); tree type = search_type_for_mask (var, vinfo);
@ -4234,13 +4234,12 @@ vect_recog_gather_scatter_pattern (stmt_vec_info stmt_info, tree *type_out)
/* Get the boolean that controls whether the load or store happens. /* Get the boolean that controls whether the load or store happens.
This is null if the operation is unconditional. */ This is null if the operation is unconditional. */
gimple *stmt = stmt_info->stmt; tree mask = vect_get_load_store_mask (stmt_info);
tree mask = vect_get_load_store_mask (stmt);
/* Make sure that the target supports an appropriate internal /* Make sure that the target supports an appropriate internal
function for the gather/scatter operation. */ function for the gather/scatter operation. */
gather_scatter_info gs_info; gather_scatter_info gs_info;
if (!vect_check_gather_scatter (stmt, loop_vinfo, &gs_info) if (!vect_check_gather_scatter (stmt_info, loop_vinfo, &gs_info)
|| gs_info.decl) || gs_info.decl)
return NULL; return NULL;
@ -4273,7 +4272,7 @@ vect_recog_gather_scatter_pattern (stmt_vec_info stmt_info, tree *type_out)
} }
else else
{ {
tree rhs = vect_get_store_rhs (stmt); tree rhs = vect_get_store_rhs (stmt_info);
if (mask != NULL) if (mask != NULL)
pattern_stmt = gimple_build_call_internal (IFN_MASK_SCATTER_STORE, 5, pattern_stmt = gimple_build_call_internal (IFN_MASK_SCATTER_STORE, 5,
base, offset, scale, rhs, base, offset, scale, rhs,
@ -4295,7 +4294,7 @@ vect_recog_gather_scatter_pattern (stmt_vec_info stmt_info, tree *type_out)
tree vectype = STMT_VINFO_VECTYPE (stmt_info); tree vectype = STMT_VINFO_VECTYPE (stmt_info);
*type_out = vectype; *type_out = vectype;
vect_pattern_detected ("gather/scatter pattern", stmt); vect_pattern_detected ("gather/scatter pattern", stmt_info->stmt);
return pattern_stmt; return pattern_stmt;
} }

View File

@ -2096,8 +2096,8 @@ vect_analyze_slp_instance (vec_info *vinfo,
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location, dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"Build SLP failed: unsupported load " "Build SLP failed: unsupported load "
"permutation "); "permutation ");
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, dump_gimple_stmt (MSG_MISSED_OPTIMIZATION,
TDF_SLIM, stmt, 0); TDF_SLIM, stmt_info->stmt, 0);
} }
vect_free_slp_instance (new_instance, false); vect_free_slp_instance (new_instance, false);
return false; return false;
@ -2172,8 +2172,9 @@ vect_analyze_slp_instance (vec_info *vinfo,
gcc_assert ((const_nunits & (const_nunits - 1)) == 0); gcc_assert ((const_nunits & (const_nunits - 1)) == 0);
unsigned group1_size = i & ~(const_nunits - 1); unsigned group1_size = i & ~(const_nunits - 1);
gimple *rest = vect_split_slp_store_group (stmt, group1_size); gimple *rest = vect_split_slp_store_group (stmt_info, group1_size);
bool res = vect_analyze_slp_instance (vinfo, stmt, max_tree_size); bool res = vect_analyze_slp_instance (vinfo, stmt_info,
max_tree_size);
/* If the first non-match was in the middle of a vector, /* If the first non-match was in the middle of a vector,
skip the rest of that vector. */ skip the rest of that vector. */
if (group1_size < i) if (group1_size < i)
@ -2513,7 +2514,6 @@ vect_slp_analyze_node_operations_1 (vec_info *vinfo, slp_tree node,
stmt_vector_for_cost *cost_vec) stmt_vector_for_cost *cost_vec)
{ {
stmt_vec_info stmt_info = SLP_TREE_SCALAR_STMTS (node)[0]; stmt_vec_info stmt_info = SLP_TREE_SCALAR_STMTS (node)[0];
gimple *stmt = stmt_info->stmt;
gcc_assert (STMT_SLP_TYPE (stmt_info) != loop_vect); gcc_assert (STMT_SLP_TYPE (stmt_info) != loop_vect);
/* For BB vectorization vector types are assigned here. /* For BB vectorization vector types are assigned here.
@ -2567,7 +2567,7 @@ vect_slp_analyze_node_operations_1 (vec_info *vinfo, slp_tree node,
} }
bool dummy; bool dummy;
return vect_analyze_stmt (stmt, &dummy, node, node_instance, cost_vec); return vect_analyze_stmt (stmt_info, &dummy, node, node_instance, cost_vec);
} }
/* Analyze statements contained in SLP tree NODE after recursively analyzing /* Analyze statements contained in SLP tree NODE after recursively analyzing

File diff suppressed because it is too large Load Diff

View File

@ -1325,7 +1325,7 @@ vect_dr_behavior (data_reference *dr)
stmt_vec_info stmt_info = vinfo_for_stmt (stmt); stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info); loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
if (loop_vinfo == NULL if (loop_vinfo == NULL
|| !nested_in_vect_loop_p (LOOP_VINFO_LOOP (loop_vinfo), stmt)) || !nested_in_vect_loop_p (LOOP_VINFO_LOOP (loop_vinfo), stmt_info))
return &DR_INNERMOST (dr); return &DR_INNERMOST (dr);
else else
return &STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info); return &STMT_VINFO_DR_WRT_VEC_LOOP (stmt_info);