mirror of git://gcc.gnu.org/git/gcc.git
ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
* ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn. (ipa_value_from_jfunc, ipa_context_from_jfunc): Skip sanity check. (ipa_get_indirect_edge_target_1): Use opt_for_fn. (good_cloning_opportunity_p): Likewise. (ipa-cp gate): Enable ipa-cp with LTO. * ipa-profile.c (ipa_propagate_frequency): Use opt_for_fn. * ipa.c (symbol_table::remove_unreachable_nodes): Always build type inheritance. * ipa-inline-transform.c (inline_transform): Check if there are inlines to apply even at -O0. * cgraphunit.c (cgraph_node::finalize_function): Use opt_for_fn. (analyze_functions): Build type inheritance graph. * ipa-inline.c (can_inline_edge_p): Use opt_for_fn. (want_early_inline_function_p, want_inline_small_function_p): Likewise. (check_callers): Likewise. (edge_badness): Likewise. (inline_small_functions): Always be ready for indirect inlining to happend. (ipa_inline): Always use want_inline_function_to_all_callers_p. (early_inline_small_functions): Use opt_for_fn. * ipa-inline-analysis.c (estimate_function_body_sizes): use opt_for_fn. (estimate_function_body_sizes): Likewise. (compute_inline_parameters): Likewise. (estimate_edge_devirt_benefit): Likewise. (inline_analyze_function): Likewise. * ipa-devirt.c (ipa_devirt): Likewise. (gate): Use in_lto_p. * ipa-prop.c (ipa_func_spec_opts_forbid_analysis_p): Use opt_for_fn. (try_make_edge_direct_virtual_call): Likewise. (update_indirect_edges_after_inlining): Likewise. (ipa_free_all_structures_after_ipa_cp): Add in_lto_p check. * common.opt (findirect-inlining): Turn into optimization. * ipa-pure-const.c (add_new_function): Use opt_for_fn. (pure_const_generate_summary): Likewise. (gate_pure_const): Always enable with in_lto_p. From-SVN: r217737
This commit is contained in:
parent
bb59f396f8
commit
2bf86c845a
|
|
@ -1,3 +1,42 @@
|
||||||
|
2014-11-18 Jan Hubicka <hubicka@ucw.cz>
|
||||||
|
|
||||||
|
* ipa-cp.c (ipcp_cloning_candidate_p): Use opt_for_fn.
|
||||||
|
(ipa_value_from_jfunc, ipa_context_from_jfunc): Skip sanity check.
|
||||||
|
(ipa_get_indirect_edge_target_1): Use opt_for_fn.
|
||||||
|
(good_cloning_opportunity_p): Likewise.
|
||||||
|
(ipa-cp gate): Enable ipa-cp with LTO.
|
||||||
|
* ipa-profile.c (ipa_propagate_frequency): Use opt_for_fn.
|
||||||
|
* ipa.c (symbol_table::remove_unreachable_nodes): Always build type
|
||||||
|
inheritance.
|
||||||
|
* ipa-inline-transform.c (inline_transform): Check if there are inlines
|
||||||
|
to apply even at -O0.
|
||||||
|
* cgraphunit.c (cgraph_node::finalize_function): Use opt_for_fn.
|
||||||
|
(analyze_functions): Build type inheritance graph.
|
||||||
|
* ipa-inline.c (can_inline_edge_p): Use opt_for_fn.
|
||||||
|
(want_early_inline_function_p, want_inline_small_function_p):
|
||||||
|
Likewise.
|
||||||
|
(check_callers): Likewise.
|
||||||
|
(edge_badness): Likewise.
|
||||||
|
(inline_small_functions): Always be ready for indirect inlining
|
||||||
|
to happend.
|
||||||
|
(ipa_inline): Always use want_inline_function_to_all_callers_p.
|
||||||
|
(early_inline_small_functions): Use opt_for_fn.
|
||||||
|
* ipa-inline-analysis.c (estimate_function_body_sizes): use opt_for_fn.
|
||||||
|
(estimate_function_body_sizes): Likewise.
|
||||||
|
(compute_inline_parameters): Likewise.
|
||||||
|
(estimate_edge_devirt_benefit): Likewise.
|
||||||
|
(inline_analyze_function): Likewise.
|
||||||
|
* ipa-devirt.c (ipa_devirt): Likewise.
|
||||||
|
(gate): Use in_lto_p.
|
||||||
|
* ipa-prop.c (ipa_func_spec_opts_forbid_analysis_p): Use opt_for_fn.
|
||||||
|
(try_make_edge_direct_virtual_call): Likewise.
|
||||||
|
(update_indirect_edges_after_inlining): Likewise.
|
||||||
|
(ipa_free_all_structures_after_ipa_cp): Add in_lto_p check.
|
||||||
|
* common.opt (findirect-inlining): Turn into optimization.
|
||||||
|
* ipa-pure-const.c (add_new_function): Use opt_for_fn.
|
||||||
|
(pure_const_generate_summary): Likewise.
|
||||||
|
(gate_pure_const): Always enable with in_lto_p.
|
||||||
|
|
||||||
2014-11-18 Maciej W. Rozycki <macro@codesourcery.com>
|
2014-11-18 Maciej W. Rozycki <macro@codesourcery.com>
|
||||||
|
|
||||||
* config/mips/mips.md (compression): Add `micromips32' setting.
|
* config/mips/mips.md (compression): Add `micromips32' setting.
|
||||||
|
|
|
||||||
|
|
@ -450,7 +450,7 @@ cgraph_node::finalize_function (tree decl, bool no_collect)
|
||||||
declared inline and nested functions. These were optimized out
|
declared inline and nested functions. These were optimized out
|
||||||
in the original implementation and it is unclear whether we want
|
in the original implementation and it is unclear whether we want
|
||||||
to change the behavior here. */
|
to change the behavior here. */
|
||||||
if ((!optimize
|
if ((!opt_for_fn (decl, optimize)
|
||||||
&& !node->cpp_implicit_alias
|
&& !node->cpp_implicit_alias
|
||||||
&& !DECL_DISREGARD_INLINE_LIMITS (decl)
|
&& !DECL_DISREGARD_INLINE_LIMITS (decl)
|
||||||
&& !DECL_DECLARED_INLINE_P (decl)
|
&& !DECL_DECLARED_INLINE_P (decl)
|
||||||
|
|
@ -929,8 +929,7 @@ analyze_functions (void)
|
||||||
FOR_EACH_SYMBOL (node)
|
FOR_EACH_SYMBOL (node)
|
||||||
if (node->cpp_implicit_alias)
|
if (node->cpp_implicit_alias)
|
||||||
node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
|
node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
|
||||||
if (optimize && flag_devirtualize)
|
build_type_inheritance_graph ();
|
||||||
build_type_inheritance_graph ();
|
|
||||||
|
|
||||||
/* Analysis adds static variables that in turn adds references to new functions.
|
/* Analysis adds static variables that in turn adds references to new functions.
|
||||||
So we need to iterate the process until it stabilize. */
|
So we need to iterate the process until it stabilize. */
|
||||||
|
|
@ -1001,7 +1000,8 @@ analyze_functions (void)
|
||||||
for (edge = cnode->callees; edge; edge = edge->next_callee)
|
for (edge = cnode->callees; edge; edge = edge->next_callee)
|
||||||
if (edge->callee->definition)
|
if (edge->callee->definition)
|
||||||
enqueue_node (edge->callee);
|
enqueue_node (edge->callee);
|
||||||
if (optimize && opt_for_fn (cnode->decl, flag_devirtualize))
|
if (opt_for_fn (cnode->decl, optimize)
|
||||||
|
&& opt_for_fn (cnode->decl, flag_devirtualize))
|
||||||
{
|
{
|
||||||
cgraph_edge *next;
|
cgraph_edge *next;
|
||||||
|
|
||||||
|
|
@ -1046,8 +1046,7 @@ analyze_functions (void)
|
||||||
symtab->process_new_functions ();
|
symtab->process_new_functions ();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (optimize && flag_devirtualize)
|
update_type_inheritance_graph ();
|
||||||
update_type_inheritance_graph ();
|
|
||||||
|
|
||||||
/* Collect entry points to the unit. */
|
/* Collect entry points to the unit. */
|
||||||
if (symtab->dump_file)
|
if (symtab->dump_file)
|
||||||
|
|
|
||||||
|
|
@ -1392,7 +1392,7 @@ Common Report Var(flag_inhibit_size_directive)
|
||||||
Do not generate .size directives
|
Do not generate .size directives
|
||||||
|
|
||||||
findirect-inlining
|
findirect-inlining
|
||||||
Common Report Var(flag_indirect_inlining)
|
Common Report Var(flag_indirect_inlining) Optimization
|
||||||
Perform indirect inlining
|
Perform indirect inlining
|
||||||
|
|
||||||
; General flag to enable inlining. Specifying -fno-inline will disable
|
; General flag to enable inlining. Specifying -fno-inline will disable
|
||||||
|
|
|
||||||
22
gcc/ipa-cp.c
22
gcc/ipa-cp.c
|
|
@ -566,7 +566,7 @@ ipcp_cloning_candidate_p (struct cgraph_node *node)
|
||||||
|
|
||||||
gcc_checking_assert (node->has_gimple_body_p ());
|
gcc_checking_assert (node->has_gimple_body_p ());
|
||||||
|
|
||||||
if (!flag_ipa_cp_clone)
|
if (!opt_for_fn (node->decl, flag_ipa_cp_clone))
|
||||||
{
|
{
|
||||||
if (dump_file)
|
if (dump_file)
|
||||||
fprintf (dump_file, "Not considering %s for cloning; "
|
fprintf (dump_file, "Not considering %s for cloning; "
|
||||||
|
|
@ -902,10 +902,7 @@ ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
|
||||||
ipcp_lattice<tree> *lat;
|
ipcp_lattice<tree> *lat;
|
||||||
|
|
||||||
if (!info->lattices)
|
if (!info->lattices)
|
||||||
{
|
return NULL_TREE;
|
||||||
gcc_checking_assert (!flag_ipa_cp);
|
|
||||||
return NULL_TREE;
|
|
||||||
}
|
|
||||||
lat = ipa_get_scalar_lat (info, idx);
|
lat = ipa_get_scalar_lat (info, idx);
|
||||||
if (!lat->is_single_const ())
|
if (!lat->is_single_const ())
|
||||||
return NULL_TREE;
|
return NULL_TREE;
|
||||||
|
|
@ -967,10 +964,7 @@ ipa_context_from_jfunc (ipa_node_params *info, cgraph_edge *cs, int csidx,
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
if (!info->lattices)
|
if (!info->lattices)
|
||||||
{
|
return ctx;
|
||||||
gcc_checking_assert (!flag_ipa_cp);
|
|
||||||
return ctx;
|
|
||||||
}
|
|
||||||
ipcp_lattice<ipa_polymorphic_call_context> *lat;
|
ipcp_lattice<ipa_polymorphic_call_context> *lat;
|
||||||
lat = ipa_get_poly_ctx_lat (info, srcidx);
|
lat = ipa_get_poly_ctx_lat (info, srcidx);
|
||||||
if (!lat->is_single_const ())
|
if (!lat->is_single_const ())
|
||||||
|
|
@ -1786,7 +1780,7 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
|
||||||
return NULL_TREE;
|
return NULL_TREE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!flag_devirtualize)
|
if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
|
||||||
return NULL_TREE;
|
return NULL_TREE;
|
||||||
|
|
||||||
gcc_assert (!ie->indirect_info->agg_contents);
|
gcc_assert (!ie->indirect_info->agg_contents);
|
||||||
|
|
@ -1884,8 +1878,8 @@ ipa_get_indirect_edge_target_1 (struct cgraph_edge *ie,
|
||||||
struct cgraph_node *node;
|
struct cgraph_node *node;
|
||||||
if (*speculative)
|
if (*speculative)
|
||||||
return target;
|
return target;
|
||||||
if (!flag_devirtualize_speculatively || ie->speculative
|
if (!opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
|
||||||
|| !ie->maybe_hot_p ())
|
|| ie->speculative || !ie->maybe_hot_p ())
|
||||||
return NULL;
|
return NULL;
|
||||||
node = try_speculative_devirtualization (ie->indirect_info->otr_type,
|
node = try_speculative_devirtualization (ie->indirect_info->otr_type,
|
||||||
ie->indirect_info->otr_token,
|
ie->indirect_info->otr_token,
|
||||||
|
|
@ -2003,7 +1997,7 @@ good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
|
||||||
int freq_sum, gcov_type count_sum, int size_cost)
|
int freq_sum, gcov_type count_sum, int size_cost)
|
||||||
{
|
{
|
||||||
if (time_benefit == 0
|
if (time_benefit == 0
|
||||||
|| !flag_ipa_cp_clone
|
|| !opt_for_fn (node->decl, flag_ipa_cp_clone)
|
||||||
|| !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
|
|| !optimize_function_for_speed_p (DECL_STRUCT_FUNCTION (node->decl)))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
|
@ -4315,7 +4309,7 @@ public:
|
||||||
{
|
{
|
||||||
/* FIXME: We should remove the optimize check after we ensure we never run
|
/* FIXME: We should remove the optimize check after we ensure we never run
|
||||||
IPA passes when not optimizing. */
|
IPA passes when not optimizing. */
|
||||||
return flag_ipa_cp && optimize;
|
return (flag_ipa_cp && optimize) || in_lto_p;
|
||||||
}
|
}
|
||||||
|
|
||||||
virtual unsigned int execute (function *) { return ipcp_driver (); }
|
virtual unsigned int execute (function *) { return ipcp_driver (); }
|
||||||
|
|
|
||||||
|
|
@ -2818,6 +2818,8 @@ ipa_devirt (void)
|
||||||
FOR_EACH_DEFINED_FUNCTION (n)
|
FOR_EACH_DEFINED_FUNCTION (n)
|
||||||
{
|
{
|
||||||
bool update = false;
|
bool update = false;
|
||||||
|
if (!opt_for_fn (n->decl, flag_devirtualize))
|
||||||
|
continue;
|
||||||
if (dump_file && n->indirect_calls)
|
if (dump_file && n->indirect_calls)
|
||||||
fprintf (dump_file, "\n\nProcesing function %s/%i\n",
|
fprintf (dump_file, "\n\nProcesing function %s/%i\n",
|
||||||
n->name (), n->order);
|
n->name (), n->order);
|
||||||
|
|
@ -2846,7 +2848,7 @@ ipa_devirt (void)
|
||||||
|
|
||||||
npolymorphic++;
|
npolymorphic++;
|
||||||
|
|
||||||
if (!flag_devirtualize_speculatively)
|
if (!opt_for_fn (n->decl, flag_devirtualize_speculatively))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (!e->maybe_hot_p ())
|
if (!e->maybe_hot_p ())
|
||||||
|
|
@ -3116,6 +3118,10 @@ public:
|
||||||
/* opt_pass methods: */
|
/* opt_pass methods: */
|
||||||
virtual bool gate (function *)
|
virtual bool gate (function *)
|
||||||
{
|
{
|
||||||
|
/* In LTO, always run the IPA passes and decide on function basis if the
|
||||||
|
pass is enabled. */
|
||||||
|
if (in_lto_p)
|
||||||
|
return true;
|
||||||
return (flag_devirtualize
|
return (flag_devirtualize
|
||||||
&& (flag_devirtualize_speculatively
|
&& (flag_devirtualize_speculatively
|
||||||
|| (warn_suggest_final_methods
|
|| (warn_suggest_final_methods
|
||||||
|
|
|
||||||
|
|
@ -2474,7 +2474,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
|
||||||
info->conds = NULL;
|
info->conds = NULL;
|
||||||
info->entry = NULL;
|
info->entry = NULL;
|
||||||
|
|
||||||
if (optimize && !early)
|
if (opt_for_fn (node->decl, optimize) && !early)
|
||||||
{
|
{
|
||||||
calculate_dominance_info (CDI_DOMINATORS);
|
calculate_dominance_info (CDI_DOMINATORS);
|
||||||
loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
|
loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
|
||||||
|
|
@ -2815,7 +2815,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
|
||||||
inline_summary (node)->self_time = time;
|
inline_summary (node)->self_time = time;
|
||||||
inline_summary (node)->self_size = size;
|
inline_summary (node)->self_size = size;
|
||||||
nonconstant_names.release ();
|
nonconstant_names.release ();
|
||||||
if (optimize && !early)
|
if (opt_for_fn (node->decl, optimize) && !early)
|
||||||
{
|
{
|
||||||
loop_optimizer_finalize ();
|
loop_optimizer_finalize ();
|
||||||
free_dominance_info (CDI_DOMINATORS);
|
free_dominance_info (CDI_DOMINATORS);
|
||||||
|
|
@ -2872,8 +2872,9 @@ compute_inline_parameters (struct cgraph_node *node, bool early)
|
||||||
info->stack_frame_offset = 0;
|
info->stack_frame_offset = 0;
|
||||||
|
|
||||||
/* Can this function be inlined at all? */
|
/* Can this function be inlined at all? */
|
||||||
if (!optimize && !lookup_attribute ("always_inline",
|
if (!opt_for_fn (node->decl, optimize)
|
||||||
DECL_ATTRIBUTES (node->decl)))
|
&& !lookup_attribute ("always_inline",
|
||||||
|
DECL_ATTRIBUTES (node->decl)))
|
||||||
info->inlinable = false;
|
info->inlinable = false;
|
||||||
else
|
else
|
||||||
info->inlinable = tree_inlinable_function_p (node->decl);
|
info->inlinable = tree_inlinable_function_p (node->decl);
|
||||||
|
|
@ -2990,7 +2991,7 @@ estimate_edge_devirt_benefit (struct cgraph_edge *ie,
|
||||||
|
|
||||||
if (!known_vals.exists () && !known_contexts.exists ())
|
if (!known_vals.exists () && !known_contexts.exists ())
|
||||||
return false;
|
return false;
|
||||||
if (!flag_indirect_inlining)
|
if (!opt_for_fn (ie->caller->decl, flag_indirect_inlining))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
target = ipa_get_indirect_edge_target (ie, known_vals, known_contexts,
|
target = ipa_get_indirect_edge_target (ie, known_vals, known_contexts,
|
||||||
|
|
@ -3986,7 +3987,7 @@ inline_analyze_function (struct cgraph_node *node)
|
||||||
if (dump_file)
|
if (dump_file)
|
||||||
fprintf (dump_file, "\nAnalyzing function: %s/%u\n",
|
fprintf (dump_file, "\nAnalyzing function: %s/%u\n",
|
||||||
node->name (), node->order);
|
node->name (), node->order);
|
||||||
if (optimize && !node->thunk.thunk_p)
|
if (opt_for_fn (node->decl, optimize) && !node->thunk.thunk_p)
|
||||||
inline_indirect_intraprocedural_analysis (node);
|
inline_indirect_intraprocedural_analysis (node);
|
||||||
compute_inline_parameters (node, false);
|
compute_inline_parameters (node, false);
|
||||||
if (!optimize)
|
if (!optimize)
|
||||||
|
|
|
||||||
|
|
@ -467,6 +467,7 @@ inline_transform (struct cgraph_node *node)
|
||||||
{
|
{
|
||||||
unsigned int todo = 0;
|
unsigned int todo = 0;
|
||||||
struct cgraph_edge *e, *next;
|
struct cgraph_edge *e, *next;
|
||||||
|
bool has_inline = false;
|
||||||
|
|
||||||
/* FIXME: Currently the pass manager is adding inline transform more than
|
/* FIXME: Currently the pass manager is adding inline transform more than
|
||||||
once to some clones. This needs revisiting after WPA cleanups. */
|
once to some clones. This needs revisiting after WPA cleanups. */
|
||||||
|
|
@ -480,13 +481,15 @@ inline_transform (struct cgraph_node *node)
|
||||||
|
|
||||||
for (e = node->callees; e; e = next)
|
for (e = node->callees; e; e = next)
|
||||||
{
|
{
|
||||||
|
if (!e->inline_failed)
|
||||||
|
has_inline = true;
|
||||||
next = e->next_callee;
|
next = e->next_callee;
|
||||||
e->redirect_call_stmt_to_callee ();
|
e->redirect_call_stmt_to_callee ();
|
||||||
}
|
}
|
||||||
node->remove_all_references ();
|
node->remove_all_references ();
|
||||||
|
|
||||||
timevar_push (TV_INTEGRATION);
|
timevar_push (TV_INTEGRATION);
|
||||||
if (node->callees && optimize)
|
if (node->callees && (optimize || has_inline))
|
||||||
todo = optimize_inline_calls (current_function_decl);
|
todo = optimize_inline_calls (current_function_decl);
|
||||||
timevar_pop (TV_INTEGRATION);
|
timevar_pop (TV_INTEGRATION);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -378,18 +378,10 @@ can_inline_edge_p (struct cgraph_edge *e, bool report,
|
||||||
optimization attribute. */
|
optimization attribute. */
|
||||||
else if (caller_tree != callee_tree)
|
else if (caller_tree != callee_tree)
|
||||||
{
|
{
|
||||||
struct cl_optimization *caller_opt
|
if (((opt_for_fn (e->caller->decl, optimize)
|
||||||
= TREE_OPTIMIZATION ((caller_tree)
|
> opt_for_fn (e->callee->decl, optimize))
|
||||||
? caller_tree
|
|| (opt_for_fn (e->caller->decl, optimize_size)
|
||||||
: optimization_default_node);
|
!= opt_for_fn (e->callee->decl, optimize_size)))
|
||||||
|
|
||||||
struct cl_optimization *callee_opt
|
|
||||||
= TREE_OPTIMIZATION ((callee_tree)
|
|
||||||
? callee_tree
|
|
||||||
: optimization_default_node);
|
|
||||||
|
|
||||||
if (((caller_opt->x_optimize > callee_opt->x_optimize)
|
|
||||||
|| (caller_opt->x_optimize_size != callee_opt->x_optimize_size))
|
|
||||||
/* gcc.dg/pr43564.c. Look at forced inline even in -O0. */
|
/* gcc.dg/pr43564.c. Look at forced inline even in -O0. */
|
||||||
&& !DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
|
&& !DECL_DISREGARD_INLINE_LIMITS (e->callee->decl))
|
||||||
{
|
{
|
||||||
|
|
@ -469,7 +461,7 @@ want_early_inline_function_p (struct cgraph_edge *e)
|
||||||
else if (flag_auto_profile && afdo_callsite_hot_enough_for_early_inline (e))
|
else if (flag_auto_profile && afdo_callsite_hot_enough_for_early_inline (e))
|
||||||
;
|
;
|
||||||
else if (!DECL_DECLARED_INLINE_P (callee->decl)
|
else if (!DECL_DECLARED_INLINE_P (callee->decl)
|
||||||
&& !flag_inline_small_functions)
|
&& !opt_for_fn (e->caller->decl, flag_inline_small_functions))
|
||||||
{
|
{
|
||||||
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
|
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
|
||||||
report_inline_failed_reason (e);
|
report_inline_failed_reason (e);
|
||||||
|
|
@ -587,7 +579,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
|
||||||
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
|
if (DECL_DISREGARD_INLINE_LIMITS (callee->decl))
|
||||||
;
|
;
|
||||||
else if (!DECL_DECLARED_INLINE_P (callee->decl)
|
else if (!DECL_DECLARED_INLINE_P (callee->decl)
|
||||||
&& !flag_inline_small_functions)
|
&& !opt_for_fn (e->caller->decl, flag_inline_small_functions))
|
||||||
{
|
{
|
||||||
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
|
e->inline_failed = CIF_FUNCTION_NOT_INLINE_CANDIDATE;
|
||||||
want_inline = false;
|
want_inline = false;
|
||||||
|
|
@ -639,7 +631,7 @@ want_inline_small_function_p (struct cgraph_edge *e, bool report)
|
||||||
want_inline = false;
|
want_inline = false;
|
||||||
}
|
}
|
||||||
else if (!DECL_DECLARED_INLINE_P (callee->decl)
|
else if (!DECL_DECLARED_INLINE_P (callee->decl)
|
||||||
&& !flag_inline_functions)
|
&& !opt_for_fn (e->caller->decl, flag_inline_functions))
|
||||||
{
|
{
|
||||||
/* growth_likely_positive is expensive, always test it last. */
|
/* growth_likely_positive is expensive, always test it last. */
|
||||||
if (growth >= MAX_INLINE_INSNS_SINGLE
|
if (growth >= MAX_INLINE_INSNS_SINGLE
|
||||||
|
|
@ -816,6 +808,8 @@ check_callers (struct cgraph_node *node, void *has_hot_call)
|
||||||
struct cgraph_edge *e;
|
struct cgraph_edge *e;
|
||||||
for (e = node->callers; e; e = e->next_caller)
|
for (e = node->callers; e; e = e->next_caller)
|
||||||
{
|
{
|
||||||
|
if (!opt_for_fn (e->caller->decl, flag_inline_functions_called_once))
|
||||||
|
return true;
|
||||||
if (!can_inline_edge_p (e, true))
|
if (!can_inline_edge_p (e, true))
|
||||||
return true;
|
return true;
|
||||||
if (!(*(bool *)has_hot_call) && e->maybe_hot_p ())
|
if (!(*(bool *)has_hot_call) && e->maybe_hot_p ())
|
||||||
|
|
@ -1010,6 +1004,8 @@ edge_badness (struct cgraph_edge *edge, bool dump)
|
||||||
|
|
||||||
compensated by the inline hints.
|
compensated by the inline hints.
|
||||||
*/
|
*/
|
||||||
|
/* TODO: We ought suport mixing units where some functions are profiled
|
||||||
|
and some not. */
|
||||||
else if (flag_guess_branch_prob)
|
else if (flag_guess_branch_prob)
|
||||||
{
|
{
|
||||||
badness = (relative_time_benefit (callee_info, edge, edge_time)
|
badness = (relative_time_benefit (callee_info, edge, edge_time)
|
||||||
|
|
@ -1575,8 +1571,7 @@ inline_small_functions (void)
|
||||||
int initial_size = 0;
|
int initial_size = 0;
|
||||||
struct cgraph_node **order = XCNEWVEC (cgraph_node *, symtab->cgraph_count);
|
struct cgraph_node **order = XCNEWVEC (cgraph_node *, symtab->cgraph_count);
|
||||||
struct cgraph_edge_hook_list *edge_removal_hook_holder;
|
struct cgraph_edge_hook_list *edge_removal_hook_holder;
|
||||||
if (flag_indirect_inlining)
|
new_indirect_edges.create (8);
|
||||||
new_indirect_edges.create (8);
|
|
||||||
|
|
||||||
edge_removal_hook_holder
|
edge_removal_hook_holder
|
||||||
= symtab->add_edge_removal_hook (&heap_edge_removal_hook, &edge_heap);
|
= symtab->add_edge_removal_hook (&heap_edge_removal_hook, &edge_heap);
|
||||||
|
|
@ -1773,7 +1768,8 @@ inline_small_functions (void)
|
||||||
if (where->global.inlined_to)
|
if (where->global.inlined_to)
|
||||||
where = where->global.inlined_to;
|
where = where->global.inlined_to;
|
||||||
if (!recursive_inlining (edge,
|
if (!recursive_inlining (edge,
|
||||||
flag_indirect_inlining
|
opt_for_fn (edge->caller->decl,
|
||||||
|
flag_indirect_inlining)
|
||||||
? &new_indirect_edges : NULL))
|
? &new_indirect_edges : NULL))
|
||||||
{
|
{
|
||||||
edge->inline_failed = CIF_RECURSIVE_INLINING;
|
edge->inline_failed = CIF_RECURSIVE_INLINING;
|
||||||
|
|
@ -1783,7 +1779,7 @@ inline_small_functions (void)
|
||||||
reset_edge_caches (where);
|
reset_edge_caches (where);
|
||||||
/* Recursive inliner inlines all recursive calls of the function
|
/* Recursive inliner inlines all recursive calls of the function
|
||||||
at once. Consequently we need to update all callee keys. */
|
at once. Consequently we need to update all callee keys. */
|
||||||
if (flag_indirect_inlining)
|
if (opt_for_fn (edge->caller->decl, flag_indirect_inlining))
|
||||||
add_new_edges_to_heap (&edge_heap, new_indirect_edges);
|
add_new_edges_to_heap (&edge_heap, new_indirect_edges);
|
||||||
update_callee_keys (&edge_heap, where, updated_nodes);
|
update_callee_keys (&edge_heap, where, updated_nodes);
|
||||||
bitmap_clear (updated_nodes);
|
bitmap_clear (updated_nodes);
|
||||||
|
|
@ -1821,8 +1817,7 @@ inline_small_functions (void)
|
||||||
|
|
||||||
gcc_checking_assert (!callee->global.inlined_to);
|
gcc_checking_assert (!callee->global.inlined_to);
|
||||||
inline_call (edge, true, &new_indirect_edges, &overall_size, true);
|
inline_call (edge, true, &new_indirect_edges, &overall_size, true);
|
||||||
if (flag_indirect_inlining)
|
add_new_edges_to_heap (&edge_heap, new_indirect_edges);
|
||||||
add_new_edges_to_heap (&edge_heap, new_indirect_edges);
|
|
||||||
|
|
||||||
reset_edge_caches (edge->callee);
|
reset_edge_caches (edge->callee);
|
||||||
reset_node_growth_cache (callee);
|
reset_node_growth_cache (callee);
|
||||||
|
|
@ -2246,8 +2241,7 @@ ipa_inline (void)
|
||||||
reset_edge_caches (where);
|
reset_edge_caches (where);
|
||||||
inline_update_overall_summary (where);
|
inline_update_overall_summary (where);
|
||||||
}
|
}
|
||||||
if (flag_inline_functions_called_once
|
if (want_inline_function_to_all_callers_p (node, cold))
|
||||||
&& want_inline_function_to_all_callers_p (node, cold))
|
|
||||||
{
|
{
|
||||||
int num_calls = 0;
|
int num_calls = 0;
|
||||||
node->call_for_symbol_thunks_and_aliases (sum_callers, &num_calls,
|
node->call_for_symbol_thunks_and_aliases (sum_callers, &num_calls,
|
||||||
|
|
@ -2345,8 +2339,8 @@ early_inline_small_functions (struct cgraph_node *node)
|
||||||
|
|
||||||
/* Do not consider functions not declared inline. */
|
/* Do not consider functions not declared inline. */
|
||||||
if (!DECL_DECLARED_INLINE_P (callee->decl)
|
if (!DECL_DECLARED_INLINE_P (callee->decl)
|
||||||
&& !flag_inline_small_functions
|
&& !opt_for_fn (node->decl, flag_inline_small_functions)
|
||||||
&& !flag_inline_functions)
|
&& !opt_for_fn (node->decl, flag_inline_functions))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (dump_file)
|
if (dump_file)
|
||||||
|
|
|
||||||
|
|
@ -418,7 +418,8 @@ ipa_propagate_frequency (struct cgraph_node *node)
|
||||||
nor about virtuals. */
|
nor about virtuals. */
|
||||||
if (!node->local.local
|
if (!node->local.local
|
||||||
|| node->alias
|
|| node->alias
|
||||||
|| (flag_devirtualize && DECL_VIRTUAL_P (node->decl)))
|
|| (opt_for_fn (node->decl, flag_devirtualize)
|
||||||
|
&& DECL_VIRTUAL_P (node->decl)))
|
||||||
return false;
|
return false;
|
||||||
gcc_assert (node->analyzed);
|
gcc_assert (node->analyzed);
|
||||||
if (dump_file && (dump_flags & TDF_DETAILS))
|
if (dump_file && (dump_flags & TDF_DETAILS))
|
||||||
|
|
@ -754,7 +755,7 @@ public:
|
||||||
{}
|
{}
|
||||||
|
|
||||||
/* opt_pass methods: */
|
/* opt_pass methods: */
|
||||||
virtual bool gate (function *) { return flag_ipa_profile; }
|
virtual bool gate (function *) { return flag_ipa_profile || in_lto_p; }
|
||||||
virtual unsigned int execute (function *) { return ipa_profile (); }
|
virtual unsigned int execute (function *) { return ipa_profile (); }
|
||||||
|
|
||||||
}; // class pass_ipa_profile
|
}; // class pass_ipa_profile
|
||||||
|
|
|
||||||
|
|
@ -168,12 +168,10 @@ static bool
|
||||||
ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
|
ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
|
||||||
{
|
{
|
||||||
tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
|
tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
|
||||||
struct cl_optimization *os;
|
|
||||||
|
|
||||||
if (!fs_opts)
|
if (!fs_opts)
|
||||||
return false;
|
return false;
|
||||||
os = TREE_OPTIMIZATION (fs_opts);
|
return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
|
||||||
return !os->x_optimize || !os->x_flag_ipa_cp;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Return index of the formal whose tree is PTREE in function which corresponds
|
/* Return index of the formal whose tree is PTREE in function which corresponds
|
||||||
|
|
@ -2896,13 +2894,14 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
|
||||||
tree target = NULL;
|
tree target = NULL;
|
||||||
bool speculative = false;
|
bool speculative = false;
|
||||||
|
|
||||||
if (!flag_devirtualize)
|
if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
|
||||||
return NULL;
|
return NULL;
|
||||||
|
|
||||||
gcc_assert (!ie->indirect_info->by_ref);
|
gcc_assert (!ie->indirect_info->by_ref);
|
||||||
|
|
||||||
/* Try to do lookup via known virtual table pointer value. */
|
/* Try to do lookup via known virtual table pointer value. */
|
||||||
if (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively)
|
if (!ie->indirect_info->vptr_changed
|
||||||
|
|| opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
|
||||||
{
|
{
|
||||||
tree vtable;
|
tree vtable;
|
||||||
unsigned HOST_WIDE_INT offset;
|
unsigned HOST_WIDE_INT offset;
|
||||||
|
|
@ -2953,7 +2952,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
|
||||||
else
|
else
|
||||||
target = ipa_impossible_devirt_target (ie, NULL_TREE);
|
target = ipa_impossible_devirt_target (ie, NULL_TREE);
|
||||||
}
|
}
|
||||||
else if (!target && flag_devirtualize_speculatively
|
else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
|
||||||
&& !ie->speculative && ie->maybe_hot_p ())
|
&& !ie->speculative && ie->maybe_hot_p ())
|
||||||
{
|
{
|
||||||
cgraph_node *n;
|
cgraph_node *n;
|
||||||
|
|
@ -3025,7 +3024,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
|
||||||
param_index = ici->param_index;
|
param_index = ici->param_index;
|
||||||
jfunc = ipa_get_ith_jump_func (top, param_index);
|
jfunc = ipa_get_ith_jump_func (top, param_index);
|
||||||
|
|
||||||
if (!flag_indirect_inlining)
|
if (!opt_for_fn (node->decl, flag_indirect_inlining))
|
||||||
new_direct_edge = NULL;
|
new_direct_edge = NULL;
|
||||||
else if (ici->polymorphic)
|
else if (ici->polymorphic)
|
||||||
{
|
{
|
||||||
|
|
@ -3579,7 +3578,7 @@ ipa_unregister_cgraph_hooks (void)
|
||||||
void
|
void
|
||||||
ipa_free_all_structures_after_ipa_cp (void)
|
ipa_free_all_structures_after_ipa_cp (void)
|
||||||
{
|
{
|
||||||
if (!optimize)
|
if (!optimize && !in_lto_p)
|
||||||
{
|
{
|
||||||
ipa_free_all_edge_args ();
|
ipa_free_all_edge_args ();
|
||||||
ipa_free_all_node_params ();
|
ipa_free_all_node_params ();
|
||||||
|
|
|
||||||
|
|
@ -914,7 +914,8 @@ add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
|
||||||
static declarations. We do not need to scan them more than once
|
static declarations. We do not need to scan them more than once
|
||||||
since all we would be interested in are the addressof
|
since all we would be interested in are the addressof
|
||||||
operations. */
|
operations. */
|
||||||
if (node->get_availability () > AVAIL_INTERPOSABLE)
|
if (node->get_availability () > AVAIL_INTERPOSABLE
|
||||||
|
&& opt_for_fn (node->decl, flag_ipa_pure_const))
|
||||||
set_function_state (node, analyze_function (node, true));
|
set_function_state (node, analyze_function (node, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -984,7 +985,8 @@ pure_const_generate_summary (void)
|
||||||
when function got cloned and the clone is AVAILABLE. */
|
when function got cloned and the clone is AVAILABLE. */
|
||||||
|
|
||||||
FOR_EACH_DEFINED_FUNCTION (node)
|
FOR_EACH_DEFINED_FUNCTION (node)
|
||||||
if (node->get_availability () >= AVAIL_INTERPOSABLE)
|
if (node->get_availability () >= AVAIL_INTERPOSABLE
|
||||||
|
&& opt_for_fn (node->decl, flag_ipa_pure_const))
|
||||||
set_function_state (node, analyze_function (node, true));
|
set_function_state (node, analyze_function (node, true));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1595,9 +1597,7 @@ execute (function *)
|
||||||
static bool
|
static bool
|
||||||
gate_pure_const (void)
|
gate_pure_const (void)
|
||||||
{
|
{
|
||||||
return (flag_ipa_pure_const
|
return flag_ipa_pure_const || in_lto_p;
|
||||||
/* Don't bother doing anything if the program has errors. */
|
|
||||||
&& !seen_error ());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
|
pass_ipa_pure_const::pass_ipa_pure_const(gcc::context *ctxt)
|
||||||
|
|
|
||||||
|
|
@ -304,8 +304,7 @@ symbol_table::remove_unreachable_nodes (bool before_inlining_p, FILE *file)
|
||||||
hash_set<void *> reachable_call_targets;
|
hash_set<void *> reachable_call_targets;
|
||||||
|
|
||||||
timevar_push (TV_IPA_UNREACHABLE);
|
timevar_push (TV_IPA_UNREACHABLE);
|
||||||
if (optimize && flag_devirtualize)
|
build_type_inheritance_graph ();
|
||||||
build_type_inheritance_graph ();
|
|
||||||
if (file)
|
if (file)
|
||||||
fprintf (file, "\nReclaiming functions:");
|
fprintf (file, "\nReclaiming functions:");
|
||||||
#ifdef ENABLE_CHECKING
|
#ifdef ENABLE_CHECKING
|
||||||
|
|
@ -391,7 +390,8 @@ symbol_table::remove_unreachable_nodes (bool before_inlining_p, FILE *file)
|
||||||
{
|
{
|
||||||
struct cgraph_edge *e;
|
struct cgraph_edge *e;
|
||||||
/* Keep alive possible targets for devirtualization. */
|
/* Keep alive possible targets for devirtualization. */
|
||||||
if (optimize && flag_devirtualize)
|
if (opt_for_fn (cnode->decl, optimize)
|
||||||
|
&& opt_for_fn (cnode->decl, flag_devirtualize))
|
||||||
{
|
{
|
||||||
struct cgraph_edge *next;
|
struct cgraph_edge *next;
|
||||||
for (e = cnode->indirect_calls; e; e = next)
|
for (e = cnode->indirect_calls; e; e = next)
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue