mirror of git://gcc.gnu.org/git/gcc.git
avr.c, [...]: Where applicable...
* config/avr/avr.c, config/bfin/bfin.c, config/c6x/c6x.c, config/epiphany/epiphany.c, config/frv/frv.c, config/ia64/ia64.c, config/iq2000/iq2000.c, config/mcore/mcore.c, config/mep/mep.c, config/mmix/mmix.c, config/pa/pa.c, config/rs6000/rs6000.c, config/s390/s390.c, config/sparc/sparc.c, config/spu/spu.c, config/stormy16/stormy16.c, config/v850/v850.c, config/xtensa/xtensa.c, dwarf2out.c, hw-doloop.c, resource.c, rtl.h : Where applicable, use the predicates NOTE_P, NONJUMP_INSN_P, JUMP_P, CALL_P, LABEL_P, and BARRIER_P instead of GET_CODE. From-SVN: r197005
This commit is contained in:
parent
b9aaf52e25
commit
b64925dc4b
|
|
@ -1,3 +1,15 @@
|
||||||
|
2013-03-23 Steven Bosscher <steven@gcc.gnu.org>
|
||||||
|
|
||||||
|
* config/avr/avr.c, config/bfin/bfin.c, config/c6x/c6x.c,
|
||||||
|
config/epiphany/epiphany.c, config/frv/frv.c, config/ia64/ia64.c,
|
||||||
|
config/iq2000/iq2000.c, config/mcore/mcore.c, config/mep/mep.c,
|
||||||
|
config/mmix/mmix.c, config/pa/pa.c, config/rs6000/rs6000.c,
|
||||||
|
config/s390/s390.c, config/sparc/sparc.c, config/spu/spu.c,
|
||||||
|
config/stormy16/stormy16.c, config/v850/v850.c, config/xtensa/xtensa.c,
|
||||||
|
dwarf2out.c, hw-doloop.c, resource.c, rtl.h : Where applicable, use
|
||||||
|
the predicates NOTE_P, NONJUMP_INSN_P, JUMP_P, CALL_P, LABEL_P, and
|
||||||
|
BARRIER_P instead of GET_CODE.
|
||||||
|
|
||||||
2013-03-23 Eric Botcazou <ebotcazou@adacore.com>
|
2013-03-23 Eric Botcazou <ebotcazou@adacore.com>
|
||||||
|
|
||||||
* config/sparc/sparc.c (sparc_emit_probe_stack_range): Fix small
|
* config/sparc/sparc.c (sparc_emit_probe_stack_range): Fix small
|
||||||
|
|
|
||||||
|
|
@ -7629,9 +7629,9 @@ _reg_unused_after (rtx insn, rtx reg)
|
||||||
rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
|
rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
|
||||||
rtx set = single_set (this_insn);
|
rtx set = single_set (this_insn);
|
||||||
|
|
||||||
if (GET_CODE (this_insn) == CALL_INSN)
|
if (CALL_P (this_insn))
|
||||||
code = CALL_INSN;
|
code = CALL_INSN;
|
||||||
else if (GET_CODE (this_insn) == JUMP_INSN)
|
else if (JUMP_P (this_insn))
|
||||||
{
|
{
|
||||||
if (INSN_ANNULLED_BRANCH_P (this_insn))
|
if (INSN_ANNULLED_BRANCH_P (this_insn))
|
||||||
return 0;
|
return 0;
|
||||||
|
|
|
||||||
|
|
@ -3887,8 +3887,7 @@ gen_one_bundle (rtx slot[3])
|
||||||
rtx t = NEXT_INSN (slot[0]);
|
rtx t = NEXT_INSN (slot[0]);
|
||||||
while (t != slot[1])
|
while (t != slot[1])
|
||||||
{
|
{
|
||||||
if (GET_CODE (t) != NOTE
|
if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
|
||||||
|| NOTE_KIND (t) != NOTE_INSN_DELETED)
|
|
||||||
return false;
|
return false;
|
||||||
t = NEXT_INSN (t);
|
t = NEXT_INSN (t);
|
||||||
}
|
}
|
||||||
|
|
@ -3898,8 +3897,7 @@ gen_one_bundle (rtx slot[3])
|
||||||
rtx t = NEXT_INSN (slot[1]);
|
rtx t = NEXT_INSN (slot[1]);
|
||||||
while (t != slot[2])
|
while (t != slot[2])
|
||||||
{
|
{
|
||||||
if (GET_CODE (t) != NOTE
|
if (! NOTE_P (t) || NOTE_KIND (t) != NOTE_INSN_DELETED)
|
||||||
|| NOTE_KIND (t) != NOTE_INSN_DELETED)
|
|
||||||
return false;
|
return false;
|
||||||
t = NEXT_INSN (t);
|
t = NEXT_INSN (t);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4848,7 +4848,7 @@ reorg_split_calls (rtx *call_labels)
|
||||||
{
|
{
|
||||||
unsigned int reservation_mask = 0;
|
unsigned int reservation_mask = 0;
|
||||||
rtx insn = get_insns ();
|
rtx insn = get_insns ();
|
||||||
gcc_assert (GET_CODE (insn) == NOTE);
|
gcc_assert (NOTE_P (insn));
|
||||||
insn = next_real_insn (insn);
|
insn = next_real_insn (insn);
|
||||||
while (insn)
|
while (insn)
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -2386,7 +2386,7 @@ epiphany_mode_after (int entity, int last_mode, rtx insn)
|
||||||
calls. */
|
calls. */
|
||||||
if (entity == EPIPHANY_MSW_ENTITY_AND || entity == EPIPHANY_MSW_ENTITY_OR)
|
if (entity == EPIPHANY_MSW_ENTITY_AND || entity == EPIPHANY_MSW_ENTITY_OR)
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (CALL_P (insn))
|
||||||
return 0;
|
return 0;
|
||||||
return last_mode;
|
return last_mode;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1408,7 +1408,7 @@ frv_function_contains_far_jump (void)
|
||||||
{
|
{
|
||||||
rtx insn = get_insns ();
|
rtx insn = get_insns ();
|
||||||
while (insn != NULL
|
while (insn != NULL
|
||||||
&& !(GET_CODE (insn) == JUMP_INSN
|
&& !(JUMP_P (insn)
|
||||||
/* Ignore tablejump patterns. */
|
/* Ignore tablejump patterns. */
|
||||||
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
|
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
|
||||||
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
|
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
|
||||||
|
|
@ -1446,7 +1446,7 @@ frv_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
|
||||||
simply emit a different assembly directive because bralr and jmpl
|
simply emit a different assembly directive because bralr and jmpl
|
||||||
execute in different units. */
|
execute in different units. */
|
||||||
for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
|
for (insn = get_insns(); insn != NULL; insn = NEXT_INSN (insn))
|
||||||
if (GET_CODE (insn) == JUMP_INSN)
|
if (JUMP_P (insn))
|
||||||
{
|
{
|
||||||
rtx pattern = PATTERN (insn);
|
rtx pattern = PATTERN (insn);
|
||||||
if (GET_CODE (pattern) == PARALLEL
|
if (GET_CODE (pattern) == PARALLEL
|
||||||
|
|
@ -2649,7 +2649,7 @@ frv_print_operand_jump_hint (rtx insn)
|
||||||
HOST_WIDE_INT prob = -1;
|
HOST_WIDE_INT prob = -1;
|
||||||
enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
|
enum { UNKNOWN, BACKWARD, FORWARD } jump_type = UNKNOWN;
|
||||||
|
|
||||||
gcc_assert (GET_CODE (insn) == JUMP_INSN);
|
gcc_assert (JUMP_P (insn));
|
||||||
|
|
||||||
/* Assume any non-conditional jump is likely. */
|
/* Assume any non-conditional jump is likely. */
|
||||||
if (! any_condjump_p (insn))
|
if (! any_condjump_p (insn))
|
||||||
|
|
@ -7387,7 +7387,7 @@ frv_pack_insn_p (rtx insn)
|
||||||
- There's no point putting a call in its own packet unless
|
- There's no point putting a call in its own packet unless
|
||||||
we have to. */
|
we have to. */
|
||||||
if (frv_packet.num_insns > 0
|
if (frv_packet.num_insns > 0
|
||||||
&& GET_CODE (insn) == INSN
|
&& NONJUMP_INSN_P (insn)
|
||||||
&& GET_MODE (insn) == TImode
|
&& GET_MODE (insn) == TImode
|
||||||
&& GET_CODE (PATTERN (insn)) != COND_EXEC)
|
&& GET_CODE (PATTERN (insn)) != COND_EXEC)
|
||||||
return false;
|
return false;
|
||||||
|
|
@ -7430,7 +7430,7 @@ frv_insert_nop_in_packet (rtx insn)
|
||||||
|
|
||||||
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
|
packet_group = &frv_packet.groups[frv_unit_groups[frv_insn_unit (insn)]];
|
||||||
last = frv_packet.insns[frv_packet.num_insns - 1];
|
last = frv_packet.insns[frv_packet.num_insns - 1];
|
||||||
if (GET_CODE (last) != INSN)
|
if (! NONJUMP_INSN_P (last))
|
||||||
{
|
{
|
||||||
insn = emit_insn_before (PATTERN (insn), last);
|
insn = emit_insn_before (PATTERN (insn), last);
|
||||||
frv_packet.insns[frv_packet.num_insns - 1] = insn;
|
frv_packet.insns[frv_packet.num_insns - 1] = insn;
|
||||||
|
|
@ -7492,7 +7492,7 @@ frv_for_each_packet (void (*handle_packet) (void))
|
||||||
|
|
||||||
default:
|
default:
|
||||||
/* Calls mustn't be packed on a TOMCAT. */
|
/* Calls mustn't be packed on a TOMCAT. */
|
||||||
if (GET_CODE (insn) == CALL_INSN && frv_cpu_type == FRV_CPU_TOMCAT)
|
if (CALL_P (insn) && frv_cpu_type == FRV_CPU_TOMCAT)
|
||||||
frv_finish_packet (handle_packet);
|
frv_finish_packet (handle_packet);
|
||||||
|
|
||||||
/* Since the last instruction in a packet determines the EH
|
/* Since the last instruction in a packet determines the EH
|
||||||
|
|
@ -7913,7 +7913,7 @@ frv_optimize_membar_local (basic_block bb, struct frv_io *next_io,
|
||||||
CLEAR_HARD_REG_SET (used_regs);
|
CLEAR_HARD_REG_SET (used_regs);
|
||||||
|
|
||||||
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
|
for (insn = BB_END (bb); insn != BB_HEAD (bb); insn = PREV_INSN (insn))
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (CALL_P (insn))
|
||||||
{
|
{
|
||||||
/* We can't predict what a call will do to volatile memory. */
|
/* We can't predict what a call will do to volatile memory. */
|
||||||
memset (next_io, 0, sizeof (struct frv_io));
|
memset (next_io, 0, sizeof (struct frv_io));
|
||||||
|
|
|
||||||
|
|
@ -5470,7 +5470,7 @@ ia64_print_operand (FILE * file, rtx x, int code)
|
||||||
else
|
else
|
||||||
which = ".sptk";
|
which = ".sptk";
|
||||||
}
|
}
|
||||||
else if (GET_CODE (current_output_insn) == CALL_INSN)
|
else if (CALL_P (current_output_insn))
|
||||||
which = ".sptk";
|
which = ".sptk";
|
||||||
else
|
else
|
||||||
which = ".dptk";
|
which = ".dptk";
|
||||||
|
|
@ -6811,8 +6811,7 @@ group_barrier_needed (rtx insn)
|
||||||
memset (rws_insn, 0, sizeof (rws_insn));
|
memset (rws_insn, 0, sizeof (rws_insn));
|
||||||
|
|
||||||
/* Don't bundle a call following another call. */
|
/* Don't bundle a call following another call. */
|
||||||
if ((pat = prev_active_insn (insn))
|
if ((pat = prev_active_insn (insn)) && CALL_P (pat))
|
||||||
&& GET_CODE (pat) == CALL_INSN)
|
|
||||||
{
|
{
|
||||||
need_barrier = 1;
|
need_barrier = 1;
|
||||||
break;
|
break;
|
||||||
|
|
@ -6826,8 +6825,7 @@ group_barrier_needed (rtx insn)
|
||||||
flags.is_branch = 1;
|
flags.is_branch = 1;
|
||||||
|
|
||||||
/* Don't bundle a jump following a call. */
|
/* Don't bundle a jump following a call. */
|
||||||
if ((pat = prev_active_insn (insn))
|
if ((pat = prev_active_insn (insn)) && CALL_P (pat))
|
||||||
&& GET_CODE (pat) == CALL_INSN)
|
|
||||||
{
|
{
|
||||||
need_barrier = 1;
|
need_barrier = 1;
|
||||||
break;
|
break;
|
||||||
|
|
@ -6929,20 +6927,20 @@ emit_insn_group_barriers (FILE *dump)
|
||||||
|
|
||||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == CODE_LABEL)
|
if (LABEL_P (insn))
|
||||||
{
|
{
|
||||||
if (insns_since_last_label)
|
if (insns_since_last_label)
|
||||||
last_label = insn;
|
last_label = insn;
|
||||||
insns_since_last_label = 0;
|
insns_since_last_label = 0;
|
||||||
}
|
}
|
||||||
else if (GET_CODE (insn) == NOTE
|
else if (NOTE_P (insn)
|
||||||
&& NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK)
|
&& NOTE_KIND (insn) == NOTE_INSN_BASIC_BLOCK)
|
||||||
{
|
{
|
||||||
if (insns_since_last_label)
|
if (insns_since_last_label)
|
||||||
last_label = insn;
|
last_label = insn;
|
||||||
insns_since_last_label = 0;
|
insns_since_last_label = 0;
|
||||||
}
|
}
|
||||||
else if (GET_CODE (insn) == INSN
|
else if (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
|
&& GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
|
||||||
&& XINT (PATTERN (insn), 1) == UNSPECV_INSN_GROUP_BARRIER)
|
&& XINT (PATTERN (insn), 1) == UNSPECV_INSN_GROUP_BARRIER)
|
||||||
{
|
{
|
||||||
|
|
@ -6983,13 +6981,13 @@ emit_all_insn_group_barriers (FILE *dump ATTRIBUTE_UNUSED)
|
||||||
|
|
||||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == BARRIER)
|
if (BARRIER_P (insn))
|
||||||
{
|
{
|
||||||
rtx last = prev_active_insn (insn);
|
rtx last = prev_active_insn (insn);
|
||||||
|
|
||||||
if (! last)
|
if (! last)
|
||||||
continue;
|
continue;
|
||||||
if (GET_CODE (last) == JUMP_INSN
|
if (JUMP_P (last)
|
||||||
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
|
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
|
||||||
last = prev_active_insn (last);
|
last = prev_active_insn (last);
|
||||||
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
|
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
|
||||||
|
|
@ -7487,7 +7485,7 @@ ia64_variable_issue (FILE *dump ATTRIBUTE_UNUSED,
|
||||||
int needed = group_barrier_needed (insn);
|
int needed = group_barrier_needed (insn);
|
||||||
|
|
||||||
gcc_assert (!needed);
|
gcc_assert (!needed);
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (CALL_P (insn))
|
||||||
init_insn_group_barriers ();
|
init_insn_group_barriers ();
|
||||||
stops_p [INSN_UID (insn)] = stop_before_p;
|
stops_p [INSN_UID (insn)] = stop_before_p;
|
||||||
stop_before_p = 0;
|
stop_before_p = 0;
|
||||||
|
|
@ -7576,7 +7574,7 @@ ia64_dfa_new_cycle (FILE *dump, int verbose, rtx insn, int last_clock,
|
||||||
&& last_scheduled_insn
|
&& last_scheduled_insn
|
||||||
&& scheduled_good_insn (last_scheduled_insn))))
|
&& scheduled_good_insn (last_scheduled_insn))))
|
||||||
|| (last_scheduled_insn
|
|| (last_scheduled_insn
|
||||||
&& (GET_CODE (last_scheduled_insn) == CALL_INSN
|
&& (CALL_P (last_scheduled_insn)
|
||||||
|| unknown_for_bundling_p (last_scheduled_insn))))
|
|| unknown_for_bundling_p (last_scheduled_insn))))
|
||||||
{
|
{
|
||||||
init_insn_group_barriers ();
|
init_insn_group_barriers ();
|
||||||
|
|
@ -7594,7 +7592,7 @@ ia64_dfa_new_cycle (FILE *dump, int verbose, rtx insn, int last_clock,
|
||||||
state_transition (curr_state, dfa_stop_insn);
|
state_transition (curr_state, dfa_stop_insn);
|
||||||
if (TARGET_EARLY_STOP_BITS)
|
if (TARGET_EARLY_STOP_BITS)
|
||||||
*sort_p = (last_scheduled_insn == NULL_RTX
|
*sort_p = (last_scheduled_insn == NULL_RTX
|
||||||
|| GET_CODE (last_scheduled_insn) != CALL_INSN);
|
|| ! CALL_P (last_scheduled_insn));
|
||||||
else
|
else
|
||||||
*sort_p = 0;
|
*sort_p = 0;
|
||||||
return 1;
|
return 1;
|
||||||
|
|
@ -8936,9 +8934,9 @@ ia64_add_bundle_selector_before (int template0, rtx insn)
|
||||||
{
|
{
|
||||||
do
|
do
|
||||||
insn = next_active_insn (insn);
|
insn = next_active_insn (insn);
|
||||||
while (GET_CODE (insn) == INSN
|
while (NONJUMP_INSN_P (insn)
|
||||||
&& get_attr_empty (insn) == EMPTY_YES);
|
&& get_attr_empty (insn) == EMPTY_YES);
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (CALL_P (insn))
|
||||||
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
|
||||||
else if (note)
|
else if (note)
|
||||||
{
|
{
|
||||||
|
|
@ -9372,13 +9370,13 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBUTE_UNUSED)
|
||||||
insn != current_sched_info->next_tail;
|
insn != current_sched_info->next_tail;
|
||||||
insn = NEXT_INSN (insn))
|
insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == BARRIER)
|
if (BARRIER_P (insn))
|
||||||
{
|
{
|
||||||
rtx last = prev_active_insn (insn);
|
rtx last = prev_active_insn (insn);
|
||||||
|
|
||||||
if (! last)
|
if (! last)
|
||||||
continue;
|
continue;
|
||||||
if (GET_CODE (last) == JUMP_INSN
|
if (JUMP_P (last)
|
||||||
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
|
&& GET_CODE (PATTERN (last)) == ADDR_DIFF_VEC)
|
||||||
last = prev_active_insn (last);
|
last = prev_active_insn (last);
|
||||||
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
|
if (recog_memoized (last) != CODE_FOR_insn_group_barrier)
|
||||||
|
|
@ -9445,8 +9443,7 @@ final_emit_insn_group_barriers (FILE *dump ATTRIBUTE_UNUSED)
|
||||||
else if (recog_memoized (insn) >= 0
|
else if (recog_memoized (insn) >= 0
|
||||||
&& important_for_bundling_p (insn))
|
&& important_for_bundling_p (insn))
|
||||||
seen_good_insn = 1;
|
seen_good_insn = 1;
|
||||||
need_barrier_p = (GET_CODE (insn) == CALL_INSN
|
need_barrier_p = (CALL_P (insn) || unknown_for_bundling_p (insn));
|
||||||
|| unknown_for_bundling_p (insn));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -9590,7 +9587,7 @@ emit_predicate_relation_info (void)
|
||||||
rtx head = BB_HEAD (bb);
|
rtx head = BB_HEAD (bb);
|
||||||
|
|
||||||
/* We only need such notes at code labels. */
|
/* We only need such notes at code labels. */
|
||||||
if (GET_CODE (head) != CODE_LABEL)
|
if (! LABEL_P (head))
|
||||||
continue;
|
continue;
|
||||||
if (NOTE_INSN_BASIC_BLOCK_P (NEXT_INSN (head)))
|
if (NOTE_INSN_BASIC_BLOCK_P (NEXT_INSN (head)))
|
||||||
head = NEXT_INSN (head);
|
head = NEXT_INSN (head);
|
||||||
|
|
@ -9618,7 +9615,7 @@ emit_predicate_relation_info (void)
|
||||||
|
|
||||||
while (1)
|
while (1)
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == CALL_INSN
|
if (CALL_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) == COND_EXEC
|
&& GET_CODE (PATTERN (insn)) == COND_EXEC
|
||||||
&& find_reg_note (insn, REG_NORETURN, NULL_RTX))
|
&& find_reg_note (insn, REG_NORETURN, NULL_RTX))
|
||||||
{
|
{
|
||||||
|
|
@ -9766,7 +9763,7 @@ ia64_reorg (void)
|
||||||
if (insn)
|
if (insn)
|
||||||
{
|
{
|
||||||
/* Skip over insns that expand to nothing. */
|
/* Skip over insns that expand to nothing. */
|
||||||
while (GET_CODE (insn) == INSN
|
while (NONJUMP_INSN_P (insn)
|
||||||
&& get_attr_empty (insn) == EMPTY_YES)
|
&& get_attr_empty (insn) == EMPTY_YES)
|
||||||
{
|
{
|
||||||
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
|
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
|
||||||
|
|
@ -9774,7 +9771,7 @@ ia64_reorg (void)
|
||||||
saw_stop = 1;
|
saw_stop = 1;
|
||||||
insn = prev_active_insn (insn);
|
insn = prev_active_insn (insn);
|
||||||
}
|
}
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (CALL_P (insn))
|
||||||
{
|
{
|
||||||
if (! saw_stop)
|
if (! saw_stop)
|
||||||
emit_insn (gen_insn_group_barrier (GEN_INT (3)));
|
emit_insn (gen_insn_group_barrier (GEN_INT (3)));
|
||||||
|
|
@ -10184,7 +10181,7 @@ ia64_asm_unwind_emit (FILE *asm_out_file, rtx insn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (GET_CODE (insn) == NOTE || ! RTX_FRAME_RELATED_P (insn))
|
if (NOTE_P (insn) || ! RTX_FRAME_RELATED_P (insn))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
/* Look for the ALLOC insn. */
|
/* Look for the ALLOC insn. */
|
||||||
|
|
|
||||||
|
|
@ -381,8 +381,7 @@ iq2000_fill_delay_slot (const char *ret, enum delay_type type, rtx operands[],
|
||||||
/* Make sure that we don't put nop's after labels. */
|
/* Make sure that we don't put nop's after labels. */
|
||||||
next_insn = NEXT_INSN (cur_insn);
|
next_insn = NEXT_INSN (cur_insn);
|
||||||
while (next_insn != 0
|
while (next_insn != 0
|
||||||
&& (GET_CODE (next_insn) == NOTE
|
&& (NOTE_P (next_insn) || LABEL_P (next_insn)))
|
||||||
|| GET_CODE (next_insn) == CODE_LABEL))
|
|
||||||
next_insn = NEXT_INSN (next_insn);
|
next_insn = NEXT_INSN (next_insn);
|
||||||
|
|
||||||
dslots_load_total += num_nops;
|
dslots_load_total += num_nops;
|
||||||
|
|
@ -391,7 +390,7 @@ iq2000_fill_delay_slot (const char *ret, enum delay_type type, rtx operands[],
|
||||||
|| operands == 0
|
|| operands == 0
|
||||||
|| cur_insn == 0
|
|| cur_insn == 0
|
||||||
|| next_insn == 0
|
|| next_insn == 0
|
||||||
|| GET_CODE (next_insn) == CODE_LABEL
|
|| LABEL_P (next_insn)
|
||||||
|| (set_reg = operands[0]) == 0)
|
|| (set_reg = operands[0]) == 0)
|
||||||
{
|
{
|
||||||
dslots_number_nops = 0;
|
dslots_number_nops = 0;
|
||||||
|
|
@ -1533,8 +1532,8 @@ final_prescan_insn (rtx insn, rtx opvec[] ATTRIBUTE_UNUSED,
|
||||||
iq2000_load_reg4 = 0;
|
iq2000_load_reg4 = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( (GET_CODE (insn) == JUMP_INSN
|
if ( (JUMP_P (insn)
|
||||||
|| GET_CODE (insn) == CALL_INSN
|
|| CALL_P (insn)
|
||||||
|| (GET_CODE (PATTERN (insn)) == RETURN))
|
|| (GET_CODE (PATTERN (insn)) == RETURN))
|
||||||
&& NEXT_INSN (PREV_INSN (insn)) == insn)
|
&& NEXT_INSN (PREV_INSN (insn)) == insn)
|
||||||
{
|
{
|
||||||
|
|
@ -1544,7 +1543,7 @@ final_prescan_insn (rtx insn, rtx opvec[] ATTRIBUTE_UNUSED,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (TARGET_STATS
|
if (TARGET_STATS
|
||||||
&& (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN))
|
&& (JUMP_P (insn) || CALL_P (insn)))
|
||||||
dslots_jump_total ++;
|
dslots_jump_total ++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2285,8 +2284,8 @@ iq2000_adjust_insn_length (rtx insn, int length)
|
||||||
/* A unconditional jump has an unfilled delay slot if it is not part
|
/* A unconditional jump has an unfilled delay slot if it is not part
|
||||||
of a sequence. A conditional jump normally has a delay slot. */
|
of a sequence. A conditional jump normally has a delay slot. */
|
||||||
if (simplejump_p (insn)
|
if (simplejump_p (insn)
|
||||||
|| ( (GET_CODE (insn) == JUMP_INSN
|
|| ( (JUMP_P (insn)
|
||||||
|| GET_CODE (insn) == CALL_INSN)))
|
|| CALL_P (insn))))
|
||||||
length += 4;
|
length += 4;
|
||||||
|
|
||||||
return length;
|
return length;
|
||||||
|
|
|
||||||
|
|
@ -914,10 +914,10 @@ mcore_is_dead (rtx first, rtx reg)
|
||||||
to assume that it is live. */
|
to assume that it is live. */
|
||||||
for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn))
|
for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == JUMP_INSN)
|
if (JUMP_P (insn))
|
||||||
return 0; /* We lose track, assume it is alive. */
|
return 0; /* We lose track, assume it is alive. */
|
||||||
|
|
||||||
else if (GET_CODE(insn) == CALL_INSN)
|
else if (CALL_P (insn))
|
||||||
{
|
{
|
||||||
/* Call's might use it for target or register parms. */
|
/* Call's might use it for target or register parms. */
|
||||||
if (reg_referenced_p (reg, PATTERN (insn))
|
if (reg_referenced_p (reg, PATTERN (insn))
|
||||||
|
|
@ -926,7 +926,7 @@ mcore_is_dead (rtx first, rtx reg)
|
||||||
else if (dead_or_set_p (insn, reg))
|
else if (dead_or_set_p (insn, reg))
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
else if (GET_CODE (insn) == INSN)
|
else if (NONJUMP_INSN_P (insn))
|
||||||
{
|
{
|
||||||
if (reg_referenced_p (reg, PATTERN (insn)))
|
if (reg_referenced_p (reg, PATTERN (insn)))
|
||||||
return 0;
|
return 0;
|
||||||
|
|
@ -2254,7 +2254,7 @@ is_cond_candidate (rtx insn)
|
||||||
changed into a conditional. Only bother with SImode items. If
|
changed into a conditional. Only bother with SImode items. If
|
||||||
we wanted to be a little more aggressive, we could also do other
|
we wanted to be a little more aggressive, we could also do other
|
||||||
modes such as DImode with reg-reg move or load 0. */
|
modes such as DImode with reg-reg move or load 0. */
|
||||||
if (GET_CODE (insn) == INSN)
|
if (NONJUMP_INSN_P (insn))
|
||||||
{
|
{
|
||||||
rtx pat = PATTERN (insn);
|
rtx pat = PATTERN (insn);
|
||||||
rtx src, dst;
|
rtx src, dst;
|
||||||
|
|
@ -2305,9 +2305,9 @@ is_cond_candidate (rtx insn)
|
||||||
*/
|
*/
|
||||||
|
|
||||||
}
|
}
|
||||||
else if (GET_CODE (insn) == JUMP_INSN &&
|
else if (JUMP_P (insn)
|
||||||
GET_CODE (PATTERN (insn)) == SET &&
|
&& GET_CODE (PATTERN (insn)) == SET
|
||||||
GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
|
&& GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
|
||||||
return COND_BRANCH_INSN;
|
return COND_BRANCH_INSN;
|
||||||
|
|
||||||
return COND_NO;
|
return COND_NO;
|
||||||
|
|
@ -2328,7 +2328,7 @@ emit_new_cond_insn (rtx insn, int cond)
|
||||||
|
|
||||||
pat = PATTERN (insn);
|
pat = PATTERN (insn);
|
||||||
|
|
||||||
if (GET_CODE (insn) == INSN)
|
if (NONJUMP_INSN_P (insn))
|
||||||
{
|
{
|
||||||
dst = SET_DEST (pat);
|
dst = SET_DEST (pat);
|
||||||
src = SET_SRC (pat);
|
src = SET_SRC (pat);
|
||||||
|
|
@ -2449,9 +2449,9 @@ conditionalize_block (rtx first)
|
||||||
/* Check that the first insn is a candidate conditional jump. This is
|
/* Check that the first insn is a candidate conditional jump. This is
|
||||||
the one that we'll eliminate. If not, advance to the next insn to
|
the one that we'll eliminate. If not, advance to the next insn to
|
||||||
try. */
|
try. */
|
||||||
if (GET_CODE (first) != JUMP_INSN ||
|
if (! JUMP_P (first)
|
||||||
GET_CODE (PATTERN (first)) != SET ||
|
|| GET_CODE (PATTERN (first)) != SET
|
||||||
GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
|
|| GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
|
||||||
return NEXT_INSN (first);
|
return NEXT_INSN (first);
|
||||||
|
|
||||||
/* Extract some information we need. */
|
/* Extract some information we need. */
|
||||||
|
|
|
||||||
|
|
@ -4882,7 +4882,7 @@ mep_reorg_regmove (rtx insns)
|
||||||
|
|
||||||
if (dump_file)
|
if (dump_file)
|
||||||
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
for (insn = insns; insn; insn = NEXT_INSN (insn))
|
||||||
if (GET_CODE (insn) == INSN)
|
if (NONJUMP_INSN_P (insn))
|
||||||
before++;
|
before++;
|
||||||
|
|
||||||
/* We're looking for (set r2 r1) moves where r1 dies, followed by a
|
/* We're looking for (set r2 r1) moves where r1 dies, followed by a
|
||||||
|
|
@ -4896,7 +4896,7 @@ mep_reorg_regmove (rtx insns)
|
||||||
for (insn = insns; insn; insn = next)
|
for (insn = insns; insn; insn = next)
|
||||||
{
|
{
|
||||||
next = next_nonnote_nondebug_insn (insn);
|
next = next_nonnote_nondebug_insn (insn);
|
||||||
if (GET_CODE (insn) != INSN)
|
if (! NONJUMP_INSN_P (insn))
|
||||||
continue;
|
continue;
|
||||||
pat = PATTERN (insn);
|
pat = PATTERN (insn);
|
||||||
|
|
||||||
|
|
@ -4912,7 +4912,7 @@ mep_reorg_regmove (rtx insns)
|
||||||
if (dump_file)
|
if (dump_file)
|
||||||
fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
|
fprintf (dump_file, "superfluous moves: considering %d\n", INSN_UID (insn));
|
||||||
|
|
||||||
while (follow && GET_CODE (follow) == INSN
|
while (follow && NONJUMP_INSN_P (follow)
|
||||||
&& GET_CODE (PATTERN (follow)) == SET
|
&& GET_CODE (PATTERN (follow)) == SET
|
||||||
&& !dead_or_set_p (follow, SET_SRC (pat))
|
&& !dead_or_set_p (follow, SET_SRC (pat))
|
||||||
&& !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
|
&& !mep_mentioned_p (PATTERN (follow), SET_SRC (pat), 0)
|
||||||
|
|
@ -4925,7 +4925,7 @@ mep_reorg_regmove (rtx insns)
|
||||||
|
|
||||||
if (dump_file)
|
if (dump_file)
|
||||||
fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
|
fprintf (dump_file, "\tfollow is %d\n", INSN_UID (follow));
|
||||||
if (follow && GET_CODE (follow) == INSN
|
if (follow && NONJUMP_INSN_P (follow)
|
||||||
&& GET_CODE (PATTERN (follow)) == SET
|
&& GET_CODE (PATTERN (follow)) == SET
|
||||||
&& find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
|
&& find_regno_note (follow, REG_DEAD, REGNO (SET_DEST (pat))))
|
||||||
{
|
{
|
||||||
|
|
@ -5523,8 +5523,7 @@ mep_reorg_erepeat (rtx insns)
|
||||||
count = simplejump_p (insn) ? 0 : 1;
|
count = simplejump_p (insn) ? 0 : 1;
|
||||||
for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
|
for (prev = PREV_INSN (insn); prev; prev = PREV_INSN (prev))
|
||||||
{
|
{
|
||||||
if (GET_CODE (prev) == CALL_INSN
|
if (CALL_P (prev) || BARRIER_P (prev))
|
||||||
|| BARRIER_P (prev))
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
if (prev == JUMP_LABEL (insn))
|
if (prev == JUMP_LABEL (insn))
|
||||||
|
|
@ -5543,10 +5542,10 @@ mep_reorg_erepeat (rtx insns)
|
||||||
*after* the label. */
|
*after* the label. */
|
||||||
rtx barrier;
|
rtx barrier;
|
||||||
for (barrier = PREV_INSN (prev);
|
for (barrier = PREV_INSN (prev);
|
||||||
barrier && GET_CODE (barrier) == NOTE;
|
barrier && NOTE_P (barrier);
|
||||||
barrier = PREV_INSN (barrier))
|
barrier = PREV_INSN (barrier))
|
||||||
;
|
;
|
||||||
if (barrier && GET_CODE (barrier) != BARRIER)
|
if (barrier && ! BARRIER_P (barrier))
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
|
@ -5590,10 +5589,9 @@ mep_reorg_erepeat (rtx insns)
|
||||||
if (LABEL_NUSES (prev) == 1)
|
if (LABEL_NUSES (prev) == 1)
|
||||||
{
|
{
|
||||||
for (user = PREV_INSN (prev);
|
for (user = PREV_INSN (prev);
|
||||||
user && (INSN_P (user) || GET_CODE (user) == NOTE);
|
user && (INSN_P (user) || NOTE_P (user));
|
||||||
user = PREV_INSN (user))
|
user = PREV_INSN (user))
|
||||||
if (GET_CODE (user) == JUMP_INSN
|
if (JUMP_P (user) && JUMP_LABEL (user) == prev)
|
||||||
&& JUMP_LABEL (user) == prev)
|
|
||||||
{
|
{
|
||||||
safe = INSN_UID (user);
|
safe = INSN_UID (user);
|
||||||
break;
|
break;
|
||||||
|
|
@ -5631,8 +5629,8 @@ mep_jmp_return_reorg (rtx insns)
|
||||||
/* Find the fist real insn the jump jumps to. */
|
/* Find the fist real insn the jump jumps to. */
|
||||||
label = ret = JUMP_LABEL (insn);
|
label = ret = JUMP_LABEL (insn);
|
||||||
while (ret
|
while (ret
|
||||||
&& (GET_CODE (ret) == NOTE
|
&& (NOTE_P (ret)
|
||||||
|| GET_CODE (ret) == CODE_LABEL
|
|| LABEL_P (ret)
|
||||||
|| GET_CODE (PATTERN (ret)) == USE))
|
|| GET_CODE (PATTERN (ret)) == USE))
|
||||||
ret = NEXT_INSN (ret);
|
ret = NEXT_INSN (ret);
|
||||||
|
|
||||||
|
|
@ -7018,7 +7016,7 @@ mep_bundle_insns (rtx insns)
|
||||||
if (recog_memoized (insn) >= 0
|
if (recog_memoized (insn) >= 0
|
||||||
&& get_attr_slot (insn) == SLOT_COP)
|
&& get_attr_slot (insn) == SLOT_COP)
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == JUMP_INSN
|
if (JUMP_P (insn)
|
||||||
|| ! last
|
|| ! last
|
||||||
|| recog_memoized (last) < 0
|
|| recog_memoized (last) < 0
|
||||||
|| get_attr_slot (last) != SLOT_CORE
|
|| get_attr_slot (last) != SLOT_CORE
|
||||||
|
|
|
||||||
|
|
@ -1728,7 +1728,7 @@ mmix_print_operand (FILE *stream, rtx x, int code)
|
||||||
if (CONSTANT_P (modified_x)
|
if (CONSTANT_P (modified_x)
|
||||||
/* Strangely enough, this is not included in CONSTANT_P.
|
/* Strangely enough, this is not included in CONSTANT_P.
|
||||||
FIXME: Ask/check about sanity here. */
|
FIXME: Ask/check about sanity here. */
|
||||||
|| GET_CODE (modified_x) == CODE_LABEL)
|
|| LABEL_P (modified_x))
|
||||||
{
|
{
|
||||||
output_addr_const (stream, modified_x);
|
output_addr_const (stream, modified_x);
|
||||||
return;
|
return;
|
||||||
|
|
|
||||||
|
|
@ -3320,7 +3320,7 @@ remove_useless_addtr_insns (int check_notes)
|
||||||
rtx tmp;
|
rtx tmp;
|
||||||
|
|
||||||
/* Ignore anything that isn't an INSN or a JUMP_INSN. */
|
/* Ignore anything that isn't an INSN or a JUMP_INSN. */
|
||||||
if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
|
if (! NONJUMP_INSN_P (insn) && ! JUMP_P (insn))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
tmp = PATTERN (insn);
|
tmp = PATTERN (insn);
|
||||||
|
|
@ -3359,7 +3359,7 @@ remove_useless_addtr_insns (int check_notes)
|
||||||
rtx tmp, next;
|
rtx tmp, next;
|
||||||
|
|
||||||
/* Ignore anything that isn't an INSN. */
|
/* Ignore anything that isn't an INSN. */
|
||||||
if (GET_CODE (insn) != INSN)
|
if (! NONJUMP_INSN_P (insn))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
tmp = PATTERN (insn);
|
tmp = PATTERN (insn);
|
||||||
|
|
@ -3382,13 +3382,11 @@ remove_useless_addtr_insns (int check_notes)
|
||||||
while (next)
|
while (next)
|
||||||
{
|
{
|
||||||
/* Jumps, calls and labels stop our search. */
|
/* Jumps, calls and labels stop our search. */
|
||||||
if (GET_CODE (next) == JUMP_INSN
|
if (JUMP_P (next) || CALL_P (next) || LABEL_P (next))
|
||||||
|| GET_CODE (next) == CALL_INSN
|
|
||||||
|| GET_CODE (next) == CODE_LABEL)
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
/* As does another fcmp insn. */
|
/* As does another fcmp insn. */
|
||||||
if (GET_CODE (next) == INSN
|
if (NONJUMP_INSN_P (next)
|
||||||
&& GET_CODE (PATTERN (next)) == SET
|
&& GET_CODE (PATTERN (next)) == SET
|
||||||
&& GET_CODE (SET_DEST (PATTERN (next))) == REG
|
&& GET_CODE (SET_DEST (PATTERN (next))) == REG
|
||||||
&& REGNO (SET_DEST (PATTERN (next))) == 0)
|
&& REGNO (SET_DEST (PATTERN (next))) == 0)
|
||||||
|
|
@ -3398,8 +3396,7 @@ remove_useless_addtr_insns (int check_notes)
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Is NEXT_INSN a branch? */
|
/* Is NEXT_INSN a branch? */
|
||||||
if (next
|
if (next && JUMP_P (next))
|
||||||
&& GET_CODE (next) == JUMP_INSN)
|
|
||||||
{
|
{
|
||||||
rtx pattern = PATTERN (next);
|
rtx pattern = PATTERN (next);
|
||||||
|
|
||||||
|
|
@ -4160,16 +4157,16 @@ pa_output_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
|
||||||
always point to a valid instruction in the current function. */
|
always point to a valid instruction in the current function. */
|
||||||
|
|
||||||
/* Get the last real insn. */
|
/* Get the last real insn. */
|
||||||
if (GET_CODE (insn) == NOTE)
|
if (NOTE_P (insn))
|
||||||
insn = prev_real_insn (insn);
|
insn = prev_real_insn (insn);
|
||||||
|
|
||||||
/* If it is a sequence, then look inside. */
|
/* If it is a sequence, then look inside. */
|
||||||
if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
|
if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
|
||||||
insn = XVECEXP (PATTERN (insn), 0, 0);
|
insn = XVECEXP (PATTERN (insn), 0, 0);
|
||||||
|
|
||||||
/* If insn is a CALL_INSN, then it must be a call to a volatile
|
/* If insn is a CALL_INSN, then it must be a call to a volatile
|
||||||
function (otherwise there would be epilogue insns). */
|
function (otherwise there would be epilogue insns). */
|
||||||
if (insn && GET_CODE (insn) == CALL_INSN)
|
if (insn && CALL_P (insn))
|
||||||
{
|
{
|
||||||
fputs ("\tnop\n", file);
|
fputs ("\tnop\n", file);
|
||||||
last_address += 4;
|
last_address += 4;
|
||||||
|
|
@ -4930,12 +4927,12 @@ pa_adjust_insn_length (rtx insn, int length)
|
||||||
|
|
||||||
/* Jumps inside switch tables which have unfilled delay slots need
|
/* Jumps inside switch tables which have unfilled delay slots need
|
||||||
adjustment. */
|
adjustment. */
|
||||||
if (GET_CODE (insn) == JUMP_INSN
|
if (JUMP_P (insn)
|
||||||
&& GET_CODE (pat) == PARALLEL
|
&& GET_CODE (pat) == PARALLEL
|
||||||
&& get_attr_type (insn) == TYPE_BTABLE_BRANCH)
|
&& get_attr_type (insn) == TYPE_BTABLE_BRANCH)
|
||||||
length += 4;
|
length += 4;
|
||||||
/* Block move pattern. */
|
/* Block move pattern. */
|
||||||
else if (GET_CODE (insn) == INSN
|
else if (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (pat) == PARALLEL
|
&& GET_CODE (pat) == PARALLEL
|
||||||
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
|
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
|
||||||
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
|
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
|
||||||
|
|
@ -4944,7 +4941,7 @@ pa_adjust_insn_length (rtx insn, int length)
|
||||||
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
|
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
|
||||||
length += compute_movmem_length (insn) - 4;
|
length += compute_movmem_length (insn) - 4;
|
||||||
/* Block clear pattern. */
|
/* Block clear pattern. */
|
||||||
else if (GET_CODE (insn) == INSN
|
else if (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (pat) == PARALLEL
|
&& GET_CODE (pat) == PARALLEL
|
||||||
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
|
&& GET_CODE (XVECEXP (pat, 0, 0)) == SET
|
||||||
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
|
&& GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
|
||||||
|
|
@ -4952,7 +4949,7 @@ pa_adjust_insn_length (rtx insn, int length)
|
||||||
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
|
&& GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
|
||||||
length += compute_clrmem_length (insn) - 4;
|
length += compute_clrmem_length (insn) - 4;
|
||||||
/* Conditional branch with an unfilled delay slot. */
|
/* Conditional branch with an unfilled delay slot. */
|
||||||
else if (GET_CODE (insn) == JUMP_INSN && ! simplejump_p (insn))
|
else if (JUMP_P (insn) && ! simplejump_p (insn))
|
||||||
{
|
{
|
||||||
/* Adjust a short backwards conditional with an unfilled delay slot. */
|
/* Adjust a short backwards conditional with an unfilled delay slot. */
|
||||||
if (GET_CODE (pat) == SET
|
if (GET_CODE (pat) == SET
|
||||||
|
|
@ -5846,7 +5843,7 @@ pa_output_arg_descriptor (rtx call_insn)
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
gcc_assert (GET_CODE (call_insn) == CALL_INSN);
|
gcc_assert (CALL_P (call_insn));
|
||||||
for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
|
for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
|
||||||
link; link = XEXP (link, 1))
|
link; link = XEXP (link, 1))
|
||||||
{
|
{
|
||||||
|
|
@ -6641,7 +6638,7 @@ pa_output_lbranch (rtx dest, rtx insn, int xdelay)
|
||||||
if (xdelay && dbr_sequence_length () != 0)
|
if (xdelay && dbr_sequence_length () != 0)
|
||||||
{
|
{
|
||||||
/* We can't handle a jump in the delay slot. */
|
/* We can't handle a jump in the delay slot. */
|
||||||
gcc_assert (GET_CODE (NEXT_INSN (insn)) != JUMP_INSN);
|
gcc_assert (! JUMP_P (NEXT_INSN (insn)));
|
||||||
|
|
||||||
final_scan_insn (NEXT_INSN (insn), asm_out_file,
|
final_scan_insn (NEXT_INSN (insn), asm_out_file,
|
||||||
optimize, 0, NULL);
|
optimize, 0, NULL);
|
||||||
|
|
@ -7650,7 +7647,7 @@ pa_output_millicode_call (rtx insn, rtx call_dest)
|
||||||
output_asm_insn ("nop", xoperands);
|
output_asm_insn ("nop", xoperands);
|
||||||
|
|
||||||
/* We are done if there isn't a jump in the delay slot. */
|
/* We are done if there isn't a jump in the delay slot. */
|
||||||
if (seq_length == 0 || GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
|
if (seq_length == 0 || ! JUMP_P (NEXT_INSN (insn)))
|
||||||
return "";
|
return "";
|
||||||
|
|
||||||
/* This call has an unconditional jump in its delay slot. */
|
/* This call has an unconditional jump in its delay slot. */
|
||||||
|
|
@ -7708,7 +7705,7 @@ pa_attr_length_call (rtx insn, int sibcall)
|
||||||
rtx pat = PATTERN (insn);
|
rtx pat = PATTERN (insn);
|
||||||
unsigned long distance = -1;
|
unsigned long distance = -1;
|
||||||
|
|
||||||
gcc_assert (GET_CODE (insn) == CALL_INSN);
|
gcc_assert (CALL_P (insn));
|
||||||
|
|
||||||
if (INSN_ADDRESSES_SET_P ())
|
if (INSN_ADDRESSES_SET_P ())
|
||||||
{
|
{
|
||||||
|
|
@ -7822,7 +7819,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibcall)
|
||||||
delay slot. We can't do this in a sibcall as we don't
|
delay slot. We can't do this in a sibcall as we don't
|
||||||
have a second call-clobbered scratch register available. */
|
have a second call-clobbered scratch register available. */
|
||||||
if (seq_length != 0
|
if (seq_length != 0
|
||||||
&& GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
|
&& ! JUMP_P (NEXT_INSN (insn))
|
||||||
&& !sibcall)
|
&& !sibcall)
|
||||||
{
|
{
|
||||||
final_scan_insn (NEXT_INSN (insn), asm_out_file,
|
final_scan_insn (NEXT_INSN (insn), asm_out_file,
|
||||||
|
|
@ -7866,7 +7863,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibcall)
|
||||||
indirect_call = 1;
|
indirect_call = 1;
|
||||||
|
|
||||||
if (seq_length != 0
|
if (seq_length != 0
|
||||||
&& GET_CODE (NEXT_INSN (insn)) != JUMP_INSN
|
&& ! JUMP_P (NEXT_INSN (insn))
|
||||||
&& !sibcall
|
&& !sibcall
|
||||||
&& (!TARGET_PA_20
|
&& (!TARGET_PA_20
|
||||||
|| indirect_call
|
|| indirect_call
|
||||||
|
|
@ -8032,7 +8029,7 @@ pa_output_call (rtx insn, rtx call_dest, int sibcall)
|
||||||
/* We are done if there isn't a jump in the delay slot. */
|
/* We are done if there isn't a jump in the delay slot. */
|
||||||
if (seq_length == 0
|
if (seq_length == 0
|
||||||
|| delay_insn_deleted
|
|| delay_insn_deleted
|
||||||
|| GET_CODE (NEXT_INSN (insn)) != JUMP_INSN)
|
|| ! JUMP_P (NEXT_INSN (insn)))
|
||||||
return "";
|
return "";
|
||||||
|
|
||||||
/* A sibcall should never have a branch in the delay slot. */
|
/* A sibcall should never have a branch in the delay slot. */
|
||||||
|
|
@ -8826,12 +8823,12 @@ int
|
||||||
pa_jump_in_call_delay (rtx insn)
|
pa_jump_in_call_delay (rtx insn)
|
||||||
{
|
{
|
||||||
|
|
||||||
if (GET_CODE (insn) != JUMP_INSN)
|
if (! JUMP_P (insn))
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
if (PREV_INSN (insn)
|
if (PREV_INSN (insn)
|
||||||
&& PREV_INSN (PREV_INSN (insn))
|
&& PREV_INSN (PREV_INSN (insn))
|
||||||
&& GET_CODE (next_real_insn (PREV_INSN (PREV_INSN (insn)))) == INSN)
|
&& NONJUMP_INSN_P (next_real_insn (PREV_INSN (PREV_INSN (insn)))))
|
||||||
{
|
{
|
||||||
rtx test_insn = next_real_insn (PREV_INSN (PREV_INSN (insn)));
|
rtx test_insn = next_real_insn (PREV_INSN (PREV_INSN (insn)));
|
||||||
|
|
||||||
|
|
@ -8928,14 +8925,14 @@ pa_following_call (rtx insn)
|
||||||
|
|
||||||
/* Find the previous real insn, skipping NOTEs. */
|
/* Find the previous real insn, skipping NOTEs. */
|
||||||
insn = PREV_INSN (insn);
|
insn = PREV_INSN (insn);
|
||||||
while (insn && GET_CODE (insn) == NOTE)
|
while (insn && NOTE_P (insn))
|
||||||
insn = PREV_INSN (insn);
|
insn = PREV_INSN (insn);
|
||||||
|
|
||||||
/* Check for CALL_INSNs and millicode calls. */
|
/* Check for CALL_INSNs and millicode calls. */
|
||||||
if (insn
|
if (insn
|
||||||
&& ((GET_CODE (insn) == CALL_INSN
|
&& ((CALL_P (insn)
|
||||||
&& get_attr_type (insn) != TYPE_DYNCALL)
|
&& get_attr_type (insn) != TYPE_DYNCALL)
|
||||||
|| (GET_CODE (insn) == INSN
|
|| (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) != SEQUENCE
|
&& GET_CODE (PATTERN (insn)) != SEQUENCE
|
||||||
&& GET_CODE (PATTERN (insn)) != USE
|
&& GET_CODE (PATTERN (insn)) != USE
|
||||||
&& GET_CODE (PATTERN (insn)) != CLOBBER
|
&& GET_CODE (PATTERN (insn)) != CLOBBER
|
||||||
|
|
@ -9000,7 +8997,7 @@ pa_reorg (void)
|
||||||
unsigned int length, i;
|
unsigned int length, i;
|
||||||
|
|
||||||
/* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode. */
|
/* Find an ADDR_VEC or ADDR_DIFF_VEC insn to explode. */
|
||||||
if (GET_CODE (insn) != JUMP_INSN
|
if (! JUMP_P (insn)
|
||||||
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
|
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
|
||||||
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
|
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -9059,7 +9056,7 @@ pa_reorg (void)
|
||||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
/* Find an ADDR_VEC insn. */
|
/* Find an ADDR_VEC insn. */
|
||||||
if (GET_CODE (insn) != JUMP_INSN
|
if (! JUMP_P (insn)
|
||||||
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
|
|| (GET_CODE (PATTERN (insn)) != ADDR_VEC
|
||||||
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
|
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC))
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -9140,9 +9137,7 @@ pa_combine_instructions (void)
|
||||||
|
|
||||||
/* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
|
/* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
|
||||||
Also ignore any special USE insns. */
|
Also ignore any special USE insns. */
|
||||||
if ((GET_CODE (anchor) != INSN
|
if ((! NONJUMP_INSN_P (anchor) && ! JUMP_P (anchor) && ! CALL_P (anchor))
|
||||||
&& GET_CODE (anchor) != JUMP_INSN
|
|
||||||
&& GET_CODE (anchor) != CALL_INSN)
|
|
||||||
|| GET_CODE (PATTERN (anchor)) == USE
|
|| GET_CODE (PATTERN (anchor)) == USE
|
||||||
|| GET_CODE (PATTERN (anchor)) == CLOBBER
|
|| GET_CODE (PATTERN (anchor)) == CLOBBER
|
||||||
|| GET_CODE (PATTERN (anchor)) == ADDR_VEC
|
|| GET_CODE (PATTERN (anchor)) == ADDR_VEC
|
||||||
|
|
@ -9162,14 +9157,14 @@ pa_combine_instructions (void)
|
||||||
floater;
|
floater;
|
||||||
floater = PREV_INSN (floater))
|
floater = PREV_INSN (floater))
|
||||||
{
|
{
|
||||||
if (GET_CODE (floater) == NOTE
|
if (NOTE_P (floater)
|
||||||
|| (GET_CODE (floater) == INSN
|
|| (NONJUMP_INSN_P (floater)
|
||||||
&& (GET_CODE (PATTERN (floater)) == USE
|
&& (GET_CODE (PATTERN (floater)) == USE
|
||||||
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
|
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
/* Anything except a regular INSN will stop our search. */
|
/* Anything except a regular INSN will stop our search. */
|
||||||
if (GET_CODE (floater) != INSN
|
if (! NONJUMP_INSN_P (floater)
|
||||||
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|
||||||
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
|
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
|
||||||
{
|
{
|
||||||
|
|
@ -9223,15 +9218,15 @@ pa_combine_instructions (void)
|
||||||
{
|
{
|
||||||
for (floater = anchor; floater; floater = NEXT_INSN (floater))
|
for (floater = anchor; floater; floater = NEXT_INSN (floater))
|
||||||
{
|
{
|
||||||
if (GET_CODE (floater) == NOTE
|
if (NOTE_P (floater)
|
||||||
|| (GET_CODE (floater) == INSN
|
|| (NONJUMP_INSN_P (floater)
|
||||||
&& (GET_CODE (PATTERN (floater)) == USE
|
&& (GET_CODE (PATTERN (floater)) == USE
|
||||||
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
|
|| GET_CODE (PATTERN (floater)) == CLOBBER)))
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
/* Anything except a regular INSN will stop our search. */
|
/* Anything except a regular INSN will stop our search. */
|
||||||
if (GET_CODE (floater) != INSN
|
if (! NONJUMP_INSN_P (floater)
|
||||||
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|
|| GET_CODE (PATTERN (floater)) == ADDR_VEC
|
||||||
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
|
|| GET_CODE (PATTERN (floater)) == ADDR_DIFF_VEC)
|
||||||
{
|
{
|
||||||
|
|
@ -9386,7 +9381,7 @@ pa_can_combine_p (rtx new_rtx, rtx anchor, rtx floater, int reversed, rtx dest,
|
||||||
int
|
int
|
||||||
pa_insn_refs_are_delayed (rtx insn)
|
pa_insn_refs_are_delayed (rtx insn)
|
||||||
{
|
{
|
||||||
return ((GET_CODE (insn) == INSN
|
return ((NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) != SEQUENCE
|
&& GET_CODE (PATTERN (insn)) != SEQUENCE
|
||||||
&& GET_CODE (PATTERN (insn)) != USE
|
&& GET_CODE (PATTERN (insn)) != USE
|
||||||
&& GET_CODE (PATTERN (insn)) != CLOBBER
|
&& GET_CODE (PATTERN (insn)) != CLOBBER
|
||||||
|
|
|
||||||
|
|
@ -17843,9 +17843,8 @@ compute_save_world_info (rs6000_stack_t *info_ptr)
|
||||||
if (WORLD_SAVE_P (info_ptr))
|
if (WORLD_SAVE_P (info_ptr))
|
||||||
{
|
{
|
||||||
rtx insn;
|
rtx insn;
|
||||||
for ( insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
|
for (insn = get_last_insn_anywhere (); insn; insn = PREV_INSN (insn))
|
||||||
if ( GET_CODE (insn) == CALL_INSN
|
if (CALL_P (insn) && SIBLING_CALL_P (insn))
|
||||||
&& SIBLING_CALL_P (insn))
|
|
||||||
{
|
{
|
||||||
info_ptr->world_save_p = 0;
|
info_ptr->world_save_p = 0;
|
||||||
break;
|
break;
|
||||||
|
|
@ -23837,7 +23836,7 @@ is_load_insn (rtx insn, rtx *load_mem)
|
||||||
if (!insn || !INSN_P (insn))
|
if (!insn || !INSN_P (insn))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (GET_CODE (insn) == CALL_INSN)
|
if (CALL_P (insn))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return is_load_insn1 (PATTERN (insn), load_mem);
|
return is_load_insn1 (PATTERN (insn), load_mem);
|
||||||
|
|
@ -24232,7 +24231,7 @@ insn_must_be_first_in_group (rtx insn)
|
||||||
enum attr_type type;
|
enum attr_type type;
|
||||||
|
|
||||||
if (!insn
|
if (!insn
|
||||||
|| GET_CODE (insn) == NOTE
|
|| NOTE_P (insn)
|
||||||
|| DEBUG_INSN_P (insn)
|
|| DEBUG_INSN_P (insn)
|
||||||
|| GET_CODE (PATTERN (insn)) == USE
|
|| GET_CODE (PATTERN (insn)) == USE
|
||||||
|| GET_CODE (PATTERN (insn)) == CLOBBER)
|
|| GET_CODE (PATTERN (insn)) == CLOBBER)
|
||||||
|
|
@ -24363,7 +24362,7 @@ insn_must_be_last_in_group (rtx insn)
|
||||||
enum attr_type type;
|
enum attr_type type;
|
||||||
|
|
||||||
if (!insn
|
if (!insn
|
||||||
|| GET_CODE (insn) == NOTE
|
|| NOTE_P (insn)
|
||||||
|| DEBUG_INSN_P (insn)
|
|| DEBUG_INSN_P (insn)
|
||||||
|| GET_CODE (PATTERN (insn)) == USE
|
|| GET_CODE (PATTERN (insn)) == USE
|
||||||
|| GET_CODE (PATTERN (insn)) == CLOBBER)
|
|| GET_CODE (PATTERN (insn)) == CLOBBER)
|
||||||
|
|
|
||||||
|
|
@ -5738,7 +5738,7 @@ addr_generation_dependency_p (rtx dep_rtx, rtx insn)
|
||||||
{
|
{
|
||||||
rtx target, pat;
|
rtx target, pat;
|
||||||
|
|
||||||
if (GET_CODE (dep_rtx) == INSN)
|
if (NONJUMP_INSN_P (dep_rtx))
|
||||||
dep_rtx = PATTERN (dep_rtx);
|
dep_rtx = PATTERN (dep_rtx);
|
||||||
|
|
||||||
if (GET_CODE (dep_rtx) == SET)
|
if (GET_CODE (dep_rtx) == SET)
|
||||||
|
|
@ -5978,7 +5978,7 @@ s390_split_branches (void)
|
||||||
|
|
||||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) != JUMP_INSN)
|
if (! JUMP_P (insn))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
pat = PATTERN (insn);
|
pat = PATTERN (insn);
|
||||||
|
|
@ -6398,7 +6398,7 @@ s390_find_constant (struct constant_pool *pool, rtx val,
|
||||||
static rtx
|
static rtx
|
||||||
s390_execute_label (rtx insn)
|
s390_execute_label (rtx insn)
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == INSN
|
if (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) == PARALLEL
|
&& GET_CODE (PATTERN (insn)) == PARALLEL
|
||||||
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
|
&& GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == UNSPEC
|
||||||
&& XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
|
&& XINT (XVECEXP (PATTERN (insn), 0, 0), 1) == UNSPEC_EXECUTE)
|
||||||
|
|
@ -6603,7 +6603,7 @@ s390_mainpool_start (void)
|
||||||
|
|
||||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == INSN
|
if (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) == SET
|
&& GET_CODE (PATTERN (insn)) == SET
|
||||||
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
|
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC_VOLATILE
|
||||||
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
|
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPECV_MAIN_POOL)
|
||||||
|
|
@ -6616,7 +6616,7 @@ s390_mainpool_start (void)
|
||||||
{
|
{
|
||||||
s390_add_execute (pool, insn);
|
s390_add_execute (pool, insn);
|
||||||
}
|
}
|
||||||
else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
|
else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
|
||||||
{
|
{
|
||||||
rtx pool_ref = NULL_RTX;
|
rtx pool_ref = NULL_RTX;
|
||||||
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
||||||
|
|
@ -6758,7 +6758,7 @@ s390_mainpool_finish (struct constant_pool *pool)
|
||||||
if (INSN_P (insn))
|
if (INSN_P (insn))
|
||||||
replace_ltrel_base (&PATTERN (insn));
|
replace_ltrel_base (&PATTERN (insn));
|
||||||
|
|
||||||
if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
|
if (NONJUMP_INSN_P (insn) || CALL_P (insn))
|
||||||
{
|
{
|
||||||
rtx addr, pool_ref = NULL_RTX;
|
rtx addr, pool_ref = NULL_RTX;
|
||||||
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
||||||
|
|
@ -6840,7 +6840,7 @@ s390_chunkify_start (void)
|
||||||
s390_add_execute (curr_pool, insn);
|
s390_add_execute (curr_pool, insn);
|
||||||
s390_add_pool_insn (curr_pool, insn);
|
s390_add_pool_insn (curr_pool, insn);
|
||||||
}
|
}
|
||||||
else if (GET_CODE (insn) == INSN || CALL_P (insn))
|
else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
|
||||||
{
|
{
|
||||||
rtx pool_ref = NULL_RTX;
|
rtx pool_ref = NULL_RTX;
|
||||||
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
||||||
|
|
@ -6867,7 +6867,7 @@ s390_chunkify_start (void)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CODE_LABEL)
|
if (JUMP_P (insn) || LABEL_P (insn))
|
||||||
{
|
{
|
||||||
if (curr_pool)
|
if (curr_pool)
|
||||||
s390_add_pool_insn (curr_pool, insn);
|
s390_add_pool_insn (curr_pool, insn);
|
||||||
|
|
@ -6911,7 +6911,7 @@ s390_chunkify_start (void)
|
||||||
Those will have an effect on code size, which we need to
|
Those will have an effect on code size, which we need to
|
||||||
consider here. This calculation makes rather pessimistic
|
consider here. This calculation makes rather pessimistic
|
||||||
worst-case assumptions. */
|
worst-case assumptions. */
|
||||||
if (GET_CODE (insn) == CODE_LABEL)
|
if (LABEL_P (insn))
|
||||||
extra_size += 6;
|
extra_size += 6;
|
||||||
|
|
||||||
if (chunk_size < S390_POOL_CHUNK_MIN
|
if (chunk_size < S390_POOL_CHUNK_MIN
|
||||||
|
|
@ -6920,7 +6920,7 @@ s390_chunkify_start (void)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
/* Pool chunks can only be inserted after BARRIERs ... */
|
/* Pool chunks can only be inserted after BARRIERs ... */
|
||||||
if (GET_CODE (insn) == BARRIER)
|
if (BARRIER_P (insn))
|
||||||
{
|
{
|
||||||
s390_end_pool (curr_pool, insn);
|
s390_end_pool (curr_pool, insn);
|
||||||
curr_pool = NULL;
|
curr_pool = NULL;
|
||||||
|
|
@ -6937,7 +6937,7 @@ s390_chunkify_start (void)
|
||||||
if (!section_switch_p)
|
if (!section_switch_p)
|
||||||
{
|
{
|
||||||
/* We can insert the barrier only after a 'real' insn. */
|
/* We can insert the barrier only after a 'real' insn. */
|
||||||
if (GET_CODE (insn) != INSN && GET_CODE (insn) != CALL_INSN)
|
if (! NONJUMP_INSN_P (insn) && ! CALL_P (insn))
|
||||||
continue;
|
continue;
|
||||||
if (get_attr_length (insn) == 0)
|
if (get_attr_length (insn) == 0)
|
||||||
continue;
|
continue;
|
||||||
|
|
@ -7009,11 +7009,11 @@ s390_chunkify_start (void)
|
||||||
Don't do that, however, if it is the label before
|
Don't do that, however, if it is the label before
|
||||||
a jump table. */
|
a jump table. */
|
||||||
|
|
||||||
if (GET_CODE (insn) == CODE_LABEL
|
if (LABEL_P (insn)
|
||||||
&& (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
|
&& (LABEL_PRESERVE_P (insn) || LABEL_NAME (insn)))
|
||||||
{
|
{
|
||||||
rtx vec_insn = next_real_insn (insn);
|
rtx vec_insn = next_real_insn (insn);
|
||||||
rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
|
rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
|
||||||
PATTERN (vec_insn) : NULL_RTX;
|
PATTERN (vec_insn) : NULL_RTX;
|
||||||
if (!vec_pat
|
if (!vec_pat
|
||||||
|| !(GET_CODE (vec_pat) == ADDR_VEC
|
|| !(GET_CODE (vec_pat) == ADDR_VEC
|
||||||
|
|
@ -7023,7 +7023,7 @@ s390_chunkify_start (void)
|
||||||
|
|
||||||
/* If we have a direct jump (conditional or unconditional)
|
/* If we have a direct jump (conditional or unconditional)
|
||||||
or a casesi jump, check all potential targets. */
|
or a casesi jump, check all potential targets. */
|
||||||
else if (GET_CODE (insn) == JUMP_INSN)
|
else if (JUMP_P (insn))
|
||||||
{
|
{
|
||||||
rtx pat = PATTERN (insn);
|
rtx pat = PATTERN (insn);
|
||||||
if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
|
if (GET_CODE (pat) == PARALLEL && XVECLEN (pat, 0) > 2)
|
||||||
|
|
@ -7048,7 +7048,7 @@ s390_chunkify_start (void)
|
||||||
/* Find the jump table used by this casesi jump. */
|
/* Find the jump table used by this casesi jump. */
|
||||||
rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
|
rtx vec_label = XEXP (XEXP (XVECEXP (pat, 0, 1), 0), 0);
|
||||||
rtx vec_insn = next_real_insn (vec_label);
|
rtx vec_insn = next_real_insn (vec_label);
|
||||||
rtx vec_pat = vec_insn && GET_CODE (vec_insn) == JUMP_INSN ?
|
rtx vec_pat = vec_insn && JUMP_P (vec_insn) ?
|
||||||
PATTERN (vec_insn) : NULL_RTX;
|
PATTERN (vec_insn) : NULL_RTX;
|
||||||
if (vec_pat
|
if (vec_pat
|
||||||
&& (GET_CODE (vec_pat) == ADDR_VEC
|
&& (GET_CODE (vec_pat) == ADDR_VEC
|
||||||
|
|
@ -7082,7 +7082,7 @@ s390_chunkify_start (void)
|
||||||
/* Insert base register reload insns at every far label. */
|
/* Insert base register reload insns at every far label. */
|
||||||
|
|
||||||
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
|
||||||
if (GET_CODE (insn) == CODE_LABEL
|
if (LABEL_P (insn)
|
||||||
&& bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
|
&& bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn)))
|
||||||
{
|
{
|
||||||
struct constant_pool *pool = s390_find_pool (pool_list, insn);
|
struct constant_pool *pool = s390_find_pool (pool_list, insn);
|
||||||
|
|
@ -7128,7 +7128,7 @@ s390_chunkify_finish (struct constant_pool *pool_list)
|
||||||
if (!curr_pool)
|
if (!curr_pool)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
|
if (NONJUMP_INSN_P (insn) || CALL_P (insn))
|
||||||
{
|
{
|
||||||
rtx addr, pool_ref = NULL_RTX;
|
rtx addr, pool_ref = NULL_RTX;
|
||||||
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
find_constant_pool_ref (PATTERN (insn), &pool_ref);
|
||||||
|
|
@ -7181,9 +7181,9 @@ s390_chunkify_cancel (struct constant_pool *pool_list)
|
||||||
rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
|
rtx jump = barrier? PREV_INSN (barrier) : NULL_RTX;
|
||||||
rtx label = NEXT_INSN (curr_pool->pool_insn);
|
rtx label = NEXT_INSN (curr_pool->pool_insn);
|
||||||
|
|
||||||
if (jump && GET_CODE (jump) == JUMP_INSN
|
if (jump && JUMP_P (jump)
|
||||||
&& barrier && GET_CODE (barrier) == BARRIER
|
&& barrier && BARRIER_P (barrier)
|
||||||
&& label && GET_CODE (label) == CODE_LABEL
|
&& label && LABEL_P (label)
|
||||||
&& GET_CODE (PATTERN (jump)) == SET
|
&& GET_CODE (PATTERN (jump)) == SET
|
||||||
&& SET_DEST (PATTERN (jump)) == pc_rtx
|
&& SET_DEST (PATTERN (jump)) == pc_rtx
|
||||||
&& GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
|
&& GET_CODE (SET_SRC (PATTERN (jump))) == LABEL_REF
|
||||||
|
|
@ -7203,7 +7203,7 @@ s390_chunkify_cancel (struct constant_pool *pool_list)
|
||||||
{
|
{
|
||||||
rtx next_insn = NEXT_INSN (insn);
|
rtx next_insn = NEXT_INSN (insn);
|
||||||
|
|
||||||
if (GET_CODE (insn) == INSN
|
if (NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) == SET
|
&& GET_CODE (PATTERN (insn)) == SET
|
||||||
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
|
&& GET_CODE (SET_SRC (PATTERN (insn))) == UNSPEC
|
||||||
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
|
&& XINT (SET_SRC (PATTERN (insn)), 1) == UNSPEC_RELOAD_BASE)
|
||||||
|
|
@ -10080,7 +10080,7 @@ s390_optimize_prologue (void)
|
||||||
|
|
||||||
next_insn = NEXT_INSN (insn);
|
next_insn = NEXT_INSN (insn);
|
||||||
|
|
||||||
if (GET_CODE (insn) != INSN)
|
if (! NONJUMP_INSN_P (insn))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
if (GET_CODE (PATTERN (insn)) == PARALLEL
|
if (GET_CODE (PATTERN (insn)) == PARALLEL
|
||||||
|
|
|
||||||
|
|
@ -3063,10 +3063,10 @@ emit_cbcond_nop (rtx insn)
|
||||||
if (!next)
|
if (!next)
|
||||||
return 1;
|
return 1;
|
||||||
|
|
||||||
if (GET_CODE (next) == INSN
|
if (NONJUMP_INSN_P (next)
|
||||||
&& GET_CODE (PATTERN (next)) == SEQUENCE)
|
&& GET_CODE (PATTERN (next)) == SEQUENCE)
|
||||||
next = XVECEXP (PATTERN (next), 0, 0);
|
next = XVECEXP (PATTERN (next), 0, 0);
|
||||||
else if (GET_CODE (next) == CALL_INSN
|
else if (CALL_P (next)
|
||||||
&& GET_CODE (PATTERN (next)) == PARALLEL)
|
&& GET_CODE (PATTERN (next)) == PARALLEL)
|
||||||
{
|
{
|
||||||
rtx delay = XVECEXP (PATTERN (next), 0, 1);
|
rtx delay = XVECEXP (PATTERN (next), 0, 1);
|
||||||
|
|
@ -3222,7 +3222,7 @@ eligible_for_return_delay (rtx trial)
|
||||||
int regno;
|
int regno;
|
||||||
rtx pat;
|
rtx pat;
|
||||||
|
|
||||||
if (GET_CODE (trial) != INSN)
|
if (! NONJUMP_INSN_P (trial))
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
if (get_attr_length (trial) != 1)
|
if (get_attr_length (trial) != 1)
|
||||||
|
|
@ -3293,7 +3293,7 @@ eligible_for_sibcall_delay (rtx trial)
|
||||||
{
|
{
|
||||||
rtx pat;
|
rtx pat;
|
||||||
|
|
||||||
if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET)
|
if (! NONJUMP_INSN_P (trial) || GET_CODE (PATTERN (trial)) != SET)
|
||||||
return 0;
|
return 0;
|
||||||
|
|
||||||
if (get_attr_length (trial) != 1)
|
if (get_attr_length (trial) != 1)
|
||||||
|
|
@ -5424,7 +5424,7 @@ sparc_asm_function_epilogue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED)
|
||||||
|
|
||||||
last_real_insn = prev_real_insn (insn);
|
last_real_insn = prev_real_insn (insn);
|
||||||
if (last_real_insn
|
if (last_real_insn
|
||||||
&& GET_CODE (last_real_insn) == INSN
|
&& NONJUMP_INSN_P (last_real_insn)
|
||||||
&& GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
|
&& GET_CODE (PATTERN (last_real_insn)) == SEQUENCE)
|
||||||
last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
|
last_real_insn = XVECEXP (PATTERN (last_real_insn), 0, 0);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1962,7 +1962,7 @@ struct spu_bb_info
|
||||||
static struct spu_bb_info *spu_bb_info;
|
static struct spu_bb_info *spu_bb_info;
|
||||||
|
|
||||||
#define STOP_HINT_P(INSN) \
|
#define STOP_HINT_P(INSN) \
|
||||||
(GET_CODE(INSN) == CALL_INSN \
|
(CALL_P(INSN) \
|
||||||
|| INSN_CODE(INSN) == CODE_FOR_divmodsi4 \
|
|| INSN_CODE(INSN) == CODE_FOR_divmodsi4 \
|
||||||
|| INSN_CODE(INSN) == CODE_FOR_udivmodsi4)
|
|| INSN_CODE(INSN) == CODE_FOR_udivmodsi4)
|
||||||
|
|
||||||
|
|
@ -2163,7 +2163,7 @@ spu_emit_branch_hint (rtx before, rtx branch, rtx target,
|
||||||
static rtx
|
static rtx
|
||||||
get_branch_target (rtx branch)
|
get_branch_target (rtx branch)
|
||||||
{
|
{
|
||||||
if (GET_CODE (branch) == JUMP_INSN)
|
if (JUMP_P (branch))
|
||||||
{
|
{
|
||||||
rtx set, src;
|
rtx set, src;
|
||||||
|
|
||||||
|
|
@ -2212,7 +2212,7 @@ get_branch_target (rtx branch)
|
||||||
|
|
||||||
return src;
|
return src;
|
||||||
}
|
}
|
||||||
else if (GET_CODE (branch) == CALL_INSN)
|
else if (CALL_P (branch))
|
||||||
{
|
{
|
||||||
rtx call;
|
rtx call;
|
||||||
/* All of our call patterns are in a PARALLEL and the CALL is
|
/* All of our call patterns are in a PARALLEL and the CALL is
|
||||||
|
|
|
||||||
|
|
@ -2441,8 +2441,7 @@ combine_bnp (rtx insn)
|
||||||
if (reg_mentioned_p (reg, and_insn))
|
if (reg_mentioned_p (reg, and_insn))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (GET_CODE (and_insn) != NOTE
|
if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
|
||||||
&& GET_CODE (and_insn) != INSN)
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -2461,8 +2460,7 @@ combine_bnp (rtx insn)
|
||||||
if (reg_mentioned_p (reg, and_insn))
|
if (reg_mentioned_p (reg, and_insn))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (GET_CODE (and_insn) != NOTE
|
if (! NOTE_P (and_insn) && ! NONJUMP_INSN_P (and_insn))
|
||||||
&& GET_CODE (and_insn) != INSN)
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -2486,8 +2484,7 @@ combine_bnp (rtx insn)
|
||||||
break;
|
break;
|
||||||
|
|
||||||
if (reg_mentioned_p (reg, shift)
|
if (reg_mentioned_p (reg, shift)
|
||||||
|| (GET_CODE (shift) != NOTE
|
|| (! NOTE_P (shift) && ! NONJUMP_INSN_P (shift)))
|
||||||
&& GET_CODE (shift) != INSN))
|
|
||||||
{
|
{
|
||||||
shift = NULL_RTX;
|
shift = NULL_RTX;
|
||||||
break;
|
break;
|
||||||
|
|
@ -2534,8 +2531,7 @@ combine_bnp (rtx insn)
|
||||||
if (reg_mentioned_p (reg, load))
|
if (reg_mentioned_p (reg, load))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (GET_CODE (load) != NOTE
|
if (! NOTE_P (load) && ! NONJUMP_INSN_P (load))
|
||||||
&& GET_CODE (load) != INSN)
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!load)
|
if (!load)
|
||||||
|
|
|
||||||
|
|
@ -1133,13 +1133,13 @@ Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, end
|
||||||
IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
|
IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
|
||||||
INSN_UID (first_insn), INSN_UID (last_insn));
|
INSN_UID (first_insn), INSN_UID (last_insn));
|
||||||
|
|
||||||
if (GET_CODE (first_insn) == NOTE)
|
if (NOTE_P (first_insn))
|
||||||
first_insn = next_nonnote_insn (first_insn);
|
first_insn = next_nonnote_insn (first_insn);
|
||||||
|
|
||||||
last_insn = next_nonnote_insn (last_insn);
|
last_insn = next_nonnote_insn (last_insn);
|
||||||
for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
|
for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
|
||||||
{
|
{
|
||||||
if (GET_CODE (insn) == INSN)
|
if (NONJUMP_INSN_P (insn))
|
||||||
{
|
{
|
||||||
rtx pattern = single_set (insn);
|
rtx pattern = single_set (insn);
|
||||||
|
|
||||||
|
|
@ -1199,7 +1199,7 @@ Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, end
|
||||||
|
|
||||||
/* Optimize back to back cases of ep <- r1 & r1 <- ep. */
|
/* Optimize back to back cases of ep <- r1 & r1 <- ep. */
|
||||||
insn = prev_nonnote_insn (first_insn);
|
insn = prev_nonnote_insn (first_insn);
|
||||||
if (insn && GET_CODE (insn) == INSN
|
if (insn && NONJUMP_INSN_P (insn)
|
||||||
&& GET_CODE (PATTERN (insn)) == SET
|
&& GET_CODE (PATTERN (insn)) == SET
|
||||||
&& SET_DEST (PATTERN (insn)) == *p_ep
|
&& SET_DEST (PATTERN (insn)) == *p_ep
|
||||||
&& SET_SRC (PATTERN (insn)) == *p_r1)
|
&& SET_SRC (PATTERN (insn)) == *p_r1)
|
||||||
|
|
|
||||||
|
|
@ -1650,7 +1650,7 @@ xtensa_emit_loop_end (rtx insn, rtx *operands)
|
||||||
{
|
{
|
||||||
rtx body = PATTERN (insn);
|
rtx body = PATTERN (insn);
|
||||||
|
|
||||||
if (GET_CODE (body) == JUMP_INSN)
|
if (JUMP_P (body))
|
||||||
{
|
{
|
||||||
output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
|
output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
|
||||||
done = 1;
|
done = 1;
|
||||||
|
|
|
||||||
|
|
@ -5059,7 +5059,7 @@ add_var_loc_to_decl (tree decl, rtx loc_note, const char *label)
|
||||||
if (temp->last
|
if (temp->last
|
||||||
&& temp->first == temp->last
|
&& temp->first == temp->last
|
||||||
&& TREE_CODE (decl) == PARM_DECL
|
&& TREE_CODE (decl) == PARM_DECL
|
||||||
&& GET_CODE (temp->first->loc) == NOTE
|
&& NOTE_P (temp->first->loc)
|
||||||
&& NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
|
&& NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
|
||||||
&& DECL_INCOMING_RTL (decl)
|
&& DECL_INCOMING_RTL (decl)
|
||||||
&& NOTE_VAR_LOCATION_LOC (temp->first->loc)
|
&& NOTE_VAR_LOCATION_LOC (temp->first->loc)
|
||||||
|
|
@ -13475,7 +13475,7 @@ dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
|
||||||
*listp = new_loc_list (descr, node->label, endname, secname);
|
*listp = new_loc_list (descr, node->label, endname, secname);
|
||||||
if (TREE_CODE (decl) == PARM_DECL
|
if (TREE_CODE (decl) == PARM_DECL
|
||||||
&& node == loc_list->first
|
&& node == loc_list->first
|
||||||
&& GET_CODE (node->loc) == NOTE
|
&& NOTE_P (node->loc)
|
||||||
&& strcmp (node->label, endname) == 0)
|
&& strcmp (node->label, endname) == 0)
|
||||||
(*listp)->force = true;
|
(*listp)->force = true;
|
||||||
listp = &(*listp)->dw_loc_next;
|
listp = &(*listp)->dw_loc_next;
|
||||||
|
|
@ -20702,7 +20702,7 @@ dwarf2out_var_location (rtx loc_note)
|
||||||
next_note = NEXT_INSN (loc_note);
|
next_note = NEXT_INSN (loc_note);
|
||||||
if (! next_note
|
if (! next_note
|
||||||
|| INSN_DELETED_P (next_note)
|
|| INSN_DELETED_P (next_note)
|
||||||
|| GET_CODE (next_note) != NOTE
|
|| ! NOTE_P (next_note)
|
||||||
|| (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
|
|| (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
|
||||||
&& NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
|
&& NOTE_KIND (next_note) != NOTE_INSN_CALL_ARG_LOCATION))
|
||||||
next_note = NULL_RTX;
|
next_note = NULL_RTX;
|
||||||
|
|
|
||||||
|
|
@ -365,7 +365,7 @@ discover_loops (bitmap_obstack *loop_stack, struct hw_doloop_hooks *hooks)
|
||||||
rtx tail = BB_END (bb);
|
rtx tail = BB_END (bb);
|
||||||
rtx insn, reg;
|
rtx insn, reg;
|
||||||
|
|
||||||
while (tail && GET_CODE (tail) == NOTE && tail != BB_HEAD (bb))
|
while (tail && NOTE_P (tail) && tail != BB_HEAD (bb))
|
||||||
tail = PREV_INSN (tail);
|
tail = PREV_INSN (tail);
|
||||||
|
|
||||||
if (tail == NULL_RTX)
|
if (tail == NULL_RTX)
|
||||||
|
|
|
||||||
|
|
@ -175,14 +175,12 @@ next_insn_no_annul (rtx insn)
|
||||||
&& NEXT_INSN (PREV_INSN (insn)) != insn)
|
&& NEXT_INSN (PREV_INSN (insn)) != insn)
|
||||||
{
|
{
|
||||||
rtx next = NEXT_INSN (insn);
|
rtx next = NEXT_INSN (insn);
|
||||||
enum rtx_code code = GET_CODE (next);
|
|
||||||
|
|
||||||
while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
|
while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
|
||||||
&& INSN_FROM_TARGET_P (next))
|
&& INSN_FROM_TARGET_P (next))
|
||||||
{
|
{
|
||||||
insn = next;
|
insn = next;
|
||||||
next = NEXT_INSN (insn);
|
next = NEXT_INSN (insn);
|
||||||
code = GET_CODE (next);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -961,8 +961,7 @@ extern const char * const reg_note_name[];
|
||||||
|
|
||||||
/* Nonzero if INSN is a note marking the beginning of a basic block. */
|
/* Nonzero if INSN is a note marking the beginning of a basic block. */
|
||||||
#define NOTE_INSN_BASIC_BLOCK_P(INSN) \
|
#define NOTE_INSN_BASIC_BLOCK_P(INSN) \
|
||||||
(GET_CODE (INSN) == NOTE \
|
(NOTE_P (INSN) && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK)
|
||||||
&& NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK)
|
|
||||||
|
|
||||||
/* Variable declaration and the location of a variable. */
|
/* Variable declaration and the location of a variable. */
|
||||||
#define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION))
|
#define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION))
|
||||||
|
|
@ -1063,7 +1062,7 @@ enum label_kind
|
||||||
/* Retrieve the kind of LABEL. */
|
/* Retrieve the kind of LABEL. */
|
||||||
#define LABEL_KIND(LABEL) __extension__ \
|
#define LABEL_KIND(LABEL) __extension__ \
|
||||||
({ __typeof (LABEL) const _label = (LABEL); \
|
({ __typeof (LABEL) const _label = (LABEL); \
|
||||||
if (GET_CODE (_label) != CODE_LABEL) \
|
if (! LABEL_P (_label)) \
|
||||||
rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__, __LINE__, \
|
rtl_check_failed_flag ("LABEL_KIND", _label, __FILE__, __LINE__, \
|
||||||
__FUNCTION__); \
|
__FUNCTION__); \
|
||||||
(enum label_kind) ((_label->jump << 1) | _label->call); })
|
(enum label_kind) ((_label->jump << 1) | _label->call); })
|
||||||
|
|
@ -1072,7 +1071,7 @@ enum label_kind
|
||||||
#define SET_LABEL_KIND(LABEL, KIND) do { \
|
#define SET_LABEL_KIND(LABEL, KIND) do { \
|
||||||
__typeof (LABEL) const _label = (LABEL); \
|
__typeof (LABEL) const _label = (LABEL); \
|
||||||
const unsigned int _kind = (KIND); \
|
const unsigned int _kind = (KIND); \
|
||||||
if (GET_CODE (_label) != CODE_LABEL) \
|
if (! LABEL_P (_label)) \
|
||||||
rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__, __LINE__, \
|
rtl_check_failed_flag ("SET_LABEL_KIND", _label, __FILE__, __LINE__, \
|
||||||
__FUNCTION__); \
|
__FUNCTION__); \
|
||||||
_label->jump = ((_kind >> 1) & 1); \
|
_label->jump = ((_kind >> 1) & 1); \
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue