1 /* Perform instruction reorganizations for delay slot filling.
2 Copyright (C) 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu).
4 Hacked by Michael Tiemann (tiemann@cygnus.com).
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
23 /* Instruction reorganization pass.
25 This pass runs after register allocation and final jump
26 optimization. It should be the last pass to run before peephole.
27 It serves primarily to fill delay slots of insns, typically branch
28 and call insns. Other insns typically involve more complicated
29 interactions of data dependencies and resource constraints, and
30 are better handled by scheduling before register allocation (by the
31 function `schedule_insns').
33 The Branch Penalty is the number of extra cycles that are needed to
34 execute a branch insn. On an ideal machine, branches take a single
35 cycle, and the Branch Penalty is 0. Several RISC machines approach
36 branch delays differently:
38 The MIPS and AMD 29000 have a single branch delay slot. Most insns
39 (except other branches) can be used to fill this slot. When the
40 slot is filled, two insns execute in two cycles, reducing the
41 branch penalty to zero.
43 The Motorola 88000 conditionally exposes its branch delay slot,
44 so code is shorter when it is turned off, but will run faster
45 when useful insns are scheduled there.
47 The IBM ROMP has two forms of branch and call insns, both with and
48 without a delay slot. Much like the 88k, insns not using the delay
49 slot can be shorted (2 bytes vs. 4 bytes), but will run slowed.
51 The SPARC always has a branch delay slot, but its effects can be
52 annulled when the branch is not taken. This means that failing to
53 find other sources of insns, we can hoist an insn from the branch
54 target that would only be safe to execute knowing that the branch
57 The HP-PA always has a branch delay slot. For unconditional branches
58 its effects can be annulled when the branch is taken. The effects
59 of the delay slot in a conditional branch can be nullified for forward
60 taken branches, or for untaken backward branches. This means
61 we can hoist insns from the fall-through path for forward branches or
62 steal insns from the target of backward branches.
64 Three techniques for filling delay slots have been implemented so far:
66 (1) `fill_simple_delay_slots' is the simplest, most efficient way
67 to fill delay slots. This pass first looks for insns which come
68 from before the branch and which are safe to execute after the
69 branch. Then it searches after the insn requiring delay slots or,
70 in the case of a branch, for insns that are after the point at
71 which the branch merges into the fallthrough code, if such a point
72 exists. When such insns are found, the branch penalty decreases
73 and no code expansion takes place.
75 (2) `fill_eager_delay_slots' is more complicated: it is used for
76 scheduling conditional jumps, or for scheduling jumps which cannot
77 be filled using (1). A machine need not have annulled jumps to use
78 this strategy, but it helps (by keeping more options open).
79 `fill_eager_delay_slots' tries to guess the direction the branch
80 will go; if it guesses right 100% of the time, it can reduce the
81 branch penalty as much as `fill_simple_delay_slots' does. If it
82 guesses wrong 100% of the time, it might as well schedule nops (or
83 on the m88k, unexpose the branch slot). When
84 `fill_eager_delay_slots' takes insns from the fall-through path of
85 the jump, usually there is no code expansion; when it takes insns
86 from the branch target, there is code expansion if it is not the
87 only way to reach that target.
89 (3) `relax_delay_slots' uses a set of rules to simplify code that
90 has been reorganized by (1) and (2). It finds cases where
91 conditional test can be eliminated, jumps can be threaded, extra
92 insns can be eliminated, etc. It is the job of (1) and (2) to do a
93 good job of scheduling locally; `relax_delay_slots' takes care of
94 making the various individual schedules work well together. It is
95 especially tuned to handle the control flow interactions of branch
96 insns. It does nothing for insns with delay slots that do not
99 On machines that use CC0, we are very conservative. We will not make
100 a copy of an insn involving CC0 since we want to maintain a 1-1
101 correspondence between the insn that sets and uses CC0. The insns are
102 allowed to be separated by placing an insn that sets CC0 (but not an insn
103 that uses CC0; we could do this, but it doesn't seem worthwhile) in a
104 delay slot. In that case, we point each insn at the other with REG_CC_USER
105 and REG_CC_SETTER notes. Note that these restrictions affect very few
106 machines because most RISC machines with delay slots will not use CC0
107 (the RT is the only known exception at this point).
111 The Acorn Risc Machine can conditionally execute most insns, so
112 it is profitable to move single insns into a position to execute
113 based on the condition code of the previous insn.
115 The HP-PA can conditionally nullify insns, providing a similar
116 effect to the ARM, differing mostly in which insn is "in charge". */
121 #include "insn-config.h"
122 #include "conditions.h"
123 #include "hard-reg-set.h"
124 #include "basic-block.h"
126 #include "insn-flags.h"
131 #include "insn-attr.h"
135 #define obstack_chunk_alloc xmalloc
136 #define obstack_chunk_free free
138 #ifndef ANNUL_IFTRUE_SLOTS
139 #define eligible_for_annul_true(INSN, SLOTS, TRIAL, FLAGS) 0
141 #ifndef ANNUL_IFFALSE_SLOTS
142 #define eligible_for_annul_false(INSN, SLOTS, TRIAL, FLAGS) 0
145 /* Insns which have delay slots that have not yet been filled. */
147 static struct obstack unfilled_slots_obstack;
148 static rtx *unfilled_firstobj;
150 /* Define macros to refer to the first and last slot containing unfilled
151 insns. These are used because the list may move and its address
152 should be recomputed at each use. */
154 #define unfilled_slots_base \
155 ((rtx *) obstack_base (&unfilled_slots_obstack))
157 #define unfilled_slots_next \
158 ((rtx *) obstack_next_free (&unfilled_slots_obstack))
160 /* This structure is used to indicate which hardware resources are set or
161 needed by insns so far. */
165 char memory; /* Insn sets or needs a memory location. */
166 char unch_memory; /* Insn sets of needs a "unchanging" MEM. */
167 char volatil; /* Insn sets or needs a volatile memory loc. */
168 char cc; /* Insn sets or needs the condition codes. */
169 HARD_REG_SET regs; /* Which registers are set or needed. */
172 /* Macro to clear all resources. */
173 #define CLEAR_RESOURCE(RES) \
174 do { (RES)->memory = (RES)->unch_memory = (RES)->volatil = (RES)->cc = 0; \
175 CLEAR_HARD_REG_SET ((RES)->regs); } while (0)
177 /* Indicates what resources are required at the beginning of the epilogue. */
178 static struct resources start_of_epilogue_needs;
180 /* Indicates what resources are required at function end. */
181 static struct resources end_of_function_needs;
183 /* Points to the label before the end of the function. */
184 static rtx end_of_function_label;
186 /* This structure is used to record liveness information at the targets or
187 fallthrough insns of branches. We will most likely need the information
188 at targets again, so save them in a hash table rather than recomputing them
193 int uid; /* INSN_UID of target. */
194 struct target_info *next; /* Next info for same hash bucket. */
195 HARD_REG_SET live_regs; /* Registers live at target. */
196 int block; /* Basic block number containing target. */
197 int bb_tick; /* Generation count of basic block info. */
200 #define TARGET_HASH_PRIME 257
202 /* Define the hash table itself. */
203 static struct target_info **target_hash_table;
205 /* For each basic block, we maintain a generation number of its basic
206 block info, which is updated each time we move an insn from the
207 target of a jump. This is the generation number indexed by block
210 static int *bb_ticks;
212 /* Mapping between INSN_UID's and position in the code since INSN_UID's do
213 not always monotonically increase. */
214 static int *uid_to_ruid;
216 /* Highest valid index in `uid_to_ruid'. */
219 static void mark_referenced_resources PROTO((rtx, struct resources *, int));
220 static void mark_set_resources PROTO((rtx, struct resources *, int, int));
221 static int stop_search_p PROTO((rtx, int));
222 static int resource_conflicts_p PROTO((struct resources *,
223 struct resources *));
224 static int insn_references_resource_p PROTO((rtx, struct resources *, int));
225 static int insn_sets_resources_p PROTO((rtx, struct resources *, int));
226 static rtx find_end_label PROTO((void));
227 static rtx emit_delay_sequence PROTO((rtx, rtx, int, int));
228 static rtx add_to_delay_list PROTO((rtx, rtx));
229 static void delete_from_delay_slot PROTO((rtx));
230 static void delete_scheduled_jump PROTO((rtx));
231 static void note_delay_statistics PROTO((int, int));
232 static rtx optimize_skip PROTO((rtx));
233 static int get_jump_flags PROTO((rtx, rtx));
234 static int rare_destination PROTO((rtx));
235 static int mostly_true_jump PROTO((rtx, rtx));
236 static rtx get_branch_condition PROTO((rtx, rtx));
237 static int condition_dominates_p PROTO((rtx, rtx));
238 static rtx steal_delay_list_from_target PROTO((rtx, rtx, rtx, rtx,
242 int, int *, int *, rtx *));
243 static rtx steal_delay_list_from_fallthrough PROTO((rtx, rtx, rtx, rtx,
248 static void try_merge_delay_insns PROTO((rtx, rtx));
249 static rtx redundant_insn PROTO((rtx, rtx, rtx));
250 static int own_thread_p PROTO((rtx, rtx, int));
251 static int find_basic_block PROTO((rtx));
252 static void update_block PROTO((rtx, rtx));
253 static int reorg_redirect_jump PROTO((rtx, rtx));
254 static void update_reg_dead_notes PROTO((rtx, rtx));
255 static void update_reg_unused_notes PROTO((rtx, rtx));
256 static void update_live_status PROTO((rtx, rtx));
257 static rtx next_insn_no_annul PROTO((rtx));
258 static void mark_target_live_regs PROTO((rtx, struct resources *));
259 static void fill_simple_delay_slots PROTO((rtx, int));
260 static rtx fill_slots_from_thread PROTO((rtx, rtx, rtx, rtx, int, int,
261 int, int, int, int *));
262 static void fill_eager_delay_slots PROTO((rtx));
263 static void relax_delay_slots PROTO((rtx));
264 static void make_return_insns PROTO((rtx));
265 static int redirect_with_delay_slots_safe_p PROTO ((rtx, rtx, rtx));
266 static int redirect_with_delay_list_safe_p PROTO ((rtx, rtx, rtx));
268 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
269 which resources are references by the insn. If INCLUDE_CALLED_ROUTINE
270 is TRUE, resources used by the called routine will be included for
274 mark_referenced_resources (x, res, include_delayed_effects)
276 register struct resources *res;
277 register int include_delayed_effects;
279 register enum rtx_code code = GET_CODE (x);
281 register char *format_ptr;
283 /* Handle leaf items for which we set resource flags. Also, special-case
284 CALL, SET and CLOBBER operators. */
296 if (GET_CODE (SUBREG_REG (x)) != REG)
297 mark_referenced_resources (SUBREG_REG (x), res, 0);
300 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
301 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
302 for (i = regno; i < last_regno; i++)
303 SET_HARD_REG_BIT (res->regs, i);
308 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
309 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
313 /* If this memory shouldn't change, it really isn't referencing
315 if (RTX_UNCHANGING_P (x))
316 res->unch_memory = 1;
319 res->volatil = MEM_VOLATILE_P (x);
321 /* Mark registers used to access memory. */
322 mark_referenced_resources (XEXP (x, 0), res, 0);
329 case UNSPEC_VOLATILE:
331 /* Traditional asm's are always volatile. */
336 res->volatil = MEM_VOLATILE_P (x);
338 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
339 We can not just fall through here since then we would be confused
340 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
341 traditional asms unlike their normal usage. */
343 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
344 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
348 /* The first operand will be a (MEM (xxx)) but doesn't really reference
349 memory. The second operand may be referenced, though. */
350 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
351 mark_referenced_resources (XEXP (x, 1), res, 0);
355 /* Usually, the first operand of SET is set, not referenced. But
356 registers used to access memory are referenced. SET_DEST is
357 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
359 mark_referenced_resources (SET_SRC (x), res, 0);
362 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
363 mark_referenced_resources (x, res, 0);
364 else if (GET_CODE (x) == SUBREG)
366 if (GET_CODE (x) == MEM)
367 mark_referenced_resources (XEXP (x, 0), res, 0);
374 if (include_delayed_effects)
376 /* A CALL references memory, the frame pointer if it exists, the
377 stack pointer, any global registers and any registers given in
378 USE insns immediately in front of the CALL.
380 However, we may have moved some of the parameter loading insns
381 into the delay slot of this CALL. If so, the USE's for them
382 don't count and should be skipped. */
383 rtx insn = PREV_INSN (x);
386 rtx next = NEXT_INSN (x);
389 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
390 if (NEXT_INSN (insn) != x)
392 next = NEXT_INSN (NEXT_INSN (insn));
393 sequence = PATTERN (NEXT_INSN (insn));
394 seq_size = XVECLEN (sequence, 0);
395 if (GET_CODE (sequence) != SEQUENCE)
400 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
401 if (frame_pointer_needed)
403 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
404 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
405 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
409 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
411 SET_HARD_REG_BIT (res->regs, i);
413 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
414 assume that this call can need any register.
416 This is done to be more conservative about how we handle setjmp.
417 We assume that they both use and set all registers. Using all
418 registers ensures that a register will not be considered dead
419 just because it crosses a setjmp call. A register should be
420 considered dead only if the setjmp call returns non-zero. */
421 if (next && GET_CODE (next) == NOTE
422 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
423 SET_HARD_REG_SET (res->regs);
428 for (link = CALL_INSN_FUNCTION_USAGE (x);
430 link = XEXP (link, 1))
431 if (GET_CODE (XEXP (link, 0)) == USE)
433 for (i = 1; i < seq_size; i++)
435 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
436 if (GET_CODE (slot_pat) == SET
437 && rtx_equal_p (SET_DEST (slot_pat),
438 SET_DEST (XEXP (link, 0))))
442 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
448 /* ... fall through to other INSN processing ... */
453 #ifdef INSN_REFERENCES_ARE_DELAYED
454 if (! include_delayed_effects
455 && INSN_REFERENCES_ARE_DELAYED (x))
459 /* No special processing, just speed up. */
460 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
464 /* Process each sub-expression and flag what it needs. */
465 format_ptr = GET_RTX_FORMAT (code);
466 for (i = 0; i < GET_RTX_LENGTH (code); i++)
467 switch (*format_ptr++)
470 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
474 for (j = 0; j < XVECLEN (x, i); j++)
475 mark_referenced_resources (XVECEXP (x, i, j), res,
476 include_delayed_effects);
481 /* Given X, a part of an insn, and a pointer to a `struct resource', RES,
482 indicate which resources are modified by the insn. If INCLUDE_CALLED_ROUTINE
483 is nonzero, also mark resources potentially set by the called routine.
485 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
486 objects are being referenced instead of set.
488 We never mark the insn as modifying the condition code unless it explicitly
489 SETs CC0 even though this is not totally correct. The reason for this is
490 that we require a SET of CC0 to immediately precede the reference to CC0.
491 So if some other insn sets CC0 as a side-effect, we know it cannot affect
492 our computation and thus may be placed in a delay slot. */
495 mark_set_resources (x, res, in_dest, include_delayed_effects)
497 register struct resources *res;
499 int include_delayed_effects;
501 register enum rtx_code code;
503 register char *format_ptr;
521 /* These don't set any resources. */
530 /* Called routine modifies the condition code, memory, any registers
531 that aren't saved across calls, global registers and anything
532 explicitly CLOBBERed immediately after the CALL_INSN. */
534 if (include_delayed_effects)
536 rtx next = NEXT_INSN (x);
537 rtx prev = PREV_INSN (x);
540 res->cc = res->memory = 1;
541 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
542 if (call_used_regs[i] || global_regs[i])
543 SET_HARD_REG_BIT (res->regs, i);
545 /* If X is part of a delay slot sequence, then NEXT should be
546 the first insn after the sequence. */
547 if (NEXT_INSN (prev) != x)
548 next = NEXT_INSN (NEXT_INSN (prev));
550 for (link = CALL_INSN_FUNCTION_USAGE (x);
551 link; link = XEXP (link, 1))
552 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
553 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
555 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
556 assume that this call can clobber any register. */
557 if (next && GET_CODE (next) == NOTE
558 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
559 SET_HARD_REG_SET (res->regs);
562 /* ... and also what it's RTL says it modifies, if anything. */
567 /* An insn consisting of just a CLOBBER (or USE) is just for flow
568 and doesn't actually do anything, so we ignore it. */
570 #ifdef INSN_SETS_ARE_DELAYED
571 if (! include_delayed_effects
572 && INSN_SETS_ARE_DELAYED (x))
577 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
582 /* If the source of a SET is a CALL, this is actually done by
583 the called routine. So only include it if we are to include the
584 effects of the calling routine. */
586 mark_set_resources (SET_DEST (x), res,
587 (include_delayed_effects
588 || GET_CODE (SET_SRC (x)) != CALL),
591 mark_set_resources (SET_SRC (x), res, 0, 0);
595 mark_set_resources (XEXP (x, 0), res, 1, 0);
599 for (i = 0; i < XVECLEN (x, 0); i++)
600 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
601 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
602 mark_set_resources (XVECEXP (x, 0, i), res, 0,
603 include_delayed_effects);
610 mark_set_resources (XEXP (x, 0), res, 1, 0);
614 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
615 mark_set_resources (XEXP (x, 1), res, 0, 0);
616 mark_set_resources (XEXP (x, 2), res, 0, 0);
623 res->unch_memory = RTX_UNCHANGING_P (x);
624 res->volatil = MEM_VOLATILE_P (x);
627 mark_set_resources (XEXP (x, 0), res, 0, 0);
633 if (GET_CODE (SUBREG_REG (x)) != REG)
634 mark_set_resources (SUBREG_REG (x), res,
635 in_dest, include_delayed_effects);
638 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
639 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
640 for (i = regno; i < last_regno; i++)
641 SET_HARD_REG_BIT (res->regs, i);
648 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
649 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
653 /* Process each sub-expression and flag what it needs. */
654 format_ptr = GET_RTX_FORMAT (code);
655 for (i = 0; i < GET_RTX_LENGTH (code); i++)
656 switch (*format_ptr++)
659 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
663 for (j = 0; j < XVECLEN (x, i); j++)
664 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
665 include_delayed_effects);
670 /* Return TRUE if this insn should stop the search for insn to fill delay
671 slots. LABELS_P indicates that labels should terminate the search.
672 In all cases, jumps terminate the search. */
675 stop_search_p (insn, labels_p)
682 switch (GET_CODE (insn))
696 /* OK unless it contains a delay slot or is an `asm' insn of some type.
697 We don't know anything about these. */
698 return (GET_CODE (PATTERN (insn)) == SEQUENCE
699 || GET_CODE (PATTERN (insn)) == ASM_INPUT
700 || asm_noperands (PATTERN (insn)) >= 0);
707 /* Return TRUE if any resources are marked in both RES1 and RES2 or if either
708 resource set contains a volatile memory reference. Otherwise, return FALSE. */
711 resource_conflicts_p (res1, res2)
712 struct resources *res1, *res2;
714 if ((res1->cc && res2->cc) || (res1->memory && res2->memory)
715 || (res1->unch_memory && res2->unch_memory)
716 || res1->volatil || res2->volatil)
720 return (res1->regs & res2->regs) != HARD_CONST (0);
725 for (i = 0; i < HARD_REG_SET_LONGS; i++)
726 if ((res1->regs[i] & res2->regs[i]) != 0)
733 /* Return TRUE if any resource marked in RES, a `struct resources', is
734 referenced by INSN. If INCLUDE_CALLED_ROUTINE is set, return if the called
735 routine is using those resources.
737 We compute this by computing all the resources referenced by INSN and
738 seeing if this conflicts with RES. It might be faster to directly check
739 ourselves, and this is the way it used to work, but it means duplicating
740 a large block of complex code. */
743 insn_references_resource_p (insn, res, include_delayed_effects)
745 register struct resources *res;
746 int include_delayed_effects;
748 struct resources insn_res;
750 CLEAR_RESOURCE (&insn_res);
751 mark_referenced_resources (insn, &insn_res, include_delayed_effects);
752 return resource_conflicts_p (&insn_res, res);
755 /* Return TRUE if INSN modifies resources that are marked in RES.
756 INCLUDE_CALLED_ROUTINE is set if the actions of that routine should be
757 included. CC0 is only modified if it is explicitly set; see comments
758 in front of mark_set_resources for details. */
761 insn_sets_resource_p (insn, res, include_delayed_effects)
763 register struct resources *res;
764 int include_delayed_effects;
766 struct resources insn_sets;
768 CLEAR_RESOURCE (&insn_sets);
769 mark_set_resources (insn, &insn_sets, 0, include_delayed_effects);
770 return resource_conflicts_p (&insn_sets, res);
773 /* Find a label at the end of the function or before a RETURN. If there is
781 /* If we found one previously, return it. */
782 if (end_of_function_label)
783 return end_of_function_label;
785 /* Otherwise, see if there is a label at the end of the function. If there
786 is, it must be that RETURN insns aren't needed, so that is our return
787 label and we don't have to do anything else. */
789 insn = get_last_insn ();
790 while (GET_CODE (insn) == NOTE
791 || (GET_CODE (insn) == INSN
792 && (GET_CODE (PATTERN (insn)) == USE
793 || GET_CODE (PATTERN (insn)) == CLOBBER)))
794 insn = PREV_INSN (insn);
796 /* When a target threads its epilogue we might already have a
797 suitable return insn. If so put a label before it for the
798 end_of_function_label. */
799 if (GET_CODE (insn) == BARRIER
800 && GET_CODE (PREV_INSN (insn)) == JUMP_INSN
801 && GET_CODE (PATTERN (PREV_INSN (insn))) == RETURN)
803 rtx temp = PREV_INSN (PREV_INSN (insn));
804 end_of_function_label = gen_label_rtx ();
805 LABEL_NUSES (end_of_function_label) = 0;
807 /* Put the label before an USE insns that may proceed the RETURN insn. */
808 while (GET_CODE (temp) == USE)
809 temp = PREV_INSN (temp);
811 emit_label_after (end_of_function_label, temp);
814 else if (GET_CODE (insn) == CODE_LABEL)
815 end_of_function_label = insn;
818 /* Otherwise, make a new label and emit a RETURN and BARRIER,
820 end_of_function_label = gen_label_rtx ();
821 LABEL_NUSES (end_of_function_label) = 0;
822 emit_label (end_of_function_label);
826 /* The return we make may have delay slots too. */
827 rtx insn = gen_return ();
828 insn = emit_jump_insn (insn);
830 if (num_delay_slots (insn) > 0)
831 obstack_ptr_grow (&unfilled_slots_obstack, insn);
836 /* Show one additional use for this label so it won't go away until
838 ++LABEL_NUSES (end_of_function_label);
840 return end_of_function_label;
843 /* Put INSN and LIST together in a SEQUENCE rtx of LENGTH, and replace
844 the pattern of INSN with the SEQUENCE.
846 Chain the insns so that NEXT_INSN of each insn in the sequence points to
847 the next and NEXT_INSN of the last insn in the sequence points to
848 the first insn after the sequence. Similarly for PREV_INSN. This makes
849 it easier to scan all insns.
851 Returns the SEQUENCE that replaces INSN. */
854 emit_delay_sequence (insn, list, length, avail)
864 /* Allocate the the rtvec to hold the insns and the SEQUENCE. */
865 rtvec seqv = rtvec_alloc (length + 1);
866 rtx seq = gen_rtx (SEQUENCE, VOIDmode, seqv);
867 rtx seq_insn = make_insn_raw (seq);
868 rtx first = get_insns ();
869 rtx last = get_last_insn ();
871 /* Make a copy of the insn having delay slots. */
872 rtx delay_insn = copy_rtx (insn);
874 /* If INSN is followed by a BARRIER, delete the BARRIER since it will only
875 confuse further processing. Update LAST in case it was the last insn.
876 We will put the BARRIER back in later. */
877 if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
879 delete_insn (NEXT_INSN (insn));
880 last = get_last_insn ();
884 /* Splice our SEQUENCE into the insn stream where INSN used to be. */
885 NEXT_INSN (seq_insn) = NEXT_INSN (insn);
886 PREV_INSN (seq_insn) = PREV_INSN (insn);
889 set_new_first_and_last_insn (first, seq_insn);
891 PREV_INSN (NEXT_INSN (seq_insn)) = seq_insn;
894 set_new_first_and_last_insn (seq_insn, last);
896 NEXT_INSN (PREV_INSN (seq_insn)) = seq_insn;
898 /* Build our SEQUENCE and rebuild the insn chain. */
899 XVECEXP (seq, 0, 0) = delay_insn;
900 INSN_DELETED_P (delay_insn) = 0;
901 PREV_INSN (delay_insn) = PREV_INSN (seq_insn);
903 for (li = list; li; li = XEXP (li, 1), i++)
905 rtx tem = XEXP (li, 0);
908 /* Show that this copy of the insn isn't deleted. */
909 INSN_DELETED_P (tem) = 0;
911 XVECEXP (seq, 0, i) = tem;
912 PREV_INSN (tem) = XVECEXP (seq, 0, i - 1);
913 NEXT_INSN (XVECEXP (seq, 0, i - 1)) = tem;
915 /* Remove any REG_DEAD notes because we can't rely on them now
916 that the insn has been moved. */
917 for (note = REG_NOTES (tem); note; note = XEXP (note, 1))
918 if (REG_NOTE_KIND (note) == REG_DEAD)
919 XEXP (note, 0) = const0_rtx;
922 NEXT_INSN (XVECEXP (seq, 0, length)) = NEXT_INSN (seq_insn);
924 /* If the previous insn is a SEQUENCE, update the NEXT_INSN pointer on the
925 last insn in that SEQUENCE to point to us. Similarly for the first
926 insn in the following insn if it is a SEQUENCE. */
928 if (PREV_INSN (seq_insn) && GET_CODE (PREV_INSN (seq_insn)) == INSN
929 && GET_CODE (PATTERN (PREV_INSN (seq_insn))) == SEQUENCE)
930 NEXT_INSN (XVECEXP (PATTERN (PREV_INSN (seq_insn)), 0,
931 XVECLEN (PATTERN (PREV_INSN (seq_insn)), 0) - 1))
934 if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
935 && GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
936 PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
938 /* If there used to be a BARRIER, put it back. */
940 emit_barrier_after (seq_insn);
948 /* Add INSN to DELAY_LIST and return the head of the new list. The list must
949 be in the order in which the insns are to be executed. */
952 add_to_delay_list (insn, delay_list)
956 /* If we have an empty list, just make a new list element. If
957 INSN has it's block number recorded, clear it since we may
958 be moving the insn to a new block. */
962 struct target_info *tinfo;
964 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
965 tinfo; tinfo = tinfo->next)
966 if (tinfo->uid == INSN_UID (insn))
972 return gen_rtx (INSN_LIST, VOIDmode, insn, NULL_RTX);
975 /* Otherwise this must be an INSN_LIST. Add INSN to the end of the
977 XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
982 /* Delete INSN from the the delay slot of the insn that it is in. This may
983 produce an insn without anything in its delay slots. */
986 delete_from_delay_slot (insn)
989 rtx trial, seq_insn, seq, prev;
993 /* We first must find the insn containing the SEQUENCE with INSN in its
994 delay slot. Do this by finding an insn, TRIAL, where
995 PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL. */
998 PREV_INSN (NEXT_INSN (trial)) == trial;
999 trial = NEXT_INSN (trial))
1002 seq_insn = PREV_INSN (NEXT_INSN (trial));
1003 seq = PATTERN (seq_insn);
1005 /* Create a delay list consisting of all the insns other than the one
1006 we are deleting (unless we were the only one). */
1007 if (XVECLEN (seq, 0) > 2)
1008 for (i = 1; i < XVECLEN (seq, 0); i++)
1009 if (XVECEXP (seq, 0, i) != insn)
1010 delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);
1012 /* Delete the old SEQUENCE, re-emit the insn that used to have the delay
1013 list, and rebuild the delay list if non-empty. */
1014 prev = PREV_INSN (seq_insn);
1015 trial = XVECEXP (seq, 0, 0);
1016 delete_insn (seq_insn);
1017 add_insn_after (trial, prev);
1019 if (GET_CODE (trial) == JUMP_INSN
1020 && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))
1021 emit_barrier_after (trial);
1023 /* If there are any delay insns, remit them. Otherwise clear the
1026 trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);
1028 INSN_ANNULLED_BRANCH_P (trial) = 0;
1030 INSN_FROM_TARGET_P (insn) = 0;
1032 /* Show we need to fill this insn again. */
1033 obstack_ptr_grow (&unfilled_slots_obstack, trial);
1036 /* Delete INSN, a JUMP_INSN. If it is a conditional jump, we must track down
1037 the insn that sets CC0 for it and delete it too. */
1040 delete_scheduled_jump (insn)
1043 /* Delete the insn that sets cc0 for us. On machines without cc0, we could
1044 delete the insn that sets the condition code, but it is hard to find it.
1045 Since this case is rare anyway, don't bother trying; there would likely
1046 be other insns that became dead anyway, which we wouldn't know to
1050 if (reg_mentioned_p (cc0_rtx, insn))
1052 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
1054 /* If a reg-note was found, it points to an insn to set CC0. This
1055 insn is in the delay list of some other insn. So delete it from
1056 the delay list it was in. */
1059 if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)
1060 && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)
1061 delete_from_delay_slot (XEXP (note, 0));
1065 /* The insn setting CC0 is our previous insn, but it may be in
1066 a delay slot. It will be the last insn in the delay slot, if
1068 rtx trial = previous_insn (insn);
1069 if (GET_CODE (trial) == NOTE)
1070 trial = prev_nonnote_insn (trial);
1071 if (sets_cc0_p (PATTERN (trial)) != 1
1072 || FIND_REG_INC_NOTE (trial, 0))
1074 if (PREV_INSN (NEXT_INSN (trial)) == trial)
1075 delete_insn (trial);
1077 delete_from_delay_slot (trial);
1085 /* Counters for delay-slot filling. */
1087 #define NUM_REORG_FUNCTIONS 2
1088 #define MAX_DELAY_HISTOGRAM 3
1089 #define MAX_REORG_PASSES 2
1091 static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];
1093 static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];
1095 static int reorg_pass_number;
1098 note_delay_statistics (slots_filled, index)
1099 int slots_filled, index;
1101 num_insns_needing_delays[index][reorg_pass_number]++;
1102 if (slots_filled > MAX_DELAY_HISTOGRAM)
1103 slots_filled = MAX_DELAY_HISTOGRAM;
1104 num_filled_delays[index][slots_filled][reorg_pass_number]++;
1107 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
1109 /* Optimize the following cases:
1111 1. When a conditional branch skips over only one instruction,
1112 use an annulling branch and put that insn in the delay slot.
1113 Use either a branch that annuls when the condition if true or
1114 invert the test with a branch that annuls when the condition is
1115 false. This saves insns, since otherwise we must copy an insn
1118 (orig) (skip) (otherwise)
1119 Bcc.n L1 Bcc',a L1 Bcc,a L1'
1126 2. When a conditional branch skips over only one instruction,
1127 and after that, it unconditionally branches somewhere else,
1128 perform the similar optimization. This saves executing the
1129 second branch in the case where the inverted condition is true.
1136 INSN is a JUMP_INSN.
1138 This should be expanded to skip over N insns, where N is the number
1139 of delay slots required. */
1142 optimize_skip (insn)
1145 register rtx trial = next_nonnote_insn (insn);
1146 rtx next_trial = next_active_insn (trial);
1151 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1154 || GET_CODE (trial) != INSN
1155 || GET_CODE (PATTERN (trial)) == SEQUENCE
1156 || recog_memoized (trial) < 0
1157 || (! eligible_for_annul_false (insn, 0, trial, flags)
1158 && ! eligible_for_annul_true (insn, 0, trial, flags)))
1161 /* There are two cases where we are just executing one insn (we assume
1162 here that a branch requires only one insn; this should be generalized
1163 at some point): Where the branch goes around a single insn or where
1164 we have one insn followed by a branch to the same label we branch to.
1165 In both of these cases, inverting the jump and annulling the delay
1166 slot give the same effect in fewer insns. */
1167 if ((next_trial == next_active_insn (JUMP_LABEL (insn)))
1169 && GET_CODE (next_trial) == JUMP_INSN
1170 && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)
1171 && (simplejump_p (next_trial)
1172 || GET_CODE (PATTERN (next_trial)) == RETURN)))
1174 if (eligible_for_annul_false (insn, 0, trial, flags))
1176 if (invert_jump (insn, JUMP_LABEL (insn)))
1177 INSN_FROM_TARGET_P (trial) = 1;
1178 else if (! eligible_for_annul_true (insn, 0, trial, flags))
1182 delay_list = add_to_delay_list (trial, NULL_RTX);
1183 next_trial = next_active_insn (trial);
1184 update_block (trial, trial);
1185 delete_insn (trial);
1187 /* Also, if we are targeting an unconditional
1188 branch, thread our jump to the target of that branch. Don't
1189 change this into a RETURN here, because it may not accept what
1190 we have in the delay slot. We'll fix this up later. */
1191 if (next_trial && GET_CODE (next_trial) == JUMP_INSN
1192 && (simplejump_p (next_trial)
1193 || GET_CODE (PATTERN (next_trial)) == RETURN))
1195 target_label = JUMP_LABEL (next_trial);
1196 if (target_label == 0)
1197 target_label = find_end_label ();
1199 /* Recompute the flags based on TARGET_LABEL since threading
1200 the jump to TARGET_LABEL may change the direction of the
1201 jump (which may change the circumstances in which the
1202 delay slot is nullified). */
1203 flags = get_jump_flags (insn, target_label);
1204 if (eligible_for_annul_true (insn, 0, trial, flags))
1205 reorg_redirect_jump (insn, target_label);
1208 INSN_ANNULLED_BRANCH_P (insn) = 1;
1216 /* Encode and return branch direction and prediction information for
1217 INSN assuming it will jump to LABEL.
1219 Non conditional branches return no direction information and
1220 are predicted as very likely taken. */
1222 get_jump_flags (insn, label)
1227 /* get_jump_flags can be passed any insn with delay slots, these may
1228 be INSNs, CALL_INSNs, or JUMP_INSNs. Only JUMP_INSNs have branch
1229 direction information, and only if they are conditional jumps.
1231 If LABEL is zero, then there is no way to determine the branch
1233 if (GET_CODE (insn) == JUMP_INSN
1234 && (condjump_p (insn) || condjump_in_parallel_p (insn))
1235 && INSN_UID (insn) <= max_uid
1237 && INSN_UID (label) <= max_uid)
1239 = (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
1240 ? ATTR_FLAG_forward : ATTR_FLAG_backward;
1241 /* No valid direction information. */
1245 /* If insn is a conditional branch call mostly_true_jump to get
1246 determine the branch prediction.
1248 Non conditional branches are predicted as very likely taken. */
1249 if (GET_CODE (insn) == JUMP_INSN
1250 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
1254 prediction = mostly_true_jump (insn, get_branch_condition (insn, label));
1258 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1261 flags |= ATTR_FLAG_likely;
1264 flags |= ATTR_FLAG_unlikely;
1267 flags |= (ATTR_FLAG_very_unlikely | ATTR_FLAG_unlikely);
1275 flags |= (ATTR_FLAG_very_likely | ATTR_FLAG_likely);
1280 /* Return 1 if INSN is a destination that will be branched to rarely (the
1281 return point of a function); return 2 if DEST will be branched to very
1282 rarely (a call to a function that doesn't return). Otherwise,
1286 rare_destination (insn)
1292 for (; insn; insn = next)
1294 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
1295 insn = XVECEXP (PATTERN (insn), 0, 0);
1297 next = NEXT_INSN (insn);
1299 switch (GET_CODE (insn))
1304 /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
1305 don't scan past JUMP_INSNs, so any barrier we find here must
1306 have been after a CALL_INSN and hence mean the call doesn't
1310 if (GET_CODE (PATTERN (insn)) == RETURN)
1312 else if (simplejump_p (insn)
1313 && jump_count++ < 10)
1314 next = JUMP_LABEL (insn);
1320 /* If we got here it means we hit the end of the function. So this
1321 is an unlikely destination. */
1326 /* Return truth value of the statement that this branch
1327 is mostly taken. If we think that the branch is extremely likely
1328 to be taken, we return 2. If the branch is slightly more likely to be
1329 taken, return 1. If the branch is slightly less likely to be taken,
1330 return 0 and if the branch is highly unlikely to be taken, return -1.
1332 CONDITION, if non-zero, is the condition that JUMP_INSN is testing. */
1335 mostly_true_jump (jump_insn, condition)
1336 rtx jump_insn, condition;
1338 rtx target_label = JUMP_LABEL (jump_insn);
1340 int rare_dest = rare_destination (target_label);
1341 int rare_fallthrough = rare_destination (NEXT_INSN (jump_insn));
1343 /* If this is a branch outside a loop, it is highly unlikely. */
1344 if (GET_CODE (PATTERN (jump_insn)) == SET
1345 && GET_CODE (SET_SRC (PATTERN (jump_insn))) == IF_THEN_ELSE
1346 && ((GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 1)) == LABEL_REF
1347 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 1)))
1348 || (GET_CODE (XEXP (SET_SRC (PATTERN (jump_insn)), 2)) == LABEL_REF
1349 && LABEL_OUTSIDE_LOOP_P (XEXP (SET_SRC (PATTERN (jump_insn)), 2)))))
1354 /* If this is the test of a loop, it is very likely true. We scan
1355 backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
1356 before the next real insn, we assume the branch is to the top of
1358 for (insn = PREV_INSN (target_label);
1359 insn && GET_CODE (insn) == NOTE;
1360 insn = PREV_INSN (insn))
1361 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1364 /* If this is a jump to the test of a loop, it is likely true. We scan
1365 forwards from the target label. If we find a NOTE_INSN_LOOP_VTOP
1366 before the next real insn, we assume the branch is to the loop branch
1368 for (insn = NEXT_INSN (target_label);
1369 insn && GET_CODE (insn) == NOTE;
1370 insn = PREV_INSN (insn))
1371 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP)
1375 /* Look at the relative rarities of the fallthrough and destination. If
1376 they differ, we can predict the branch that way. */
1378 switch (rare_fallthrough - rare_dest)
1392 /* If we couldn't figure out what this jump was, assume it won't be
1393 taken. This should be rare. */
1397 /* EQ tests are usually false and NE tests are usually true. Also,
1398 most quantities are positive, so we can make the appropriate guesses
1399 about signed comparisons against zero. */
1400 switch (GET_CODE (condition))
1403 /* Unconditional branch. */
1411 if (XEXP (condition, 1) == const0_rtx)
1416 if (XEXP (condition, 1) == const0_rtx)
1421 /* Predict backward branches usually take, forward branches usually not. If
1422 we don't know whether this is forward or backward, assume the branch
1423 will be taken, since most are. */
1424 return (target_label == 0 || INSN_UID (jump_insn) > max_uid
1425 || INSN_UID (target_label) > max_uid
1426 || (uid_to_ruid[INSN_UID (jump_insn)]
1427 > uid_to_ruid[INSN_UID (target_label)]));;
1430 /* Return the condition under which INSN will branch to TARGET. If TARGET
1431 is zero, return the condition under which INSN will return. If INSN is
1432 an unconditional branch, return const_true_rtx. If INSN isn't a simple
1433 type of jump, or it doesn't go to TARGET, return 0. */
1436 get_branch_condition (insn, target)
1440 rtx pat = PATTERN (insn);
1443 if (condjump_in_parallel_p (insn))
1444 pat = XVECEXP (pat, 0, 0);
1446 if (GET_CODE (pat) == RETURN)
1447 return target == 0 ? const_true_rtx : 0;
1449 else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)
1452 src = SET_SRC (pat);
1453 if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)
1454 return const_true_rtx;
1456 else if (GET_CODE (src) == IF_THEN_ELSE
1457 && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)
1458 || (GET_CODE (XEXP (src, 1)) == LABEL_REF
1459 && XEXP (XEXP (src, 1), 0) == target))
1460 && XEXP (src, 2) == pc_rtx)
1461 return XEXP (src, 0);
1463 else if (GET_CODE (src) == IF_THEN_ELSE
1464 && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)
1465 || (GET_CODE (XEXP (src, 2)) == LABEL_REF
1466 && XEXP (XEXP (src, 2), 0) == target))
1467 && XEXP (src, 1) == pc_rtx)
1468 return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),
1469 GET_MODE (XEXP (src, 0)),
1470 XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));
1475 /* Return non-zero if CONDITION is more strict than the condition of
1476 INSN, i.e., if INSN will always branch if CONDITION is true. */
1479 condition_dominates_p (condition, insn)
1483 rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));
1484 enum rtx_code code = GET_CODE (condition);
1485 enum rtx_code other_code;
1487 if (rtx_equal_p (condition, other_condition)
1488 || other_condition == const_true_rtx)
1491 else if (condition == const_true_rtx || other_condition == 0)
1494 other_code = GET_CODE (other_condition);
1495 if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2
1496 || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))
1497 || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))
1500 return comparison_dominates_p (code, other_code);
1503 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1504 any insns already in the delay slot of JUMP. */
1507 redirect_with_delay_slots_safe_p (jump, newlabel, seq)
1508 rtx jump, newlabel, seq;
1510 int flags, slots, i;
1511 rtx pat = PATTERN (seq);
1513 /* Make sure all the delay slots of this jump would still
1514 be valid after threading the jump. If they are still
1515 valid, then return non-zero. */
1517 flags = get_jump_flags (jump, newlabel);
1518 for (i = 1; i < XVECLEN (pat, 0); i++)
1520 #ifdef ANNUL_IFFALSE_SLOTS
1521 (INSN_ANNULLED_BRANCH_P (jump)
1522 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1523 ? eligible_for_annul_false (jump, i - 1,
1524 XVECEXP (pat, 0, i), flags) :
1526 #ifdef ANNUL_IFTRUE_SLOTS
1527 (INSN_ANNULLED_BRANCH_P (jump)
1528 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
1529 ? eligible_for_annul_true (jump, i - 1,
1530 XVECEXP (pat, 0, i), flags) :
1532 eligible_for_delay (jump, i -1, XVECEXP (pat, 0, i), flags)))
1535 return (i == XVECLEN (pat, 0));
1538 /* Return non-zero if redirecting JUMP to NEWLABEL does not invalidate
1539 any insns we wish to place in the delay slot of JUMP. */
1542 redirect_with_delay_list_safe_p (jump, newlabel, delay_list)
1543 rtx jump, newlabel, delay_list;
1548 /* Make sure all the insns in DELAY_LIST would still be
1549 valid after threading the jump. If they are still
1550 valid, then return non-zero. */
1552 flags = get_jump_flags (jump, newlabel);
1553 for (li = delay_list, i = 0; li; li = XEXP (li, 1), i++)
1555 #ifdef ANNUL_IFFALSE_SLOTS
1556 (INSN_ANNULLED_BRANCH_P (jump)
1557 && INSN_FROM_TARGET_P (XEXP (li, 0)))
1558 ? eligible_for_annul_false (jump, i, XEXP (li, 0), flags) :
1560 #ifdef ANNUL_IFTRUE_SLOTS
1561 (INSN_ANNULLED_BRANCH_P (jump)
1562 && ! INSN_FROM_TARGET_P (XEXP (li, 0)))
1563 ? eligible_for_annul_true (jump, i, XEXP (li, 0), flags) :
1565 eligible_for_delay (jump, i, XEXP (li, 0), flags)))
1568 return (li == NULL);
1572 /* INSN branches to an insn whose pattern SEQ is a SEQUENCE. Given that
1573 the condition tested by INSN is CONDITION and the resources shown in
1574 OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns
1575 from SEQ's delay list, in addition to whatever insns it may execute
1576 (in DELAY_LIST). SETS and NEEDED are denote resources already set and
1577 needed while searching for delay slot insns. Return the concatenated
1578 delay list if possible, otherwise, return 0.
1580 SLOTS_TO_FILL is the total number of slots required by INSN, and
1581 PSLOTS_FILLED points to the number filled so far (also the number of
1582 insns in DELAY_LIST). It is updated with the number that have been
1583 filled from the SEQUENCE, if any.
1585 PANNUL_P points to a non-zero value if we already know that we need
1586 to annul INSN. If this routine determines that annulling is needed,
1587 it may set that value non-zero.
1589 PNEW_THREAD points to a location that is to receive the place at which
1590 execution should continue. */
1593 steal_delay_list_from_target (insn, condition, seq, delay_list,
1594 sets, needed, other_needed,
1595 slots_to_fill, pslots_filled, pannul_p,
1597 rtx insn, condition;
1600 struct resources *sets, *needed, *other_needed;
1607 int slots_remaining = slots_to_fill - *pslots_filled;
1608 int total_slots_filled = *pslots_filled;
1609 rtx new_delay_list = 0;
1610 int must_annul = *pannul_p;
1613 /* We can't do anything if there are more delay slots in SEQ than we
1614 can handle, or if we don't know that it will be a taken branch.
1615 We know that it will be a taken branch if it is either an unconditional
1616 branch or a conditional branch with a stricter branch condition.
1618 Also, exit if the branch has more than one set, since then it is computing
1619 other results that can't be ignored, e.g. the HPPA mov&branch instruction.
1620 ??? It may be possible to move other sets into INSN in addition to
1621 moving the instructions in the delay slots. */
1623 if (XVECLEN (seq, 0) - 1 > slots_remaining
1624 || ! condition_dominates_p (condition, XVECEXP (seq, 0, 0))
1625 || ! single_set (XVECEXP (seq, 0, 0)))
1628 for (i = 1; i < XVECLEN (seq, 0); i++)
1630 rtx trial = XVECEXP (seq, 0, i);
1633 if (insn_references_resource_p (trial, sets, 0)
1634 || insn_sets_resource_p (trial, needed, 0)
1635 || insn_sets_resource_p (trial, sets, 0)
1637 /* If TRIAL sets CC0, we can't copy it, so we can't steal this
1639 || find_reg_note (trial, REG_CC_USER, NULL_RTX)
1641 /* If TRIAL is from the fallthrough code of an annulled branch insn
1642 in SEQ, we cannot use it. */
1643 || (INSN_ANNULLED_BRANCH_P (XVECEXP (seq, 0, 0))
1644 && ! INSN_FROM_TARGET_P (trial)))
1647 /* If this insn was already done (usually in a previous delay slot),
1648 pretend we put it in our delay slot. */
1649 if (redundant_insn (trial, insn, new_delay_list))
1652 /* We will end up re-vectoring this branch, so compute flags
1653 based on jumping to the new label. */
1654 flags = get_jump_flags (insn, JUMP_LABEL (XVECEXP (seq, 0, 0)));
1657 && ((condition == const_true_rtx
1658 || (! insn_sets_resource_p (trial, other_needed, 0)
1659 && ! may_trap_p (PATTERN (trial)))))
1660 ? eligible_for_delay (insn, total_slots_filled, trial, flags)
1662 eligible_for_annul_false (insn, total_slots_filled, trial, flags)))
1664 temp = copy_rtx (trial);
1665 INSN_FROM_TARGET_P (temp) = 1;
1666 new_delay_list = add_to_delay_list (temp, new_delay_list);
1667 total_slots_filled++;
1669 if (--slots_remaining == 0)
1676 /* Show the place to which we will be branching. */
1677 *pnew_thread = next_active_insn (JUMP_LABEL (XVECEXP (seq, 0, 0)));
1679 /* Add any new insns to the delay list and update the count of the
1680 number of slots filled. */
1681 *pslots_filled = total_slots_filled;
1682 *pannul_p = must_annul;
1684 if (delay_list == 0)
1685 return new_delay_list;
1687 for (temp = new_delay_list; temp; temp = XEXP (temp, 1))
1688 delay_list = add_to_delay_list (XEXP (temp, 0), delay_list);
1693 /* Similar to steal_delay_list_from_target except that SEQ is on the
1694 fallthrough path of INSN. Here we only do something if the delay insn
1695 of SEQ is an unconditional branch. In that case we steal its delay slot
1696 for INSN since unconditional branches are much easier to fill. */
1699 steal_delay_list_from_fallthrough (insn, condition, seq,
1700 delay_list, sets, needed, other_needed,
1701 slots_to_fill, pslots_filled, pannul_p)
1702 rtx insn, condition;
1705 struct resources *sets, *needed, *other_needed;
1713 flags = get_jump_flags (insn, JUMP_LABEL (insn));
1715 /* We can't do anything if SEQ's delay insn isn't an
1716 unconditional branch. */
1718 if (! simplejump_p (XVECEXP (seq, 0, 0))
1719 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) != RETURN)
1722 for (i = 1; i < XVECLEN (seq, 0); i++)
1724 rtx trial = XVECEXP (seq, 0, i);
1726 /* If TRIAL sets CC0, stealing it will move it too far from the use
1728 if (insn_references_resource_p (trial, sets, 0)
1729 || insn_sets_resource_p (trial, needed, 0)
1730 || insn_sets_resource_p (trial, sets, 0)
1732 || sets_cc0_p (PATTERN (trial))
1738 /* If this insn was already done, we don't need it. */
1739 if (redundant_insn (trial, insn, delay_list))
1741 delete_from_delay_slot (trial);
1746 && ((condition == const_true_rtx
1747 || (! insn_sets_resource_p (trial, other_needed, 0)
1748 && ! may_trap_p (PATTERN (trial)))))
1749 ? eligible_for_delay (insn, *pslots_filled, trial, flags)
1751 eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
1753 delete_from_delay_slot (trial);
1754 delay_list = add_to_delay_list (trial, delay_list);
1756 if (++(*pslots_filled) == slots_to_fill)
1766 /* Try merging insns starting at THREAD which match exactly the insns in
1769 If all insns were matched and the insn was previously annulling, the
1770 annul bit will be cleared.
1772 For each insn that is merged, if the branch is or will be non-annulling,
1773 we delete the merged insn. */
1776 try_merge_delay_insns (insn, thread)
1779 rtx trial, next_trial;
1780 rtx delay_insn = XVECEXP (PATTERN (insn), 0, 0);
1781 int annul_p = INSN_ANNULLED_BRANCH_P (delay_insn);
1782 int slot_number = 1;
1783 int num_slots = XVECLEN (PATTERN (insn), 0);
1784 rtx next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1785 struct resources set, needed;
1786 rtx merged_insns = 0;
1790 flags = get_jump_flags (delay_insn, JUMP_LABEL (delay_insn));
1792 CLEAR_RESOURCE (&needed);
1793 CLEAR_RESOURCE (&set);
1795 /* If this is not an annulling branch, take into account anything needed in
1796 NEXT_TO_MATCH. This prevents two increments from being incorrectly
1797 folded into one. If we are annulling, this would be the correct
1798 thing to do. (The alternative, looking at things set in NEXT_TO_MATCH
1799 will essentially disable this optimization. This method is somewhat of
1800 a kludge, but I don't see a better way.) */
1802 mark_referenced_resources (next_to_match, &needed, 1);
1804 for (trial = thread; !stop_search_p (trial, 1); trial = next_trial)
1806 rtx pat = PATTERN (trial);
1807 rtx oldtrial = trial;
1809 next_trial = next_nonnote_insn (trial);
1811 /* TRIAL must be a CALL_INSN or INSN. Skip USE and CLOBBER. */
1812 if (GET_CODE (trial) == INSN
1813 && (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER))
1816 if (GET_CODE (next_to_match) == GET_CODE (trial)
1818 /* We can't share an insn that sets cc0. */
1819 && ! sets_cc0_p (pat)
1821 && ! insn_references_resource_p (trial, &set, 1)
1822 && ! insn_sets_resource_p (trial, &set, 1)
1823 && ! insn_sets_resource_p (trial, &needed, 1)
1824 && (trial = try_split (pat, trial, 0)) != 0
1825 /* Update next_trial, in case try_split succeeded. */
1826 && (next_trial = next_nonnote_insn (trial))
1827 /* Likewise THREAD. */
1828 && (thread = oldtrial == thread ? trial : thread)
1829 && rtx_equal_p (PATTERN (next_to_match), PATTERN (trial))
1830 /* Have to test this condition if annul condition is different
1831 from (and less restrictive than) non-annulling one. */
1832 && eligible_for_delay (delay_insn, slot_number - 1, trial, flags))
1837 update_block (trial, thread);
1838 if (trial == thread)
1839 thread = next_active_insn (thread);
1841 delete_insn (trial);
1842 INSN_FROM_TARGET_P (next_to_match) = 0;
1845 merged_insns = gen_rtx (INSN_LIST, VOIDmode, trial, merged_insns);
1847 if (++slot_number == num_slots)
1850 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1852 mark_referenced_resources (next_to_match, &needed, 1);
1855 mark_set_resources (trial, &set, 0, 1);
1856 mark_referenced_resources (trial, &needed, 1);
1859 /* See if we stopped on a filled insn. If we did, try to see if its
1860 delay slots match. */
1861 if (slot_number != num_slots
1862 && trial && GET_CODE (trial) == INSN
1863 && GET_CODE (PATTERN (trial)) == SEQUENCE
1864 && ! INSN_ANNULLED_BRANCH_P (XVECEXP (PATTERN (trial), 0, 0)))
1866 rtx pat = PATTERN (trial);
1867 rtx filled_insn = XVECEXP (pat, 0, 0);
1869 /* Account for resources set/needed by the filled insn. */
1870 mark_set_resources (filled_insn, &set, 0, 1);
1871 mark_referenced_resources (filled_insn, &needed, 1);
1873 for (i = 1; i < XVECLEN (pat, 0); i++)
1875 rtx dtrial = XVECEXP (pat, 0, i);
1877 if (! insn_references_resource_p (dtrial, &set, 1)
1878 && ! insn_sets_resource_p (dtrial, &set, 1)
1879 && ! insn_sets_resource_p (dtrial, &needed, 1)
1881 && ! sets_cc0_p (PATTERN (dtrial))
1883 && rtx_equal_p (PATTERN (next_to_match), PATTERN (dtrial))
1884 && eligible_for_delay (delay_insn, slot_number - 1, dtrial, flags))
1888 update_block (dtrial, thread);
1889 delete_from_delay_slot (dtrial);
1890 INSN_FROM_TARGET_P (next_to_match) = 0;
1893 merged_insns = gen_rtx (INSN_LIST, SImode, dtrial,
1896 if (++slot_number == num_slots)
1899 next_to_match = XVECEXP (PATTERN (insn), 0, slot_number);
1904 /* If all insns in the delay slot have been matched and we were previously
1905 annulling the branch, we need not any more. In that case delete all the
1906 merged insns. Also clear the INSN_FROM_TARGET_P bit of each insn the
1907 the delay list so that we know that it isn't only being used at the
1909 if (slot_number == num_slots && annul_p)
1911 for (; merged_insns; merged_insns = XEXP (merged_insns, 1))
1913 if (GET_MODE (merged_insns) == SImode)
1915 update_block (XEXP (merged_insns, 0), thread);
1916 delete_from_delay_slot (XEXP (merged_insns, 0));
1920 update_block (XEXP (merged_insns, 0), thread);
1921 delete_insn (XEXP (merged_insns, 0));
1925 INSN_ANNULLED_BRANCH_P (delay_insn) = 0;
1927 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1928 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) = 0;
1932 /* See if INSN is redundant with an insn in front of TARGET. Often this
1933 is called when INSN is a candidate for a delay slot of TARGET.
1934 DELAY_LIST are insns that will be placed in delay slots of TARGET in front
1935 of INSN. Often INSN will be redundant with an insn in a delay slot of
1936 some previous insn. This happens when we have a series of branches to the
1937 same label; in that case the first insn at the target might want to go
1938 into each of the delay slots.
1940 If we are not careful, this routine can take up a significant fraction
1941 of the total compilation time (4%), but only wins rarely. Hence we
1942 speed this routine up by making two passes. The first pass goes back
1943 until it hits a label and sees if it find an insn with an identical
1944 pattern. Only in this (relatively rare) event does it check for
1947 We do not split insns we encounter. This could cause us not to find a
1948 redundant insn, but the cost of splitting seems greater than the possible
1949 gain in rare cases. */
1952 redundant_insn (insn, target, delay_list)
1957 rtx target_main = target;
1958 rtx ipat = PATTERN (insn);
1960 struct resources needed, set;
1963 /* Scan backwards looking for a match. */
1964 for (trial = PREV_INSN (target); trial; trial = PREV_INSN (trial))
1966 if (GET_CODE (trial) == CODE_LABEL)
1969 if (GET_RTX_CLASS (GET_CODE (trial)) != 'i')
1972 pat = PATTERN (trial);
1973 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
1976 if (GET_CODE (pat) == SEQUENCE)
1978 /* Stop for a CALL and its delay slots because it is difficult to
1979 track its resource needs correctly. */
1980 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
1983 /* Stop for an INSN or JUMP_INSN with delayed effects and its delay
1984 slots because it is difficult to track its resource needs
1987 #ifdef INSN_SETS_ARE_DELAYED
1988 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1992 #ifdef INSN_REFERENCES_ARE_DELAYED
1993 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
1997 /* See if any of the insns in the delay slot match, updating
1998 resource requirements as we go. */
1999 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2000 if (GET_CODE (XVECEXP (pat, 0, i)) == GET_CODE (insn)
2001 && rtx_equal_p (PATTERN (XVECEXP (pat, 0, i)), ipat))
2004 /* If found a match, exit this loop early. */
2009 else if (GET_CODE (trial) == GET_CODE (insn) && rtx_equal_p (pat, ipat))
2013 /* If we didn't find an insn that matches, return 0. */
2017 /* See what resources this insn sets and needs. If they overlap, or
2018 if this insn references CC0, it can't be redundant. */
2020 CLEAR_RESOURCE (&needed);
2021 CLEAR_RESOURCE (&set);
2022 mark_set_resources (insn, &set, 0, 1);
2023 mark_referenced_resources (insn, &needed, 1);
2025 /* If TARGET is a SEQUENCE, get the main insn. */
2026 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2027 target_main = XVECEXP (PATTERN (target), 0, 0);
2029 if (resource_conflicts_p (&needed, &set)
2031 || reg_mentioned_p (cc0_rtx, ipat)
2033 /* The insn requiring the delay may not set anything needed or set by
2035 || insn_sets_resource_p (target_main, &needed, 1)
2036 || insn_sets_resource_p (target_main, &set, 1))
2039 /* Insns we pass may not set either NEEDED or SET, so merge them for
2041 needed.memory |= set.memory;
2042 needed.unch_memory |= set.unch_memory;
2043 IOR_HARD_REG_SET (needed.regs, set.regs);
2045 /* This insn isn't redundant if it conflicts with an insn that either is
2046 or will be in a delay slot of TARGET. */
2050 if (insn_sets_resource_p (XEXP (delay_list, 0), &needed, 1))
2052 delay_list = XEXP (delay_list, 1);
2055 if (GET_CODE (target) == INSN && GET_CODE (PATTERN (target)) == SEQUENCE)
2056 for (i = 1; i < XVECLEN (PATTERN (target), 0); i++)
2057 if (insn_sets_resource_p (XVECEXP (PATTERN (target), 0, i), &needed, 1))
2060 /* Scan backwards until we reach a label or an insn that uses something
2061 INSN sets or sets something insn uses or sets. */
2063 for (trial = PREV_INSN (target);
2064 trial && GET_CODE (trial) != CODE_LABEL;
2065 trial = PREV_INSN (trial))
2067 if (GET_CODE (trial) != INSN && GET_CODE (trial) != CALL_INSN
2068 && GET_CODE (trial) != JUMP_INSN)
2071 pat = PATTERN (trial);
2072 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2075 if (GET_CODE (pat) == SEQUENCE)
2077 /* If this is a CALL_INSN and its delay slots, it is hard to track
2078 the resource needs properly, so give up. */
2079 if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN)
2082 /* If this this is an INSN or JUMP_INSN with delayed effects, it
2083 is hard to track the resource needs properly, so give up. */
2085 #ifdef INSN_SETS_ARE_DELAYED
2086 if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2090 #ifdef INSN_REFERENCES_ARE_DELAYED
2091 if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
2095 /* See if any of the insns in the delay slot match, updating
2096 resource requirements as we go. */
2097 for (i = XVECLEN (pat, 0) - 1; i > 0; i--)
2099 rtx candidate = XVECEXP (pat, 0, i);
2101 /* If an insn will be annulled if the branch is false, it isn't
2102 considered as a possible duplicate insn. */
2103 if (rtx_equal_p (PATTERN (candidate), ipat)
2104 && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2105 && INSN_FROM_TARGET_P (candidate)))
2107 /* Show that this insn will be used in the sequel. */
2108 INSN_FROM_TARGET_P (candidate) = 0;
2112 /* Unless this is an annulled insn from the target of a branch,
2113 we must stop if it sets anything needed or set by INSN. */
2114 if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0))
2115 || ! INSN_FROM_TARGET_P (candidate))
2116 && insn_sets_resource_p (candidate, &needed, 1))
2121 /* If the insn requiring the delay slot conflicts with INSN, we
2123 if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
2128 /* See if TRIAL is the same as INSN. */
2129 pat = PATTERN (trial);
2130 if (rtx_equal_p (pat, ipat))
2133 /* Can't go any further if TRIAL conflicts with INSN. */
2134 if (insn_sets_resource_p (trial, &needed, 1))
2142 /* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero,
2143 it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH
2144 is non-zero, we are allowed to fall into this thread; otherwise, we are
2147 If LABEL is used more than one or we pass a label other than LABEL before
2148 finding an active insn, we do not own this thread. */
2151 own_thread_p (thread, label, allow_fallthrough)
2154 int allow_fallthrough;
2159 /* We don't own the function end. */
2163 /* Get the first active insn, or THREAD, if it is an active insn. */
2164 active_insn = next_active_insn (PREV_INSN (thread));
2166 for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn))
2167 if (GET_CODE (insn) == CODE_LABEL
2168 && (insn != label || LABEL_NUSES (insn) != 1))
2171 if (allow_fallthrough)
2174 /* Ensure that we reach a BARRIER before any insn or label. */
2175 for (insn = prev_nonnote_insn (thread);
2176 insn == 0 || GET_CODE (insn) != BARRIER;
2177 insn = prev_nonnote_insn (insn))
2179 || GET_CODE (insn) == CODE_LABEL
2180 || (GET_CODE (insn) == INSN
2181 && GET_CODE (PATTERN (insn)) != USE
2182 && GET_CODE (PATTERN (insn)) != CLOBBER))
2188 /* Find the number of the basic block that starts closest to INSN. Return -1
2189 if we couldn't find such a basic block. */
2192 find_basic_block (insn)
2197 /* Scan backwards to the previous BARRIER. Then see if we can find a
2198 label that starts a basic block. Return the basic block number. */
2200 for (insn = prev_nonnote_insn (insn);
2201 insn && GET_CODE (insn) != BARRIER;
2202 insn = prev_nonnote_insn (insn))
2205 /* The start of the function is basic block zero. */
2209 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
2210 anything other than a CODE_LABEL or note, we can't find this code. */
2211 for (insn = next_nonnote_insn (insn);
2212 insn && GET_CODE (insn) == CODE_LABEL;
2213 insn = next_nonnote_insn (insn))
2215 for (i = 0; i < n_basic_blocks; i++)
2216 if (insn == basic_block_head[i])
2223 /* Called when INSN is being moved from a location near the target of a jump.
2224 We leave a marker of the form (use (INSN)) immediately in front
2225 of WHERE for mark_target_live_regs. These markers will be deleted when
2228 We used to try to update the live status of registers if WHERE is at
2229 the start of a basic block, but that can't work since we may remove a
2230 BARRIER in relax_delay_slots. */
2233 update_block (insn, where)
2239 /* Ignore if this was in a delay slot and it came from the target of
2241 if (INSN_FROM_TARGET_P (insn))
2244 emit_insn_before (gen_rtx (USE, VOIDmode, insn), where);
2246 /* INSN might be making a value live in a block where it didn't use to
2247 be. So recompute liveness information for this block. */
2249 b = find_basic_block (insn);
2254 /* Similar to REDIRECT_JUMP except that we update the BB_TICKS entry for
2255 the basic block containing the jump. */
2258 reorg_redirect_jump (jump, nlabel)
2262 int b = find_basic_block (jump);
2267 return redirect_jump (jump, nlabel);
2270 /* Called when INSN is being moved forward into a delay slot of DELAYED_INSN.
2271 We check every instruction between INSN and DELAYED_INSN for REG_DEAD notes
2272 that reference values used in INSN. If we find one, then we move the
2273 REG_DEAD note to INSN.
2275 This is needed to handle the case where an later insn (after INSN) has a
2276 REG_DEAD note for a register used by INSN, and this later insn subsequently
2277 gets moved before a CODE_LABEL because it is a redundant insn. In this
2278 case, mark_target_live_regs may be confused into thinking the register
2279 is dead because it sees a REG_DEAD note immediately before a CODE_LABEL. */
2282 update_reg_dead_notes (insn, delayed_insn)
2283 rtx insn, delayed_insn;
2287 for (p = next_nonnote_insn (insn); p != delayed_insn;
2288 p = next_nonnote_insn (p))
2289 for (link = REG_NOTES (p); link; link = next)
2291 next = XEXP (link, 1);
2293 if (REG_NOTE_KIND (link) != REG_DEAD
2294 || GET_CODE (XEXP (link, 0)) != REG)
2297 if (reg_referenced_p (XEXP (link, 0), PATTERN (insn)))
2299 /* Move the REG_DEAD note from P to INSN. */
2300 remove_note (p, link);
2301 XEXP (link, 1) = REG_NOTES (insn);
2302 REG_NOTES (insn) = link;
2307 /* Delete any REG_UNUSED notes that exist on INSN but not on REDUNDANT_INSN.
2309 This handles the case of udivmodXi4 instructions which optimize their
2310 output depending on whether any REG_UNUSED notes are present.
2311 we must make sure that INSN calculates as many results as REDUNDANT_INSN
2315 update_reg_unused_notes (insn, redundant_insn)
2316 rtx insn, redundant_insn;
2320 for (link = REG_NOTES (insn); link; link = next)
2322 next = XEXP (link, 1);
2324 if (REG_NOTE_KIND (link) != REG_UNUSED
2325 || GET_CODE (XEXP (link, 0)) != REG)
2328 if (! find_regno_note (redundant_insn, REG_UNUSED,
2329 REGNO (XEXP (link, 0))))
2330 remove_note (insn, link);
2334 /* Marks registers possibly live at the current place being scanned by
2335 mark_target_live_regs. Used only by next two function. */
2337 static HARD_REG_SET current_live_regs;
2339 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
2340 Also only used by the next two functions. */
2342 static HARD_REG_SET pending_dead_regs;
2344 /* Utility function called from mark_target_live_regs via note_stores.
2345 It deadens any CLOBBERed registers and livens any SET registers. */
2348 update_live_status (dest, x)
2352 int first_regno, last_regno;
2355 if (GET_CODE (dest) != REG
2356 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
2359 if (GET_CODE (dest) == SUBREG)
2360 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2362 first_regno = REGNO (dest);
2364 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
2366 if (GET_CODE (x) == CLOBBER)
2367 for (i = first_regno; i < last_regno; i++)
2368 CLEAR_HARD_REG_BIT (current_live_regs, i);
2370 for (i = first_regno; i < last_regno; i++)
2372 SET_HARD_REG_BIT (current_live_regs, i);
2373 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
2377 /* Similar to next_insn, but ignores insns in the delay slots of
2378 an annulled branch. */
2381 next_insn_no_annul (insn)
2386 /* If INSN is an annulled branch, skip any insns from the target
2388 if (INSN_ANNULLED_BRANCH_P (insn)
2389 && NEXT_INSN (PREV_INSN (insn)) != insn)
2390 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
2391 insn = NEXT_INSN (insn);
2393 insn = NEXT_INSN (insn);
2394 if (insn && GET_CODE (insn) == INSN
2395 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2396 insn = XVECEXP (PATTERN (insn), 0, 0);
2402 /* Set the resources that are live at TARGET.
2404 If TARGET is zero, we refer to the end of the current function and can
2405 return our precomputed value.
2407 Otherwise, we try to find out what is live by consulting the basic block
2408 information. This is tricky, because we must consider the actions of
2409 reload and jump optimization, which occur after the basic block information
2412 Accordingly, we proceed as follows::
2414 We find the previous BARRIER and look at all immediately following labels
2415 (with no intervening active insns) to see if any of them start a basic
2416 block. If we hit the start of the function first, we use block 0.
2418 Once we have found a basic block and a corresponding first insns, we can
2419 accurately compute the live status from basic_block_live_regs and
2420 reg_renumber. (By starting at a label following a BARRIER, we are immune
2421 to actions taken by reload and jump.) Then we scan all insns between
2422 that point and our target. For each CLOBBER (or for call-clobbered regs
2423 when we pass a CALL_INSN), mark the appropriate registers are dead. For
2424 a SET, mark them as live.
2426 We have to be careful when using REG_DEAD notes because they are not
2427 updated by such things as find_equiv_reg. So keep track of registers
2428 marked as dead that haven't been assigned to, and mark them dead at the
2429 next CODE_LABEL since reload and jump won't propagate values across labels.
2431 If we cannot find the start of a basic block (should be a very rare
2432 case, if it can happen at all), mark everything as potentially live.
2434 Next, scan forward from TARGET looking for things set or clobbered
2435 before they are used. These are not live.
2437 Because we can be called many times on the same target, save our results
2438 in a hash table indexed by INSN_UID. */
2441 mark_target_live_regs (target, res)
2443 struct resources *res;
2447 struct target_info *tinfo;
2451 HARD_REG_SET scratch;
2452 struct resources set, needed;
2455 /* Handle end of function. */
2458 *res = end_of_function_needs;
2462 /* We have to assume memory is needed, but the CC isn't. */
2464 res->volatil = res->unch_memory = 0;
2467 /* See if we have computed this value already. */
2468 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2469 tinfo; tinfo = tinfo->next)
2470 if (tinfo->uid == INSN_UID (target))
2473 /* Start by getting the basic block number. If we have saved information,
2474 we can get it from there unless the insn at the start of the basic block
2475 has been deleted. */
2476 if (tinfo && tinfo->block != -1
2477 && ! INSN_DELETED_P (basic_block_head[tinfo->block]))
2481 b = find_basic_block (target);
2485 /* If the information is up-to-date, use it. Otherwise, we will
2487 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
2489 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
2495 /* Allocate a place to put our results and chain it into the
2497 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
2498 tinfo->uid = INSN_UID (target);
2500 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
2501 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
2504 CLEAR_HARD_REG_SET (pending_dead_regs);
2506 /* If we found a basic block, get the live registers from it and update
2507 them with anything set or killed between its start and the insn before
2508 TARGET. Otherwise, we must assume everything is live. */
2511 regset regs_live = basic_block_live_at_start[b];
2513 REGSET_ELT_TYPE bit;
2515 rtx start_insn, stop_insn;
2517 /* Compute hard regs live at start of block -- this is the real hard regs
2518 marked live, plus live pseudo regs that have been renumbered to
2522 current_live_regs = *regs_live;
2524 COPY_HARD_REG_SET (current_live_regs, regs_live);
2527 for (offset = 0, i = 0; offset < regset_size; offset++)
2529 if (regs_live[offset] == 0)
2530 i += REGSET_ELT_BITS;
2532 for (bit = 1; bit && i < max_regno; bit <<= 1, i++)
2533 if ((regs_live[offset] & bit)
2534 && (regno = reg_renumber[i]) >= 0)
2536 j < regno + HARD_REGNO_NREGS (regno,
2537 PSEUDO_REGNO_MODE (i));
2539 SET_HARD_REG_BIT (current_live_regs, j);
2542 /* Get starting and ending insn, handling the case where each might
2544 start_insn = (b == 0 ? get_insns () : basic_block_head[b]);
2547 if (GET_CODE (start_insn) == INSN
2548 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
2549 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
2551 if (GET_CODE (stop_insn) == INSN
2552 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
2553 stop_insn = next_insn (PREV_INSN (stop_insn));
2555 for (insn = start_insn; insn != stop_insn;
2556 insn = next_insn_no_annul (insn))
2559 rtx real_insn = insn;
2561 /* If this insn is from the target of a branch, it isn't going to
2562 be used in the sequel. If it is used in both cases, this
2563 test will not be true. */
2564 if (INSN_FROM_TARGET_P (insn))
2567 /* If this insn is a USE made by update_block, we care about the
2569 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
2570 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2571 real_insn = XEXP (PATTERN (insn), 0);
2573 if (GET_CODE (real_insn) == CALL_INSN)
2575 /* CALL clobbers all call-used regs that aren't fixed except
2576 sp, ap, and fp. Do this before setting the result of the
2578 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2579 if (call_used_regs[i]
2580 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
2581 && i != ARG_POINTER_REGNUM
2582 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2583 && i != HARD_FRAME_POINTER_REGNUM
2585 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2586 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
2588 #ifdef PIC_OFFSET_TABLE_REGNUM
2589 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2592 CLEAR_HARD_REG_BIT (current_live_regs, i);
2594 /* A CALL_INSN sets any global register live, since it may
2595 have been modified by the call. */
2596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2598 SET_HARD_REG_BIT (current_live_regs, i);
2601 /* Mark anything killed in an insn to be deadened at the next
2602 label. Ignore USE insns; the only REG_DEAD notes will be for
2603 parameters. But they might be early. A CALL_INSN will usually
2604 clobber registers used for parameters. It isn't worth bothering
2605 with the unlikely case when it won't. */
2606 if ((GET_CODE (real_insn) == INSN
2607 && GET_CODE (PATTERN (real_insn)) != USE
2608 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
2609 || GET_CODE (real_insn) == JUMP_INSN
2610 || GET_CODE (real_insn) == CALL_INSN)
2612 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2613 if (REG_NOTE_KIND (link) == REG_DEAD
2614 && GET_CODE (XEXP (link, 0)) == REG
2615 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2617 int first_regno = REGNO (XEXP (link, 0));
2620 + HARD_REGNO_NREGS (first_regno,
2621 GET_MODE (XEXP (link, 0))));
2623 for (i = first_regno; i < last_regno; i++)
2624 SET_HARD_REG_BIT (pending_dead_regs, i);
2627 note_stores (PATTERN (real_insn), update_live_status);
2629 /* If any registers were unused after this insn, kill them.
2630 These notes will always be accurate. */
2631 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
2632 if (REG_NOTE_KIND (link) == REG_UNUSED
2633 && GET_CODE (XEXP (link, 0)) == REG
2634 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
2636 int first_regno = REGNO (XEXP (link, 0));
2639 + HARD_REGNO_NREGS (first_regno,
2640 GET_MODE (XEXP (link, 0))));
2642 for (i = first_regno; i < last_regno; i++)
2643 CLEAR_HARD_REG_BIT (current_live_regs, i);
2647 else if (GET_CODE (real_insn) == CODE_LABEL)
2649 /* A label clobbers the pending dead registers since neither
2650 reload nor jump will propagate a value across a label. */
2651 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
2652 CLEAR_HARD_REG_SET (pending_dead_regs);
2655 /* The beginning of the epilogue corresponds to the end of the
2656 RTL chain when there are no epilogue insns. Certain resources
2657 are implicitly required at that point. */
2658 else if (GET_CODE (real_insn) == NOTE
2659 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
2660 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
2663 COPY_HARD_REG_SET (res->regs, current_live_regs);
2665 tinfo->bb_tick = bb_ticks[b];
2668 /* We didn't find the start of a basic block. Assume everything
2669 in use. This should happen only extremely rarely. */
2670 SET_HARD_REG_SET (res->regs);
2672 /* Now step forward from TARGET looking for registers that are set before
2673 they are used. These are dead. If we pass a label, any pending dead
2674 registers that weren't yet used can be made dead. Stop when we pass a
2675 conditional JUMP_INSN; follow the first few unconditional branches. */
2677 CLEAR_RESOURCE (&set);
2678 CLEAR_RESOURCE (&needed);
2680 for (insn = target; insn; insn = next)
2682 rtx this_jump_insn = insn;
2684 next = NEXT_INSN (insn);
2685 switch (GET_CODE (insn))
2688 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
2689 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
2690 CLEAR_HARD_REG_SET (pending_dead_regs);
2698 if (GET_CODE (PATTERN (insn)) == USE)
2700 /* If INSN is a USE made by update_block, we care about the
2701 underlying insn. Any registers set by the underlying insn
2702 are live since the insn is being done somewhere else. */
2703 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
2704 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
2706 /* All other USE insns are to be ignored. */
2709 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
2711 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
2713 /* An unconditional jump can be used to fill the delay slot
2714 of a call, so search for a JUMP_INSN in any position. */
2715 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
2717 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
2718 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2724 if (GET_CODE (this_jump_insn) == JUMP_INSN)
2726 if (jump_count++ < 10
2727 && (simplejump_p (this_jump_insn)
2728 || GET_CODE (PATTERN (this_jump_insn)) == RETURN))
2730 next = next_active_insn (JUMP_LABEL (this_jump_insn));
2734 jump_target = JUMP_LABEL (this_jump_insn);
2741 mark_referenced_resources (insn, &needed, 1);
2742 mark_set_resources (insn, &set, 0, 1);
2744 COPY_HARD_REG_SET (scratch, set.regs);
2745 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
2746 AND_COMPL_HARD_REG_SET (res->regs, scratch);
2749 /* If we hit an unconditional branch, we have another way of finding out
2750 what is live: we can see what is live at the branch target and include
2751 anything used but not set before the branch. The only things that are
2752 live are those that are live using the above test and the test below.
2754 Don't try this if we expired our jump count above, since that would
2755 mean there may be an infinite loop in the function being compiled. */
2757 if (jump_insn && jump_count < 10)
2759 struct resources new_resources;
2760 rtx stop_insn = next_active_insn (jump_insn);
2762 mark_target_live_regs (next_active_insn (jump_target), &new_resources);
2763 CLEAR_RESOURCE (&set);
2764 CLEAR_RESOURCE (&needed);
2766 /* Include JUMP_INSN in the needed registers. */
2767 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
2769 mark_referenced_resources (insn, &needed, 1);
2771 COPY_HARD_REG_SET (scratch, needed.regs);
2772 AND_COMPL_HARD_REG_SET (scratch, set.regs);
2773 IOR_HARD_REG_SET (new_resources.regs, scratch);
2775 mark_set_resources (insn, &set, 0, 1);
2778 AND_HARD_REG_SET (res->regs, new_resources.regs);
2781 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
2784 /* Scan a function looking for insns that need a delay slot and find insns to
2785 put into the delay slot.
2787 NON_JUMPS_P is non-zero if we are to only try to fill non-jump insns (such
2788 as calls). We do these first since we don't want jump insns (that are
2789 easier to fill) to get the only insns that could be used for non-jump insns.
2790 When it is zero, only try to fill JUMP_INSNs.
2792 When slots are filled in this manner, the insns (including the
2793 delay_insn) are put together in a SEQUENCE rtx. In this fashion,
2794 it is possible to tell whether a delay slot has really been filled
2795 or not. `final' knows how to deal with this, by communicating
2796 through FINAL_SEQUENCE. */
2799 fill_simple_delay_slots (first, non_jumps_p)
2803 register rtx insn, pat, trial, next_trial;
2805 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
2806 struct resources needed, set;
2807 register int slots_to_fill, slots_filled;
2810 for (i = 0; i < num_unfilled_slots; i++)
2813 /* Get the next insn to fill. If it has already had any slots assigned,
2814 we can't do anything with it. Maybe we'll improve this later. */
2816 insn = unfilled_slots_base[i];
2818 || INSN_DELETED_P (insn)
2819 || (GET_CODE (insn) == INSN
2820 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2821 || (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
2822 || (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
2825 if (GET_CODE (insn) == JUMP_INSN)
2826 flags = get_jump_flags (insn, JUMP_LABEL (insn));
2828 flags = get_jump_flags (insn, NULL_RTX);
2829 slots_to_fill = num_delay_slots (insn);
2830 if (slots_to_fill == 0)
2833 /* This insn needs, or can use, some delay slots. SLOTS_TO_FILL
2834 says how many. After initialization, first try optimizing
2837 nop add %o7,.-L1,%o7
2841 If this case applies, the delay slot of the call is filled with
2842 the unconditional jump. This is done first to avoid having the
2843 delay slot of the call filled in the backward scan. Also, since
2844 the unconditional jump is likely to also have a delay slot, that
2845 insn must exist when it is subsequently scanned.
2847 This is tried on each insn with delay slots as some machines
2848 have insns which perform calls, but are not represented as
2854 if ((trial = next_active_insn (insn))
2855 && GET_CODE (trial) == JUMP_INSN
2856 && simplejump_p (trial)
2857 && eligible_for_delay (insn, slots_filled, trial, flags)
2858 && no_labels_between_p (insn, trial))
2862 delay_list = add_to_delay_list (trial, delay_list);
2864 /* TRIAL may have had its delay slot filled, then unfilled. When
2865 the delay slot is unfilled, TRIAL is placed back on the unfilled
2866 slots obstack. Unfortunately, it is placed on the end of the
2867 obstack, not in its original location. Therefore, we must search
2868 from entry i + 1 to the end of the unfilled slots obstack to
2869 try and find TRIAL. */
2870 tmp = &unfilled_slots_base[i + 1];
2871 while (*tmp != trial && tmp != unfilled_slots_next)
2874 /* Remove the unconditional jump from consideration for delay slot
2875 filling and unthread it. */
2879 rtx next = NEXT_INSN (trial);
2880 rtx prev = PREV_INSN (trial);
2882 NEXT_INSN (prev) = next;
2884 PREV_INSN (next) = prev;
2888 /* Now, scan backwards from the insn to search for a potential
2889 delay-slot candidate. Stop searching when a label or jump is hit.
2891 For each candidate, if it is to go into the delay slot (moved
2892 forward in execution sequence), it must not need or set any resources
2893 that were set by later insns and must not set any resources that
2894 are needed for those insns.
2896 The delay slot insn itself sets resources unless it is a call
2897 (in which case the called routine, not the insn itself, is doing
2900 if (slots_filled < slots_to_fill)
2902 CLEAR_RESOURCE (&needed);
2903 CLEAR_RESOURCE (&set);
2904 mark_set_resources (insn, &set, 0, 0);
2905 mark_referenced_resources (insn, &needed, 0);
2907 for (trial = prev_nonnote_insn (insn); ! stop_search_p (trial, 1);
2910 next_trial = prev_nonnote_insn (trial);
2912 /* This must be an INSN or CALL_INSN. */
2913 pat = PATTERN (trial);
2915 /* USE and CLOBBER at this level was just for flow; ignore it. */
2916 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
2919 /* Check for resource conflict first, to avoid unnecessary
2921 if (! insn_references_resource_p (trial, &set, 1)
2922 && ! insn_sets_resource_p (trial, &set, 1)
2923 && ! insn_sets_resource_p (trial, &needed, 1)
2925 /* Can't separate set of cc0 from its use. */
2926 && ! (reg_mentioned_p (cc0_rtx, pat)
2927 && ! sets_cc0_p (cc0_rtx, pat))
2931 trial = try_split (pat, trial, 1);
2932 next_trial = prev_nonnote_insn (trial);
2933 if (eligible_for_delay (insn, slots_filled, trial, flags))
2935 /* In this case, we are searching backward, so if we
2936 find insns to put on the delay list, we want
2937 to put them at the head, rather than the
2938 tail, of the list. */
2940 update_reg_dead_notes (trial, insn);
2941 delay_list = gen_rtx (INSN_LIST, VOIDmode,
2943 update_block (trial, trial);
2944 delete_insn (trial);
2945 if (slots_to_fill == ++slots_filled)
2951 mark_set_resources (trial, &set, 0, 1);
2952 mark_referenced_resources (trial, &needed, 1);
2956 /* If all needed slots haven't been filled, we come here. */
2958 /* Try to optimize case of jumping around a single insn. */
2959 #if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
2960 if (slots_filled != slots_to_fill
2962 && GET_CODE (insn) == JUMP_INSN
2963 && (condjump_p (insn) || condjump_in_parallel_p (insn)))
2965 delay_list = optimize_skip (insn);
2971 /* Try to get insns from beyond the insn needing the delay slot.
2972 These insns can neither set or reference resources set in insns being
2973 skipped, cannot set resources in the insn being skipped, and, if this
2974 is a CALL_INSN (or a CALL_INSN is passed), cannot trap (because the
2975 call might not return).
2977 There used to be code which continued past the target label if
2978 we saw all uses of the target label. This code did not work,
2979 because it failed to account for some instructions which were
2980 both annulled and marked as from the target. This can happen as a
2981 result of optimize_skip. Since this code was redundant with
2982 fill_eager_delay_slots anyways, it was just deleted. */
2984 if (slots_filled != slots_to_fill
2985 && (GET_CODE (insn) != JUMP_INSN
2986 || ((condjump_p (insn) || condjump_in_parallel_p (insn))
2987 && ! simplejump_p (insn)
2988 && JUMP_LABEL (insn) != 0)))
2991 int maybe_never = 0;
2992 struct resources needed_at_jump;
2994 CLEAR_RESOURCE (&needed);
2995 CLEAR_RESOURCE (&set);
2997 if (GET_CODE (insn) == CALL_INSN)
2999 mark_set_resources (insn, &set, 0, 1);
3000 mark_referenced_resources (insn, &needed, 1);
3005 mark_set_resources (insn, &set, 0, 1);
3006 mark_referenced_resources (insn, &needed, 1);
3007 if (GET_CODE (insn) == JUMP_INSN)
3008 target = JUMP_LABEL (insn);
3011 for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
3013 rtx pat, trial_delay;
3015 next_trial = next_nonnote_insn (trial);
3017 if (GET_CODE (trial) == CODE_LABEL
3018 || GET_CODE (trial) == BARRIER)
3021 /* We must have an INSN, JUMP_INSN, or CALL_INSN. */
3022 pat = PATTERN (trial);
3024 /* Stand-alone USE and CLOBBER are just for flow. */
3025 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3028 /* If this already has filled delay slots, get the insn needing
3030 if (GET_CODE (pat) == SEQUENCE)
3031 trial_delay = XVECEXP (pat, 0, 0);
3033 trial_delay = trial;
3035 /* If this is a jump insn to our target, indicate that we have
3036 seen another jump to it. If we aren't handling a conditional
3037 jump, stop our search. Otherwise, compute the needs at its
3038 target and add them to NEEDED. */
3039 if (GET_CODE (trial_delay) == JUMP_INSN)
3043 else if (JUMP_LABEL (trial_delay) != target)
3045 mark_target_live_regs
3046 (next_active_insn (JUMP_LABEL (trial_delay)),
3048 needed.memory |= needed_at_jump.memory;
3049 needed.unch_memory |= needed_at_jump.unch_memory;
3050 IOR_HARD_REG_SET (needed.regs, needed_at_jump.regs);
3054 /* See if we have a resource problem before we try to
3057 && GET_CODE (pat) != SEQUENCE
3058 && ! insn_references_resource_p (trial, &set, 1)
3059 && ! insn_sets_resource_p (trial, &set, 1)
3060 && ! insn_sets_resource_p (trial, &needed, 1)
3062 && ! (reg_mentioned_p (cc0_rtx, pat) && ! sets_cc0_p (pat))
3064 && ! (maybe_never && may_trap_p (pat))
3065 && (trial = try_split (pat, trial, 0))
3066 && eligible_for_delay (insn, slots_filled, trial, flags))
3068 next_trial = next_nonnote_insn (trial);
3069 delay_list = add_to_delay_list (trial, delay_list);
3072 if (reg_mentioned_p (cc0_rtx, pat))
3073 link_cc0_insns (trial);
3076 delete_insn (trial);
3077 if (slots_to_fill == ++slots_filled)
3082 mark_set_resources (trial, &set, 0, 1);
3083 mark_referenced_resources (trial, &needed, 1);
3085 /* Ensure we don't put insns between the setting of cc and the
3086 comparison by moving a setting of cc into an earlier delay
3087 slot since these insns could clobber the condition code. */
3090 /* If this is a call or jump, we might not get here. */
3091 if (GET_CODE (trial_delay) == CALL_INSN
3092 || GET_CODE (trial_delay) == JUMP_INSN)
3096 /* If there are slots left to fill and our search was stopped by an
3097 unconditional branch, try the insn at the branch target. We can
3098 redirect the branch if it works.
3100 Don't do this if the insn at the branch target is a branch. */
3101 if (slots_to_fill != slots_filled
3103 && GET_CODE (trial) == JUMP_INSN
3104 && simplejump_p (trial)
3105 && (target == 0 || JUMP_LABEL (trial) == target)
3106 && (next_trial = next_active_insn (JUMP_LABEL (trial))) != 0
3107 && ! (GET_CODE (next_trial) == INSN
3108 && GET_CODE (PATTERN (next_trial)) == SEQUENCE)
3109 && GET_CODE (next_trial) != JUMP_INSN
3110 && ! insn_references_resource_p (next_trial, &set, 1)
3111 && ! insn_sets_resource_p (next_trial, &set, 1)
3112 && ! insn_sets_resource_p (next_trial, &needed, 1)
3114 && ! reg_mentioned_p (cc0_rtx, PATTERN (next_trial))
3116 && ! (maybe_never && may_trap_p (PATTERN (next_trial)))
3117 && (next_trial = try_split (PATTERN (next_trial), next_trial, 0))
3118 && eligible_for_delay (insn, slots_filled, next_trial, flags))
3120 rtx new_label = next_active_insn (next_trial);
3123 new_label = get_label_before (new_label);
3125 new_label = find_end_label ();
3128 = add_to_delay_list (copy_rtx (next_trial), delay_list);
3130 reorg_redirect_jump (trial, new_label);
3132 /* If we merged because we both jumped to the same place,
3133 redirect the original insn also. */
3135 reorg_redirect_jump (insn, new_label);
3140 unfilled_slots_base[i]
3141 = emit_delay_sequence (insn, delay_list,
3142 slots_filled, slots_to_fill);
3144 if (slots_to_fill == slots_filled)
3145 unfilled_slots_base[i] = 0;
3147 note_delay_statistics (slots_filled, 0);
3150 #ifdef DELAY_SLOTS_FOR_EPILOGUE
3151 /* See if the epilogue needs any delay slots. Try to fill them if so.
3152 The only thing we can do is scan backwards from the end of the
3153 function. If we did this in a previous pass, it is incorrect to do it
3155 if (current_function_epilogue_delay_list)
3158 slots_to_fill = DELAY_SLOTS_FOR_EPILOGUE;
3159 if (slots_to_fill == 0)
3163 CLEAR_RESOURCE (&set);
3165 /* The frame pointer and stack pointer are needed at the beginning of
3166 the epilogue, so instructions setting them can not be put in the
3167 epilogue delay slot. However, everything else needed at function
3168 end is safe, so we don't want to use end_of_function_needs here. */
3169 CLEAR_RESOURCE (&needed);
3170 if (frame_pointer_needed)
3172 SET_HARD_REG_BIT (needed.regs, FRAME_POINTER_REGNUM);
3173 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3174 SET_HARD_REG_BIT (needed.regs, HARD_FRAME_POINTER_REGNUM);
3176 #ifdef EXIT_IGNORE_STACK
3177 if (! EXIT_IGNORE_STACK)
3179 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3182 SET_HARD_REG_BIT (needed.regs, STACK_POINTER_REGNUM);
3184 for (trial = get_last_insn (); ! stop_search_p (trial, 1);
3185 trial = PREV_INSN (trial))
3187 if (GET_CODE (trial) == NOTE)
3189 pat = PATTERN (trial);
3190 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3193 if (! insn_references_resource_p (trial, &set, 1)
3194 && ! insn_sets_resource_p (trial, &needed, 1)
3195 && ! insn_sets_resource_p (trial, &set, 1)
3197 /* Don't want to mess with cc0 here. */
3198 && ! reg_mentioned_p (cc0_rtx, pat)
3202 trial = try_split (pat, trial, 1);
3203 if (ELIGIBLE_FOR_EPILOGUE_DELAY (trial, slots_filled))
3205 /* Here as well we are searching backward, so put the
3206 insns we find on the head of the list. */
3208 current_function_epilogue_delay_list
3209 = gen_rtx (INSN_LIST, VOIDmode, trial,
3210 current_function_epilogue_delay_list);
3211 mark_referenced_resources (trial, &end_of_function_needs, 1);
3212 update_block (trial, trial);
3213 delete_insn (trial);
3215 /* Clear deleted bit so final.c will output the insn. */
3216 INSN_DELETED_P (trial) = 0;
3218 if (slots_to_fill == ++slots_filled)
3224 mark_set_resources (trial, &set, 0, 1);
3225 mark_referenced_resources (trial, &needed, 1);
3228 note_delay_statistics (slots_filled, 0);
3232 /* Try to find insns to place in delay slots.
3234 INSN is the jump needing SLOTS_TO_FILL delay slots. It tests CONDITION
3235 or is an unconditional branch if CONDITION is const_true_rtx.
3236 *PSLOTS_FILLED is updated with the number of slots that we have filled.
3238 THREAD is a flow-of-control, either the insns to be executed if the
3239 branch is true or if the branch is false, THREAD_IF_TRUE says which.
3241 OPPOSITE_THREAD is the thread in the opposite direction. It is used
3242 to see if any potential delay slot insns set things needed there.
3244 LIKELY is non-zero if it is extremely likely that the branch will be
3245 taken and THREAD_IF_TRUE is set. This is used for the branch at the
3246 end of a loop back up to the top.
3248 OWN_THREAD and OWN_OPPOSITE_THREAD are true if we are the only user of the
3249 thread. I.e., it is the fallthrough code of our jump or the target of the
3250 jump when we are the only jump going there.
3252 If OWN_THREAD is false, it must be the "true" thread of a jump. In that
3253 case, we can only take insns from the head of the thread for our delay
3254 slot. We then adjust the jump to point after the insns we have taken. */
3257 fill_slots_from_thread (insn, condition, thread, opposite_thread, likely,
3258 thread_if_true, own_thread, own_opposite_thread,
3259 slots_to_fill, pslots_filled)
3262 rtx thread, opposite_thread;
3265 int own_thread, own_opposite_thread;
3266 int slots_to_fill, *pslots_filled;
3270 struct resources opposite_needed, set, needed;
3276 /* Validate our arguments. */
3277 if ((condition == const_true_rtx && ! thread_if_true)
3278 || (! own_thread && ! thread_if_true))
3281 flags = get_jump_flags (insn, JUMP_LABEL (insn));
3283 /* If our thread is the end of subroutine, we can't get any delay
3288 /* If this is an unconditional branch, nothing is needed at the
3289 opposite thread. Otherwise, compute what is needed there. */
3290 if (condition == const_true_rtx)
3291 CLEAR_RESOURCE (&opposite_needed);
3293 mark_target_live_regs (opposite_thread, &opposite_needed);
3295 /* If the insn at THREAD can be split, do it here to avoid having to
3296 update THREAD and NEW_THREAD if it is done in the loop below. Also
3297 initialize NEW_THREAD. */
3299 new_thread = thread = try_split (PATTERN (thread), thread, 0);
3301 /* Scan insns at THREAD. We are looking for an insn that can be removed
3302 from THREAD (it neither sets nor references resources that were set
3303 ahead of it and it doesn't set anything needs by the insns ahead of
3304 it) and that either can be placed in an annulling insn or aren't
3305 needed at OPPOSITE_THREAD. */
3307 CLEAR_RESOURCE (&needed);
3308 CLEAR_RESOURCE (&set);
3310 /* If we do not own this thread, we must stop as soon as we find
3311 something that we can't put in a delay slot, since all we can do
3312 is branch into THREAD at a later point. Therefore, labels stop
3313 the search if this is not the `true' thread. */
3315 for (trial = thread;
3316 ! stop_search_p (trial, ! thread_if_true) && (! lose || own_thread);
3317 trial = next_nonnote_insn (trial))
3321 /* If we have passed a label, we no longer own this thread. */
3322 if (GET_CODE (trial) == CODE_LABEL)
3328 pat = PATTERN (trial);
3329 if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
3332 /* If TRIAL conflicts with the insns ahead of it, we lose. Also,
3333 don't separate or copy insns that set and use CC0. */
3334 if (! insn_references_resource_p (trial, &set, 1)
3335 && ! insn_sets_resource_p (trial, &set, 1)
3336 && ! insn_sets_resource_p (trial, &needed, 1)
3338 && ! (reg_mentioned_p (cc0_rtx, pat)
3339 && (! own_thread || ! sets_cc0_p (pat)))
3345 /* If TRIAL is redundant with some insn before INSN, we don't
3346 actually need to add it to the delay list; we can merely pretend
3348 if (prior_insn = redundant_insn (trial, insn, delay_list))
3352 update_block (trial, thread);
3353 if (trial == thread)
3355 thread = next_active_insn (thread);
3356 if (new_thread == trial)
3357 new_thread = thread;
3360 delete_insn (trial);
3364 update_reg_unused_notes (prior_insn, trial);
3365 new_thread = next_active_insn (trial);
3371 /* There are two ways we can win: If TRIAL doesn't set anything
3372 needed at the opposite thread and can't trap, or if it can
3373 go into an annulled delay slot. */
3374 if (condition == const_true_rtx
3375 || (! insn_sets_resource_p (trial, &opposite_needed, 1)
3376 && ! may_trap_p (pat)))
3379 trial = try_split (pat, trial, 0);
3380 if (new_thread == old_trial)
3382 if (thread == old_trial)
3384 pat = PATTERN (trial);
3385 if (eligible_for_delay (insn, *pslots_filled, trial, flags))
3389 #ifdef ANNUL_IFTRUE_SLOTS
3392 #ifdef ANNUL_IFFALSE_SLOTS
3398 trial = try_split (pat, trial, 0);
3399 if (new_thread == old_trial)
3401 if (thread == old_trial)
3403 pat = PATTERN (trial);
3405 ? eligible_for_annul_false (insn, *pslots_filled, trial, flags)
3406 : eligible_for_annul_true (insn, *pslots_filled, trial, flags)))
3414 if (reg_mentioned_p (cc0_rtx, pat))
3415 link_cc0_insns (trial);
3418 /* If we own this thread, delete the insn. If this is the
3419 destination of a branch, show that a basic block status
3420 may have been updated. In any case, mark the new
3421 starting point of this thread. */
3424 update_block (trial, thread);
3425 delete_insn (trial);
3428 new_thread = next_active_insn (trial);
3430 temp = own_thread ? trial : copy_rtx (trial);
3432 INSN_FROM_TARGET_P (temp) = 1;
3434 delay_list = add_to_delay_list (temp, delay_list);
3436 if (slots_to_fill == ++(*pslots_filled))
3438 /* Even though we have filled all the slots, we
3439 may be branching to a location that has a
3440 redundant insn. Skip any if so. */
3441 while (new_thread && ! own_thread
3442 && ! insn_sets_resource_p (new_thread, &set, 1)
3443 && ! insn_sets_resource_p (new_thread, &needed, 1)
3444 && ! insn_references_resource_p (new_thread,
3446 && redundant_insn (new_thread, insn, delay_list))
3447 new_thread = next_active_insn (new_thread);
3456 /* This insn can't go into a delay slot. */
3458 mark_set_resources (trial, &set, 0, 1);
3459 mark_referenced_resources (trial, &needed, 1);
3461 /* Ensure we don't put insns between the setting of cc and the comparison
3462 by moving a setting of cc into an earlier delay slot since these insns
3463 could clobber the condition code. */
3466 /* If this insn is a register-register copy and the next insn has
3467 a use of our destination, change it to use our source. That way,
3468 it will become a candidate for our delay slot the next time
3469 through this loop. This case occurs commonly in loops that
3472 We could check for more complex cases than those tested below,
3473 but it doesn't seem worth it. It might also be a good idea to try
3474 to swap the two insns. That might do better.
3476 We can't do this if the next insn modifies our destination, because
3477 that would make the replacement into the insn invalid. We also can't
3478 do this if it modifies our source, because it might be an earlyclobber
3479 operand. This latter test also prevents updating the contents of
3482 if (GET_CODE (trial) == INSN && GET_CODE (pat) == SET
3483 && GET_CODE (SET_SRC (pat)) == REG
3484 && GET_CODE (SET_DEST (pat)) == REG)
3486 rtx next = next_nonnote_insn (trial);
3488 if (next && GET_CODE (next) == INSN
3489 && GET_CODE (PATTERN (next)) != USE
3490 && ! reg_set_p (SET_DEST (pat), next)
3491 && ! reg_set_p (SET_SRC (pat), next)
3492 && reg_referenced_p (SET_DEST (pat), PATTERN (next)))
3493 validate_replace_rtx (SET_DEST (pat), SET_SRC (pat), next);
3497 /* If we stopped on a branch insn that has delay slots, see if we can
3498 steal some of the insns in those slots. */
3499 if (trial && GET_CODE (trial) == INSN
3500 && GET_CODE (PATTERN (trial)) == SEQUENCE
3501 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN)
3503 /* If this is the `true' thread, we will want to follow the jump,
3504 so we can only do this if we have taken everything up to here. */
3505 if (thread_if_true && trial == new_thread)
3507 = steal_delay_list_from_target (insn, condition, PATTERN (trial),
3508 delay_list, &set, &needed,
3509 &opposite_needed, slots_to_fill,
3510 pslots_filled, &must_annul,
3512 else if (! thread_if_true)
3514 = steal_delay_list_from_fallthrough (insn, condition,
3516 delay_list, &set, &needed,
3517 &opposite_needed, slots_to_fill,
3518 pslots_filled, &must_annul);
3521 /* If we haven't found anything for this delay slot and it is very
3522 likely that the branch will be taken, see if the insn at our target
3523 increments or decrements a register with an increment that does not
3524 depend on the destination register. If so, try to place the opposite
3525 arithmetic insn after the jump insn and put the arithmetic insn in the
3526 delay slot. If we can't do this, return. */
3527 if (delay_list == 0 && likely && new_thread && GET_CODE (new_thread) == INSN)
3529 rtx pat = PATTERN (new_thread);
3534 pat = PATTERN (trial);
3536 if (GET_CODE (trial) != INSN || GET_CODE (pat) != SET
3537 || ! eligible_for_delay (insn, 0, trial, flags))
3540 dest = SET_DEST (pat), src = SET_SRC (pat);
3541 if ((GET_CODE (src) == PLUS || GET_CODE (src) == MINUS)
3542 && rtx_equal_p (XEXP (src, 0), dest)
3543 && ! reg_overlap_mentioned_p (dest, XEXP (src, 1)))
3545 rtx other = XEXP (src, 1);
3549 /* If this is a constant adjustment, use the same code with
3550 the negated constant. Otherwise, reverse the sense of the
3552 if (GET_CODE (other) == CONST_INT)
3553 new_arith = gen_rtx (GET_CODE (src), GET_MODE (src), dest,
3554 negate_rtx (GET_MODE (src), other));
3556 new_arith = gen_rtx (GET_CODE (src) == PLUS ? MINUS : PLUS,
3557 GET_MODE (src), dest, other);
3559 ninsn = emit_insn_after (gen_rtx (SET, VOIDmode, dest, new_arith),
3562 if (recog_memoized (ninsn) < 0
3563 || (insn_extract (ninsn),
3564 ! constrain_operands (INSN_CODE (ninsn), 1)))
3566 delete_insn (ninsn);
3572 update_block (trial, thread);
3573 delete_insn (trial);
3576 new_thread = next_active_insn (trial);
3578 ninsn = own_thread ? trial : copy_rtx (trial);
3580 INSN_FROM_TARGET_P (ninsn) = 1;
3582 delay_list = add_to_delay_list (ninsn, NULL_RTX);
3587 if (delay_list && must_annul)
3588 INSN_ANNULLED_BRANCH_P (insn) = 1;
3590 /* If we are to branch into the middle of this thread, find an appropriate
3591 label or make a new one if none, and redirect INSN to it. If we hit the
3592 end of the function, use the end-of-function label. */
3593 if (new_thread != thread)
3597 if (! thread_if_true)
3600 if (new_thread && GET_CODE (new_thread) == JUMP_INSN
3601 && (simplejump_p (new_thread)
3602 || GET_CODE (PATTERN (new_thread)) == RETURN)
3603 && redirect_with_delay_list_safe_p (insn,
3604 JUMP_LABEL (new_thread),
3606 new_thread = follow_jumps (JUMP_LABEL (new_thread));
3608 if (new_thread == 0)
3609 label = find_end_label ();
3610 else if (GET_CODE (new_thread) == CODE_LABEL)
3613 label = get_label_before (new_thread);
3615 reorg_redirect_jump (insn, label);
3621 /* Make another attempt to find insns to place in delay slots.
3623 We previously looked for insns located in front of the delay insn
3624 and, for non-jump delay insns, located behind the delay insn.
3626 Here only try to schedule jump insns and try to move insns from either
3627 the target or the following insns into the delay slot. If annulling is
3628 supported, we will be likely to do this. Otherwise, we can do this only
3632 fill_eager_delay_slots (first)
3637 int num_unfilled_slots = unfilled_slots_next - unfilled_slots_base;
3639 for (i = 0; i < num_unfilled_slots; i++)
3642 rtx target_label, insn_at_target, fallthrough_insn;
3645 int own_fallthrough;
3646 int prediction, slots_to_fill, slots_filled;
3648 insn = unfilled_slots_base[i];
3650 || INSN_DELETED_P (insn)
3651 || GET_CODE (insn) != JUMP_INSN
3652 || ! (condjump_p (insn) || condjump_in_parallel_p (insn)))
3655 slots_to_fill = num_delay_slots (insn);
3656 if (slots_to_fill == 0)
3660 target_label = JUMP_LABEL (insn);
3661 condition = get_branch_condition (insn, target_label);
3666 /* Get the next active fallthrough and target insns and see if we own
3667 them. Then see whether the branch is likely true. We don't need
3668 to do a lot of this for unconditional branches. */
3670 insn_at_target = next_active_insn (target_label);
3671 own_target = own_thread_p (target_label, target_label, 0);
3673 if (condition == const_true_rtx)
3675 own_fallthrough = 0;
3676 fallthrough_insn = 0;
3681 fallthrough_insn = next_active_insn (insn);
3682 own_fallthrough = own_thread_p (NEXT_INSN (insn), NULL_RTX, 1);
3683 prediction = mostly_true_jump (insn, condition);
3686 /* If this insn is expected to branch, first try to get insns from our
3687 target, then our fallthrough insns. If it is not, expected to branch,
3688 try the other order. */
3693 = fill_slots_from_thread (insn, condition, insn_at_target,
3694 fallthrough_insn, prediction == 2, 1,
3695 own_target, own_fallthrough,
3696 slots_to_fill, &slots_filled);
3698 if (delay_list == 0 && own_fallthrough)
3700 /* Even though we didn't find anything for delay slots,
3701 we might have found a redundant insn which we deleted
3702 from the thread that was filled. So we have to recompute
3703 the next insn at the target. */
3704 target_label = JUMP_LABEL (insn);
3705 insn_at_target = next_active_insn (target_label);
3708 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3709 insn_at_target, 0, 0,
3710 own_fallthrough, own_target,
3711 slots_to_fill, &slots_filled);
3716 if (own_fallthrough)
3718 = fill_slots_from_thread (insn, condition, fallthrough_insn,
3719 insn_at_target, 0, 0,
3720 own_fallthrough, own_target,
3721 slots_to_fill, &slots_filled);
3723 if (delay_list == 0)
3725 = fill_slots_from_thread (insn, condition, insn_at_target,
3726 next_active_insn (insn), 0, 1,
3727 own_target, own_fallthrough,
3728 slots_to_fill, &slots_filled);
3732 unfilled_slots_base[i]
3733 = emit_delay_sequence (insn, delay_list,
3734 slots_filled, slots_to_fill);
3736 if (slots_to_fill == slots_filled)
3737 unfilled_slots_base[i] = 0;
3739 note_delay_statistics (slots_filled, 1);
3743 /* Once we have tried two ways to fill a delay slot, make a pass over the
3744 code to try to improve the results and to do such things as more jump
3748 relax_delay_slots (first)
3751 register rtx insn, next, pat;
3752 register rtx trial, delay_insn, target_label;
3754 /* Look at every JUMP_INSN and see if we can improve it. */
3755 for (insn = first; insn; insn = next)
3759 next = next_active_insn (insn);
3761 /* If this is a jump insn, see if it now jumps to a jump, jumps to
3762 the next insn, or jumps to a label that is not the last of a
3763 group of consecutive labels. */
3764 if (GET_CODE (insn) == JUMP_INSN
3765 && (condjump_p (insn) || condjump_in_parallel_p (insn))
3766 && (target_label = JUMP_LABEL (insn)) != 0)
3768 target_label = follow_jumps (target_label);
3769 target_label = prev_label (next_active_insn (target_label));
3771 if (target_label == 0)
3772 target_label = find_end_label ();
3774 if (next_active_insn (target_label) == next
3775 && ! condjump_in_parallel_p (insn))
3781 if (target_label != JUMP_LABEL (insn))
3782 reorg_redirect_jump (insn, target_label);
3784 /* See if this jump branches around a unconditional jump.
3785 If so, invert this jump and point it to the target of the
3787 if (next && GET_CODE (next) == JUMP_INSN
3788 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3789 && next_active_insn (target_label) == next_active_insn (next)
3790 && no_labels_between_p (insn, next))
3792 rtx label = JUMP_LABEL (next);
3794 /* Be careful how we do this to avoid deleting code or
3795 labels that are momentarily dead. See similar optimization
3798 We also need to ensure we properly handle the case when
3799 invert_jump fails. */
3801 ++LABEL_NUSES (target_label);
3803 ++LABEL_NUSES (label);
3805 if (invert_jump (insn, label))
3812 --LABEL_NUSES (label);
3814 if (--LABEL_NUSES (target_label) == 0)
3815 delete_insn (target_label);
3821 /* If this is an unconditional jump and the previous insn is a
3822 conditional jump, try reversing the condition of the previous
3823 insn and swapping our targets. The next pass might be able to
3826 Don't do this if we expect the conditional branch to be true, because
3827 we would then be making the more common case longer. */
3829 if (GET_CODE (insn) == JUMP_INSN
3830 && (simplejump_p (insn) || GET_CODE (PATTERN (insn)) == RETURN)
3831 && (other = prev_active_insn (insn)) != 0
3832 && (condjump_p (other) || condjump_in_parallel_p (other))
3833 && no_labels_between_p (other, insn)
3834 && 0 < mostly_true_jump (other,
3835 get_branch_condition (other,
3836 JUMP_LABEL (other))))
3838 rtx other_target = JUMP_LABEL (other);
3839 target_label = JUMP_LABEL (insn);
3841 /* Increment the count of OTHER_TARGET, so it doesn't get deleted
3842 as we move the label. */
3844 ++LABEL_NUSES (other_target);
3846 if (invert_jump (other, target_label))
3847 reorg_redirect_jump (insn, other_target);
3850 --LABEL_NUSES (other_target);
3853 /* Now look only at cases where we have filled a delay slot. */
3854 if (GET_CODE (insn) != INSN
3855 || GET_CODE (PATTERN (insn)) != SEQUENCE)
3858 pat = PATTERN (insn);
3859 delay_insn = XVECEXP (pat, 0, 0);
3861 /* See if the first insn in the delay slot is redundant with some
3862 previous insn. Remove it from the delay slot if so; then set up
3863 to reprocess this insn. */
3864 if (redundant_insn (XVECEXP (pat, 0, 1), delay_insn, 0))
3866 delete_from_delay_slot (XVECEXP (pat, 0, 1));
3867 next = prev_active_insn (next);
3871 /* Now look only at the cases where we have a filled JUMP_INSN. */
3872 if (GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
3873 || ! (condjump_p (XVECEXP (PATTERN (insn), 0, 0))
3874 || condjump_in_parallel_p (XVECEXP (PATTERN (insn), 0, 0))))
3877 target_label = JUMP_LABEL (delay_insn);
3881 /* If this jump goes to another unconditional jump, thread it, but
3882 don't convert a jump into a RETURN here. */
3883 trial = follow_jumps (target_label);
3884 /* We use next_real_insn instead of next_active_insn, so that
3885 the special USE insns emitted by reorg won't be ignored.
3886 If they are ignored, then they will get deleted if target_label
3887 is now unreachable, and that would cause mark_target_live_regs
3889 trial = prev_label (next_real_insn (trial));
3890 if (trial == 0 && target_label != 0)
3891 trial = find_end_label ();
3893 if (trial != target_label
3894 && redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
3896 reorg_redirect_jump (delay_insn, trial);
3897 target_label = trial;
3900 /* If the first insn at TARGET_LABEL is redundant with a previous
3901 insn, redirect the jump to the following insn process again. */
3902 trial = next_active_insn (target_label);
3903 if (trial && GET_CODE (PATTERN (trial)) != SEQUENCE
3904 && redundant_insn (trial, insn, 0))
3906 trial = next_active_insn (trial);
3908 target_label = find_end_label ();
3910 target_label = get_label_before (trial);
3911 reorg_redirect_jump (delay_insn, target_label);
3916 /* Similarly, if it is an unconditional jump with one insn in its
3917 delay list and that insn is redundant, thread the jump. */
3918 if (trial && GET_CODE (PATTERN (trial)) == SEQUENCE
3919 && XVECLEN (PATTERN (trial), 0) == 2
3920 && GET_CODE (XVECEXP (PATTERN (trial), 0, 0)) == JUMP_INSN
3921 && (simplejump_p (XVECEXP (PATTERN (trial), 0, 0))
3922 || GET_CODE (PATTERN (XVECEXP (PATTERN (trial), 0, 0))) == RETURN)
3923 && redundant_insn (XVECEXP (PATTERN (trial), 0, 1), insn, 0))
3925 target_label = JUMP_LABEL (XVECEXP (PATTERN (trial), 0, 0));
3926 if (target_label == 0)
3927 target_label = find_end_label ();
3929 if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
3932 reorg_redirect_jump (delay_insn, target_label);
3939 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3940 && prev_active_insn (target_label) == insn
3941 && ! condjump_in_parallel_p (delay_insn)
3943 /* If the last insn in the delay slot sets CC0 for some insn,
3944 various code assumes that it is in a delay slot. We could
3945 put it back where it belonged and delete the register notes,
3946 but it doesn't seem worthwhile in this uncommon case. */
3947 && ! find_reg_note (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1),
3948 REG_CC_USER, NULL_RTX)
3954 /* All this insn does is execute its delay list and jump to the
3955 following insn. So delete the jump and just execute the delay
3958 We do this by deleting the INSN containing the SEQUENCE, then
3959 re-emitting the insns separately, and then deleting the jump.
3960 This allows the count of the jump target to be properly
3963 /* Clear the from target bit, since these insns are no longer
3965 for (i = 0; i < XVECLEN (pat, 0); i++)
3966 INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)) = 0;
3968 trial = PREV_INSN (insn);
3970 emit_insn_after (pat, trial);
3971 delete_scheduled_jump (delay_insn);
3975 /* See if this is an unconditional jump around a single insn which is
3976 identical to the one in its delay slot. In this case, we can just
3977 delete the branch and the insn in its delay slot. */
3978 if (next && GET_CODE (next) == INSN
3979 && prev_label (next_active_insn (next)) == target_label
3980 && simplejump_p (insn)
3981 && XVECLEN (pat, 0) == 2
3982 && rtx_equal_p (PATTERN (next), PATTERN (XVECEXP (pat, 0, 1))))
3988 /* See if this jump (with its delay slots) branches around another
3989 jump (without delay slots). If so, invert this jump and point
3990 it to the target of the second jump. We cannot do this for
3991 annulled jumps, though. Again, don't convert a jump to a RETURN
3993 if (! INSN_ANNULLED_BRANCH_P (delay_insn)
3994 && next && GET_CODE (next) == JUMP_INSN
3995 && (simplejump_p (next) || GET_CODE (PATTERN (next)) == RETURN)
3996 && next_active_insn (target_label) == next_active_insn (next)
3997 && no_labels_between_p (insn, next))
3999 rtx label = JUMP_LABEL (next);
4000 rtx old_label = JUMP_LABEL (delay_insn);
4003 label = find_end_label ();
4005 if (redirect_with_delay_slots_safe_p (delay_insn, label, insn))
4007 /* Be careful how we do this to avoid deleting code or labels
4008 that are momentarily dead. See similar optimization in
4011 ++LABEL_NUSES (old_label);
4013 if (invert_jump (delay_insn, label))
4017 /* Must update the INSN_FROM_TARGET_P bits now that
4018 the branch is reversed, so that mark_target_live_regs
4019 will handle the delay slot insn correctly. */
4020 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
4022 rtx slot = XVECEXP (PATTERN (insn), 0, i);
4023 INSN_FROM_TARGET_P (slot) = ! INSN_FROM_TARGET_P (slot);
4030 if (old_label && --LABEL_NUSES (old_label) == 0)
4031 delete_insn (old_label);
4036 /* If we own the thread opposite the way this insn branches, see if we
4037 can merge its delay slots with following insns. */
4038 if (INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4039 && own_thread_p (NEXT_INSN (insn), 0, 1))
4040 try_merge_delay_insns (insn, next);
4041 else if (! INSN_FROM_TARGET_P (XVECEXP (pat, 0, 1))
4042 && own_thread_p (target_label, target_label, 0))
4043 try_merge_delay_insns (insn, next_active_insn (target_label));
4045 /* If we get here, we haven't deleted INSN. But we may have deleted
4046 NEXT, so recompute it. */
4047 next = next_active_insn (insn);
4053 /* Look for filled jumps to the end of function label. We can try to convert
4054 them into RETURN insns if the insns in the delay slot are valid for the
4058 make_return_insns (first)
4061 rtx insn, jump_insn, pat;
4062 rtx real_return_label = end_of_function_label;
4065 /* See if there is a RETURN insn in the function other than the one we
4066 made for END_OF_FUNCTION_LABEL. If so, set up anything we can't change
4067 into a RETURN to jump to it. */
4068 for (insn = first; insn; insn = NEXT_INSN (insn))
4069 if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == RETURN)
4071 real_return_label = get_label_before (insn);
4075 /* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
4076 was equal to END_OF_FUNCTION_LABEL. */
4077 LABEL_NUSES (real_return_label)++;
4079 /* Clear the list of insns to fill so we can use it. */
4080 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4082 for (insn = first; insn; insn = NEXT_INSN (insn))
4086 /* Only look at filled JUMP_INSNs that go to the end of function
4088 if (GET_CODE (insn) != INSN
4089 || GET_CODE (PATTERN (insn)) != SEQUENCE
4090 || GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) != JUMP_INSN
4091 || JUMP_LABEL (XVECEXP (PATTERN (insn), 0, 0)) != end_of_function_label)
4094 pat = PATTERN (insn);
4095 jump_insn = XVECEXP (pat, 0, 0);
4097 /* If we can't make the jump into a RETURN, try to redirect it to the best
4098 RETURN and go on to the next insn. */
4099 if (! reorg_redirect_jump (jump_insn, NULL_RTX))
4101 /* Make sure redirecting the jump will not invalidate the delay
4103 if (redirect_with_delay_slots_safe_p (jump_insn,
4106 reorg_redirect_jump (jump_insn, real_return_label);
4110 /* See if this RETURN can accept the insns current in its delay slot.
4111 It can if it has more or an equal number of slots and the contents
4112 of each is valid. */
4114 flags = get_jump_flags (jump_insn, JUMP_LABEL (jump_insn));
4115 slots = num_delay_slots (jump_insn);
4116 if (slots >= XVECLEN (pat, 0) - 1)
4118 for (i = 1; i < XVECLEN (pat, 0); i++)
4120 #ifdef ANNUL_IFFALSE_SLOTS
4121 (INSN_ANNULLED_BRANCH_P (jump_insn)
4122 && INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4123 ? eligible_for_annul_false (jump_insn, i - 1,
4124 XVECEXP (pat, 0, i), flags) :
4126 #ifdef ANNUL_IFTRUE_SLOTS
4127 (INSN_ANNULLED_BRANCH_P (jump_insn)
4128 && ! INSN_FROM_TARGET_P (XVECEXP (pat, 0, i)))
4129 ? eligible_for_annul_true (jump_insn, i - 1,
4130 XVECEXP (pat, 0, i), flags) :
4132 eligible_for_delay (jump_insn, i -1, XVECEXP (pat, 0, i), flags)))
4138 if (i == XVECLEN (pat, 0))
4141 /* We have to do something with this insn. If it is an unconditional
4142 RETURN, delete the SEQUENCE and output the individual insns,
4143 followed by the RETURN. Then set things up so we try to find
4144 insns for its delay slots, if it needs some. */
4145 if (GET_CODE (PATTERN (jump_insn)) == RETURN)
4147 rtx prev = PREV_INSN (insn);
4150 for (i = 1; i < XVECLEN (pat, 0); i++)
4151 prev = emit_insn_after (PATTERN (XVECEXP (pat, 0, i)), prev);
4153 insn = emit_jump_insn_after (PATTERN (jump_insn), prev);
4154 emit_barrier_after (insn);
4157 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4160 /* It is probably more efficient to keep this with its current
4161 delay slot as a branch to a RETURN. */
4162 reorg_redirect_jump (jump_insn, real_return_label);
4165 /* Now delete REAL_RETURN_LABEL if we never used it. Then try to fill any
4166 new delay slots we have created. */
4167 if (--LABEL_NUSES (real_return_label) == 0)
4168 delete_insn (real_return_label);
4170 fill_simple_delay_slots (first, 1);
4171 fill_simple_delay_slots (first, 0);
4175 /* Try to find insns to place in delay slots. */
4178 dbr_schedule (first, file)
4182 rtx insn, next, epilogue_insn = 0;
4185 int old_flag_no_peephole = flag_no_peephole;
4187 /* Execute `final' once in prescan mode to delete any insns that won't be
4188 used. Don't let final try to do any peephole optimization--it will
4189 ruin dataflow information for this pass. */
4191 flag_no_peephole = 1;
4192 final (first, 0, NO_DEBUG, 1, 1);
4193 flag_no_peephole = old_flag_no_peephole;
4196 /* If the current function has no insns other than the prologue and
4197 epilogue, then do not try to fill any delay slots. */
4198 if (n_basic_blocks == 0)
4201 /* Find the highest INSN_UID and allocate and initialize our map from
4202 INSN_UID's to position in code. */
4203 for (max_uid = 0, insn = first; insn; insn = NEXT_INSN (insn))
4205 if (INSN_UID (insn) > max_uid)
4206 max_uid = INSN_UID (insn);
4207 if (GET_CODE (insn) == NOTE
4208 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
4209 epilogue_insn = insn;
4212 uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
4213 for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
4214 uid_to_ruid[INSN_UID (insn)] = i;
4216 /* Initialize the list of insns that need filling. */
4217 if (unfilled_firstobj == 0)
4219 gcc_obstack_init (&unfilled_slots_obstack);
4220 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4223 for (insn = next_active_insn (first); insn; insn = next_active_insn (insn))
4227 INSN_ANNULLED_BRANCH_P (insn) = 0;
4228 INSN_FROM_TARGET_P (insn) = 0;
4230 /* Skip vector tables. We can't get attributes for them. */
4231 if (GET_CODE (insn) == JUMP_INSN
4232 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
4233 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
4236 if (num_delay_slots (insn) > 0)
4237 obstack_ptr_grow (&unfilled_slots_obstack, insn);
4239 /* Ensure all jumps go to the last of a set of consecutive labels. */
4240 if (GET_CODE (insn) == JUMP_INSN
4241 && (condjump_p (insn) || condjump_in_parallel_p (insn))
4242 && JUMP_LABEL (insn) != 0
4243 && ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
4244 != JUMP_LABEL (insn)))
4245 redirect_jump (insn, target);
4248 /* Indicate what resources are required to be valid at the end of the current
4249 function. The condition code never is and memory always is. If the
4250 frame pointer is needed, it is and so is the stack pointer unless
4251 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
4252 stack pointer is. Registers used to return the function value are
4253 needed. Registers holding global variables are needed. */
4255 end_of_function_needs.cc = 0;
4256 end_of_function_needs.memory = 1;
4257 end_of_function_needs.unch_memory = 0;
4258 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
4260 if (frame_pointer_needed)
4262 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
4263 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4264 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
4266 #ifdef EXIT_IGNORE_STACK
4267 if (! EXIT_IGNORE_STACK)
4269 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4272 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
4274 if (current_function_return_rtx != 0
4275 && GET_CODE (current_function_return_rtx) == REG)
4276 mark_referenced_resources (current_function_return_rtx,
4277 &end_of_function_needs, 1);
4279 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4281 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
4283 /* The registers required to be live at the end of the function are
4284 represented in the flow information as being dead just prior to
4285 reaching the end of the function. For example, the return of a value
4286 might be represented by a USE of the return register immediately
4287 followed by an unconditional jump to the return label where the
4288 return label is the end of the RTL chain. The end of the RTL chain
4289 is then taken to mean that the return register is live.
4291 This sequence is no longer maintained when epilogue instructions are
4292 added to the RTL chain. To reconstruct the original meaning, the
4293 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
4294 point where these registers become live (start_of_epilogue_needs).
4295 If epilogue instructions are present, the registers set by those
4296 instructions won't have been processed by flow. Thus, those
4297 registers are additionally required at the end of the RTL chain
4298 (end_of_function_needs). */
4300 start_of_epilogue_needs = end_of_function_needs;
4302 while (epilogue_insn = next_nonnote_insn (epilogue_insn))
4303 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
4305 /* Show we haven't computed an end-of-function label yet. */
4306 end_of_function_label = 0;
4308 /* Allocate and initialize the tables used by mark_target_live_regs. */
4310 = (struct target_info **) alloca ((TARGET_HASH_PRIME
4311 * sizeof (struct target_info *)));
4312 bzero ((char *) target_hash_table,
4313 TARGET_HASH_PRIME * sizeof (struct target_info *));
4315 bb_ticks = (int *) alloca (n_basic_blocks * sizeof (int));
4316 bzero ((char *) bb_ticks, n_basic_blocks * sizeof (int));
4318 /* Initialize the statistics for this function. */
4319 bzero ((char *) num_insns_needing_delays, sizeof num_insns_needing_delays);
4320 bzero ((char *) num_filled_delays, sizeof num_filled_delays);
4322 /* Now do the delay slot filling. Try everything twice in case earlier
4323 changes make more slots fillable. */
4325 for (reorg_pass_number = 0;
4326 reorg_pass_number < MAX_REORG_PASSES;
4327 reorg_pass_number++)
4329 fill_simple_delay_slots (first, 1);
4330 fill_simple_delay_slots (first, 0);
4331 fill_eager_delay_slots (first);
4332 relax_delay_slots (first);
4335 /* Delete any USE insns made by update_block; subsequent passes don't need
4336 them or know how to deal with them. */
4337 for (insn = first; insn; insn = next)
4339 next = NEXT_INSN (insn);
4341 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
4342 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
4343 next = delete_insn (insn);
4346 /* If we made an end of function label, indicate that it is now
4347 safe to delete it by undoing our prior adjustment to LABEL_NUSES.
4348 If it is now unused, delete it. */
4349 if (end_of_function_label && --LABEL_NUSES (end_of_function_label) == 0)
4350 delete_insn (end_of_function_label);
4353 if (HAVE_return && end_of_function_label != 0)
4354 make_return_insns (first);
4357 obstack_free (&unfilled_slots_obstack, unfilled_firstobj);
4359 /* It is not clear why the line below is needed, but it does seem to be. */
4360 unfilled_firstobj = (rtx *) obstack_alloc (&unfilled_slots_obstack, 0);
4362 /* Reposition the prologue and epilogue notes in case we moved the
4363 prologue/epilogue insns. */
4364 reposition_prologue_and_epilogue_notes (first);
4368 register int i, j, need_comma;
4370 for (reorg_pass_number = 0;
4371 reorg_pass_number < MAX_REORG_PASSES;
4372 reorg_pass_number++)
4374 fprintf (file, ";; Reorg pass #%d:\n", reorg_pass_number + 1);
4375 for (i = 0; i < NUM_REORG_FUNCTIONS; i++)
4378 fprintf (file, ";; Reorg function #%d\n", i);
4380 fprintf (file, ";; %d insns needing delay slots\n;; ",
4381 num_insns_needing_delays[i][reorg_pass_number]);
4383 for (j = 0; j < MAX_DELAY_HISTOGRAM; j++)
4384 if (num_filled_delays[i][j][reorg_pass_number])
4387 fprintf (file, ", ");
4389 fprintf (file, "%d got %d delays",
4390 num_filled_delays[i][j][reorg_pass_number], j);
4392 fprintf (file, "\n");
4397 #endif /* DELAY_SLOTS */