1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Basic block manipulation
29 create_basic_block, flow_delete_block, split_block,
31 - Infrastructure to determine quickly basic block for insn
32 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
33 - Edge redirection with updating and optimizing of insn chain
34 block_label, redirect_edge_and_branch,
35 redirect_edge_and_branch_force, tidy_fallthru_edge, force_nonfallthru
36 - Edge splitting and commiting to edges
37 split_edge, insert_insn_on_edge, commit_edge_insertions
38 - Dumping and debugging
39 print_rtl_with_bb, dump_bb, debug_bb, debug_bb_n
40 - Consistency checking
42 - CFG updating after constant propagation
43 purge_dead_edges, purge_all_dead_edges */
49 #include "hard-reg-set.h"
50 #include "basic-block.h"
59 #include "insn-config.h"
61 /* Stubs in case we don't have a return insn. */
64 #define gen_return() NULL_RTX
67 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
68 /* ??? Should probably be using LABEL_NUSES instead. It would take a
69 bit of surgery to be able to use or co-opt the routines in jump. */
71 rtx tail_recursion_label_list;
73 static int can_delete_note_p PARAMS ((rtx));
74 static int can_delete_label_p PARAMS ((rtx));
75 static void commit_one_edge_insertion PARAMS ((edge, int));
76 static bool try_redirect_by_replacing_jump PARAMS ((edge, basic_block));
77 static rtx last_loop_beg_note PARAMS ((rtx));
78 static bool back_edge_of_syntactic_loop_p PARAMS ((basic_block, basic_block));
79 static basic_block force_nonfallthru_and_redirect PARAMS ((edge, basic_block));
81 /* Return true if NOTE is not one of the ones that must be kept paired,
82 so that we may simply delete it. */
85 can_delete_note_p (note)
88 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
89 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
90 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
93 /* True if a given label can be deleted. */
96 can_delete_label_p (label)
99 return (!LABEL_PRESERVE_P (label)
100 /* User declared labels must be preserved. */
101 && LABEL_NAME (label) == 0
102 && !in_expr_list_p (forced_labels, label)
103 && !in_expr_list_p (label_value_list, label));
106 /* Delete INSN by patching it out. Return the next insn. */
112 rtx next = NEXT_INSN (insn);
114 bool really_delete = true;
116 if (GET_CODE (insn) == CODE_LABEL)
118 /* Some labels can't be directly removed from the INSN chain, as they
119 might be references via variables, constant pool etc.
120 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
121 if (! can_delete_label_p (insn))
123 const char *name = LABEL_NAME (insn);
125 really_delete = false;
126 PUT_CODE (insn, NOTE);
127 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
128 NOTE_SOURCE_FILE (insn) = name;
131 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
136 /* If this insn has already been deleted, something is very wrong. */
137 if (INSN_DELETED_P (insn))
140 INSN_DELETED_P (insn) = 1;
143 /* If deleting a jump, decrement the use count of the label. Deleting
144 the label itself should happen in the normal course of block merging. */
145 if (GET_CODE (insn) == JUMP_INSN
147 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
148 LABEL_NUSES (JUMP_LABEL (insn))--;
150 /* Also if deleting an insn that references a label. */
151 else if ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
152 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
153 LABEL_NUSES (XEXP (note, 0))--;
155 if (GET_CODE (insn) == JUMP_INSN
156 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
157 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
159 rtx pat = PATTERN (insn);
160 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
161 int len = XVECLEN (pat, diff_vec_p);
164 for (i = 0; i < len; i++)
166 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
168 /* When deleting code in bulk (e.g. removing many unreachable
169 blocks) we can delete a label that's a target of the vector
170 before deleting the vector itself. */
171 if (GET_CODE (label) != NOTE)
172 LABEL_NUSES (label)--;
179 /* Like delete_insn but also purge dead edges from BB. */
181 delete_insn_and_edges (insn)
188 && BLOCK_FOR_INSN (insn)
189 && BLOCK_FOR_INSN (insn)->end == insn)
191 x = delete_insn (insn);
193 purge_dead_edges (BLOCK_FOR_INSN (insn));
197 /* Unlink a chain of insns between START and FINISH, leaving notes
198 that must be paired. */
201 delete_insn_chain (start, finish)
206 /* Unchain the insns one by one. It would be quicker to delete all of these
207 with a single unchaining, rather than one at a time, but we need to keep
211 next = NEXT_INSN (start);
212 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
215 next = delete_insn (start);
223 /* Like delete_insn but also purge dead edges from BB. */
225 delete_insn_chain_and_edges (first, last)
231 && BLOCK_FOR_INSN (last)
232 && BLOCK_FOR_INSN (last)->end == last)
234 delete_insn_chain (first, last);
236 purge_dead_edges (BLOCK_FOR_INSN (last));
239 /* Create a new basic block consisting of the instructions between HEAD and END
240 inclusive. This function is designed to allow fast BB construction - reuses
241 the note and basic block struct in BB_NOTE, if any and do not grow
242 BASIC_BLOCK chain and should be used directly only by CFG construction code.
243 END can be NULL in to create new empty basic block before HEAD. Both END
244 and HEAD can be NULL to create basic block at the end of INSN chain.
245 AFTER is the basic block we should be put after. */
248 create_basic_block_structure (head, end, bb_note, after)
249 rtx head, end, bb_note;
255 && ! RTX_INTEGRATED_P (bb_note)
256 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
259 /* If we found an existing note, thread it back onto the chain. */
263 if (GET_CODE (head) == CODE_LABEL)
267 after = PREV_INSN (head);
271 if (after != bb_note && NEXT_INSN (after) != bb_note)
272 reorder_insns_nobb (bb_note, bb_note, after);
276 /* Otherwise we must create a note and a basic block structure. */
282 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
283 else if (GET_CODE (head) == CODE_LABEL && end)
285 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
291 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
297 NOTE_BASIC_BLOCK (bb_note) = bb;
300 /* Always include the bb note in the block. */
301 if (NEXT_INSN (end) == bb_note)
306 bb->index = last_basic_block++;
308 link_block (bb, after);
309 BASIC_BLOCK (bb->index) = bb;
310 update_bb_for_insn (bb);
312 /* Tag the block so that we know it has been used when considering
313 other basic block notes. */
319 /* Create new basic block consisting of instructions in between HEAD and END
320 and place it to the BB chain after block AFTER. END can be NULL in to
321 create new empty basic block before HEAD. Both END and HEAD can be NULL to
322 create basic block at the end of INSN chain. */
325 create_basic_block (head, end, after)
331 /* Place the new block just after the end. */
332 VARRAY_GROW (basic_block_info, last_basic_block+1);
336 bb = create_basic_block_structure (head, end, NULL, after);
341 /* Delete the insns in a (non-live) block. We physically delete every
342 non-deleted-note insn, and update the flow graph appropriately.
344 Return nonzero if we deleted an exception handler. */
346 /* ??? Preserving all such notes strikes me as wrong. It would be nice
347 to post-process the stream to remove empty blocks, loops, ranges, etc. */
350 flow_delete_block_noexpunge (b)
353 int deleted_handler = 0;
356 /* If the head of this block is a CODE_LABEL, then it might be the
357 label for an exception handler which can't be reached.
359 We need to remove the label from the exception_handler_label list
360 and remove the associated NOTE_INSN_EH_REGION_BEG and
361 NOTE_INSN_EH_REGION_END notes. */
363 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
364 hanging before the block. */
366 for (insn = PREV_INSN (b->head); insn; insn = PREV_INSN (insn))
368 if (GET_CODE (insn) != NOTE)
370 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
371 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
372 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
377 never_reached_warning (insn, b->end);
379 if (GET_CODE (insn) == CODE_LABEL)
380 maybe_remove_eh_handler (insn);
382 /* Include any jump table following the basic block. */
384 if (GET_CODE (end) == JUMP_INSN
385 && (tmp = JUMP_LABEL (end)) != NULL_RTX
386 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
387 && GET_CODE (tmp) == JUMP_INSN
388 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
389 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
392 /* Include any barrier that may follow the basic block. */
393 tmp = next_nonnote_insn (end);
394 if (tmp && GET_CODE (tmp) == BARRIER)
397 /* Selectively delete the entire chain. */
399 delete_insn_chain (insn, end);
401 /* Remove the edges into and out of this block. Note that there may
402 indeed be edges in, if we are removing an unreachable loop. */
403 while (b->pred != NULL)
404 remove_edge (b->pred);
405 while (b->succ != NULL)
406 remove_edge (b->succ);
411 return deleted_handler;
415 flow_delete_block (b)
418 int deleted_handler = flow_delete_block_noexpunge (b);
420 /* Remove the basic block from the array. */
423 return deleted_handler;
426 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
429 compute_bb_for_insn ()
438 for (insn = bb->head; ; insn = NEXT_INSN (insn))
440 BLOCK_FOR_INSN (insn) = bb;
447 /* Release the basic_block_for_insn array. */
453 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
454 if (GET_CODE (insn) != BARRIER)
455 BLOCK_FOR_INSN (insn) = NULL;
458 /* Update insns block within BB. */
461 update_bb_for_insn (bb)
466 for (insn = bb->head; ; insn = NEXT_INSN (insn))
468 set_block_for_insn (insn, bb);
474 /* Split a block BB after insn INSN creating a new fallthru edge.
475 Return the new edge. Note that to keep other parts of the compiler happy,
476 this function renumbers all the basic blocks so that the new
477 one has a number one greater than the block split. */
480 split_block (bb, insn)
488 /* There is no point splitting the block after its end. */
492 /* Create the new basic block. */
493 new_bb = create_basic_block (NEXT_INSN (insn), bb->end, bb);
494 new_bb->count = bb->count;
495 new_bb->frequency = bb->frequency;
496 new_bb->loop_depth = bb->loop_depth;
499 /* Redirect the outgoing edges. */
500 new_bb->succ = bb->succ;
502 for (e = new_bb->succ; e; e = e->succ_next)
505 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
507 if (bb->global_live_at_start)
509 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
510 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
511 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
513 /* We now have to calculate which registers are live at the end
514 of the split basic block and at the start of the new basic
515 block. Start with those registers that are known to be live
516 at the end of the original basic block and get
517 propagate_block to determine which registers are live. */
518 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
519 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
520 COPY_REG_SET (bb->global_live_at_end,
521 new_bb->global_live_at_start);
522 #ifdef HAVE_conditional_execution
523 /* In the presence of conditional execution we are not able to update
524 liveness precisely. */
525 if (reload_completed)
527 bb->flags |= BB_DIRTY;
528 new_bb->flags |= BB_DIRTY;
536 /* Blocks A and B are to be merged into a single block A. The insns
537 are already contiguous, hence `nomove'. */
540 merge_blocks_nomove (a, b)
543 rtx b_head = b->head, b_end = b->end, a_end = a->end;
544 rtx del_first = NULL_RTX, del_last = NULL_RTX;
548 /* If there was a CODE_LABEL beginning B, delete it. */
549 if (GET_CODE (b_head) == CODE_LABEL)
551 /* Detect basic blocks with nothing but a label. This can happen
552 in particular at the end of a function. */
556 del_first = del_last = b_head;
557 b_head = NEXT_INSN (b_head);
560 /* Delete the basic block note and handle blocks containing just that
562 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
570 b_head = NEXT_INSN (b_head);
573 /* If there was a jump out of A, delete it. */
574 if (GET_CODE (a_end) == JUMP_INSN)
578 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
579 if (GET_CODE (prev) != NOTE
580 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
587 /* If this was a conditional jump, we need to also delete
588 the insn that set cc0. */
589 if (only_sets_cc0_p (prev))
593 prev = prev_nonnote_insn (prev);
600 a_end = PREV_INSN (del_first);
602 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
603 del_first = NEXT_INSN (a_end);
605 /* Normally there should only be one successor of A and that is B, but
606 partway though the merge of blocks for conditional_execution we'll
607 be merging a TEST block with THEN and ELSE successors. Free the
608 whole lot of them and hope the caller knows what they're doing. */
610 remove_edge (a->succ);
612 /* Adjust the edges out of B for the new owner. */
613 for (e = b->succ; e; e = e->succ_next)
616 a->flags |= b->flags;
618 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
619 b->pred = b->succ = NULL;
620 a->global_live_at_end = b->global_live_at_end;
624 /* Delete everything marked above as well as crap that might be
625 hanging out between the two blocks. */
626 delete_insn_chain (del_first, del_last);
628 /* Reassociate the insns of B with A. */
633 for (x = a_end; x != b_end; x = NEXT_INSN (x))
634 set_block_for_insn (x, a);
636 set_block_for_insn (b_end, a);
644 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
651 if (block == EXIT_BLOCK_PTR)
654 if (GET_CODE (block->head) != CODE_LABEL)
656 block->head = emit_label_before (gen_label_rtx (), block->head);
662 /* Attempt to perform edge redirection by replacing possibly complex jump
663 instruction by unconditional jump or removing jump completely. This can
664 apply only if all edges now point to the same block. The parameters and
665 return values are equivalent to redirect_edge_and_branch. */
668 try_redirect_by_replacing_jump (e, target)
672 basic_block src = e->src;
673 rtx insn = src->end, kill_from;
678 /* Verify that all targets will be TARGET. */
679 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
680 if (tmp->dest != target && tmp != e)
683 if (tmp || !onlyjump_p (insn))
685 if (flow2_completed && JUMP_LABEL (insn)
686 && (table = NEXT_INSN (JUMP_LABEL (insn))) != NULL_RTX
687 && GET_CODE (table) == JUMP_INSN
688 && (GET_CODE (PATTERN (table)) == ADDR_VEC
689 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
692 /* Avoid removing branch with side effects. */
693 set = single_set (insn);
694 if (!set || side_effects_p (set))
697 /* In case we zap a conditional jump, we'll need to kill
698 the cc0 setter too. */
701 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
702 kill_from = PREV_INSN (insn);
705 /* See if we can create the fallthru edge. */
706 if (can_fallthru (src, target))
709 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
712 /* Selectively unlink whole insn chain. */
713 delete_insn_chain (kill_from, PREV_INSN (target->head));
716 /* If this already is simplejump, redirect it. */
717 else if (simplejump_p (insn))
719 if (e->dest == target)
722 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
723 INSN_UID (insn), e->dest->index, target->index);
724 if (!redirect_jump (insn, block_label (target), 0))
726 if (target == EXIT_BLOCK_PTR)
732 /* Cannot do anything for target exit block. */
733 else if (target == EXIT_BLOCK_PTR)
736 /* Or replace possibly complicated jump insn by simple jump insn. */
739 rtx target_label = block_label (target);
742 emit_jump_insn_after (gen_jump (target_label), insn);
743 JUMP_LABEL (src->end) = target_label;
744 LABEL_NUSES (target_label)++;
746 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
747 INSN_UID (insn), INSN_UID (src->end));
750 delete_insn_chain (kill_from, insn);
752 /* Recognize a tablejump that we are converting to a
753 simple jump and remove its associated CODE_LABEL
754 and ADDR_VEC or ADDR_DIFF_VEC. */
755 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
756 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
757 && GET_CODE (tmp) == JUMP_INSN
758 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
759 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
761 delete_insn_chain (JUMP_LABEL (insn), tmp);
764 barrier = next_nonnote_insn (src->end);
765 if (!barrier || GET_CODE (barrier) != BARRIER)
766 emit_barrier_after (src->end);
769 /* Keep only one edge out and set proper flags. */
770 while (src->succ->succ_next)
771 remove_edge (src->succ);
774 e->flags = EDGE_FALLTHRU;
778 e->probability = REG_BR_PROB_BASE;
779 e->count = src->count;
781 /* We don't want a block to end on a line-number note since that has
782 the potential of changing the code between -g and not -g. */
783 while (GET_CODE (e->src->end) == NOTE
784 && NOTE_LINE_NUMBER (e->src->end) >= 0)
785 delete_insn (e->src->end);
787 if (e->dest != target)
788 redirect_edge_succ (e, target);
793 /* Return last loop_beg note appearing after INSN, before start of next
794 basic block. Return INSN if there are no such notes.
796 When emitting jump to redirect a fallthru edge, it should always appear
797 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
798 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
802 last_loop_beg_note (insn)
807 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
808 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
809 insn = NEXT_INSN (insn))
810 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
816 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
817 expense of adding new instructions or reordering basic blocks.
819 Function can be also called with edge destination equivalent to the TARGET.
820 Then it should try the simplifications and do nothing if none is possible.
822 Return true if transformation succeeded. We still return false in case E
823 already destinated TARGET and we didn't managed to simplify instruction
827 redirect_edge_and_branch (e, target)
832 rtx old_label = e->dest->head;
833 basic_block src = e->src;
836 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
839 if (try_redirect_by_replacing_jump (e, target))
842 /* Do this fast path late, as we want above code to simplify for cases
843 where called on single edge leaving basic block containing nontrivial
845 else if (e->dest == target)
848 /* We can only redirect non-fallthru edges of jump insn. */
849 if (e->flags & EDGE_FALLTHRU)
851 else if (GET_CODE (insn) != JUMP_INSN)
854 /* Recognize a tablejump and adjust all matching cases. */
855 if ((tmp = JUMP_LABEL (insn)) != NULL_RTX
856 && (tmp = NEXT_INSN (tmp)) != NULL_RTX
857 && GET_CODE (tmp) == JUMP_INSN
858 && (GET_CODE (PATTERN (tmp)) == ADDR_VEC
859 || GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC))
863 rtx new_label = block_label (target);
865 if (target == EXIT_BLOCK_PTR)
867 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
868 vec = XVEC (PATTERN (tmp), 0);
870 vec = XVEC (PATTERN (tmp), 1);
872 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
873 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
875 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
876 --LABEL_NUSES (old_label);
877 ++LABEL_NUSES (new_label);
880 /* Handle casesi dispatch insns */
881 if ((tmp = single_set (insn)) != NULL
882 && SET_DEST (tmp) == pc_rtx
883 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
884 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
885 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
887 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
889 --LABEL_NUSES (old_label);
890 ++LABEL_NUSES (new_label);
895 /* ?? We may play the games with moving the named labels from
896 one basic block to the other in case only one computed_jump is
898 if (computed_jump_p (insn)
899 /* A return instruction can't be redirected. */
900 || returnjump_p (insn))
903 /* If the insn doesn't go where we think, we're confused. */
904 if (JUMP_LABEL (insn) != old_label)
907 /* If the substitution doesn't succeed, die. This can happen
908 if the back end emitted unrecognizable instructions or if
909 target is exit block on some arches. */
910 if (!redirect_jump (insn, block_label (target), 0))
912 if (target == EXIT_BLOCK_PTR)
919 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
920 e->src->index, e->dest->index, target->index);
922 if (e->dest != target)
923 redirect_edge_succ_nodup (e, target);
928 /* Like force_nonfallthru below, but additionally performs redirection
929 Used by redirect_edge_and_branch_force. */
932 force_nonfallthru_and_redirect (e, target)
936 basic_block jump_block, new_bb = NULL, src = e->src;
939 int abnormal_edge_flags = 0;
941 /* In the case the last instruction is conditional jump to the next
942 instruction, first redirect the jump itself and then continue
943 by creating an basic block afterwards to redirect fallthru edge. */
944 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
945 && any_condjump_p (e->src->end)
946 /* When called from cfglayout, fallthru edges do not
947 neccessarily go to the next block. */
948 && e->src->next_bb == e->dest
949 && JUMP_LABEL (e->src->end) == e->dest->head)
952 edge b = unchecked_make_edge (e->src, target, 0);
954 if (!redirect_jump (e->src->end, block_label (target), 0))
956 note = find_reg_note (e->src->end, REG_BR_PROB, NULL_RTX);
959 int prob = INTVAL (XEXP (note, 0));
961 b->probability = prob;
962 b->count = e->count * prob / REG_BR_PROB_BASE;
963 e->probability -= e->probability;
964 e->count -= b->count;
965 if (e->probability < 0)
972 if (e->flags & EDGE_ABNORMAL)
974 /* Irritating special case - fallthru edge to the same block as abnormal
976 We can't redirect abnormal edge, but we still can split the fallthru
977 one and create separate abnormal edge to original destination.
978 This allows bb-reorder to make such edge non-fallthru. */
979 if (e->dest != target)
981 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
982 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
984 else if (!(e->flags & EDGE_FALLTHRU))
986 else if (e->src == ENTRY_BLOCK_PTR)
988 /* We can't redirect the entry block. Create an empty block at the
989 start of the function which we use to add the new jump. */
991 basic_block bb = create_basic_block (e->dest->head, NULL, ENTRY_BLOCK_PTR);
993 /* Change the existing edge's source to be the new block, and add
994 a new edge from the entry block to the new block. */
996 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1004 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1007 if (e->src->succ->succ_next || abnormal_edge_flags)
1009 /* Create the new structures. */
1011 /* Position the new block correctly relative to loop notes. */
1012 note = last_loop_beg_note (e->src->end);
1013 note = NEXT_INSN (note);
1015 /* ... and ADDR_VECs. */
1017 && GET_CODE (note) == CODE_LABEL
1019 && GET_CODE (NEXT_INSN (note)) == JUMP_INSN
1020 && (GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_DIFF_VEC
1021 || GET_CODE (PATTERN (NEXT_INSN (note))) == ADDR_VEC))
1022 note = NEXT_INSN (NEXT_INSN (note));
1024 jump_block = create_basic_block (note, NULL, e->src);
1025 jump_block->count = e->count;
1026 jump_block->frequency = EDGE_FREQUENCY (e);
1027 jump_block->loop_depth = target->loop_depth;
1029 if (target->global_live_at_start)
1031 jump_block->global_live_at_start
1032 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1033 jump_block->global_live_at_end
1034 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1035 COPY_REG_SET (jump_block->global_live_at_start,
1036 target->global_live_at_start);
1037 COPY_REG_SET (jump_block->global_live_at_end,
1038 target->global_live_at_start);
1042 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1043 new_edge->probability = e->probability;
1044 new_edge->count = e->count;
1046 /* Redirect old edge. */
1047 redirect_edge_pred (e, jump_block);
1048 e->probability = REG_BR_PROB_BASE;
1050 new_bb = jump_block;
1053 jump_block = e->src;
1055 e->flags &= ~EDGE_FALLTHRU;
1056 if (target == EXIT_BLOCK_PTR)
1059 emit_jump_insn_after (gen_return (), jump_block->end);
1065 rtx label = block_label (target);
1066 emit_jump_insn_after (gen_jump (label), jump_block->end);
1067 JUMP_LABEL (jump_block->end) = label;
1068 LABEL_NUSES (label)++;
1071 emit_barrier_after (jump_block->end);
1072 redirect_edge_succ_nodup (e, target);
1074 if (abnormal_edge_flags)
1075 make_edge (src, target, abnormal_edge_flags);
1080 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1081 (and possibly create new basic block) to make edge non-fallthru.
1082 Return newly created BB or NULL if none. */
1085 force_nonfallthru (e)
1088 return force_nonfallthru_and_redirect (e, e->dest);
1091 /* Redirect edge even at the expense of creating new jump insn or
1092 basic block. Return new basic block if created, NULL otherwise.
1093 Abort if conversion is impossible. */
1096 redirect_edge_and_branch_force (e, target)
1100 if (redirect_edge_and_branch (e, target)
1101 || e->dest == target)
1104 /* In case the edge redirection failed, try to force it to be non-fallthru
1105 and redirect newly created simplejump. */
1106 return force_nonfallthru_and_redirect (e, target);
1109 /* The given edge should potentially be a fallthru edge. If that is in
1110 fact true, delete the jump and barriers that are in the way. */
1113 tidy_fallthru_edge (e, b, c)
1119 /* ??? In a late-running flow pass, other folks may have deleted basic
1120 blocks by nopping out blocks, leaving multiple BARRIERs between here
1121 and the target label. They ought to be chastized and fixed.
1123 We can also wind up with a sequence of undeletable labels between
1124 one block and the next.
1126 So search through a sequence of barriers, labels, and notes for
1127 the head of block C and assert that we really do fall through. */
1129 for (q = NEXT_INSN (b->end); q != c->head; q = NEXT_INSN (q))
1133 /* Remove what will soon cease being the jump insn from the source block.
1134 If block B consisted only of this single jump, turn it into a deleted
1137 if (GET_CODE (q) == JUMP_INSN
1139 && (any_uncondjump_p (q)
1140 || (b->succ == e && e->succ_next == NULL)))
1143 /* If this was a conditional jump, we need to also delete
1144 the insn that set cc0. */
1145 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1151 /* We don't want a block to end on a line-number note since that has
1152 the potential of changing the code between -g and not -g. */
1153 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1157 /* Selectively unlink the sequence. */
1158 if (q != PREV_INSN (c->head))
1159 delete_insn_chain (NEXT_INSN (q), PREV_INSN (c->head));
1161 e->flags |= EDGE_FALLTHRU;
1164 /* Fix up edges that now fall through, or rather should now fall through
1165 but previously required a jump around now deleted blocks. Simplify
1166 the search by only examining blocks numerically adjacent, since this
1167 is how find_basic_blocks created them. */
1170 tidy_fallthru_edges ()
1174 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1177 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1183 /* We care about simple conditional or unconditional jumps with
1186 If we had a conditional branch to the next instruction when
1187 find_basic_blocks was called, then there will only be one
1188 out edge for the block which ended with the conditional
1189 branch (since we do not create duplicate edges).
1191 Furthermore, the edge will be marked as a fallthru because we
1192 merge the flags for the duplicate edges. So we do not want to
1193 check that the edge is not a FALLTHRU edge. */
1195 if ((s = b->succ) != NULL
1196 && ! (s->flags & EDGE_COMPLEX)
1197 && s->succ_next == NULL
1199 /* If the jump insn has side effects, we can't tidy the edge. */
1200 && (GET_CODE (b->end) != JUMP_INSN
1201 || onlyjump_p (b->end)))
1202 tidy_fallthru_edge (s, b, c);
1206 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1207 is back edge of syntactic loop. */
1210 back_edge_of_syntactic_loop_p (bb1, bb2)
1211 basic_block bb1, bb2;
1220 /* ??? Could we guarantee that bb indices are monotone, so that we could
1221 just compare them? */
1222 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1228 for (insn = bb1->end; insn != bb2->head && count >= 0;
1229 insn = NEXT_INSN (insn))
1230 if (GET_CODE (insn) == NOTE)
1232 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1234 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1241 /* Split a (typically critical) edge. Return the new block.
1242 Abort on abnormal edges.
1244 ??? The code generally expects to be called on critical edges.
1245 The case of a block ending in an unconditional jump to a
1246 block with multiple predecessors is not handled optimally. */
1249 split_edge (edge_in)
1256 /* Abnormal edges cannot be split. */
1257 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1260 /* We are going to place the new block in front of edge destination.
1261 Avoid existence of fallthru predecessors. */
1262 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1266 for (e = edge_in->dest->pred; e; e = e->pred_next)
1267 if (e->flags & EDGE_FALLTHRU)
1271 force_nonfallthru (e);
1274 /* Create the basic block note.
1276 Where we place the note can have a noticeable impact on the generated
1277 code. Consider this cfg:
1287 If we need to insert an insn on the edge from block 0 to block 1,
1288 we want to ensure the instructions we insert are outside of any
1289 loop notes that physically sit between block 0 and block 1. Otherwise
1290 we confuse the loop optimizer into thinking the loop is a phony. */
1292 if (edge_in->dest != EXIT_BLOCK_PTR
1293 && PREV_INSN (edge_in->dest->head)
1294 && GET_CODE (PREV_INSN (edge_in->dest->head)) == NOTE
1295 && (NOTE_LINE_NUMBER (PREV_INSN (edge_in->dest->head))
1296 == NOTE_INSN_LOOP_BEG)
1297 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1298 before = PREV_INSN (edge_in->dest->head);
1299 else if (edge_in->dest != EXIT_BLOCK_PTR)
1300 before = edge_in->dest->head;
1304 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1305 bb->count = edge_in->count;
1306 bb->frequency = EDGE_FREQUENCY (edge_in);
1308 /* ??? This info is likely going to be out of date very soon. */
1309 if (edge_in->dest->global_live_at_start)
1311 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1312 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1313 COPY_REG_SET (bb->global_live_at_start,
1314 edge_in->dest->global_live_at_start);
1315 COPY_REG_SET (bb->global_live_at_end,
1316 edge_in->dest->global_live_at_start);
1319 edge_out = make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1321 /* For non-fallthry edges, we must adjust the predecessor's
1322 jump instruction to target our new block. */
1323 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1325 if (!redirect_edge_and_branch (edge_in, bb))
1329 redirect_edge_succ (edge_in, bb);
1334 /* Queue instructions for insertion on an edge between two basic blocks.
1335 The new instructions and basic blocks (if any) will not appear in the
1336 CFG until commit_edge_insertions is called. */
1339 insert_insn_on_edge (pattern, e)
1343 /* We cannot insert instructions on an abnormal critical edge.
1344 It will be easier to find the culprit if we die now. */
1345 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1348 if (e->insns == NULL_RTX)
1351 push_to_sequence (e->insns);
1353 emit_insn (pattern);
1355 e->insns = get_insns ();
1359 /* Update the CFG for the instructions queued on edge E. */
1362 commit_one_edge_insertion (e, watch_calls)
1366 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1367 basic_block bb = NULL;
1369 /* Pull the insns off the edge now since the edge might go away. */
1371 e->insns = NULL_RTX;
1373 /* Special case -- avoid inserting code between call and storing
1374 its return value. */
1375 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1376 && e->src != ENTRY_BLOCK_PTR
1377 && GET_CODE (e->src->end) == CALL_INSN)
1379 rtx next = next_nonnote_insn (e->src->end);
1381 after = e->dest->head;
1382 /* The first insn after the call may be a stack pop, skip it. */
1384 && keep_with_call_p (next))
1387 next = next_nonnote_insn (next);
1391 if (!before && !after)
1393 /* Figure out where to put these things. If the destination has
1394 one predecessor, insert there. Except for the exit block. */
1395 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1399 /* Get the location correct wrt a code label, and "nice" wrt
1400 a basic block note, and before everything else. */
1402 if (GET_CODE (tmp) == CODE_LABEL)
1403 tmp = NEXT_INSN (tmp);
1404 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1405 tmp = NEXT_INSN (tmp);
1406 if (tmp == bb->head)
1409 after = PREV_INSN (tmp);
1411 after = get_last_insn ();
1414 /* If the source has one successor and the edge is not abnormal,
1415 insert there. Except for the entry block. */
1416 else if ((e->flags & EDGE_ABNORMAL) == 0
1417 && e->src->succ->succ_next == NULL
1418 && e->src != ENTRY_BLOCK_PTR)
1422 /* It is possible to have a non-simple jump here. Consider a target
1423 where some forms of unconditional jumps clobber a register. This
1424 happens on the fr30 for example.
1426 We know this block has a single successor, so we can just emit
1427 the queued insns before the jump. */
1428 if (GET_CODE (bb->end) == JUMP_INSN)
1429 for (before = bb->end;
1430 GET_CODE (PREV_INSN (before)) == NOTE
1431 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1432 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1436 /* We'd better be fallthru, or we've lost track of what's what. */
1437 if ((e->flags & EDGE_FALLTHRU) == 0)
1443 /* Otherwise we must split the edge. */
1446 bb = split_edge (e);
1451 /* Now that we've found the spot, do the insertion. */
1455 emit_insn_before (insns, before);
1456 last = prev_nonnote_insn (before);
1459 last = emit_insn_after (insns, after);
1461 if (returnjump_p (last))
1463 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1464 This is not currently a problem because this only happens
1465 for the (single) epilogue, which already has a fallthru edge
1469 if (e->dest != EXIT_BLOCK_PTR
1470 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1473 e->flags &= ~EDGE_FALLTHRU;
1474 emit_barrier_after (last);
1477 delete_insn (before);
1479 else if (GET_CODE (last) == JUMP_INSN)
1482 /* Mark the basic block for find_sub_basic_blocks. */
1486 /* Update the CFG for all queued instructions. */
1489 commit_edge_insertions ()
1493 bool changed = false;
1495 #ifdef ENABLE_CHECKING
1496 verify_flow_info ();
1499 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1503 for (e = bb->succ; e; e = next)
1505 next = e->succ_next;
1509 commit_one_edge_insertion (e, false);
1517 blocks = sbitmap_alloc (last_basic_block);
1518 sbitmap_zero (blocks);
1522 SET_BIT (blocks, bb->index);
1525 find_many_sub_basic_blocks (blocks);
1526 sbitmap_free (blocks);
1529 /* Update the CFG for all queued instructions, taking special care of inserting
1530 code on edges between call and storing its return value. */
1533 commit_edge_insertions_watch_calls ()
1537 bool changed = false;
1539 #ifdef ENABLE_CHECKING
1540 verify_flow_info ();
1543 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1547 for (e = bb->succ; e; e = next)
1549 next = e->succ_next;
1553 commit_one_edge_insertion (e, true);
1561 blocks = sbitmap_alloc (last_basic_block);
1562 sbitmap_zero (blocks);
1566 SET_BIT (blocks, bb->index);
1569 find_many_sub_basic_blocks (blocks);
1570 sbitmap_free (blocks);
1573 /* Print out one basic block with live information at start and end. */
1584 fprintf (outf, ";; Basic block %d, loop depth %d, count ",
1585 bb->index, bb->loop_depth);
1586 fprintf (outf, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) bb->count);
1589 fputs (";; Predecessors: ", outf);
1590 for (e = bb->pred; e; e = e->pred_next)
1591 dump_edge_info (outf, e, 0);
1594 fputs (";; Registers live at start:", outf);
1595 dump_regset (bb->global_live_at_start, outf);
1598 for (insn = bb->head, last = NEXT_INSN (bb->end); insn != last;
1599 insn = NEXT_INSN (insn))
1600 print_rtl_single (outf, insn);
1602 fputs (";; Registers live at end:", outf);
1603 dump_regset (bb->global_live_at_end, outf);
1606 fputs (";; Successors: ", outf);
1607 for (e = bb->succ; e; e = e->succ_next)
1608 dump_edge_info (outf, e, 1);
1616 dump_bb (bb, stderr);
1623 dump_bb (BASIC_BLOCK (n), stderr);
1626 /* Like print_rtl, but also print out live information for the start of each
1630 print_rtl_with_bb (outf, rtx_first)
1637 fprintf (outf, "(nil)\n");
1640 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1641 int max_uid = get_max_uid ();
1643 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1645 = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1646 enum bb_state *in_bb_p
1647 = (enum bb_state *) xcalloc (max_uid, sizeof (enum bb_state));
1651 FOR_EACH_BB_REVERSE (bb)
1655 start[INSN_UID (bb->head)] = bb;
1656 end[INSN_UID (bb->end)] = bb;
1657 for (x = bb->head; x != NULL_RTX; x = NEXT_INSN (x))
1659 enum bb_state state = IN_MULTIPLE_BB;
1661 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1663 in_bb_p[INSN_UID (x)] = state;
1670 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1674 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1676 fprintf (outf, ";; Start of basic block %d, registers live:",
1678 dump_regset (bb->global_live_at_start, outf);
1682 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1683 && GET_CODE (tmp_rtx) != NOTE
1684 && GET_CODE (tmp_rtx) != BARRIER)
1685 fprintf (outf, ";; Insn is not within a basic block\n");
1686 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1687 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1689 did_output = print_rtl_single (outf, tmp_rtx);
1691 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1693 fprintf (outf, ";; End of basic block %d, registers live:\n",
1695 dump_regset (bb->global_live_at_end, outf);
1708 if (current_function_epilogue_delay_list != 0)
1710 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1711 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1712 tmp_rtx = XEXP (tmp_rtx, 1))
1713 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1718 update_br_prob_note (bb)
1722 if (GET_CODE (bb->end) != JUMP_INSN)
1724 note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX);
1725 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1727 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1730 /* Verify the CFG consistency. This function check some CFG invariants and
1731 aborts when something is wrong. Hope that this function will help to
1732 convert many optimization passes to preserve CFG consistent.
1734 Currently it does following checks:
1736 - test head/end pointers
1737 - overlapping of basic blocks
1738 - edge list correctness
1739 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1740 - tails of basic blocks (ensure that boundary is necessary)
1741 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1742 and NOTE_INSN_BASIC_BLOCK
1743 - check that all insns are in the basic blocks
1744 (except the switch handling code, barriers and notes)
1745 - check that all returns are followed by barriers
1747 In future it can be extended check a lot of other stuff as well
1748 (reachability of basic blocks, life information, etc. etc.). */
1753 const int max_uid = get_max_uid ();
1754 const rtx rtx_first = get_insns ();
1755 rtx last_head = get_last_insn ();
1756 basic_block *bb_info, *last_visited;
1757 size_t *edge_checksum;
1759 int num_bb_notes, err = 0;
1760 basic_block bb, last_bb_seen;
1762 bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
1763 last_visited = (basic_block *) xcalloc (last_basic_block + 2,
1764 sizeof (basic_block));
1765 edge_checksum = (size_t *) xcalloc (last_basic_block + 2, sizeof (size_t));
1767 /* Check bb chain & numbers. */
1768 last_bb_seen = ENTRY_BLOCK_PTR;
1769 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
1771 if (bb != EXIT_BLOCK_PTR
1772 && bb != BASIC_BLOCK (bb->index))
1774 error ("bb %d on wrong place", bb->index);
1778 if (bb->prev_bb != last_bb_seen)
1780 error ("prev_bb of %d should be %d, not %d",
1781 bb->index, last_bb_seen->index, bb->prev_bb->index);
1788 FOR_EACH_BB_REVERSE (bb)
1790 rtx head = bb->head;
1793 /* Verify the end of the basic block is in the INSN chain. */
1794 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1800 error ("end insn %d for block %d not found in the insn stream",
1801 INSN_UID (end), bb->index);
1805 /* Work backwards from the end to the head of the basic block
1806 to verify the head is in the RTL chain. */
1807 for (; x != NULL_RTX; x = PREV_INSN (x))
1809 /* While walking over the insn chain, verify insns appear
1810 in only one basic block and initialize the BB_INFO array
1811 used by other passes. */
1812 if (bb_info[INSN_UID (x)] != NULL)
1814 error ("insn %d is in multiple basic blocks (%d and %d)",
1815 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1819 bb_info[INSN_UID (x)] = bb;
1826 error ("head insn %d for block %d not found in the insn stream",
1827 INSN_UID (head), bb->index);
1834 /* Now check the basic blocks (boundaries etc.) */
1835 FOR_EACH_BB_REVERSE (bb)
1837 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1841 if (INSN_P (bb->end)
1842 && (note = find_reg_note (bb->end, REG_BR_PROB, NULL_RTX))
1843 && bb->succ && bb->succ->succ_next
1844 && any_condjump_p (bb->end))
1846 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1848 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
1849 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1855 error ("verify_flow_info: Wrong count of block %i %i",
1856 bb->index, (int)bb->count);
1859 if (bb->frequency < 0)
1861 error ("verify_flow_info: Wrong frequency of block %i %i",
1862 bb->index, bb->frequency);
1865 for (e = bb->succ; e; e = e->succ_next)
1867 if (last_visited [e->dest->index + 2] == bb)
1869 error ("verify_flow_info: Duplicate edge %i->%i",
1870 e->src->index, e->dest->index);
1873 if (e->probability < 0 || e->probability > REG_BR_PROB_BASE)
1875 error ("verify_flow_info: Wrong probability of edge %i->%i %i",
1876 e->src->index, e->dest->index, e->probability);
1881 error ("verify_flow_info: Wrong count of edge %i->%i %i",
1882 e->src->index, e->dest->index, (int)e->count);
1886 last_visited [e->dest->index + 2] = bb;
1888 if (e->flags & EDGE_FALLTHRU)
1891 if ((e->flags & ~EDGE_DFS_BACK) == 0)
1894 if (e->flags & EDGE_ABNORMAL_CALL)
1897 if (e->flags & EDGE_EH)
1899 else if (e->flags & EDGE_ABNORMAL)
1902 if ((e->flags & EDGE_FALLTHRU)
1903 && e->src != ENTRY_BLOCK_PTR
1904 && e->dest != EXIT_BLOCK_PTR)
1908 if (e->src->next_bb != e->dest)
1911 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
1912 e->src->index, e->dest->index);
1916 for (insn = NEXT_INSN (e->src->end); insn != e->dest->head;
1917 insn = NEXT_INSN (insn))
1918 if (GET_CODE (insn) == BARRIER
1919 #ifndef CASE_DROPS_THROUGH
1922 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
1926 error ("verify_flow_info: Incorrect fallthru %i->%i",
1927 e->src->index, e->dest->index);
1928 fatal_insn ("wrong insn in the fallthru edge", insn);
1935 error ("verify_flow_info: Basic block %d succ edge is corrupted",
1937 fprintf (stderr, "Predecessor: ");
1938 dump_edge_info (stderr, e, 0);
1939 fprintf (stderr, "\nSuccessor: ");
1940 dump_edge_info (stderr, e, 1);
1941 fprintf (stderr, "\n");
1945 edge_checksum[e->dest->index + 2] += (size_t) e;
1948 if (n_eh && GET_CODE (PATTERN (bb->end)) != RESX
1949 && !find_reg_note (bb->end, REG_EH_REGION, NULL_RTX))
1951 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1955 && (GET_CODE (bb->end) != JUMP_INSN
1956 || (n_branch > 1 && (any_uncondjump_p (bb->end)
1957 || any_condjump_p (bb->end)))))
1959 error ("Too many outgoing branch edges from bb %i", bb->index);
1962 if (n_fallthru && any_uncondjump_p (bb->end))
1964 error ("Fallthru edge after unconditional jump %i", bb->index);
1967 if (n_branch != 1 && any_uncondjump_p (bb->end))
1969 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1972 if (n_branch != 1 && any_condjump_p (bb->end)
1973 && JUMP_LABEL (bb->end) != bb->next_bb->head)
1975 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1978 if (n_call && GET_CODE (bb->end) != CALL_INSN)
1980 error ("Call edges for non-call insn in bb %i", bb->index);
1984 && (GET_CODE (bb->end) != CALL_INSN && n_call != n_abnormal)
1985 && (GET_CODE (bb->end) != JUMP_INSN
1986 || any_condjump_p (bb->end)
1987 || any_uncondjump_p (bb->end)))
1989 error ("Abnormal edges for no purpose in bb %i", bb->index);
1997 /* Ensure existence of barrier in BB with no fallthru edges. */
1998 for (insn = bb->end; !insn || GET_CODE (insn) != BARRIER;
1999 insn = NEXT_INSN (insn))
2001 || (GET_CODE (insn) == NOTE
2002 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2004 error ("missing barrier after block %i", bb->index);
2010 for (e = bb->pred; e; e = e->pred_next)
2014 error ("basic block %d pred edge is corrupted", bb->index);
2015 fputs ("Predecessor: ", stderr);
2016 dump_edge_info (stderr, e, 0);
2017 fputs ("\nSuccessor: ", stderr);
2018 dump_edge_info (stderr, e, 1);
2019 fputc ('\n', stderr);
2022 edge_checksum[e->dest->index + 2] -= (size_t) e;
2025 for (x = bb->head; x != NEXT_INSN (bb->end); x = NEXT_INSN (x))
2026 if (BLOCK_FOR_INSN (x) != bb)
2029 if (! BLOCK_FOR_INSN (x))
2031 ("insn %d inside basic block %d but block_for_insn is NULL",
2032 INSN_UID (x), bb->index);
2035 ("insn %d inside basic block %d but block_for_insn is %i",
2036 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2041 /* OK pointers are correct. Now check the header of basic
2042 block. It ought to contain optional CODE_LABEL followed
2043 by NOTE_BASIC_BLOCK. */
2045 if (GET_CODE (x) == CODE_LABEL)
2049 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2057 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2059 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2065 /* Do checks for empty blocks her. e */
2068 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2070 if (NOTE_INSN_BASIC_BLOCK_P (x))
2072 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2073 INSN_UID (x), bb->index);
2080 if (GET_CODE (x) == JUMP_INSN
2081 || GET_CODE (x) == CODE_LABEL
2082 || GET_CODE (x) == BARRIER)
2084 error ("in basic block %d:", bb->index);
2085 fatal_insn ("flow control insn inside a basic block", x);
2090 /* Complete edge checksumming for ENTRY and EXIT. */
2094 for (e = ENTRY_BLOCK_PTR->succ; e ; e = e->succ_next)
2095 edge_checksum[e->dest->index + 2] += (size_t) e;
2097 for (e = EXIT_BLOCK_PTR->pred; e ; e = e->pred_next)
2098 edge_checksum[e->dest->index + 2] -= (size_t) e;
2101 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
2102 if (edge_checksum[bb->index + 2])
2104 error ("basic block %i edge lists are corrupted", bb->index);
2109 last_bb_seen = ENTRY_BLOCK_PTR;
2111 for (x = rtx_first; x; x = NEXT_INSN (x))
2113 if (NOTE_INSN_BASIC_BLOCK_P (x))
2115 bb = NOTE_BASIC_BLOCK (x);
2118 if (bb != last_bb_seen->next_bb)
2119 internal_error ("basic blocks not numbered consecutively");
2124 if (!bb_info[INSN_UID (x)])
2126 switch (GET_CODE (x))
2133 /* An addr_vec is placed outside any block block. */
2135 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2136 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2137 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2140 /* But in any case, non-deletable labels can appear anywhere. */
2144 fatal_insn ("insn outside basic block", x);
2149 && GET_CODE (x) == JUMP_INSN
2150 && returnjump_p (x) && ! condjump_p (x)
2151 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2152 fatal_insn ("return not followed by barrier", x);
2155 if (num_bb_notes != n_basic_blocks)
2157 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2158 num_bb_notes, n_basic_blocks);
2161 internal_error ("verify_flow_info failed");
2165 free (last_visited);
2166 free (edge_checksum);
2169 /* Assume that the preceding pass has possibly eliminated jump instructions
2170 or converted the unconditional jumps. Eliminate the edges from CFG.
2171 Return true if any edges are eliminated. */
2174 purge_dead_edges (bb)
2178 rtx insn = bb->end, note;
2179 bool purged = false;
2181 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2182 if (GET_CODE (insn) == INSN
2183 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2187 if (! may_trap_p (PATTERN (insn))
2188 || ((eqnote = find_reg_equal_equiv_note (insn))
2189 && ! may_trap_p (XEXP (eqnote, 0))))
2190 remove_note (insn, note);
2193 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2194 for (e = bb->succ; e; e = next)
2196 next = e->succ_next;
2197 if (e->flags & EDGE_EH)
2199 if (can_throw_internal (bb->end))
2202 else if (e->flags & EDGE_ABNORMAL_CALL)
2204 if (GET_CODE (bb->end) == CALL_INSN
2205 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2206 || INTVAL (XEXP (note, 0)) >= 0))
2213 bb->flags |= BB_DIRTY;
2217 if (GET_CODE (insn) == JUMP_INSN)
2222 /* We do care only about conditional jumps and simplejumps. */
2223 if (!any_condjump_p (insn)
2224 && !returnjump_p (insn)
2225 && !simplejump_p (insn))
2228 /* Branch probability/prediction notes are defined only for
2229 condjumps. We've possibly turned condjump into simplejump. */
2230 if (simplejump_p (insn))
2232 note = find_reg_note (insn, REG_BR_PROB, NULL);
2234 remove_note (insn, note);
2235 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2236 remove_note (insn, note);
2239 for (e = bb->succ; e; e = next)
2241 next = e->succ_next;
2243 /* Avoid abnormal flags to leak from computed jumps turned
2244 into simplejumps. */
2246 e->flags &= ~EDGE_ABNORMAL;
2248 /* See if this edge is one we should keep. */
2249 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2250 /* A conditional jump can fall through into the next
2251 block, so we should keep the edge. */
2253 else if (e->dest != EXIT_BLOCK_PTR
2254 && e->dest->head == JUMP_LABEL (insn))
2255 /* If the destination block is the target of the jump,
2258 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2259 /* If the destination block is the exit block, and this
2260 instruction is a return, then keep the edge. */
2262 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2263 /* Keep the edges that correspond to exceptions thrown by
2264 this instruction and rematerialize the EDGE_ABNORMAL flag
2265 we just cleared above. */
2267 e->flags |= EDGE_ABNORMAL;
2271 /* We do not need this edge. */
2272 bb->flags |= BB_DIRTY;
2277 if (!bb->succ || !purged)
2281 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2286 /* Redistribute probabilities. */
2287 if (!bb->succ->succ_next)
2289 bb->succ->probability = REG_BR_PROB_BASE;
2290 bb->succ->count = bb->count;
2294 note = find_reg_note (insn, REG_BR_PROB, NULL);
2298 b = BRANCH_EDGE (bb);
2299 f = FALLTHRU_EDGE (bb);
2300 b->probability = INTVAL (XEXP (note, 0));
2301 f->probability = REG_BR_PROB_BASE - b->probability;
2302 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2303 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2309 /* If we don't see a jump insn, we don't know exactly why the block would
2310 have been broken at this point. Look for a simple, non-fallthru edge,
2311 as these are only created by conditional branches. If we find such an
2312 edge we know that there used to be a jump here and can then safely
2313 remove all non-fallthru edges. */
2314 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2321 for (e = bb->succ; e; e = next)
2323 next = e->succ_next;
2324 if (!(e->flags & EDGE_FALLTHRU))
2326 bb->flags |= BB_DIRTY;
2332 if (!bb->succ || bb->succ->succ_next)
2335 bb->succ->probability = REG_BR_PROB_BASE;
2336 bb->succ->count = bb->count;
2339 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2344 /* Search all basic blocks for potentially dead edges and purge them. Return
2345 true if some edge has been eliminated. */
2348 purge_all_dead_edges (update_life_p)
2357 blocks = sbitmap_alloc (last_basic_block);
2358 sbitmap_zero (blocks);
2363 bool purged_here = purge_dead_edges (bb);
2365 purged |= purged_here;
2366 if (purged_here && update_life_p)
2367 SET_BIT (blocks, bb->index);
2370 if (update_life_p && purged)
2371 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2372 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2373 | PROP_KILL_DEAD_CODE);
2376 sbitmap_free (blocks);