1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
43 /* Verify that there is exactly single jump instruction since last and attach
44 REG_BR_PROB note specifying probability.
45 ??? We really ought to pass the probability down to RTL expanders and let it
46 re-distribute it when the conditional expands into multiple conditionals.
47 This is however difficult to do. */
49 add_reg_br_prob_note (rtx last, int probability)
51 if (profile_status == PROFILE_ABSENT)
53 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
56 /* It is common to emit condjump-around-jump sequence when we don't know
57 how to reverse the conditional. Special case this. */
58 if (!any_condjump_p (last)
59 || !JUMP_P (NEXT_INSN (last))
60 || !simplejump_p (NEXT_INSN (last))
61 || !NEXT_INSN (NEXT_INSN (last))
62 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
63 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
64 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
65 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
67 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
69 = gen_rtx_EXPR_LIST (REG_BR_PROB,
70 GEN_INT (REG_BR_PROB_BASE - probability),
74 if (!last || !JUMP_P (last) || !any_condjump_p (last))
76 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
78 = gen_rtx_EXPR_LIST (REG_BR_PROB,
79 GEN_INT (probability), REG_NOTES (last));
83 fprintf (dump_file, "Failed to add probability note\n");
87 #ifndef LOCAL_ALIGNMENT
88 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
91 #ifndef STACK_ALIGNMENT_NEEDED
92 #define STACK_ALIGNMENT_NEEDED 1
96 /* This structure holds data relevant to one variable that will be
97 placed in a stack slot. */
103 /* The offset of the variable. During partitioning, this is the
104 offset relative to the partition. After partitioning, this
105 is relative to the stack frame. */
106 HOST_WIDE_INT offset;
108 /* Initially, the size of the variable. Later, the size of the partition,
109 if this variable becomes it's partition's representative. */
112 /* The *byte* alignment required for this variable. Or as, with the
113 size, the alignment for this partition. */
116 /* The partition representative. */
117 size_t representative;
119 /* The next stack variable in the partition, or EOC. */
123 #define EOC ((size_t)-1)
125 /* We have an array of such objects while deciding allocation. */
126 static struct stack_var *stack_vars;
127 static size_t stack_vars_alloc;
128 static size_t stack_vars_num;
130 /* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
131 is non-decreasing. */
132 static size_t *stack_vars_sorted;
134 /* We have an interference graph between such objects. This graph
135 is lower triangular. */
136 static bool *stack_vars_conflict;
137 static size_t stack_vars_conflict_alloc;
139 /* The phase of the stack frame. This is the known misalignment of
140 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
141 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
142 static int frame_phase;
144 /* Used during expand_used_vars to remember if we saw any decls for
145 which we'd like to enable stack smashing protection. */
146 static bool has_protected_decls;
148 /* Used during expand_used_vars. Remember if we say a character buffer
149 smaller than our cutoff threshold. Used for -Wstack-protector. */
150 static bool has_short_buffer;
152 /* Discover the byte alignment to use for DECL. Ignore alignment
153 we can't do with expected alignment of the stack boundary. */
156 get_decl_align_unit (tree decl)
160 align = DECL_ALIGN (decl);
161 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
162 if (align > PREFERRED_STACK_BOUNDARY)
163 align = PREFERRED_STACK_BOUNDARY;
164 if (cfun->stack_alignment_needed < align)
165 cfun->stack_alignment_needed = align;
167 return align / BITS_PER_UNIT;
170 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
171 Return the frame offset. */
174 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
176 HOST_WIDE_INT offset, new_frame_offset;
178 new_frame_offset = frame_offset;
179 if (FRAME_GROWS_DOWNWARD)
181 new_frame_offset -= size + frame_phase;
182 new_frame_offset &= -align;
183 new_frame_offset += frame_phase;
184 offset = new_frame_offset;
188 new_frame_offset -= frame_phase;
189 new_frame_offset += align - 1;
190 new_frame_offset &= -align;
191 new_frame_offset += frame_phase;
192 offset = new_frame_offset;
193 new_frame_offset += size;
195 frame_offset = new_frame_offset;
197 if (frame_offset_overflow (frame_offset, cfun->decl))
198 frame_offset = offset = 0;
203 /* Accumulate DECL into STACK_VARS. */
206 add_stack_var (tree decl)
208 if (stack_vars_num >= stack_vars_alloc)
210 if (stack_vars_alloc)
211 stack_vars_alloc = stack_vars_alloc * 3 / 2;
213 stack_vars_alloc = 32;
215 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
217 stack_vars[stack_vars_num].decl = decl;
218 stack_vars[stack_vars_num].offset = 0;
219 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
220 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
222 /* All variables are initially in their own partition. */
223 stack_vars[stack_vars_num].representative = stack_vars_num;
224 stack_vars[stack_vars_num].next = EOC;
226 /* Ensure that this decl doesn't get put onto the list twice. */
227 SET_DECL_RTL (decl, pc_rtx);
232 /* Compute the linear index of a lower-triangular coordinate (I, J). */
235 triangular_index (size_t i, size_t j)
242 return (i * (i + 1)) / 2 + j;
245 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
248 resize_stack_vars_conflict (size_t n)
250 size_t size = triangular_index (n-1, n-1) + 1;
252 if (size <= stack_vars_conflict_alloc)
255 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
256 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
257 (size - stack_vars_conflict_alloc) * sizeof (bool));
258 stack_vars_conflict_alloc = size;
261 /* Make the decls associated with luid's X and Y conflict. */
264 add_stack_var_conflict (size_t x, size_t y)
266 size_t index = triangular_index (x, y);
267 gcc_assert (index < stack_vars_conflict_alloc);
268 stack_vars_conflict[index] = true;
271 /* Check whether the decls associated with luid's X and Y conflict. */
274 stack_var_conflict_p (size_t x, size_t y)
276 size_t index = triangular_index (x, y);
277 gcc_assert (index < stack_vars_conflict_alloc);
278 return stack_vars_conflict[index];
281 /* Returns true if TYPE is or contains a union type. */
284 aggregate_contains_union_type (tree type)
288 if (TREE_CODE (type) == UNION_TYPE
289 || TREE_CODE (type) == QUAL_UNION_TYPE)
291 if (TREE_CODE (type) == ARRAY_TYPE)
292 return aggregate_contains_union_type (TREE_TYPE (type));
293 if (TREE_CODE (type) != RECORD_TYPE)
296 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
297 if (TREE_CODE (field) == FIELD_DECL)
298 if (aggregate_contains_union_type (TREE_TYPE (field)))
304 /* A subroutine of expand_used_vars. If two variables X and Y have alias
305 sets that do not conflict, then do add a conflict for these variables
306 in the interference graph. We also need to make sure to add conflicts
307 for union containing structures. Else RTL alias analysis comes along
308 and due to type based aliasing rules decides that for two overlapping
309 union temporaries { short s; int i; } accesses to the same mem through
310 different types may not alias and happily reorders stores across
311 life-time boundaries of the temporaries (See PR25654).
312 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
315 add_alias_set_conflicts (void)
317 size_t i, j, n = stack_vars_num;
319 for (i = 0; i < n; ++i)
321 tree type_i = TREE_TYPE (stack_vars[i].decl);
322 bool aggr_i = AGGREGATE_TYPE_P (type_i);
325 contains_union = aggregate_contains_union_type (type_i);
326 for (j = 0; j < i; ++j)
328 tree type_j = TREE_TYPE (stack_vars[j].decl);
329 bool aggr_j = AGGREGATE_TYPE_P (type_j);
331 /* Either the objects conflict by means of type based
332 aliasing rules, or we need to add a conflict. */
333 || !objects_must_conflict_p (type_i, type_j)
334 /* In case the types do not conflict ensure that access
335 to elements will conflict. In case of unions we have
336 to be careful as type based aliasing rules may say
337 access to the same memory does not conflict. So play
338 safe and add a conflict in this case. */
340 add_stack_var_conflict (i, j);
345 /* A subroutine of partition_stack_vars. A comparison function for qsort,
346 sorting an array of indicies by the size of the object. */
349 stack_var_size_cmp (const void *a, const void *b)
351 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
352 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
353 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
354 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
360 /* For stack variables of the same size use the uid of the decl
361 to make the sort stable. */
369 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
370 partitioning algorithm. Partitions A and B are known to be non-conflicting.
371 Merge them into a single partition A.
373 At the same time, add OFFSET to all variables in partition B. At the end
374 of the partitioning process we've have a nice block easy to lay out within
378 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
382 /* Update each element of partition B with the given offset,
383 and merge them into partition A. */
384 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
386 stack_vars[i].offset += offset;
387 stack_vars[i].representative = a;
389 stack_vars[last].next = stack_vars[a].next;
390 stack_vars[a].next = b;
392 /* Update the required alignment of partition A to account for B. */
393 if (stack_vars[a].alignb < stack_vars[b].alignb)
394 stack_vars[a].alignb = stack_vars[b].alignb;
396 /* Update the interference graph and merge the conflicts. */
397 for (last = stack_vars_num, i = 0; i < last; ++i)
398 if (stack_var_conflict_p (b, i))
399 add_stack_var_conflict (a, i);
402 /* A subroutine of expand_used_vars. Binpack the variables into
403 partitions constrained by the interference graph. The overall
404 algorithm used is as follows:
406 Sort the objects by size.
411 Look for the largest non-conflicting object B with size <= S.
421 partition_stack_vars (void)
423 size_t si, sj, n = stack_vars_num;
425 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
426 for (si = 0; si < n; ++si)
427 stack_vars_sorted[si] = si;
432 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
434 /* Special case: detect when all variables conflict, and thus we can't
435 do anything during the partitioning loop. It isn't uncommon (with
436 C code at least) to declare all variables at the top of the function,
437 and if we're not inlining, then all variables will be in the same scope.
438 Take advantage of very fast libc routines for this scan. */
439 gcc_assert (sizeof(bool) == sizeof(char));
440 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
443 for (si = 0; si < n; ++si)
445 size_t i = stack_vars_sorted[si];
446 HOST_WIDE_INT isize = stack_vars[i].size;
447 HOST_WIDE_INT offset = 0;
449 for (sj = si; sj-- > 0; )
451 size_t j = stack_vars_sorted[sj];
452 HOST_WIDE_INT jsize = stack_vars[j].size;
453 unsigned int jalign = stack_vars[j].alignb;
455 /* Ignore objects that aren't partition representatives. */
456 if (stack_vars[j].representative != j)
459 /* Ignore objects too large for the remaining space. */
463 /* Ignore conflicting objects. */
464 if (stack_var_conflict_p (i, j))
467 /* Refine the remaining space check to include alignment. */
468 if (offset & (jalign - 1))
470 HOST_WIDE_INT toff = offset;
472 toff &= -(HOST_WIDE_INT)jalign;
473 if (isize - (toff - offset) < jsize)
476 isize -= toff - offset;
480 /* UNION the objects, placing J at OFFSET. */
481 union_stack_vars (i, j, offset);
490 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
493 dump_stack_var_partition (void)
495 size_t si, i, j, n = stack_vars_num;
497 for (si = 0; si < n; ++si)
499 i = stack_vars_sorted[si];
501 /* Skip variables that aren't partition representatives, for now. */
502 if (stack_vars[i].representative != i)
505 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
506 " align %u\n", (unsigned long) i, stack_vars[i].size,
507 stack_vars[i].alignb);
509 for (j = i; j != EOC; j = stack_vars[j].next)
511 fputc ('\t', dump_file);
512 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
513 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
514 stack_vars[i].offset);
519 /* Assign rtl to DECL at frame offset OFFSET. */
522 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
527 /* If this fails, we've overflowed the stack frame. Error nicely? */
528 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
530 x = plus_constant (virtual_stack_vars_rtx, offset);
531 x = gen_rtx_MEM (DECL_MODE (decl), x);
533 /* Set alignment we actually gave this decl. */
534 offset -= frame_phase;
535 align = offset & -offset;
536 align *= BITS_PER_UNIT;
537 if (align > STACK_BOUNDARY || align == 0)
538 align = STACK_BOUNDARY;
539 DECL_ALIGN (decl) = align;
540 DECL_USER_ALIGN (decl) = 0;
542 set_mem_attributes (x, decl, true);
543 SET_DECL_RTL (decl, x);
546 /* A subroutine of expand_used_vars. Give each partition representative
547 a unique location within the stack frame. Update each partition member
548 with that location. */
551 expand_stack_vars (bool (*pred) (tree))
553 size_t si, i, j, n = stack_vars_num;
555 for (si = 0; si < n; ++si)
557 HOST_WIDE_INT offset;
559 i = stack_vars_sorted[si];
561 /* Skip variables that aren't partition representatives, for now. */
562 if (stack_vars[i].representative != i)
565 /* Skip variables that have already had rtl assigned. See also
566 add_stack_var where we perpetrate this pc_rtx hack. */
567 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
570 /* Check the predicate to see whether this variable should be
571 allocated in this pass. */
572 if (pred && !pred (stack_vars[i].decl))
575 offset = alloc_stack_frame_space (stack_vars[i].size,
576 stack_vars[i].alignb);
578 /* Create rtl for each variable based on their location within the
580 for (j = i; j != EOC; j = stack_vars[j].next)
581 expand_one_stack_var_at (stack_vars[j].decl,
582 stack_vars[j].offset + offset);
586 /* A subroutine of expand_one_var. Called to immediately assign rtl
587 to a variable to be allocated in the stack frame. */
590 expand_one_stack_var (tree var)
592 HOST_WIDE_INT size, offset, align;
594 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
595 align = get_decl_align_unit (var);
596 offset = alloc_stack_frame_space (size, align);
598 expand_one_stack_var_at (var, offset);
601 /* A subroutine of expand_one_var. Called to assign rtl
602 to a TREE_STATIC VAR_DECL. */
605 expand_one_static_var (tree var)
607 /* In unit-at-a-time all the static variables are expanded at the end
608 of compilation process. */
609 if (flag_unit_at_a_time)
611 /* If this is an inlined copy of a static local variable,
612 look up the original. */
613 var = DECL_ORIGIN (var);
615 /* If we've already processed this variable because of that, do nothing. */
616 if (TREE_ASM_WRITTEN (var))
619 /* Give the front end a chance to do whatever. In practice, this is
620 resolving duplicate names for IMA in C. */
621 if (lang_hooks.expand_decl (var))
624 /* Otherwise, just emit the variable. */
625 rest_of_decl_compilation (var, 0, 0);
628 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
629 that will reside in a hard register. */
632 expand_one_hard_reg_var (tree var)
634 rest_of_decl_compilation (var, 0, 0);
637 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
638 that will reside in a pseudo register. */
641 expand_one_register_var (tree var)
643 tree type = TREE_TYPE (var);
644 int unsignedp = TYPE_UNSIGNED (type);
645 enum machine_mode reg_mode
646 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
647 rtx x = gen_reg_rtx (reg_mode);
649 SET_DECL_RTL (var, x);
651 /* Note if the object is a user variable. */
652 if (!DECL_ARTIFICIAL (var))
656 /* Trust user variables which have a pointer type to really
657 be pointers. Do not trust compiler generated temporaries
658 as our type system is totally busted as it relates to
659 pointer arithmetic which translates into lots of compiler
660 generated objects with pointer types, but which are not really
662 if (POINTER_TYPE_P (type))
663 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
667 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
668 has some associated error, e.g. its type is error-mark. We just need
669 to pick something that won't crash the rest of the compiler. */
672 expand_one_error_var (tree var)
674 enum machine_mode mode = DECL_MODE (var);
678 x = gen_rtx_MEM (BLKmode, const0_rtx);
679 else if (mode == VOIDmode)
682 x = gen_reg_rtx (mode);
684 SET_DECL_RTL (var, x);
687 /* A subroutine of expand_one_var. VAR is a variable that will be
688 allocated to the local stack frame. Return true if we wish to
689 add VAR to STACK_VARS so that it will be coalesced with other
690 variables. Return false to allocate VAR immediately.
692 This function is used to reduce the number of variables considered
693 for coalescing, which reduces the size of the quadratic problem. */
696 defer_stack_allocation (tree var, bool toplevel)
698 /* If stack protection is enabled, *all* stack variables must be deferred,
699 so that we can re-order the strings to the top of the frame. */
700 if (flag_stack_protect)
703 /* Variables in the outermost scope automatically conflict with
704 every other variable. The only reason to want to defer them
705 at all is that, after sorting, we can more efficiently pack
706 small variables in the stack frame. Continue to defer at -O2. */
707 if (toplevel && optimize < 2)
710 /* Without optimization, *most* variables are allocated from the
711 stack, which makes the quadratic problem large exactly when we
712 want compilation to proceed as quickly as possible. On the
713 other hand, we don't want the function's stack frame size to
714 get completely out of hand. So we avoid adding scalars and
715 "small" aggregates to the list at all. */
716 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
722 /* A subroutine of expand_used_vars. Expand one variable according to
723 its flavor. Variables to be placed on the stack are not actually
724 expanded yet, merely recorded. */
727 expand_one_var (tree var, bool toplevel)
729 if (TREE_CODE (var) != VAR_DECL)
730 lang_hooks.expand_decl (var);
731 else if (DECL_EXTERNAL (var))
733 else if (DECL_HAS_VALUE_EXPR_P (var))
735 else if (TREE_STATIC (var))
736 expand_one_static_var (var);
737 else if (DECL_RTL_SET_P (var))
739 else if (TREE_TYPE (var) == error_mark_node)
740 expand_one_error_var (var);
741 else if (DECL_HARD_REGISTER (var))
742 expand_one_hard_reg_var (var);
743 else if (use_register_for_decl (var))
744 expand_one_register_var (var);
745 else if (defer_stack_allocation (var, toplevel))
748 expand_one_stack_var (var);
751 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
752 expanding variables. Those variables that can be put into registers
753 are allocated pseudos; those that can't are put on the stack.
755 TOPLEVEL is true if this is the outermost BLOCK. */
758 expand_used_vars_for_block (tree block, bool toplevel)
760 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
763 old_sv_num = toplevel ? 0 : stack_vars_num;
765 /* Expand all variables at this level. */
766 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
768 /* Force local static variables to be output when marked by
769 used attribute. For unit-at-a-time, cgraph code already takes
771 || (!flag_unit_at_a_time && TREE_STATIC (t)
772 && DECL_PRESERVE_P (t)))
773 expand_one_var (t, toplevel);
775 this_sv_num = stack_vars_num;
777 /* Expand all variables at containing levels. */
778 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
779 expand_used_vars_for_block (t, false);
781 /* Since we do not track exact variable lifetimes (which is not even
782 possible for variables whose address escapes), we mirror the block
783 tree in the interference graph. Here we cause all variables at this
784 level, and all sublevels, to conflict. Do make certain that a
785 variable conflicts with itself. */
786 if (old_sv_num < this_sv_num)
788 new_sv_num = stack_vars_num;
789 resize_stack_vars_conflict (new_sv_num);
791 for (i = old_sv_num; i < new_sv_num; ++i)
792 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
793 add_stack_var_conflict (i, j);
797 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
798 and clear TREE_USED on all local variables. */
801 clear_tree_used (tree block)
805 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
806 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
809 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
814 SPCT_FLAG_DEFAULT = 1,
819 /* Examine TYPE and determine a bit mask of the following features. */
821 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
822 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
823 #define SPCT_HAS_ARRAY 4
824 #define SPCT_HAS_AGGREGATE 8
827 stack_protect_classify_type (tree type)
829 unsigned int ret = 0;
832 switch (TREE_CODE (type))
835 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
836 if (t == char_type_node
837 || t == signed_char_type_node
838 || t == unsigned_char_type_node)
840 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
841 unsigned HOST_WIDE_INT len;
843 if (!TYPE_SIZE_UNIT (type)
844 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
847 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
850 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
852 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
855 ret = SPCT_HAS_ARRAY;
859 case QUAL_UNION_TYPE:
861 ret = SPCT_HAS_AGGREGATE;
862 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
863 if (TREE_CODE (t) == FIELD_DECL)
864 ret |= stack_protect_classify_type (TREE_TYPE (t));
874 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
875 part of the local stack frame. Remember if we ever return nonzero for
876 any variable in this function. The return value is the phase number in
877 which the variable should be allocated. */
880 stack_protect_decl_phase (tree decl)
882 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
885 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
886 has_short_buffer = true;
888 if (flag_stack_protect == SPCT_FLAG_ALL
889 || flag_stack_protect == SPCT_FLAG_STRONG)
891 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
892 && !(bits & SPCT_HAS_AGGREGATE))
894 else if (bits & SPCT_HAS_ARRAY)
898 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
901 has_protected_decls = true;
906 /* Two helper routines that check for phase 1 and phase 2. These are used
907 as callbacks for expand_stack_vars. */
910 stack_protect_decl_phase_1 (tree decl)
912 return stack_protect_decl_phase (decl) == 1;
916 stack_protect_decl_phase_2 (tree decl)
918 return stack_protect_decl_phase (decl) == 2;
921 /* Ensure that variables in different stack protection phases conflict
922 so that they are not merged and share the same stack slot. */
925 add_stack_protection_conflicts (void)
927 size_t i, j, n = stack_vars_num;
928 unsigned char *phase;
930 phase = XNEWVEC (unsigned char, n);
931 for (i = 0; i < n; ++i)
932 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
934 for (i = 0; i < n; ++i)
936 unsigned char ph_i = phase[i];
937 for (j = 0; j < i; ++j)
938 if (ph_i != phase[j])
939 add_stack_var_conflict (i, j);
945 /* Create a decl for the guard at the top of the stack frame. */
948 create_stack_guard (void)
950 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
951 TREE_THIS_VOLATILE (guard) = 1;
952 TREE_USED (guard) = 1;
953 expand_one_stack_var (guard);
954 cfun->stack_protect_guard = guard;
957 /* Helper routine to check if a record or union contains an array field. */
960 record_or_union_type_has_array_p (tree tree_type)
962 tree fields = TYPE_FIELDS (tree_type);
965 for (f = fields; f; f = TREE_CHAIN (f))
966 if (TREE_CODE (f) == FIELD_DECL)
968 tree field_type = TREE_TYPE (f);
969 if ((TREE_CODE (field_type) == RECORD_TYPE
970 || TREE_CODE (field_type) == UNION_TYPE
971 || TREE_CODE (field_type) == QUAL_UNION_TYPE)
972 && record_or_union_type_has_array_p (field_type))
974 if (TREE_CODE (field_type) == ARRAY_TYPE)
980 /* Expand all variables used in the function. */
983 expand_used_vars (void)
985 tree t, outer_block = DECL_INITIAL (current_function_decl);
986 bool gen_stack_protect_signal = false;
988 /* Compute the phase of the stack frame for this function. */
990 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
991 int off = STARTING_FRAME_OFFSET % align;
992 frame_phase = off ? align - off : 0;
995 /* Set TREE_USED on all variables in the unexpanded_var_list. */
996 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
997 TREE_USED (TREE_VALUE (t)) = 1;
999 /* Clear TREE_USED on all variables associated with a block scope. */
1000 clear_tree_used (outer_block);
1002 /* Initialize local stack smashing state. */
1003 has_protected_decls = false;
1004 has_short_buffer = false;
1006 if (flag_stack_protect == SPCT_FLAG_STRONG)
1007 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1009 tree var = TREE_VALUE (t);
1010 if (!is_global_var (var))
1012 tree var_type = TREE_TYPE (var);
1013 /* Examine local referenced variables that have their addresses
1014 * taken, contain an array, or are arrays. */
1015 if (TREE_CODE (var) == VAR_DECL
1016 && (TREE_CODE (var_type) == ARRAY_TYPE
1017 || TREE_ADDRESSABLE (var)
1018 || ((TREE_CODE (var_type) == RECORD_TYPE
1019 || TREE_CODE (var_type) == UNION_TYPE
1020 || TREE_CODE (var_type) == QUAL_UNION_TYPE)
1021 && record_or_union_type_has_array_p (var_type))))
1023 gen_stack_protect_signal = true;
1029 /* At this point all variables on the unexpanded_var_list with TREE_USED
1030 set are not associated with any block scope. Lay them out. */
1031 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
1033 tree var = TREE_VALUE (t);
1034 bool expand_now = false;
1036 /* We didn't set a block for static or extern because it's hard
1037 to tell the difference between a global variable (re)declared
1038 in a local scope, and one that's really declared there to
1039 begin with. And it doesn't really matter much, since we're
1040 not giving them stack space. Expand them now. */
1041 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1044 /* Any variable that could have been hoisted into an SSA_NAME
1045 will have been propagated anywhere the optimizers chose,
1046 i.e. not confined to their original block. Allocate them
1047 as if they were defined in the outermost scope. */
1048 else if (is_gimple_reg (var))
1051 /* If the variable is not associated with any block, then it
1052 was created by the optimizers, and could be live anywhere
1054 else if (TREE_USED (var))
1057 /* Finally, mark all variables on the list as used. We'll use
1058 this in a moment when we expand those associated with scopes. */
1059 TREE_USED (var) = 1;
1062 expand_one_var (var, true);
1064 cfun->unexpanded_var_list = NULL_TREE;
1066 /* At this point, all variables within the block tree with TREE_USED
1067 set are actually used by the optimized function. Lay them out. */
1068 expand_used_vars_for_block (outer_block, true);
1070 if (stack_vars_num > 0)
1072 /* Due to the way alias sets work, no variables with non-conflicting
1073 alias sets may be assigned the same address. Add conflicts to
1075 add_alias_set_conflicts ();
1077 /* If stack protection is enabled, we don't share space between
1078 vulnerable data and non-vulnerable data. */
1079 if (flag_stack_protect)
1080 add_stack_protection_conflicts ();
1082 /* Now that we have collected all stack variables, and have computed a
1083 minimal interference graph, attempt to save some stack space. */
1084 partition_stack_vars ();
1086 dump_stack_var_partition ();
1089 switch (flag_stack_protect)
1092 create_stack_guard ();
1095 case SPCT_FLAG_STRONG:
1096 if (gen_stack_protect_signal
1097 || current_function_calls_alloca || has_protected_decls)
1098 create_stack_guard ();
1101 case SPCT_FLAG_DEFAULT:
1102 if (current_function_calls_alloca || has_protected_decls)
1103 create_stack_guard();
1110 /* Assign rtl to each variable based on these partitions. */
1111 if (stack_vars_num > 0)
1113 /* Reorder decls to be protected by iterating over the variables
1114 array multiple times, and allocating out of each phase in turn. */
1115 /* ??? We could probably integrate this into the qsort we did
1116 earlier, such that we naturally see these variables first,
1117 and thus naturally allocate things in the right order. */
1118 if (has_protected_decls)
1120 /* Phase 1 contains only character arrays. */
1121 expand_stack_vars (stack_protect_decl_phase_1);
1123 /* Phase 2 contains other kinds of arrays. */
1124 if (flag_stack_protect == 2)
1125 expand_stack_vars (stack_protect_decl_phase_2);
1128 expand_stack_vars (NULL);
1130 /* Free up stack variable graph data. */
1131 XDELETEVEC (stack_vars);
1132 XDELETEVEC (stack_vars_sorted);
1133 XDELETEVEC (stack_vars_conflict);
1135 stack_vars_alloc = stack_vars_num = 0;
1136 stack_vars_conflict = NULL;
1137 stack_vars_conflict_alloc = 0;
1140 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1141 if (STACK_ALIGNMENT_NEEDED)
1143 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1144 if (!FRAME_GROWS_DOWNWARD)
1145 frame_offset += align - 1;
1146 frame_offset &= -align;
1151 /* If we need to produce a detailed dump, print the tree representation
1152 for STMT to the dump file. SINCE is the last RTX after which the RTL
1153 generated for STMT should have been appended. */
1156 maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1158 if (dump_file && (dump_flags & TDF_DETAILS))
1160 fprintf (dump_file, "\n;; ");
1161 print_generic_expr (dump_file, stmt, TDF_SLIM);
1162 fprintf (dump_file, "\n");
1164 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1168 /* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1169 Returns a new basic block if we've terminated the current basic
1170 block and created a new one. */
1173 expand_gimple_cond_expr (basic_block bb, tree stmt)
1175 basic_block new_bb, dest;
1179 tree pred = COND_EXPR_COND (stmt);
1180 tree then_exp = COND_EXPR_THEN (stmt);
1181 tree else_exp = COND_EXPR_ELSE (stmt);
1184 last2 = last = get_last_insn ();
1186 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1187 if (EXPR_LOCUS (stmt))
1189 emit_line_note (*(EXPR_LOCUS (stmt)));
1190 record_block_change (TREE_BLOCK (stmt));
1193 /* These flags have no purpose in RTL land. */
1194 true_edge->flags &= ~EDGE_TRUE_VALUE;
1195 false_edge->flags &= ~EDGE_FALSE_VALUE;
1197 /* We can either have a pure conditional jump with one fallthru edge or
1198 two-way jump that needs to be decomposed into two basic blocks. */
1199 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1201 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1202 add_reg_br_prob_note (last, true_edge->probability);
1203 maybe_dump_rtl_for_tree_stmt (stmt, last);
1204 if (EXPR_LOCUS (then_exp))
1205 emit_line_note (*(EXPR_LOCUS (then_exp)));
1208 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1210 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
1211 add_reg_br_prob_note (last, false_edge->probability);
1212 maybe_dump_rtl_for_tree_stmt (stmt, last);
1213 if (EXPR_LOCUS (else_exp))
1214 emit_line_note (*(EXPR_LOCUS (else_exp)));
1217 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1218 && TREE_CODE (else_exp) == GOTO_EXPR);
1220 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
1221 add_reg_br_prob_note (last, true_edge->probability);
1222 last = get_last_insn ();
1223 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1226 if (BARRIER_P (BB_END (bb)))
1227 BB_END (bb) = PREV_INSN (BB_END (bb));
1228 update_bb_for_insn (bb);
1230 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1231 dest = false_edge->dest;
1232 redirect_edge_succ (false_edge, new_bb);
1233 false_edge->flags |= EDGE_FALLTHRU;
1234 new_bb->count = false_edge->count;
1235 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1236 new_edge = make_edge (new_bb, dest, 0);
1237 new_edge->probability = REG_BR_PROB_BASE;
1238 new_edge->count = new_bb->count;
1239 if (BARRIER_P (BB_END (new_bb)))
1240 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1241 update_bb_for_insn (new_bb);
1243 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1245 if (EXPR_LOCUS (else_exp))
1246 emit_line_note (*(EXPR_LOCUS (else_exp)));
1251 /* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
1252 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1253 generated a tail call (something that might be denied by the ABI
1254 rules governing the call; see calls.c).
1256 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1257 can still reach the rest of BB. The case here is __builtin_sqrt,
1258 where the NaN result goes through the external function (with a
1259 tailcall) and the normal result happens via a sqrt instruction. */
1262 expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
1270 last2 = last = get_last_insn ();
1272 expand_expr_stmt (stmt);
1274 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
1275 if (CALL_P (last) && SIBLING_CALL_P (last))
1278 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1280 *can_fallthru = true;
1284 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1285 Any instructions emitted here are about to be deleted. */
1286 do_pending_stack_adjust ();
1288 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1289 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1290 EH or abnormal edges, we shouldn't have created a tail call in
1291 the first place. So it seems to me we should just be removing
1292 all edges here, or redirecting the existing fallthru edge to
1298 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1300 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1302 if (e->dest != EXIT_BLOCK_PTR)
1304 e->dest->count -= e->count;
1305 e->dest->frequency -= EDGE_FREQUENCY (e);
1306 if (e->dest->count < 0)
1308 if (e->dest->frequency < 0)
1309 e->dest->frequency = 0;
1312 probability += e->probability;
1319 /* This is somewhat ugly: the call_expr expander often emits instructions
1320 after the sibcall (to perform the function return). These confuse the
1321 find_many_sub_basic_blocks code, so we need to get rid of these. */
1322 last = NEXT_INSN (last);
1323 gcc_assert (BARRIER_P (last));
1325 *can_fallthru = false;
1326 while (NEXT_INSN (last))
1328 /* For instance an sqrt builtin expander expands if with
1329 sibcall in the then and label for `else`. */
1330 if (LABEL_P (NEXT_INSN (last)))
1332 *can_fallthru = true;
1335 delete_insn (NEXT_INSN (last));
1338 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1339 e->probability += probability;
1342 update_bb_for_insn (bb);
1344 if (NEXT_INSN (last))
1346 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1349 if (BARRIER_P (last))
1350 BB_END (bb) = PREV_INSN (last);
1353 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1358 /* Expand basic block BB from GIMPLE trees to RTL. */
1361 expand_gimple_basic_block (basic_block bb)
1363 block_stmt_iterator bsi = bsi_start (bb);
1372 "\n;; Generating RTL for tree basic block %d\n",
1376 init_rtl_bb_info (bb);
1377 bb->flags |= BB_RTL;
1379 if (!bsi_end_p (bsi))
1380 stmt = bsi_stmt (bsi);
1382 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1384 last = get_last_insn ();
1386 expand_expr_stmt (stmt);
1388 /* Java emits line number notes in the top of labels.
1389 ??? Make this go away once line number notes are obsoleted. */
1390 BB_HEAD (bb) = NEXT_INSN (last);
1391 if (NOTE_P (BB_HEAD (bb)))
1392 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1394 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1396 maybe_dump_rtl_for_tree_stmt (stmt, last);
1399 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1401 NOTE_BASIC_BLOCK (note) = bb;
1403 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1405 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1406 e->flags &= ~EDGE_EXECUTABLE;
1408 /* At the moment not all abnormal edges match the RTL representation.
1409 It is safe to remove them here as find_many_sub_basic_blocks will
1410 rediscover them. In the future we should get this fixed properly. */
1411 if (e->flags & EDGE_ABNORMAL)
1417 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1419 tree stmt = bsi_stmt (bsi);
1425 /* Expand this statement, then evaluate the resulting RTL and
1426 fixup the CFG accordingly. */
1427 if (TREE_CODE (stmt) == COND_EXPR)
1429 new_bb = expand_gimple_cond_expr (bb, stmt);
1435 tree call = get_call_expr_in (stmt);
1436 if (call && CALL_EXPR_TAILCALL (call))
1439 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1450 last = get_last_insn ();
1451 expand_expr_stmt (stmt);
1452 maybe_dump_rtl_for_tree_stmt (stmt, last);
1457 do_pending_stack_adjust ();
1459 /* Find the block tail. The last insn in the block is the insn
1460 before a barrier and/or table jump insn. */
1461 last = get_last_insn ();
1462 if (BARRIER_P (last))
1463 last = PREV_INSN (last);
1464 if (JUMP_TABLE_DATA_P (last))
1465 last = PREV_INSN (PREV_INSN (last));
1468 update_bb_for_insn (bb);
1474 /* Create a basic block for initialization code. */
1477 construct_init_block (void)
1479 basic_block init_block, first_block;
1483 /* Multiple entry points not supported yet. */
1484 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
1485 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1486 init_rtl_bb_info (EXIT_BLOCK_PTR);
1487 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1488 EXIT_BLOCK_PTR->flags |= BB_RTL;
1490 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
1492 /* When entry edge points to first basic block, we don't need jump,
1493 otherwise we have to jump into proper target. */
1494 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1496 tree label = tree_block_label (e->dest);
1498 emit_jump (label_rtx (label));
1502 flags = EDGE_FALLTHRU;
1504 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1507 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1508 init_block->count = ENTRY_BLOCK_PTR->count;
1511 first_block = e->dest;
1512 redirect_edge_succ (e, init_block);
1513 e = make_edge (init_block, first_block, flags);
1516 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1517 e->probability = REG_BR_PROB_BASE;
1518 e->count = ENTRY_BLOCK_PTR->count;
1520 update_bb_for_insn (init_block);
1525 /* Create a block containing landing pads and similar stuff. */
1528 construct_exit_block (void)
1530 rtx head = get_last_insn ();
1532 basic_block exit_block;
1537 /* Make sure the locus is set to the end of the function, so that
1538 epilogue line numbers and warnings are set properly. */
1539 #ifdef USE_MAPPED_LOCATION
1540 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1542 if (cfun->function_end_locus.file)
1544 input_location = cfun->function_end_locus;
1546 /* The following insns belong to the top scope. */
1547 record_block_change (DECL_INITIAL (current_function_decl));
1549 /* Generate rtl for function exit. */
1550 expand_function_end ();
1552 end = get_last_insn ();
1555 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
1556 head = NEXT_INSN (head);
1557 exit_block = create_basic_block (NEXT_INSN (head), end,
1558 EXIT_BLOCK_PTR->prev_bb);
1559 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1560 exit_block->count = EXIT_BLOCK_PTR->count;
1563 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
1565 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
1566 if (!(e->flags & EDGE_ABNORMAL))
1567 redirect_edge_succ (e, exit_block);
1572 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1573 e->probability = REG_BR_PROB_BASE;
1574 e->count = EXIT_BLOCK_PTR->count;
1575 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
1578 e->count -= e2->count;
1579 exit_block->count -= e2->count;
1580 exit_block->frequency -= EDGE_FREQUENCY (e2);
1584 if (exit_block->count < 0)
1585 exit_block->count = 0;
1586 if (exit_block->frequency < 0)
1587 exit_block->frequency = 0;
1588 update_bb_for_insn (exit_block);
1591 /* Helper function for discover_nonconstant_array_refs.
1592 Look for ARRAY_REF nodes with non-constant indexes and mark them
1596 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1597 void *data ATTRIBUTE_UNUSED)
1601 if (IS_TYPE_OR_DECL_P (t))
1603 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1605 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1606 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1607 && (!TREE_OPERAND (t, 2)
1608 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1609 || (TREE_CODE (t) == COMPONENT_REF
1610 && (!TREE_OPERAND (t,2)
1611 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1612 || TREE_CODE (t) == BIT_FIELD_REF
1613 || TREE_CODE (t) == REALPART_EXPR
1614 || TREE_CODE (t) == IMAGPART_EXPR
1615 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1616 || TREE_CODE (t) == NOP_EXPR
1617 || TREE_CODE (t) == CONVERT_EXPR)
1618 t = TREE_OPERAND (t, 0);
1620 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1622 t = get_base_address (t);
1623 if (t && DECL_P (t))
1624 TREE_ADDRESSABLE (t) = 1;
1633 /* RTL expansion is not able to compile array references with variable
1634 offsets for arrays stored in single register. Discover such
1635 expressions and mark variables as addressable to avoid this
1639 discover_nonconstant_array_refs (void)
1642 block_stmt_iterator bsi;
1646 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1647 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1652 /* Translate the intermediate representation contained in the CFG
1653 from GIMPLE trees to RTL.
1655 We do conversion per basic block and preserve/update the tree CFG.
1656 This implies we have to do some magic as the CFG can simultaneously
1657 consist of basic blocks containing RTL and GIMPLE trees. This can
1658 confuse the CFG hooks, so be careful to not manipulate CFG during
1662 tree_expand_cfg (void)
1664 basic_block bb, init_block;
1669 /* Some backends want to know that we are expanding to RTL. */
1670 currently_expanding_to_rtl = 1;
1672 /* Prepare the rtl middle end to start recording block changes. */
1673 reset_block_changes ();
1675 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1676 discover_nonconstant_array_refs ();
1678 /* Expand the variables recorded during gimple lowering. */
1679 expand_used_vars ();
1681 /* Honor stack protection warnings. */
1682 if (warn_stack_protect)
1684 if (current_function_calls_alloca)
1685 warning (0, "not protecting local variables: variable length buffer");
1686 if (has_short_buffer && !cfun->stack_protect_guard)
1687 warning (0, "not protecting function: no buffer at least %d bytes long",
1688 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1691 /* Set up parameters and prepare for return, for the function. */
1692 expand_function_start (current_function_decl);
1694 /* If this function is `main', emit a call to `__main'
1695 to run global initializers, etc. */
1696 if (DECL_NAME (current_function_decl)
1697 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1698 && DECL_FILE_SCOPE_P (current_function_decl))
1699 expand_main_function ();
1701 /* Initialize the stack_protect_guard field. This must happen after the
1702 call to __main (if any) so that the external decl is initialized. */
1703 if (cfun->stack_protect_guard)
1704 stack_protect_prologue ();
1706 /* Register rtl specific functions for cfg. */
1707 rtl_register_cfg_hooks ();
1709 init_block = construct_init_block ();
1711 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
1712 remaining edges in expand_gimple_basic_block. */
1713 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1714 e->flags &= ~EDGE_EXECUTABLE;
1716 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
1717 bb = expand_gimple_basic_block (bb);
1719 construct_exit_block ();
1721 /* We're done expanding trees to RTL. */
1722 currently_expanding_to_rtl = 0;
1724 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1726 convert_from_eh_region_ranges ();
1728 rebuild_jump_labels (get_insns ());
1729 find_exception_handler_labels ();
1731 blocks = sbitmap_alloc (last_basic_block);
1732 sbitmap_ones (blocks);
1733 find_many_sub_basic_blocks (blocks);
1734 purge_all_dead_edges ();
1735 sbitmap_free (blocks);
1738 #ifdef ENABLE_CHECKING
1742 /* There's no need to defer outputting this function any more; we
1743 know we want to output it. */
1744 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1746 /* Now that we're done expanding trees to RTL, we shouldn't have any
1747 more CONCATs anywhere. */
1748 generating_concat_p = 0;
1750 finalize_block_changes ();
1755 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1756 /* And the pass manager will dump RTL for us. */
1759 /* If we're emitting a nested function, make sure its parent gets
1760 emitted as well. Doing otherwise confuses debug info. */
1763 for (parent = DECL_CONTEXT (current_function_decl);
1764 parent != NULL_TREE;
1765 parent = get_containing_scope (parent))
1766 if (TREE_CODE (parent) == FUNCTION_DECL)
1767 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1770 /* We are now committed to emitting code for this function. Do any
1771 preparation, such as emitting abstract debug info for the inline
1772 before it gets mangled by optimization. */
1773 if (cgraph_function_possibly_inlined_p (current_function_decl))
1774 (*debug_hooks->outlining_inline_function) (current_function_decl);
1776 TREE_ASM_WRITTEN (current_function_decl) = 1;
1778 /* After expanding, the return labels are no longer needed. */
1779 return_label = NULL;
1780 naked_return_label = NULL;
1784 struct tree_opt_pass pass_expand =
1786 "expand", /* name */
1788 tree_expand_cfg, /* execute */
1791 0, /* static_pass_number */
1792 TV_EXPAND, /* tv_id */
1793 /* ??? If TER is enabled, we actually receive GENERIC. */
1794 PROP_gimple_leh | PROP_cfg, /* properties_required */
1795 PROP_rtl, /* properties_provided */
1796 PROP_trees, /* properties_destroyed */
1797 0, /* todo_flags_start */
1798 TODO_dump_func, /* todo_flags_finish */