1 /* Expands front end tree to back end RTL for GCC.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 /* This file handles the generation of rtl code from tree structure
26 at the level of the function as a whole.
27 It creates the rtl expressions for parameters and auto variables
28 and has full responsibility for allocating stack slots.
30 `expand_function_start' is called at the beginning of a function,
31 before the function body is parsed, and `expand_function_end' is
32 called after parsing the body.
34 Call `assign_stack_local' to allocate a stack slot for a local variable.
35 This is usually done during the RTL generation for the function body,
36 but it can also be done in the reload pass when a pseudo-register does
37 not get a hard register. */
41 #include "coretypes.h"
52 #include "hard-reg-set.h"
53 #include "insn-config.h"
56 #include "basic-block.h"
61 #include "integrate.h"
62 #include "langhooks.h"
64 #include "cfglayout.h"
65 #include "tree-gimple.h"
66 #include "tree-pass.h"
70 #ifndef LOCAL_ALIGNMENT
71 #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
74 #ifndef STACK_ALIGNMENT_NEEDED
75 #define STACK_ALIGNMENT_NEEDED 1
78 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
80 /* Some systems use __main in a way incompatible with its use in gcc, in these
81 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
82 give the same symbol without quotes for an alternative entry point. You
83 must define both, or neither. */
85 #define NAME__MAIN "__main"
88 /* Round a value to the lowest integer less than it that is a multiple of
89 the required alignment. Avoid using division in case the value is
90 negative. Assume the alignment is a power of two. */
91 #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
93 /* Similar, but round to the next highest integer that meets the
95 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
97 /* Nonzero if function being compiled doesn't contain any calls
98 (ignoring the prologue and epilogue). This is set prior to
99 local register allocation and is valid for the remaining
101 int current_function_is_leaf;
103 /* Nonzero if function being compiled doesn't modify the stack pointer
104 (ignoring the prologue and epilogue). This is only valid after
105 life_analysis has run. */
106 int current_function_sp_is_unchanging;
108 /* Nonzero if the function being compiled is a leaf function which only
109 uses leaf registers. This is valid after reload (specifically after
110 sched2) and is useful only if the port defines LEAF_REGISTERS. */
111 int current_function_uses_only_leaf_regs;
113 /* Nonzero once virtual register instantiation has been done.
114 assign_stack_local uses frame_pointer_rtx when this is nonzero.
115 calls.c:emit_library_call_value_1 uses it to set up
116 post-instantiation libcalls. */
117 int virtuals_instantiated;
119 /* APPLE LOCAL begin radar 5732232 - blocks */
120 struct block_sema_info *cur_block;
121 /* APPLE LOCAL end radar 5732232 - blocks */
123 /* Assign unique numbers to labels generated for profiling, debugging, etc. */
124 static GTY(()) int funcdef_no;
126 /* These variables hold pointers to functions to create and destroy
127 target specific, per-function data structures. */
128 struct machine_function * (*init_machine_status) (void);
130 /* The currently compiled function. */
131 struct function *cfun = 0;
133 /* These arrays record the INSN_UIDs of the prologue and epilogue insns. */
134 static VEC(int,heap) *prologue;
135 static VEC(int,heap) *epilogue;
137 /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
139 static VEC(int,heap) *sibcall_epilogue;
141 /* In order to evaluate some expressions, such as function calls returning
142 structures in memory, we need to temporarily allocate stack locations.
143 We record each allocated temporary in the following structure.
145 Associated with each temporary slot is a nesting level. When we pop up
146 one level, all temporaries associated with the previous level are freed.
147 Normally, all temporaries are freed after the execution of the statement
148 in which they were created. However, if we are inside a ({...}) grouping,
149 the result may be in a temporary and hence must be preserved. If the
150 result could be in a temporary, we preserve it if we can determine which
151 one it is in. If we cannot determine which temporary may contain the
152 result, all temporaries are preserved. A temporary is preserved by
153 pretending it was allocated at the previous nesting level.
155 Automatic variables are also assigned temporary slots, at the nesting
156 level where they are defined. They are marked a "kept" so that
157 free_temp_slots will not free them. */
159 struct temp_slot GTY(())
161 /* Points to next temporary slot. */
162 struct temp_slot *next;
163 /* Points to previous temporary slot. */
164 struct temp_slot *prev;
166 /* The rtx to used to reference the slot. */
168 /* The rtx used to represent the address if not the address of the
169 slot above. May be an EXPR_LIST if multiple addresses exist. */
171 /* The alignment (in bits) of the slot. */
173 /* The size, in units, of the slot. */
175 /* The type of the object in the slot, or zero if it doesn't correspond
176 to a type. We use this to determine whether a slot can be reused.
177 It can be reused if objects of the type of the new slot will always
178 conflict with objects of the type of the old slot. */
180 /* Nonzero if this temporary is currently in use. */
182 /* Nonzero if this temporary has its address taken. */
184 /* Nesting level at which this slot is being used. */
186 /* Nonzero if this should survive a call to free_temp_slots. */
188 /* The offset of the slot from the frame_pointer, including extra space
189 for alignment. This info is for combine_temp_slots. */
190 HOST_WIDE_INT base_offset;
191 /* The size of the slot, including extra space for alignment. This
192 info is for combine_temp_slots. */
193 HOST_WIDE_INT full_size;
196 /* Forward declarations. */
198 static rtx assign_stack_local_1 (enum machine_mode, HOST_WIDE_INT, int,
200 static struct temp_slot *find_temp_slot_from_address (rtx);
201 static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
202 static void pad_below (struct args_size *, enum machine_mode, tree);
203 static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
204 static int all_blocks (tree, tree *);
205 static tree *get_block_vector (tree, int *);
206 extern tree debug_find_var_in_block_tree (tree, tree);
207 /* We always define `record_insns' even if it's not used so that we
208 can always export `prologue_epilogue_contains'. */
209 static void record_insns (rtx, VEC(int,heap) **) ATTRIBUTE_UNUSED;
210 static int contains (rtx, VEC(int,heap) **);
212 static void emit_return_into_block (basic_block, rtx);
214 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
215 static rtx keep_stack_depressed (rtx);
217 static void prepare_function_start (tree);
218 static void do_clobber_return_reg (rtx, void *);
219 static void do_use_return_reg (rtx, void *);
220 static void set_insn_locators (rtx, int) ATTRIBUTE_UNUSED;
221 /* APPLE LOCAL radar 6163705, Blocks prologues */
222 static rtx find_block_prologue_insns (void);
224 /* Pointer to chain of `struct function' for containing functions. */
225 struct function *outer_function_chain;
227 /* Given a function decl for a containing function,
228 return the `struct function' for it. */
231 find_function_data (tree decl)
235 for (p = outer_function_chain; p; p = p->outer)
242 /* Save the current context for compilation of a nested function.
243 This is called from language-specific code. The caller should use
244 the enter_nested langhook to save any language-specific state,
245 since this function knows only about language-independent
249 push_function_context_to (tree context ATTRIBUTE_UNUSED)
254 init_dummy_function_start ();
257 p->outer = outer_function_chain;
258 outer_function_chain = p;
260 lang_hooks.function.enter_nested (p);
266 push_function_context (void)
268 push_function_context_to (current_function_decl);
271 /* Restore the last saved context, at the end of a nested function.
272 This function is called from language-specific code. */
275 pop_function_context_from (tree context ATTRIBUTE_UNUSED)
277 struct function *p = outer_function_chain;
280 outer_function_chain = p->outer;
282 current_function_decl = p->decl;
284 lang_hooks.function.leave_nested (p);
286 /* Reset variables that have known state during rtx generation. */
287 virtuals_instantiated = 0;
288 generating_concat_p = 1;
292 pop_function_context (void)
294 pop_function_context_from (current_function_decl);
297 /* Clear out all parts of the state in F that can safely be discarded
298 after the function has been parsed, but not compiled, to let
299 garbage collection reclaim the memory. */
302 free_after_parsing (struct function *f)
304 /* f->expr->forced_labels is used by code generation. */
305 /* f->emit->regno_reg_rtx is used by code generation. */
306 /* f->varasm is used by code generation. */
307 /* f->eh->eh_return_stub_label is used by code generation. */
309 lang_hooks.function.final (f);
312 /* Clear out all parts of the state in F that can safely be discarded
313 after the function has been compiled, to let garbage collection
314 reclaim the memory. */
317 free_after_compilation (struct function *f)
319 VEC_free (int, heap, prologue);
320 VEC_free (int, heap, epilogue);
321 VEC_free (int, heap, sibcall_epilogue);
330 f->x_avail_temp_slots = NULL;
331 f->x_used_temp_slots = NULL;
332 f->arg_offset_rtx = NULL;
333 f->return_rtx = NULL;
334 f->internal_arg_pointer = NULL;
335 f->x_nonlocal_goto_handler_labels = NULL;
336 f->x_return_label = NULL;
337 f->x_naked_return_label = NULL;
338 f->x_stack_slot_list = NULL;
339 f->x_stack_check_probe_note = NULL;
340 f->x_arg_pointer_save_area = NULL;
341 f->x_parm_birth_insn = NULL;
342 f->epilogue_delay_list = NULL;
345 /* Allocate fixed slots in the stack frame of the current function. */
347 /* Return size needed for stack frame based on slots so far allocated in
349 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
350 the caller may have to do that. */
353 get_func_frame_size (struct function *f)
355 if (FRAME_GROWS_DOWNWARD)
356 return -f->x_frame_offset;
358 return f->x_frame_offset;
361 /* Return size needed for stack frame based on slots so far allocated.
362 This size counts from zero. It is not rounded to PREFERRED_STACK_BOUNDARY;
363 the caller may have to do that. */
366 get_frame_size (void)
368 return get_func_frame_size (cfun);
371 /* Issue an error message and return TRUE if frame OFFSET overflows in
372 the signed target pointer arithmetics for function FUNC. Otherwise
376 frame_offset_overflow (HOST_WIDE_INT offset, tree func)
378 unsigned HOST_WIDE_INT size = FRAME_GROWS_DOWNWARD ? -offset : offset;
380 if (size > ((unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (Pmode) - 1))
381 /* Leave room for the fixed part of the frame. */
382 - 64 * UNITS_PER_WORD)
384 error ("%Jtotal size of local objects too large", func);
391 /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
392 with machine mode MODE.
394 ALIGN controls the amount of alignment for the address of the slot:
395 0 means according to MODE,
396 -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
397 -2 means use BITS_PER_UNIT,
398 positive specifies alignment boundary in bits.
400 We do not round to stack_boundary here.
402 FUNCTION specifies the function to allocate in. */
405 assign_stack_local_1 (enum machine_mode mode, HOST_WIDE_INT size, int align,
406 struct function *function)
409 int bigend_correction = 0;
410 unsigned int alignment;
411 int frame_off, frame_alignment, frame_phase;
418 alignment = BIGGEST_ALIGNMENT;
420 alignment = GET_MODE_ALIGNMENT (mode);
422 /* Allow the target to (possibly) increase the alignment of this
424 type = lang_hooks.types.type_for_mode (mode, 0);
426 alignment = LOCAL_ALIGNMENT (type, alignment);
428 alignment /= BITS_PER_UNIT;
430 else if (align == -1)
432 alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
433 size = CEIL_ROUND (size, alignment);
435 else if (align == -2)
436 alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
438 alignment = align / BITS_PER_UNIT;
440 if (FRAME_GROWS_DOWNWARD)
441 function->x_frame_offset -= size;
443 /* Ignore alignment we can't do with expected alignment of the boundary. */
444 if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
445 alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
447 if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
448 function->stack_alignment_needed = alignment * BITS_PER_UNIT;
450 /* Calculate how many bytes the start of local variables is off from
452 frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
453 frame_off = STARTING_FRAME_OFFSET % frame_alignment;
454 frame_phase = frame_off ? frame_alignment - frame_off : 0;
456 /* Round the frame offset to the specified alignment. The default is
457 to always honor requests to align the stack but a port may choose to
458 do its own stack alignment by defining STACK_ALIGNMENT_NEEDED. */
459 if (STACK_ALIGNMENT_NEEDED
463 /* We must be careful here, since FRAME_OFFSET might be negative and
464 division with a negative dividend isn't as well defined as we might
465 like. So we instead assume that ALIGNMENT is a power of two and
466 use logical operations which are unambiguous. */
467 if (FRAME_GROWS_DOWNWARD)
468 function->x_frame_offset
469 = (FLOOR_ROUND (function->x_frame_offset - frame_phase,
470 (unsigned HOST_WIDE_INT) alignment)
473 function->x_frame_offset
474 = (CEIL_ROUND (function->x_frame_offset - frame_phase,
475 (unsigned HOST_WIDE_INT) alignment)
479 /* On a big-endian machine, if we are allocating more space than we will use,
480 use the least significant bytes of those that are allocated. */
481 if (BYTES_BIG_ENDIAN && mode != BLKmode && GET_MODE_SIZE (mode) < size)
482 bigend_correction = size - GET_MODE_SIZE (mode);
484 /* If we have already instantiated virtual registers, return the actual
485 address relative to the frame pointer. */
486 if (function == cfun && virtuals_instantiated)
487 addr = plus_constant (frame_pointer_rtx,
489 (frame_offset + bigend_correction
490 + STARTING_FRAME_OFFSET, Pmode));
492 addr = plus_constant (virtual_stack_vars_rtx,
494 (function->x_frame_offset + bigend_correction,
497 if (!FRAME_GROWS_DOWNWARD)
498 function->x_frame_offset += size;
500 x = gen_rtx_MEM (mode, addr);
501 MEM_NOTRAP_P (x) = 1;
503 function->x_stack_slot_list
504 = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
506 if (frame_offset_overflow (function->x_frame_offset, function->decl))
507 function->x_frame_offset = 0;
512 /* Wrapper around assign_stack_local_1; assign a local stack slot for the
516 assign_stack_local (enum machine_mode mode, HOST_WIDE_INT size, int align)
518 return assign_stack_local_1 (mode, size, align, cfun);
522 /* Removes temporary slot TEMP from LIST. */
525 cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
528 temp->next->prev = temp->prev;
530 temp->prev->next = temp->next;
534 temp->prev = temp->next = NULL;
537 /* Inserts temporary slot TEMP to LIST. */
540 insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
544 (*list)->prev = temp;
549 /* Returns the list of used temp slots at LEVEL. */
551 static struct temp_slot **
552 temp_slots_at_level (int level)
554 if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
556 size_t old_length = VEC_length (temp_slot_p, used_temp_slots);
559 VEC_safe_grow (temp_slot_p, gc, used_temp_slots, level + 1);
560 p = VEC_address (temp_slot_p, used_temp_slots);
561 memset (&p[old_length], 0,
562 sizeof (temp_slot_p) * (level + 1 - old_length));
565 return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
568 /* Returns the maximal temporary slot level. */
571 max_slot_level (void)
573 if (!used_temp_slots)
576 return VEC_length (temp_slot_p, used_temp_slots) - 1;
579 /* Moves temporary slot TEMP to LEVEL. */
582 move_slot_to_level (struct temp_slot *temp, int level)
584 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
585 insert_slot_to_list (temp, temp_slots_at_level (level));
589 /* Make temporary slot TEMP available. */
592 make_slot_available (struct temp_slot *temp)
594 cut_slot_from_list (temp, temp_slots_at_level (temp->level));
595 insert_slot_to_list (temp, &avail_temp_slots);
600 /* Allocate a temporary stack slot and record it for possible later
603 MODE is the machine mode to be given to the returned rtx.
605 SIZE is the size in units of the space required. We do no rounding here
606 since assign_stack_local will do any required rounding.
608 KEEP is 1 if this slot is to be retained after a call to
609 free_temp_slots. Automatic variables for a block are allocated
610 with this flag. KEEP values of 2 or 3 were needed respectively
611 for variables whose lifetime is controlled by CLEANUP_POINT_EXPRs
612 or for SAVE_EXPRs, but they are now unused.
614 TYPE is the type that will be used for the stack slot. */
617 assign_stack_temp_for_type (enum machine_mode mode, HOST_WIDE_INT size,
621 struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
624 /* If SIZE is -1 it means that somebody tried to allocate a temporary
625 of a variable size. */
626 gcc_assert (size != -1);
628 /* These are now unused. */
629 gcc_assert (keep <= 1);
632 align = BIGGEST_ALIGNMENT;
634 align = GET_MODE_ALIGNMENT (mode);
637 type = lang_hooks.types.type_for_mode (mode, 0);
640 align = LOCAL_ALIGNMENT (type, align);
642 /* Try to find an available, already-allocated temporary of the proper
643 mode which meets the size and alignment requirements. Choose the
644 smallest one with the closest alignment.
646 If assign_stack_temp is called outside of the tree->rtl expansion,
647 we cannot reuse the stack slots (that may still refer to
648 VIRTUAL_STACK_VARS_REGNUM). */
649 if (!virtuals_instantiated)
651 for (p = avail_temp_slots; p; p = p->next)
653 if (p->align >= align && p->size >= size
654 && GET_MODE (p->slot) == mode
655 && objects_must_conflict_p (p->type, type)
656 && (best_p == 0 || best_p->size > p->size
657 || (best_p->size == p->size && best_p->align > p->align)))
659 if (p->align == align && p->size == size)
662 cut_slot_from_list (selected, &avail_temp_slots);
671 /* Make our best, if any, the one to use. */
675 cut_slot_from_list (selected, &avail_temp_slots);
677 /* If there are enough aligned bytes left over, make them into a new
678 temp_slot so that the extra bytes don't get wasted. Do this only
679 for BLKmode slots, so that we can be sure of the alignment. */
680 if (GET_MODE (best_p->slot) == BLKmode)
682 int alignment = best_p->align / BITS_PER_UNIT;
683 HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
685 if (best_p->size - rounded_size >= alignment)
687 p = ggc_alloc (sizeof (struct temp_slot));
688 p->in_use = p->addr_taken = 0;
689 p->size = best_p->size - rounded_size;
690 p->base_offset = best_p->base_offset + rounded_size;
691 p->full_size = best_p->full_size - rounded_size;
692 p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
693 p->align = best_p->align;
695 p->type = best_p->type;
696 insert_slot_to_list (p, &avail_temp_slots);
698 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
701 best_p->size = rounded_size;
702 best_p->full_size = rounded_size;
707 /* If we still didn't find one, make a new temporary. */
710 HOST_WIDE_INT frame_offset_old = frame_offset;
712 p = ggc_alloc (sizeof (struct temp_slot));
714 /* We are passing an explicit alignment request to assign_stack_local.
715 One side effect of that is assign_stack_local will not round SIZE
716 to ensure the frame offset remains suitably aligned.
718 So for requests which depended on the rounding of SIZE, we go ahead
719 and round it now. We also make sure ALIGNMENT is at least
720 BIGGEST_ALIGNMENT. */
721 gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
722 p->slot = assign_stack_local (mode,
724 ? CEIL_ROUND (size, (int) align / BITS_PER_UNIT)
730 /* The following slot size computation is necessary because we don't
731 know the actual size of the temporary slot until assign_stack_local
732 has performed all the frame alignment and size rounding for the
733 requested temporary. Note that extra space added for alignment
734 can be either above or below this stack slot depending on which
735 way the frame grows. We include the extra space if and only if it
736 is above this slot. */
737 if (FRAME_GROWS_DOWNWARD)
738 p->size = frame_offset_old - frame_offset;
742 /* Now define the fields used by combine_temp_slots. */
743 if (FRAME_GROWS_DOWNWARD)
745 p->base_offset = frame_offset;
746 p->full_size = frame_offset_old - frame_offset;
750 p->base_offset = frame_offset_old;
751 p->full_size = frame_offset - frame_offset_old;
762 p->level = temp_slot_level;
765 pp = temp_slots_at_level (p->level);
766 insert_slot_to_list (p, pp);
768 /* Create a new MEM rtx to avoid clobbering MEM flags of old slots. */
769 slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
770 stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
772 /* If we know the alias set for the memory that will be used, use
773 it. If there's no TYPE, then we don't know anything about the
774 alias set for the memory. */
775 set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
776 set_mem_align (slot, align);
778 /* If a type is specified, set the relevant flags. */
781 MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
782 MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
784 MEM_NOTRAP_P (slot) = 1;
789 /* Allocate a temporary stack slot and record it for possible later
790 reuse. First three arguments are same as in preceding function. */
793 assign_stack_temp (enum machine_mode mode, HOST_WIDE_INT size, int keep)
795 return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
798 /* Assign a temporary.
799 If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
800 and so that should be used in error messages. In either case, we
801 allocate of the given type.
802 KEEP is as for assign_stack_temp.
803 MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
804 it is 0 if a register is OK.
805 DONT_PROMOTE is 1 if we should not promote values in register
809 assign_temp (tree type_or_decl, int keep, int memory_required,
810 int dont_promote ATTRIBUTE_UNUSED)
813 enum machine_mode mode;
818 if (DECL_P (type_or_decl))
819 decl = type_or_decl, type = TREE_TYPE (decl);
821 decl = NULL, type = type_or_decl;
823 mode = TYPE_MODE (type);
825 unsignedp = TYPE_UNSIGNED (type);
828 if (mode == BLKmode || memory_required)
830 HOST_WIDE_INT size = int_size_in_bytes (type);
833 /* Zero sized arrays are GNU C extension. Set size to 1 to avoid
834 problems with allocating the stack space. */
838 /* Unfortunately, we don't yet know how to allocate variable-sized
839 temporaries. However, sometimes we can find a fixed upper limit on
840 the size, so try that instead. */
842 size = max_int_size_in_bytes (type);
844 /* The size of the temporary may be too large to fit into an integer. */
845 /* ??? Not sure this should happen except for user silliness, so limit
846 this to things that aren't compiler-generated temporaries. The
847 rest of the time we'll die in assign_stack_temp_for_type. */
848 if (decl && size == -1
849 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
851 error ("size of variable %q+D is too large", decl);
855 tmp = assign_stack_temp_for_type (mode, size, keep, type);
861 mode = promote_mode (type, mode, &unsignedp, 0);
864 return gen_reg_rtx (mode);
867 /* Combine temporary stack slots which are adjacent on the stack.
869 This allows for better use of already allocated stack space. This is only
870 done for BLKmode slots because we can be sure that we won't have alignment
871 problems in this case. */
874 combine_temp_slots (void)
876 struct temp_slot *p, *q, *next, *next_q;
879 /* We can't combine slots, because the information about which slot
880 is in which alias set will be lost. */
881 if (flag_strict_aliasing)
884 /* If there are a lot of temp slots, don't do anything unless
885 high levels of optimization. */
886 if (! flag_expensive_optimizations)
887 for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
888 if (num_slots > 100 || (num_slots > 10 && optimize == 0))
891 for (p = avail_temp_slots; p; p = next)
897 if (GET_MODE (p->slot) != BLKmode)
900 for (q = p->next; q; q = next_q)
906 if (GET_MODE (q->slot) != BLKmode)
909 if (p->base_offset + p->full_size == q->base_offset)
911 /* Q comes after P; combine Q into P. */
913 p->full_size += q->full_size;
916 else if (q->base_offset + q->full_size == p->base_offset)
918 /* P comes after Q; combine P into Q. */
920 q->full_size += p->full_size;
925 cut_slot_from_list (q, &avail_temp_slots);
928 /* Either delete P or advance past it. */
930 cut_slot_from_list (p, &avail_temp_slots);
934 /* Find the temp slot corresponding to the object at address X. */
936 static struct temp_slot *
937 find_temp_slot_from_address (rtx x)
943 for (i = max_slot_level (); i >= 0; i--)
944 for (p = *temp_slots_at_level (i); p; p = p->next)
946 if (XEXP (p->slot, 0) == x
948 || (GET_CODE (x) == PLUS
949 && XEXP (x, 0) == virtual_stack_vars_rtx
950 && GET_CODE (XEXP (x, 1)) == CONST_INT
951 && INTVAL (XEXP (x, 1)) >= p->base_offset
952 && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
955 else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
956 for (next = p->address; next; next = XEXP (next, 1))
957 if (XEXP (next, 0) == x)
961 /* If we have a sum involving a register, see if it points to a temp
963 if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
964 && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
966 else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
967 && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
973 /* Indicate that NEW is an alternate way of referring to the temp slot
974 that previously was known by OLD. */
977 update_temp_slot_address (rtx old, rtx new)
981 if (rtx_equal_p (old, new))
984 p = find_temp_slot_from_address (old);
986 /* If we didn't find one, see if both OLD is a PLUS. If so, and NEW
987 is a register, see if one operand of the PLUS is a temporary
988 location. If so, NEW points into it. Otherwise, if both OLD and
989 NEW are a PLUS and if there is a register in common between them.
990 If so, try a recursive call on those values. */
993 if (GET_CODE (old) != PLUS)
998 update_temp_slot_address (XEXP (old, 0), new);
999 update_temp_slot_address (XEXP (old, 1), new);
1002 else if (GET_CODE (new) != PLUS)
1005 if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1006 update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1007 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1008 update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1009 else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1010 update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1011 else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1012 update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1017 /* Otherwise add an alias for the temp's address. */
1018 else if (p->address == 0)
1022 if (GET_CODE (p->address) != EXPR_LIST)
1023 p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1025 p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1029 /* If X could be a reference to a temporary slot, mark the fact that its
1030 address was taken. */
1033 mark_temp_addr_taken (rtx x)
1035 struct temp_slot *p;
1040 /* If X is not in memory or is at a constant address, it cannot be in
1041 a temporary slot. */
1042 if (!MEM_P (x) || CONSTANT_P (XEXP (x, 0)))
1045 p = find_temp_slot_from_address (XEXP (x, 0));
1050 /* If X could be a reference to a temporary slot, mark that slot as
1051 belonging to the to one level higher than the current level. If X
1052 matched one of our slots, just mark that one. Otherwise, we can't
1053 easily predict which it is, so upgrade all of them. Kept slots
1054 need not be touched.
1056 This is called when an ({...}) construct occurs and a statement
1057 returns a value in memory. */
1060 preserve_temp_slots (rtx x)
1062 struct temp_slot *p = 0, *next;
1064 /* If there is no result, we still might have some objects whose address
1065 were taken, so we need to make sure they stay around. */
1068 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1073 move_slot_to_level (p, temp_slot_level - 1);
1079 /* If X is a register that is being used as a pointer, see if we have
1080 a temporary slot we know it points to. To be consistent with
1081 the code below, we really should preserve all non-kept slots
1082 if we can't find a match, but that seems to be much too costly. */
1083 if (REG_P (x) && REG_POINTER (x))
1084 p = find_temp_slot_from_address (x);
1086 /* If X is not in memory or is at a constant address, it cannot be in
1087 a temporary slot, but it can contain something whose address was
1089 if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
1091 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1096 move_slot_to_level (p, temp_slot_level - 1);
1102 /* First see if we can find a match. */
1104 p = find_temp_slot_from_address (XEXP (x, 0));
1108 /* Move everything at our level whose address was taken to our new
1109 level in case we used its address. */
1110 struct temp_slot *q;
1112 if (p->level == temp_slot_level)
1114 for (q = *temp_slots_at_level (temp_slot_level); q; q = next)
1118 if (p != q && q->addr_taken)
1119 move_slot_to_level (q, temp_slot_level - 1);
1122 move_slot_to_level (p, temp_slot_level - 1);
1128 /* Otherwise, preserve all non-kept slots at this level. */
1129 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1134 move_slot_to_level (p, temp_slot_level - 1);
1138 /* Free all temporaries used so far. This is normally called at the
1139 end of generating code for a statement. */
1142 free_temp_slots (void)
1144 struct temp_slot *p, *next;
1146 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1151 make_slot_available (p);
1154 combine_temp_slots ();
1157 /* Push deeper into the nesting level for stack temporaries. */
1160 push_temp_slots (void)
1165 /* Pop a temporary nesting level. All slots in use in the current level
1169 pop_temp_slots (void)
1171 struct temp_slot *p, *next;
1173 for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
1176 make_slot_available (p);
1179 combine_temp_slots ();
1184 /* Initialize temporary slots. */
1187 init_temp_slots (void)
1189 /* We have not allocated any temporaries yet. */
1190 avail_temp_slots = 0;
1191 used_temp_slots = 0;
1192 temp_slot_level = 0;
1195 /* These routines are responsible for converting virtual register references
1196 to the actual hard register references once RTL generation is complete.
1198 The following four variables are used for communication between the
1199 routines. They contain the offsets of the virtual registers from their
1200 respective hard registers. */
1202 static int in_arg_offset;
1203 static int var_offset;
1204 static int dynamic_offset;
1205 static int out_arg_offset;
1206 static int cfa_offset;
1208 /* In most machines, the stack pointer register is equivalent to the bottom
1211 #ifndef STACK_POINTER_OFFSET
1212 #define STACK_POINTER_OFFSET 0
1215 /* If not defined, pick an appropriate default for the offset of dynamically
1216 allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
1217 REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE. */
1219 #ifndef STACK_DYNAMIC_OFFSET
1221 /* The bottom of the stack points to the actual arguments. If
1222 REG_PARM_STACK_SPACE is defined, this includes the space for the register
1223 parameters. However, if OUTGOING_REG_PARM_STACK space is not defined,
1224 stack space for register parameters is not pushed by the caller, but
1225 rather part of the fixed stack areas and hence not included in
1226 `current_function_outgoing_args_size'. Nevertheless, we must allow
1227 for it when allocating stack dynamic objects. */
1229 #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
1230 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1231 ((ACCUMULATE_OUTGOING_ARGS \
1232 ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
1233 + (STACK_POINTER_OFFSET)) \
1236 #define STACK_DYNAMIC_OFFSET(FNDECL) \
1237 ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0) \
1238 + (STACK_POINTER_OFFSET))
1243 /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
1244 is a virtual register, return the equivalent hard register and set the
1245 offset indirectly through the pointer. Otherwise, return 0. */
1248 instantiate_new_reg (rtx x, HOST_WIDE_INT *poffset)
1251 HOST_WIDE_INT offset;
1253 if (x == virtual_incoming_args_rtx)
1254 new = arg_pointer_rtx, offset = in_arg_offset;
1255 else if (x == virtual_stack_vars_rtx)
1256 new = frame_pointer_rtx, offset = var_offset;
1257 else if (x == virtual_stack_dynamic_rtx)
1258 new = stack_pointer_rtx, offset = dynamic_offset;
1259 else if (x == virtual_outgoing_args_rtx)
1260 new = stack_pointer_rtx, offset = out_arg_offset;
1261 else if (x == virtual_cfa_rtx)
1263 #ifdef FRAME_POINTER_CFA_OFFSET
1264 new = frame_pointer_rtx;
1266 new = arg_pointer_rtx;
1268 offset = cfa_offset;
1277 /* A subroutine of instantiate_virtual_regs, called via for_each_rtx.
1278 Instantiate any virtual registers present inside of *LOC. The expression
1279 is simplified, as much as possible, but is not to be considered "valid"
1280 in any sense implied by the target. If any change is made, set CHANGED
1284 instantiate_virtual_regs_in_rtx (rtx *loc, void *data)
1286 HOST_WIDE_INT offset;
1287 bool *changed = (bool *) data;
1294 switch (GET_CODE (x))
1297 new = instantiate_new_reg (x, &offset);
1300 *loc = plus_constant (new, offset);
1307 new = instantiate_new_reg (XEXP (x, 0), &offset);
1310 new = plus_constant (new, offset);
1311 *loc = simplify_gen_binary (PLUS, GET_MODE (x), new, XEXP (x, 1));
1317 /* FIXME -- from old code */
1318 /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
1319 we can commute the PLUS and SUBREG because pointers into the
1320 frame are well-behaved. */
1330 /* A subroutine of instantiate_virtual_regs_in_insn. Return true if X
1331 matches the predicate for insn CODE operand OPERAND. */
1334 safe_insn_predicate (int code, int operand, rtx x)
1336 const struct insn_operand_data *op_data;
1341 op_data = &insn_data[code].operand[operand];
1342 if (op_data->predicate == NULL)
1345 return op_data->predicate (x, op_data->mode);
1348 /* A subroutine of instantiate_virtual_regs. Instantiate any virtual
1349 registers present inside of insn. The result will be a valid insn. */
1352 instantiate_virtual_regs_in_insn (rtx insn)
1354 HOST_WIDE_INT offset;
1356 bool any_change = false;
1357 rtx set, new, x, seq;
1359 /* There are some special cases to be handled first. */
1360 set = single_set (insn);
1363 /* We're allowed to assign to a virtual register. This is interpreted
1364 to mean that the underlying register gets assigned the inverse
1365 transformation. This is used, for example, in the handling of
1367 new = instantiate_new_reg (SET_DEST (set), &offset);
1372 for_each_rtx (&SET_SRC (set), instantiate_virtual_regs_in_rtx, NULL);
1373 x = simplify_gen_binary (PLUS, GET_MODE (new), SET_SRC (set),
1375 x = force_operand (x, new);
1377 emit_move_insn (new, x);
1382 emit_insn_before (seq, insn);
1387 /* Handle a straight copy from a virtual register by generating a
1388 new add insn. The difference between this and falling through
1389 to the generic case is avoiding a new pseudo and eliminating a
1390 move insn in the initial rtl stream. */
1391 new = instantiate_new_reg (SET_SRC (set), &offset);
1392 if (new && offset != 0
1393 && REG_P (SET_DEST (set))
1394 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1398 x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS,
1399 new, GEN_INT (offset), SET_DEST (set),
1400 1, OPTAB_LIB_WIDEN);
1401 if (x != SET_DEST (set))
1402 emit_move_insn (SET_DEST (set), x);
1407 emit_insn_before (seq, insn);
1412 extract_insn (insn);
1413 insn_code = INSN_CODE (insn);
1415 /* Handle a plus involving a virtual register by determining if the
1416 operands remain valid if they're modified in place. */
1417 if (GET_CODE (SET_SRC (set)) == PLUS
1418 && recog_data.n_operands >= 3
1419 && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
1420 && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
1421 && GET_CODE (recog_data.operand[2]) == CONST_INT
1422 && (new = instantiate_new_reg (recog_data.operand[1], &offset)))
1424 offset += INTVAL (recog_data.operand[2]);
1426 /* If the sum is zero, then replace with a plain move. */
1428 && REG_P (SET_DEST (set))
1429 && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
1432 emit_move_insn (SET_DEST (set), new);
1436 emit_insn_before (seq, insn);
1441 x = gen_int_mode (offset, recog_data.operand_mode[2]);
1443 /* Using validate_change and apply_change_group here leaves
1444 recog_data in an invalid state. Since we know exactly what
1445 we want to check, do those two by hand. */
1446 if (safe_insn_predicate (insn_code, 1, new)
1447 && safe_insn_predicate (insn_code, 2, x))
1449 *recog_data.operand_loc[1] = recog_data.operand[1] = new;
1450 *recog_data.operand_loc[2] = recog_data.operand[2] = x;
1453 /* Fall through into the regular operand fixup loop in
1454 order to take care of operands other than 1 and 2. */
1460 extract_insn (insn);
1461 insn_code = INSN_CODE (insn);
1464 /* In the general case, we expect virtual registers to appear only in
1465 operands, and then only as either bare registers or inside memories. */
1466 for (i = 0; i < recog_data.n_operands; ++i)
1468 x = recog_data.operand[i];
1469 switch (GET_CODE (x))
1473 rtx addr = XEXP (x, 0);
1474 bool changed = false;
1476 for_each_rtx (&addr, instantiate_virtual_regs_in_rtx, &changed);
1481 x = replace_equiv_address (x, addr);
1485 emit_insn_before (seq, insn);
1490 new = instantiate_new_reg (x, &offset);
1499 /* Careful, special mode predicates may have stuff in
1500 insn_data[insn_code].operand[i].mode that isn't useful
1501 to us for computing a new value. */
1502 /* ??? Recognize address_operand and/or "p" constraints
1503 to see if (plus new offset) is a valid before we put
1504 this through expand_simple_binop. */
1505 x = expand_simple_binop (GET_MODE (x), PLUS, new,
1506 GEN_INT (offset), NULL_RTX,
1507 1, OPTAB_LIB_WIDEN);
1510 emit_insn_before (seq, insn);
1515 new = instantiate_new_reg (SUBREG_REG (x), &offset);
1521 new = expand_simple_binop (GET_MODE (new), PLUS, new,
1522 GEN_INT (offset), NULL_RTX,
1523 1, OPTAB_LIB_WIDEN);
1526 emit_insn_before (seq, insn);
1528 x = simplify_gen_subreg (recog_data.operand_mode[i], new,
1529 GET_MODE (new), SUBREG_BYTE (x));
1536 /* At this point, X contains the new value for the operand.
1537 Validate the new value vs the insn predicate. Note that
1538 asm insns will have insn_code -1 here. */
1539 if (!safe_insn_predicate (insn_code, i, x))
1542 x = force_reg (insn_data[insn_code].operand[i].mode, x);
1546 emit_insn_before (seq, insn);
1549 *recog_data.operand_loc[i] = recog_data.operand[i] = x;
1555 /* Propagate operand changes into the duplicates. */
1556 for (i = 0; i < recog_data.n_dups; ++i)
1557 *recog_data.dup_loc[i]
1558 = recog_data.operand[(unsigned)recog_data.dup_num[i]];
1560 /* Force re-recognition of the instruction for validation. */
1561 INSN_CODE (insn) = -1;
1564 if (asm_noperands (PATTERN (insn)) >= 0)
1566 if (!check_asm_operands (PATTERN (insn)))
1568 error_for_asm (insn, "impossible constraint in %<asm%>");
1574 if (recog_memoized (insn) < 0)
1575 fatal_insn_not_found (insn);
1579 /* Subroutine of instantiate_decls. Given RTL representing a decl,
1580 do any instantiation required. */
1583 instantiate_decl (rtx x)
1590 /* If this is a CONCAT, recurse for the pieces. */
1591 if (GET_CODE (x) == CONCAT)
1593 instantiate_decl (XEXP (x, 0));
1594 instantiate_decl (XEXP (x, 1));
1598 /* If this is not a MEM, no need to do anything. Similarly if the
1599 address is a constant or a register that is not a virtual register. */
1604 if (CONSTANT_P (addr)
1606 && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
1607 || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
1610 for_each_rtx (&XEXP (x, 0), instantiate_virtual_regs_in_rtx, NULL);
1613 /* Helper for instantiate_decls called via walk_tree: Process all decls
1614 in the given DECL_VALUE_EXPR. */
1617 instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1623 if (DECL_P (t) && DECL_RTL_SET_P (t))
1624 instantiate_decl (DECL_RTL (t));
1629 /* Subroutine of instantiate_decls: Process all decls in the given
1630 BLOCK node and all its subblocks. */
1633 instantiate_decls_1 (tree let)
1637 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1639 if (DECL_RTL_SET_P (t))
1640 instantiate_decl (DECL_RTL (t));
1641 if (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
1643 tree v = DECL_VALUE_EXPR (t);
1644 walk_tree (&v, instantiate_expr, NULL, NULL);
1648 /* Process all subblocks. */
1649 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
1650 instantiate_decls_1 (t);
1653 /* Scan all decls in FNDECL (both variables and parameters) and instantiate
1654 all virtual registers in their DECL_RTL's. */
1657 instantiate_decls (tree fndecl)
1661 /* Process all parameters of the function. */
1662 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
1664 instantiate_decl (DECL_RTL (decl));
1665 instantiate_decl (DECL_INCOMING_RTL (decl));
1666 if (DECL_HAS_VALUE_EXPR_P (decl))
1668 tree v = DECL_VALUE_EXPR (decl);
1669 walk_tree (&v, instantiate_expr, NULL, NULL);
1673 /* Now process all variables defined in the function or its subblocks. */
1674 instantiate_decls_1 (DECL_INITIAL (fndecl));
1677 /* Pass through the INSNS of function FNDECL and convert virtual register
1678 references to hard register references. */
1681 instantiate_virtual_regs (void)
1685 /* Compute the offsets to use for this function. */
1686 in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
1687 var_offset = STARTING_FRAME_OFFSET;
1688 dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
1689 out_arg_offset = STACK_POINTER_OFFSET;
1690 #ifdef FRAME_POINTER_CFA_OFFSET
1691 cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
1693 cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
1696 /* Initialize recognition, indicating that volatile is OK. */
1699 /* Scan through all the insns, instantiating every virtual register still
1701 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1704 /* These patterns in the instruction stream can never be recognized.
1705 Fortunately, they shouldn't contain virtual registers either. */
1706 if (GET_CODE (PATTERN (insn)) == USE
1707 || GET_CODE (PATTERN (insn)) == CLOBBER
1708 || GET_CODE (PATTERN (insn)) == ADDR_VEC
1709 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
1710 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
1713 instantiate_virtual_regs_in_insn (insn);
1715 if (INSN_DELETED_P (insn))
1718 for_each_rtx (®_NOTES (insn), instantiate_virtual_regs_in_rtx, NULL);
1720 /* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE. */
1721 if (GET_CODE (insn) == CALL_INSN)
1722 for_each_rtx (&CALL_INSN_FUNCTION_USAGE (insn),
1723 instantiate_virtual_regs_in_rtx, NULL);
1726 /* Instantiate the virtual registers in the DECLs for debugging purposes. */
1727 instantiate_decls (current_function_decl);
1729 /* Indicate that, from now on, assign_stack_local should use
1730 frame_pointer_rtx. */
1731 virtuals_instantiated = 1;
1735 struct tree_opt_pass pass_instantiate_virtual_regs =
1739 instantiate_virtual_regs, /* execute */
1742 0, /* static_pass_number */
1744 0, /* properties_required */
1745 0, /* properties_provided */
1746 0, /* properties_destroyed */
1747 0, /* todo_flags_start */
1748 TODO_dump_func, /* todo_flags_finish */
1753 /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
1754 This means a type for which function calls must pass an address to the
1755 function or get an address back from the function.
1756 EXP may be a type node or an expression (whose type is tested). */
1759 aggregate_value_p (tree exp, tree fntype)
1761 int i, regno, nregs;
1764 tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
1766 /* DECL node associated with FNTYPE when relevant, which we might need to
1767 check for by-invisible-reference returns, typically for CALL_EXPR input
1769 tree fndecl = NULL_TREE;
1772 switch (TREE_CODE (fntype))
1775 fndecl = get_callee_fndecl (fntype);
1776 fntype = fndecl ? TREE_TYPE (fndecl) : 0;
1780 fntype = TREE_TYPE (fndecl);
1785 case IDENTIFIER_NODE:
1789 /* We don't expect other rtl types here. */
1793 if (TREE_CODE (type) == VOID_TYPE)
1796 /* If the front end has decided that this needs to be passed by
1797 reference, do so. */
1798 if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
1799 && DECL_BY_REFERENCE (exp))
1802 /* If the EXPression is a CALL_EXPR, honor DECL_BY_REFERENCE set on the
1803 called function RESULT_DECL, meaning the function returns in memory by
1804 invisible reference. This check lets front-ends not set TREE_ADDRESSABLE
1805 on the function type, which used to be the way to request such a return
1806 mechanism but might now be causing troubles at gimplification time if
1807 temporaries with the function type need to be created. */
1808 if (TREE_CODE (exp) == CALL_EXPR && fndecl && DECL_RESULT (fndecl)
1809 && DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
1812 if (targetm.calls.return_in_memory (type, fntype))
1814 /* Types that are TREE_ADDRESSABLE must be constructed in memory,
1815 and thus can't be returned in registers. */
1816 if (TREE_ADDRESSABLE (type))
1818 if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
1820 /* Make sure we have suitable call-clobbered regs to return
1821 the value in; if not, we must return it in memory. */
1822 reg = hard_function_value (type, 0, fntype, 0);
1824 /* If we have something other than a REG (e.g. a PARALLEL), then assume
1829 regno = REGNO (reg);
1830 nregs = hard_regno_nregs[regno][TYPE_MODE (type)];
1831 for (i = 0; i < nregs; i++)
1832 if (! call_used_regs[regno + i])
1837 /* Return true if we should assign DECL a pseudo register; false if it
1838 should live on the local stack. */
1841 use_register_for_decl (tree decl)
1843 /* Honor volatile. */
1844 if (TREE_SIDE_EFFECTS (decl))
1847 /* Honor addressability. */
1848 if (TREE_ADDRESSABLE (decl))
1851 /* Only register-like things go in registers. */
1852 if (DECL_MODE (decl) == BLKmode)
1855 /* If -ffloat-store specified, don't put explicit float variables
1857 /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
1858 propagates values across these stores, and it probably shouldn't. */
1859 if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
1862 /* If we're not interested in tracking debugging information for
1863 this decl, then we can certainly put it in a register. */
1864 if (DECL_IGNORED_P (decl))
1867 return (optimize || DECL_REGISTER (decl));
1870 /* Return true if TYPE should be passed by invisible reference. */
1873 pass_by_reference (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1874 tree type, bool named_arg)
1878 /* If this type contains non-trivial constructors, then it is
1879 forbidden for the middle-end to create any new copies. */
1880 if (TREE_ADDRESSABLE (type))
1883 /* GCC post 3.4 passes *all* variable sized types by reference. */
1884 if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1888 return targetm.calls.pass_by_reference (ca, mode, type, named_arg);
1891 /* Return true if TYPE, which is passed by reference, should be callee
1892 copied instead of caller copied. */
1895 reference_callee_copied (CUMULATIVE_ARGS *ca, enum machine_mode mode,
1896 tree type, bool named_arg)
1898 if (type && TREE_ADDRESSABLE (type))
1900 return targetm.calls.callee_copies (ca, mode, type, named_arg);
1903 /* Structures to communicate between the subroutines of assign_parms.
1904 The first holds data persistent across all parameters, the second
1905 is cleared out for each parameter. */
1907 struct assign_parm_data_all
1909 CUMULATIVE_ARGS args_so_far;
1910 struct args_size stack_args_size;
1911 tree function_result_decl;
1913 rtx conversion_insns;
1914 HOST_WIDE_INT pretend_args_size;
1915 HOST_WIDE_INT extra_pretend_bytes;
1916 int reg_parm_stack_space;
1919 struct assign_parm_data_one
1925 enum machine_mode nominal_mode;
1926 enum machine_mode passed_mode;
1927 enum machine_mode promoted_mode;
1928 struct locate_and_pad_arg_data locate;
1930 BOOL_BITFIELD named_arg : 1;
1931 BOOL_BITFIELD passed_pointer : 1;
1932 BOOL_BITFIELD on_stack : 1;
1933 BOOL_BITFIELD loaded_in_reg : 1;
1936 /* A subroutine of assign_parms. Initialize ALL. */
1939 assign_parms_initialize_all (struct assign_parm_data_all *all)
1943 memset (all, 0, sizeof (*all));
1945 fntype = TREE_TYPE (current_function_decl);
1947 #ifdef INIT_CUMULATIVE_INCOMING_ARGS
1948 INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far, fntype, NULL_RTX);
1950 INIT_CUMULATIVE_ARGS (all->args_so_far, fntype, NULL_RTX,
1951 current_function_decl, -1);
1954 #ifdef REG_PARM_STACK_SPACE
1955 all->reg_parm_stack_space = REG_PARM_STACK_SPACE (current_function_decl);
1959 /* If ARGS contains entries with complex types, split the entry into two
1960 entries of the component type. Return a new list of substitutions are
1961 needed, else the old list. */
1964 split_complex_args (tree args)
1968 /* Before allocating memory, check for the common case of no complex. */
1969 for (p = args; p; p = TREE_CHAIN (p))
1971 tree type = TREE_TYPE (p);
1972 if (TREE_CODE (type) == COMPLEX_TYPE
1973 && targetm.calls.split_complex_arg (type))
1979 args = copy_list (args);
1981 for (p = args; p; p = TREE_CHAIN (p))
1983 tree type = TREE_TYPE (p);
1984 if (TREE_CODE (type) == COMPLEX_TYPE
1985 && targetm.calls.split_complex_arg (type))
1988 tree subtype = TREE_TYPE (type);
1989 bool addressable = TREE_ADDRESSABLE (p);
1991 /* Rewrite the PARM_DECL's type with its component. */
1992 TREE_TYPE (p) = subtype;
1993 DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
1994 DECL_MODE (p) = VOIDmode;
1995 DECL_SIZE (p) = NULL;
1996 DECL_SIZE_UNIT (p) = NULL;
1997 /* If this arg must go in memory, put it in a pseudo here.
1998 We can't allow it to go in memory as per normal parms,
1999 because the usual place might not have the imag part
2000 adjacent to the real part. */
2001 DECL_ARTIFICIAL (p) = addressable;
2002 DECL_IGNORED_P (p) = addressable;
2003 TREE_ADDRESSABLE (p) = 0;
2006 /* Build a second synthetic decl. */
2007 decl = build_decl (PARM_DECL, NULL_TREE, subtype);
2008 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
2009 DECL_ARTIFICIAL (decl) = addressable;
2010 DECL_IGNORED_P (decl) = addressable;
2011 layout_decl (decl, 0);
2013 /* Splice it in; skip the new decl. */
2014 TREE_CHAIN (decl) = TREE_CHAIN (p);
2015 TREE_CHAIN (p) = decl;
2023 /* A subroutine of assign_parms. Adjust the parameter list to incorporate
2024 the hidden struct return argument, and (abi willing) complex args.
2025 Return the new parameter list. */
2028 assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
2030 tree fndecl = current_function_decl;
2031 tree fntype = TREE_TYPE (fndecl);
2032 tree fnargs = DECL_ARGUMENTS (fndecl);
2034 /* If struct value address is treated as the first argument, make it so. */
2035 if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
2036 && ! current_function_returns_pcc_struct
2037 && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
2039 tree type = build_pointer_type (TREE_TYPE (fntype));
2042 decl = build_decl (PARM_DECL, NULL_TREE, type);
2043 DECL_ARG_TYPE (decl) = type;
2044 DECL_ARTIFICIAL (decl) = 1;
2045 DECL_IGNORED_P (decl) = 1;
2047 TREE_CHAIN (decl) = fnargs;
2049 all->function_result_decl = decl;
2052 all->orig_fnargs = fnargs;
2054 /* If the target wants to split complex arguments into scalars, do so. */
2055 if (targetm.calls.split_complex_arg)
2056 fnargs = split_complex_args (fnargs);
2061 /* A subroutine of assign_parms. Examine PARM and pull out type and mode
2062 data for the parameter. Incorporate ABI specifics such as pass-by-
2063 reference and type promotion. */
2066 assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
2067 struct assign_parm_data_one *data)
2069 tree nominal_type, passed_type;
2070 enum machine_mode nominal_mode, passed_mode, promoted_mode;
2072 memset (data, 0, sizeof (*data));
2074 /* NAMED_ARG is a mis-nomer. We really mean 'non-varadic'. */
2075 if (!current_function_stdarg)
2076 data->named_arg = 1; /* No varadic parms. */
2077 else if (TREE_CHAIN (parm))
2078 data->named_arg = 1; /* Not the last non-varadic parm. */
2079 else if (targetm.calls.strict_argument_naming (&all->args_so_far))
2080 data->named_arg = 1; /* Only varadic ones are unnamed. */
2082 data->named_arg = 0; /* Treat as varadic. */
2084 nominal_type = TREE_TYPE (parm);
2085 passed_type = DECL_ARG_TYPE (parm);
2087 /* Look out for errors propagating this far. Also, if the parameter's
2088 type is void then its value doesn't matter. */
2089 if (TREE_TYPE (parm) == error_mark_node
2090 /* This can happen after weird syntax errors
2091 or if an enum type is defined among the parms. */
2092 || TREE_CODE (parm) != PARM_DECL
2093 || passed_type == NULL
2094 || VOID_TYPE_P (nominal_type))
2096 nominal_type = passed_type = void_type_node;
2097 nominal_mode = passed_mode = promoted_mode = VOIDmode;
2101 /* Find mode of arg as it is passed, and mode of arg as it should be
2102 during execution of this function. */
2103 passed_mode = TYPE_MODE (passed_type);
2104 nominal_mode = TYPE_MODE (nominal_type);
2106 /* If the parm is to be passed as a transparent union, use the type of
2107 the first field for the tests below. We have already verified that
2108 the modes are the same. */
2109 if (TREE_CODE (passed_type) == UNION_TYPE
2110 && TYPE_TRANSPARENT_UNION (passed_type))
2111 passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
2113 /* See if this arg was passed by invisible reference. */
2114 if (pass_by_reference (&all->args_so_far, passed_mode,
2115 passed_type, data->named_arg))
2117 passed_type = nominal_type = build_pointer_type (passed_type);
2118 data->passed_pointer = true;
2119 passed_mode = nominal_mode = Pmode;
2122 /* Find mode as it is passed by the ABI. */
2123 promoted_mode = passed_mode;
2124 if (targetm.calls.promote_function_args (TREE_TYPE (current_function_decl)))
2126 int unsignedp = TYPE_UNSIGNED (passed_type);
2127 promoted_mode = promote_mode (passed_type, promoted_mode,
2132 data->nominal_type = nominal_type;
2133 data->passed_type = passed_type;
2134 data->nominal_mode = nominal_mode;
2135 data->passed_mode = passed_mode;
2136 data->promoted_mode = promoted_mode;
2139 /* A subroutine of assign_parms. Invoke setup_incoming_varargs. */
2142 assign_parms_setup_varargs (struct assign_parm_data_all *all,
2143 struct assign_parm_data_one *data, bool no_rtl)
2145 int varargs_pretend_bytes = 0;
2147 targetm.calls.setup_incoming_varargs (&all->args_so_far,
2148 data->promoted_mode,
2150 &varargs_pretend_bytes, no_rtl);
2152 /* If the back-end has requested extra stack space, record how much is
2153 needed. Do not change pretend_args_size otherwise since it may be
2154 nonzero from an earlier partial argument. */
2155 if (varargs_pretend_bytes > 0)
2156 all->pretend_args_size = varargs_pretend_bytes;
2159 /* A subroutine of assign_parms. Set DATA->ENTRY_PARM corresponding to
2160 the incoming location of the current parameter. */
2163 assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
2164 struct assign_parm_data_one *data)
2166 HOST_WIDE_INT pretend_bytes = 0;
2170 if (data->promoted_mode == VOIDmode)
2172 data->entry_parm = data->stack_parm = const0_rtx;
2176 #ifdef FUNCTION_INCOMING_ARG
2177 entry_parm = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2178 data->passed_type, data->named_arg);
2180 entry_parm = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2181 data->passed_type, data->named_arg);
2184 if (entry_parm == 0)
2185 data->promoted_mode = data->passed_mode;
2187 /* Determine parm's home in the stack, in case it arrives in the stack
2188 or we should pretend it did. Compute the stack position and rtx where
2189 the argument arrives and its size.
2191 There is one complexity here: If this was a parameter that would
2192 have been passed in registers, but wasn't only because it is
2193 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
2194 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
2195 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
2196 as it was the previous time. */
2197 in_regs = entry_parm != 0;
2198 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2201 if (!in_regs && !data->named_arg)
2203 if (targetm.calls.pretend_outgoing_varargs_named (&all->args_so_far))
2206 #ifdef FUNCTION_INCOMING_ARG
2207 tem = FUNCTION_INCOMING_ARG (all->args_so_far, data->promoted_mode,
2208 data->passed_type, true);
2210 tem = FUNCTION_ARG (all->args_so_far, data->promoted_mode,
2211 data->passed_type, true);
2213 in_regs = tem != NULL;
2217 /* If this parameter was passed both in registers and in the stack, use
2218 the copy on the stack. */
2219 if (targetm.calls.must_pass_in_stack (data->promoted_mode,
2227 partial = targetm.calls.arg_partial_bytes (&all->args_so_far,
2228 data->promoted_mode,
2231 data->partial = partial;
2233 /* The caller might already have allocated stack space for the
2234 register parameters. */
2235 if (partial != 0 && all->reg_parm_stack_space == 0)
2237 /* Part of this argument is passed in registers and part
2238 is passed on the stack. Ask the prologue code to extend
2239 the stack part so that we can recreate the full value.
2241 PRETEND_BYTES is the size of the registers we need to store.
2242 CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
2243 stack space that the prologue should allocate.
2245 Internally, gcc assumes that the argument pointer is aligned
2246 to STACK_BOUNDARY bits. This is used both for alignment
2247 optimizations (see init_emit) and to locate arguments that are
2248 aligned to more than PARM_BOUNDARY bits. We must preserve this
2249 invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
2250 a stack boundary. */
2252 /* We assume at most one partial arg, and it must be the first
2253 argument on the stack. */
2254 gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
2256 pretend_bytes = partial;
2257 all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
2259 /* We want to align relative to the actual stack pointer, so
2260 don't include this in the stack size until later. */
2261 all->extra_pretend_bytes = all->pretend_args_size;
2265 locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
2266 entry_parm ? data->partial : 0, current_function_decl,
2267 &all->stack_args_size, &data->locate);
2269 /* Adjust offsets to include the pretend args. */
2270 pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
2271 data->locate.slot_offset.constant += pretend_bytes;
2272 data->locate.offset.constant += pretend_bytes;
2274 data->entry_parm = entry_parm;
2277 /* A subroutine of assign_parms. If there is actually space on the stack
2278 for this parm, count it in stack_args_size and return true. */
2281 assign_parm_is_stack_parm (struct assign_parm_data_all *all,
2282 struct assign_parm_data_one *data)
2284 /* Trivially true if we've no incoming register. */
2285 if (data->entry_parm == NULL)
2287 /* Also true if we're partially in registers and partially not,
2288 since we've arranged to drop the entire argument on the stack. */
2289 else if (data->partial != 0)
2291 /* Also true if the target says that it's passed in both registers
2292 and on the stack. */
2293 else if (GET_CODE (data->entry_parm) == PARALLEL
2294 && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
2296 /* Also true if the target says that there's stack allocated for
2297 all register parameters. */
2298 else if (all->reg_parm_stack_space > 0)
2300 /* Otherwise, no, this parameter has no ABI defined stack slot. */
2304 all->stack_args_size.constant += data->locate.size.constant;
2305 if (data->locate.size.var)
2306 ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
2311 /* A subroutine of assign_parms. Given that this parameter is allocated
2312 stack space by the ABI, find it. */
2315 assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
2317 rtx offset_rtx, stack_parm;
2318 unsigned int align, boundary;
2320 /* If we're passing this arg using a reg, make its stack home the
2321 aligned stack slot. */
2322 if (data->entry_parm)
2323 offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
2325 offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
2327 stack_parm = current_function_internal_arg_pointer;
2328 if (offset_rtx != const0_rtx)
2329 stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
2330 stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
2332 set_mem_attributes (stack_parm, parm, 1);
2334 boundary = data->locate.boundary;
2335 align = BITS_PER_UNIT;
2337 /* If we're padding upward, we know that the alignment of the slot
2338 is FUNCTION_ARG_BOUNDARY. If we're using slot_offset, we're
2339 intentionally forcing upward padding. Otherwise we have to come
2340 up with a guess at the alignment based on OFFSET_RTX. */
2341 if (data->locate.where_pad != downward || data->entry_parm)
2343 else if (GET_CODE (offset_rtx) == CONST_INT)
2345 align = INTVAL (offset_rtx) * BITS_PER_UNIT | boundary;
2346 align = align & -align;
2348 set_mem_align (stack_parm, align);
2350 if (data->entry_parm)
2351 set_reg_attrs_for_parm (data->entry_parm, stack_parm);
2353 data->stack_parm = stack_parm;
2356 /* A subroutine of assign_parms. Adjust DATA->ENTRY_RTL such that it's
2357 always valid and contiguous. */
2360 assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
2362 rtx entry_parm = data->entry_parm;
2363 rtx stack_parm = data->stack_parm;
2365 /* If this parm was passed part in regs and part in memory, pretend it
2366 arrived entirely in memory by pushing the register-part onto the stack.
2367 In the special case of a DImode or DFmode that is split, we could put
2368 it together in a pseudoreg directly, but for now that's not worth
2370 if (data->partial != 0)
2372 /* Handle calls that pass values in multiple non-contiguous
2373 locations. The Irix 6 ABI has examples of this. */
2374 if (GET_CODE (entry_parm) == PARALLEL)
2375 emit_group_store (validize_mem (stack_parm), entry_parm,
2377 int_size_in_bytes (data->passed_type));
2380 gcc_assert (data->partial % UNITS_PER_WORD == 0);
2381 move_block_from_reg (REGNO (entry_parm), validize_mem (stack_parm),
2382 data->partial / UNITS_PER_WORD);
2385 entry_parm = stack_parm;
2388 /* If we didn't decide this parm came in a register, by default it came
2390 else if (entry_parm == NULL)
2391 entry_parm = stack_parm;
2393 /* When an argument is passed in multiple locations, we can't make use
2394 of this information, but we can save some copying if the whole argument
2395 is passed in a single register. */
2396 else if (GET_CODE (entry_parm) == PARALLEL
2397 && data->nominal_mode != BLKmode
2398 && data->passed_mode != BLKmode)
2400 size_t i, len = XVECLEN (entry_parm, 0);
2402 for (i = 0; i < len; i++)
2403 if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
2404 && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
2405 && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
2406 == data->passed_mode)
2407 && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
2409 entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
2414 data->entry_parm = entry_parm;
2417 /* A subroutine of assign_parms. Adjust DATA->STACK_RTL such that it's
2418 always valid and properly aligned. */
2421 assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
2423 rtx stack_parm = data->stack_parm;
2425 /* If we can't trust the parm stack slot to be aligned enough for its
2426 ultimate type, don't use that slot after entry. We'll make another
2427 stack slot, if we need one. */
2429 && ((STRICT_ALIGNMENT
2430 && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
2431 || (data->nominal_type
2432 && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
2433 && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
2436 /* If parm was passed in memory, and we need to convert it on entry,
2437 don't store it back in that same slot. */
2438 else if (data->entry_parm == stack_parm
2439 && data->nominal_mode != BLKmode
2440 && data->nominal_mode != data->passed_mode)
2443 /* If stack protection is in effect for this function, don't leave any
2444 pointers in their passed stack slots. */
2445 else if (cfun->stack_protect_guard
2446 && (flag_stack_protect == 2
2447 || data->passed_pointer
2448 || POINTER_TYPE_P (data->nominal_type)))
2451 data->stack_parm = stack_parm;
2454 /* A subroutine of assign_parms. Return true if the current parameter
2455 should be stored as a BLKmode in the current frame. */
2458 assign_parm_setup_block_p (struct assign_parm_data_one *data)
2460 if (data->nominal_mode == BLKmode)
2462 if (GET_CODE (data->entry_parm) == PARALLEL)
2465 #ifdef BLOCK_REG_PADDING
2466 /* Only assign_parm_setup_block knows how to deal with register arguments
2467 that are padded at the least significant end. */
2468 if (REG_P (data->entry_parm)
2469 && GET_MODE_SIZE (data->promoted_mode) < UNITS_PER_WORD
2470 && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
2471 == (BYTES_BIG_ENDIAN ? upward : downward)))
2478 /* A subroutine of assign_parms. Arrange for the parameter to be
2479 present and valid in DATA->STACK_RTL. */
2482 assign_parm_setup_block (struct assign_parm_data_all *all,
2483 tree parm, struct assign_parm_data_one *data)
2485 rtx entry_parm = data->entry_parm;
2486 rtx stack_parm = data->stack_parm;
2488 HOST_WIDE_INT size_stored;
2489 rtx orig_entry_parm = entry_parm;
2491 if (GET_CODE (entry_parm) == PARALLEL)
2492 entry_parm = emit_group_move_into_temps (entry_parm);
2494 /* If we've a non-block object that's nevertheless passed in parts,
2495 reconstitute it in register operations rather than on the stack. */
2496 if (GET_CODE (entry_parm) == PARALLEL
2497 && data->nominal_mode != BLKmode)
2499 rtx elt0 = XEXP (XVECEXP (orig_entry_parm, 0, 0), 0);
2501 if ((XVECLEN (entry_parm, 0) > 1
2502 || hard_regno_nregs[REGNO (elt0)][GET_MODE (elt0)] > 1)
2503 && use_register_for_decl (parm))
2505 rtx parmreg = gen_reg_rtx (data->nominal_mode);
2507 push_to_sequence (all->conversion_insns);
2509 /* For values returned in multiple registers, handle possible
2510 incompatible calls to emit_group_store.
2512 For example, the following would be invalid, and would have to
2513 be fixed by the conditional below:
2515 emit_group_store ((reg:SF), (parallel:DF))
2516 emit_group_store ((reg:SI), (parallel:DI))
2518 An example of this are doubles in e500 v2:
2519 (parallel:DF (expr_list (reg:SI) (const_int 0))
2520 (expr_list (reg:SI) (const_int 4))). */
2521 if (data->nominal_mode != data->passed_mode)
2523 rtx t = gen_reg_rtx (GET_MODE (entry_parm));
2524 emit_group_store (t, entry_parm, NULL_TREE,
2525 GET_MODE_SIZE (GET_MODE (entry_parm)));
2526 convert_move (parmreg, t, 0);
2529 emit_group_store (parmreg, entry_parm, data->nominal_type,
2530 int_size_in_bytes (data->nominal_type));
2532 all->conversion_insns = get_insns ();
2535 SET_DECL_RTL (parm, parmreg);
2540 size = int_size_in_bytes (data->passed_type);
2541 size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
2542 if (stack_parm == 0)
2544 DECL_ALIGN (parm) = MAX (DECL_ALIGN (parm), BITS_PER_WORD);
2545 stack_parm = assign_stack_local (BLKmode, size_stored,
2547 if (GET_MODE_SIZE (GET_MODE (entry_parm)) == size)
2548 PUT_MODE (stack_parm, GET_MODE (entry_parm));
2549 set_mem_attributes (stack_parm, parm, 1);
2552 /* If a BLKmode arrives in registers, copy it to a stack slot. Handle
2553 calls that pass values in multiple non-contiguous locations. */
2554 if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
2558 /* Note that we will be storing an integral number of words.
2559 So we have to be careful to ensure that we allocate an
2560 integral number of words. We do this above when we call
2561 assign_stack_local if space was not allocated in the argument
2562 list. If it was, this will not work if PARM_BOUNDARY is not
2563 a multiple of BITS_PER_WORD. It isn't clear how to fix this
2564 if it becomes a problem. Exception is when BLKmode arrives
2565 with arguments not conforming to word_mode. */
2567 if (data->stack_parm == 0)
2569 else if (GET_CODE (entry_parm) == PARALLEL)
2572 gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
2574 mem = validize_mem (stack_parm);
2576 /* Handle values in multiple non-contiguous locations. */
2577 if (GET_CODE (entry_parm) == PARALLEL)
2579 push_to_sequence (all->conversion_insns);
2580 emit_group_store (mem, entry_parm, data->passed_type, size);
2581 all->conversion_insns = get_insns ();
2588 /* If SIZE is that of a mode no bigger than a word, just use
2589 that mode's store operation. */
2590 else if (size <= UNITS_PER_WORD)
2592 enum machine_mode mode
2593 = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0);
2596 #ifdef BLOCK_REG_PADDING
2597 && (size == UNITS_PER_WORD
2598 || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2599 != (BYTES_BIG_ENDIAN ? upward : downward)))
2603 rtx reg = gen_rtx_REG (mode, REGNO (entry_parm));
2604 emit_move_insn (change_address (mem, mode, 0), reg);
2607 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
2608 machine must be aligned to the left before storing
2609 to memory. Note that the previous test doesn't
2610 handle all cases (e.g. SIZE == 3). */
2611 else if (size != UNITS_PER_WORD
2612 #ifdef BLOCK_REG_PADDING
2613 && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
2621 int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2622 rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
2624 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
2625 build_int_cst (NULL_TREE, by),
2627 tem = change_address (mem, word_mode, 0);
2628 emit_move_insn (tem, x);
2631 move_block_from_reg (REGNO (entry_parm), mem,
2632 size_stored / UNITS_PER_WORD);
2635 move_block_from_reg (REGNO (entry_parm), mem,
2636 size_stored / UNITS_PER_WORD);
2638 else if (data->stack_parm == 0)
2640 push_to_sequence (all->conversion_insns);
2641 emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
2643 all->conversion_insns = get_insns ();
2647 data->stack_parm = stack_parm;
2648 SET_DECL_RTL (parm, stack_parm);
2651 /* A subroutine of assign_parms. Allocate a pseudo to hold the current
2652 parameter. Get it there. Perform all ABI specified conversions. */
2655 assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
2656 struct assign_parm_data_one *data)
2659 enum machine_mode promoted_nominal_mode;
2660 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
2661 bool did_conversion = false;
2663 /* Store the parm in a pseudoregister during the function, but we may
2664 need to do it in a wider mode. */
2666 /* This is not really promoting for a call. However we need to be
2667 consistent with assign_parm_find_data_types and expand_expr_real_1. */
2668 promoted_nominal_mode
2669 = promote_mode (data->nominal_type, data->nominal_mode, &unsignedp, 1);
2671 parmreg = gen_reg_rtx (promoted_nominal_mode);
2673 if (!DECL_ARTIFICIAL (parm))
2674 mark_user_reg (parmreg);
2676 /* If this was an item that we received a pointer to,
2677 set DECL_RTL appropriately. */
2678 if (data->passed_pointer)
2680 rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
2681 set_mem_attributes (x, parm, 1);
2682 SET_DECL_RTL (parm, x);
2685 SET_DECL_RTL (parm, parmreg);
2687 /* Copy the value into the register. */
2688 if (data->nominal_mode != data->passed_mode
2689 || promoted_nominal_mode != data->promoted_mode)
2693 /* ENTRY_PARM has been converted to PROMOTED_MODE, its
2694 mode, by the caller. We now have to convert it to
2695 NOMINAL_MODE, if different. However, PARMREG may be in
2696 a different mode than NOMINAL_MODE if it is being stored
2699 If ENTRY_PARM is a hard register, it might be in a register
2700 not valid for operating in its mode (e.g., an odd-numbered
2701 register for a DFmode). In that case, moves are the only
2702 thing valid, so we can't do a convert from there. This
2703 occurs when the calling sequence allow such misaligned
2706 In addition, the conversion may involve a call, which could
2707 clobber parameters which haven't been copied to pseudo
2708 registers yet. Therefore, we must first copy the parm to
2709 a pseudo reg here, and save the conversion until after all
2710 parameters have been moved. */
2712 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2714 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2716 push_to_sequence (all->conversion_insns);
2717 tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
2719 if (GET_CODE (tempreg) == SUBREG
2720 && GET_MODE (tempreg) == data->nominal_mode
2721 && REG_P (SUBREG_REG (tempreg))
2722 && data->nominal_mode == data->passed_mode
2723 && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm)
2724 && GET_MODE_SIZE (GET_MODE (tempreg))
2725 < GET_MODE_SIZE (GET_MODE (data->entry_parm)))
2727 /* The argument is already sign/zero extended, so note it
2729 SUBREG_PROMOTED_VAR_P (tempreg) = 1;
2730 SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
2733 /* TREE_USED gets set erroneously during expand_assignment. */
2734 save_tree_used = TREE_USED (parm);
2735 expand_assignment (parm, make_tree (data->nominal_type, tempreg));
2736 TREE_USED (parm) = save_tree_used;
2737 all->conversion_insns = get_insns ();
2740 did_conversion = true;
2743 emit_move_insn (parmreg, validize_mem (data->entry_parm));
2745 /* If we were passed a pointer but the actual value can safely live
2746 in a register, put it in one. */
2747 if (data->passed_pointer
2748 && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
2749 /* If by-reference argument was promoted, demote it. */
2750 && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
2751 || use_register_for_decl (parm)))
2753 /* We can't use nominal_mode, because it will have been set to
2754 Pmode above. We must use the actual mode of the parm. */
2755 parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
2756 mark_user_reg (parmreg);
2758 if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
2760 rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
2761 int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
2763 push_to_sequence (all->conversion_insns);
2764 emit_move_insn (tempreg, DECL_RTL (parm));
2765 tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
2766 emit_move_insn (parmreg, tempreg);
2767 all->conversion_insns = get_insns ();
2770 did_conversion = true;
2773 emit_move_insn (parmreg, DECL_RTL (parm));
2775 SET_DECL_RTL (parm, parmreg);
2777 /* STACK_PARM is the pointer, not the parm, and PARMREG is
2779 data->stack_parm = NULL;
2782 /* Mark the register as eliminable if we did no conversion and it was
2783 copied from memory at a fixed offset, and the arg pointer was not
2784 copied to a pseudo-reg. If the arg pointer is a pseudo reg or the
2785 offset formed an invalid address, such memory-equivalences as we
2786 make here would screw up life analysis for it. */
2787 if (data->nominal_mode == data->passed_mode
2789 && data->stack_parm != 0
2790 && MEM_P (data->stack_parm)
2791 && data->locate.offset.var == 0
2792 && reg_mentioned_p (virtual_incoming_args_rtx,
2793 XEXP (data->stack_parm, 0)))
2795 rtx linsn = get_last_insn ();
2798 /* Mark complex types separately. */
2799 if (GET_CODE (parmreg) == CONCAT)
2801 enum machine_mode submode
2802 = GET_MODE_INNER (GET_MODE (parmreg));
2803 int regnor = REGNO (XEXP (parmreg, 0));
2804 int regnoi = REGNO (XEXP (parmreg, 1));
2805 rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
2806 rtx stacki = adjust_address_nv (data->stack_parm, submode,
2807 GET_MODE_SIZE (submode));
2809 /* Scan backwards for the set of the real and
2811 for (sinsn = linsn; sinsn != 0;
2812 sinsn = prev_nonnote_insn (sinsn))
2814 set = single_set (sinsn);
2818 if (SET_DEST (set) == regno_reg_rtx [regnoi])
2820 = gen_rtx_EXPR_LIST (REG_EQUIV, stacki,
2822 else if (SET_DEST (set) == regno_reg_rtx [regnor])
2824 = gen_rtx_EXPR_LIST (REG_EQUIV, stackr,
2828 else if ((set = single_set (linsn)) != 0
2829 && SET_DEST (set) == parmreg)
2831 = gen_rtx_EXPR_LIST (REG_EQUIV,
2832 data->stack_parm, REG_NOTES (linsn));
2835 /* For pointer data type, suggest pointer register. */
2836 if (POINTER_TYPE_P (TREE_TYPE (parm)))
2837 mark_reg_pointer (parmreg,
2838 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
2841 /* A subroutine of assign_parms. Allocate stack space to hold the current
2842 parameter. Get it there. Perform all ABI specified conversions. */
2845 assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
2846 struct assign_parm_data_one *data)
2848 /* Value must be stored in the stack slot STACK_PARM during function
2850 bool to_conversion = false;
2852 if (data->promoted_mode != data->nominal_mode)
2854 /* Conversion is required. */
2855 rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
2857 emit_move_insn (tempreg, validize_mem (data->entry_parm));
2859 push_to_sequence (all->conversion_insns);
2860 to_conversion = true;
2862 data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
2863 TYPE_UNSIGNED (TREE_TYPE (parm)));
2865 if (data->stack_parm)
2866 /* ??? This may need a big-endian conversion on sparc64. */
2868 = adjust_address (data->stack_parm, data->nominal_mode, 0);
2871 if (data->entry_parm != data->stack_parm)
2875 if (data->stack_parm == 0)
2878 = assign_stack_local (GET_MODE (data->entry_parm),
2879 GET_MODE_SIZE (GET_MODE (data->entry_parm)),
2880 TYPE_ALIGN (data->passed_type));
2881 set_mem_attributes (data->stack_parm, parm, 1);
2884 dest = validize_mem (data->stack_parm);
2885 src = validize_mem (data->entry_parm);
2889 /* Use a block move to handle potentially misaligned entry_parm. */
2891 push_to_sequence (all->conversion_insns);
2892 to_conversion = true;
2894 emit_block_move (dest, src,
2895 GEN_INT (int_size_in_bytes (data->passed_type)),
2899 emit_move_insn (dest, src);
2904 all->conversion_insns = get_insns ();
2908 SET_DECL_RTL (parm, data->stack_parm);
2911 /* A subroutine of assign_parms. If the ABI splits complex arguments, then
2912 undo the frobbing that we did in assign_parms_augmented_arg_list. */
2915 assign_parms_unsplit_complex (struct assign_parm_data_all *all, tree fnargs)
2918 tree orig_fnargs = all->orig_fnargs;
2920 for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm))
2922 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
2923 && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
2925 rtx tmp, real, imag;
2926 enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
2928 real = DECL_RTL (fnargs);
2929 imag = DECL_RTL (TREE_CHAIN (fnargs));
2930 if (inner != GET_MODE (real))
2932 real = gen_lowpart_SUBREG (inner, real);
2933 imag = gen_lowpart_SUBREG (inner, imag);
2936 if (TREE_ADDRESSABLE (parm))
2939 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
2941 /* split_complex_arg put the real and imag parts in
2942 pseudos. Move them to memory. */
2943 tmp = assign_stack_local (DECL_MODE (parm), size,
2944 TYPE_ALIGN (TREE_TYPE (parm)));
2945 set_mem_attributes (tmp, parm, 1);
2946 rmem = adjust_address_nv (tmp, inner, 0);
2947 imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
2948 push_to_sequence (all->conversion_insns);
2949 emit_move_insn (rmem, real);
2950 emit_move_insn (imem, imag);
2951 all->conversion_insns = get_insns ();
2955 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2956 SET_DECL_RTL (parm, tmp);
2958 real = DECL_INCOMING_RTL (fnargs);
2959 imag = DECL_INCOMING_RTL (TREE_CHAIN (fnargs));
2960 if (inner != GET_MODE (real))
2962 real = gen_lowpart_SUBREG (inner, real);
2963 imag = gen_lowpart_SUBREG (inner, imag);
2965 tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
2966 set_decl_incoming_rtl (parm, tmp);
2967 fnargs = TREE_CHAIN (fnargs);
2971 SET_DECL_RTL (parm, DECL_RTL (fnargs));
2972 set_decl_incoming_rtl (parm, DECL_INCOMING_RTL (fnargs));
2974 /* Set MEM_EXPR to the original decl, i.e. to PARM,
2975 instead of the copy of decl, i.e. FNARGS. */
2976 if (DECL_INCOMING_RTL (parm) && MEM_P (DECL_INCOMING_RTL (parm)))
2977 set_mem_expr (DECL_INCOMING_RTL (parm), parm);
2980 fnargs = TREE_CHAIN (fnargs);
2984 /* Assign RTL expressions to the function's parameters. This may involve
2985 copying them into registers and using those registers as the DECL_RTL. */
2988 assign_parms (tree fndecl)
2990 struct assign_parm_data_all all;
2993 current_function_internal_arg_pointer
2994 = targetm.calls.internal_arg_pointer ();
2996 assign_parms_initialize_all (&all);
2997 fnargs = assign_parms_augmented_arg_list (&all);
2999 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3001 struct assign_parm_data_one data;
3003 /* Extract the type of PARM; adjust it according to ABI. */
3004 assign_parm_find_data_types (&all, parm, &data);
3006 /* Early out for errors and void parameters. */
3007 if (data.passed_mode == VOIDmode)
3009 SET_DECL_RTL (parm, const0_rtx);
3010 DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
3014 if (current_function_stdarg && !TREE_CHAIN (parm))
3015 assign_parms_setup_varargs (&all, &data, false);
3017 /* Find out where the parameter arrives in this function. */
3018 assign_parm_find_entry_rtl (&all, &data);
3020 /* Find out where stack space for this parameter might be. */
3021 if (assign_parm_is_stack_parm (&all, &data))
3023 assign_parm_find_stack_rtl (parm, &data);
3024 assign_parm_adjust_entry_rtl (&data);
3027 /* Record permanently how this parm was passed. */
3028 set_decl_incoming_rtl (parm, data.entry_parm);
3030 /* Update info on where next arg arrives in registers. */
3031 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3032 data.passed_type, data.named_arg);
3034 assign_parm_adjust_stack_rtl (&data);
3036 if (assign_parm_setup_block_p (&data))
3037 assign_parm_setup_block (&all, parm, &data);
3038 else if (data.passed_pointer || use_register_for_decl (parm))
3039 assign_parm_setup_reg (&all, parm, &data);
3041 assign_parm_setup_stack (&all, parm, &data);
3044 if (targetm.calls.split_complex_arg && fnargs != all.orig_fnargs)
3045 assign_parms_unsplit_complex (&all, fnargs);
3047 /* Output all parameter conversion instructions (possibly including calls)
3048 now that all parameters have been copied out of hard registers. */
3049 emit_insn (all.conversion_insns);
3051 /* If we are receiving a struct value address as the first argument, set up
3052 the RTL for the function result. As this might require code to convert
3053 the transmitted address to Pmode, we do this here to ensure that possible
3054 preliminary conversions of the address have been emitted already. */
3055 if (all.function_result_decl)
3057 tree result = DECL_RESULT (current_function_decl);
3058 rtx addr = DECL_RTL (all.function_result_decl);
3061 if (DECL_BY_REFERENCE (result))
3065 addr = convert_memory_address (Pmode, addr);
3066 x = gen_rtx_MEM (DECL_MODE (result), addr);
3067 set_mem_attributes (x, result, 1);
3069 SET_DECL_RTL (result, x);
3072 /* We have aligned all the args, so add space for the pretend args. */
3073 current_function_pretend_args_size = all.pretend_args_size;
3074 all.stack_args_size.constant += all.extra_pretend_bytes;
3075 current_function_args_size = all.stack_args_size.constant;
3077 /* Adjust function incoming argument size for alignment and
3080 #ifdef REG_PARM_STACK_SPACE
3081 current_function_args_size = MAX (current_function_args_size,
3082 REG_PARM_STACK_SPACE (fndecl));
3085 current_function_args_size = CEIL_ROUND (current_function_args_size,
3086 PARM_BOUNDARY / BITS_PER_UNIT);
3088 #ifdef ARGS_GROW_DOWNWARD
3089 current_function_arg_offset_rtx
3090 = (all.stack_args_size.var == 0 ? GEN_INT (-all.stack_args_size.constant)
3091 : expand_expr (size_diffop (all.stack_args_size.var,
3092 size_int (-all.stack_args_size.constant)),
3093 NULL_RTX, VOIDmode, 0));
3095 current_function_arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
3098 /* See how many bytes, if any, of its args a function should try to pop
3101 current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
3102 current_function_args_size);
3104 /* For stdarg.h function, save info about
3105 regs and stack space used by the named args. */
3107 current_function_args_info = all.args_so_far;
3109 /* Set the rtx used for the function return value. Put this in its
3110 own variable so any optimizers that need this information don't have
3111 to include tree.h. Do this here so it gets done when an inlined
3112 function gets output. */
3114 current_function_return_rtx
3115 = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
3116 ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
3118 /* If scalar return value was computed in a pseudo-reg, or was a named
3119 return value that got dumped to the stack, copy that to the hard
3121 if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
3123 tree decl_result = DECL_RESULT (fndecl);
3124 rtx decl_rtl = DECL_RTL (decl_result);
3126 if (REG_P (decl_rtl)
3127 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
3128 : DECL_REGISTER (decl_result))
3132 real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
3134 REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
3135 /* The delay slot scheduler assumes that current_function_return_rtx
3136 holds the hard register containing the return value, not a
3137 temporary pseudo. */
3138 current_function_return_rtx = real_decl_rtl;
3143 /* A subroutine of gimplify_parameters, invoked via walk_tree.
3144 For all seen types, gimplify their sizes. */
3147 gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
3154 if (POINTER_TYPE_P (t))
3156 else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
3157 && !TYPE_SIZES_GIMPLIFIED (t))
3159 gimplify_type_sizes (t, (tree *) data);
3167 /* Gimplify the parameter list for current_function_decl. This involves
3168 evaluating SAVE_EXPRs of variable sized parameters and generating code
3169 to implement callee-copies reference parameters. Returns a list of
3170 statements to add to the beginning of the function, or NULL if nothing
3174 gimplify_parameters (void)
3176 struct assign_parm_data_all all;
3177 tree fnargs, parm, stmts = NULL;
3179 assign_parms_initialize_all (&all);
3180 fnargs = assign_parms_augmented_arg_list (&all);
3182 for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
3184 struct assign_parm_data_one data;
3186 /* Extract the type of PARM; adjust it according to ABI. */
3187 assign_parm_find_data_types (&all, parm, &data);
3189 /* Early out for errors and void parameters. */
3190 if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
3193 /* Update info on where next arg arrives in registers. */
3194 FUNCTION_ARG_ADVANCE (all.args_so_far, data.promoted_mode,
3195 data.passed_type, data.named_arg);
3197 /* ??? Once upon a time variable_size stuffed parameter list
3198 SAVE_EXPRs (amongst others) onto a pending sizes list. This
3199 turned out to be less than manageable in the gimple world.
3200 Now we have to hunt them down ourselves. */
3201 walk_tree_without_duplicates (&data.passed_type,
3202 gimplify_parm_type, &stmts);
3204 if (!TREE_CONSTANT (DECL_SIZE (parm)))
3206 gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
3207 gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
3210 if (data.passed_pointer)
3212 tree type = TREE_TYPE (data.passed_type);
3213 if (reference_callee_copied (&all.args_so_far, TYPE_MODE (type),
3214 type, data.named_arg))
3218 /* For constant sized objects, this is trivial; for
3219 variable-sized objects, we have to play games. */
3220 if (TREE_CONSTANT (DECL_SIZE (parm)))
3222 local = create_tmp_var (type, get_name (parm));
3223 DECL_IGNORED_P (local) = 0;
3227 tree ptr_type, addr, args;
3229 ptr_type = build_pointer_type (type);
3230 addr = create_tmp_var (ptr_type, get_name (parm));
3231 DECL_IGNORED_P (addr) = 0;
3232 local = build_fold_indirect_ref (addr);
3234 args = tree_cons (NULL, DECL_SIZE_UNIT (parm), NULL);
3235 t = built_in_decls[BUILT_IN_ALLOCA];
3236 t = build_function_call_expr (t, args);
3237 t = fold_convert (ptr_type, t);
3238 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
3239 gimplify_and_add (t, &stmts);
3242 t = build2 (MODIFY_EXPR, void_type_node, local, parm);
3243 gimplify_and_add (t, &stmts);
3245 SET_DECL_VALUE_EXPR (parm, local);
3246 DECL_HAS_VALUE_EXPR_P (parm) = 1;
3254 /* Indicate whether REGNO is an incoming argument to the current function
3255 that was promoted to a wider mode. If so, return the RTX for the
3256 register (to get its mode). PMODE and PUNSIGNEDP are set to the mode
3257 that REGNO is promoted from and whether the promotion was signed or
3261 promoted_input_arg (unsigned int regno, enum machine_mode *pmode, int *punsignedp)
3265 for (arg = DECL_ARGUMENTS (current_function_decl); arg;
3266 arg = TREE_CHAIN (arg))
3267 if (REG_P (DECL_INCOMING_RTL (arg))
3268 && REGNO (DECL_INCOMING_RTL (arg)) == regno
3269 && TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
3271 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
3272 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (arg));
3274 mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
3275 if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
3276 && mode != DECL_MODE (arg))
3278 *pmode = DECL_MODE (arg);
3279 *punsignedp = unsignedp;
3280 return DECL_INCOMING_RTL (arg);
3288 /* Compute the size and offset from the start of the stacked arguments for a
3289 parm passed in mode PASSED_MODE and with type TYPE.
3291 INITIAL_OFFSET_PTR points to the current offset into the stacked
3294 The starting offset and size for this parm are returned in
3295 LOCATE->OFFSET and LOCATE->SIZE, respectively. When IN_REGS is
3296 nonzero, the offset is that of stack slot, which is returned in
3297 LOCATE->SLOT_OFFSET. LOCATE->ALIGNMENT_PAD is the amount of
3298 padding required from the initial offset ptr to the stack slot.
3300 IN_REGS is nonzero if the argument will be passed in registers. It will
3301 never be set if REG_PARM_STACK_SPACE is not defined.
3303 FNDECL is the function in which the argument was defined.
3305 There are two types of rounding that are done. The first, controlled by
3306 FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
3307 list to be aligned to the specific boundary (in bits). This rounding
3308 affects the initial and starting offsets, but not the argument size.
3310 The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
3311 optionally rounds the size of the parm to PARM_BOUNDARY. The
3312 initial offset is not affected by this rounding, while the size always
3313 is and the starting offset may be. */
3315 /* LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
3316 INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
3317 callers pass in the total size of args so far as
3318 INITIAL_OFFSET_PTR. LOCATE->SIZE is always positive. */
3321 locate_and_pad_parm (enum machine_mode passed_mode, tree type, int in_regs,
3322 int partial, tree fndecl ATTRIBUTE_UNUSED,
3323 struct args_size *initial_offset_ptr,
3324 struct locate_and_pad_arg_data *locate)
3327 enum direction where_pad;
3328 unsigned int boundary;
3329 int reg_parm_stack_space = 0;
3330 int part_size_in_regs;
3332 #ifdef REG_PARM_STACK_SPACE
3333 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
3335 /* If we have found a stack parm before we reach the end of the
3336 area reserved for registers, skip that area. */
3339 if (reg_parm_stack_space > 0)
3341 if (initial_offset_ptr->var)
3343 initial_offset_ptr->var
3344 = size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
3345 ssize_int (reg_parm_stack_space));
3346 initial_offset_ptr->constant = 0;
3348 else if (initial_offset_ptr->constant < reg_parm_stack_space)
3349 initial_offset_ptr->constant = reg_parm_stack_space;
3352 #endif /* REG_PARM_STACK_SPACE */
3354 part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
3357 = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
3358 where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
3359 boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
3360 locate->where_pad = where_pad;
3361 locate->boundary = boundary;
3363 /* Remember if the outgoing parameter requires extra alignment on the
3364 calling function side. */
3365 if (boundary > PREFERRED_STACK_BOUNDARY)
3366 boundary = PREFERRED_STACK_BOUNDARY;
3367 if (cfun->stack_alignment_needed < boundary)
3368 cfun->stack_alignment_needed = boundary;
3370 #ifdef ARGS_GROW_DOWNWARD
3371 locate->slot_offset.constant = -initial_offset_ptr->constant;
3372 if (initial_offset_ptr->var)
3373 locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
3374 initial_offset_ptr->var);
3378 if (where_pad != none
3379 && (!host_integerp (sizetree, 1)
3380 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3381 s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
3382 SUB_PARM_SIZE (locate->slot_offset, s2);
3385 locate->slot_offset.constant += part_size_in_regs;
3388 #ifdef REG_PARM_STACK_SPACE
3389 || REG_PARM_STACK_SPACE (fndecl) > 0
3392 pad_to_arg_alignment (&locate->slot_offset, boundary,
3393 &locate->alignment_pad);
3395 locate->size.constant = (-initial_offset_ptr->constant
3396 - locate->slot_offset.constant);
3397 if (initial_offset_ptr->var)
3398 locate->size.var = size_binop (MINUS_EXPR,
3399 size_binop (MINUS_EXPR,
3401 initial_offset_ptr->var),
3402 locate->slot_offset.var);
3404 /* Pad_below needs the pre-rounded size to know how much to pad
3406 locate->offset = locate->slot_offset;
3407 if (where_pad == downward)
3408 pad_below (&locate->offset, passed_mode, sizetree);
3410 #else /* !ARGS_GROW_DOWNWARD */
3412 #ifdef REG_PARM_STACK_SPACE
3413 || REG_PARM_STACK_SPACE (fndecl) > 0
3416 pad_to_arg_alignment (initial_offset_ptr, boundary,
3417 &locate->alignment_pad);
3418 locate->slot_offset = *initial_offset_ptr;
3420 #ifdef PUSH_ROUNDING
3421 if (passed_mode != BLKmode)
3422 sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
3425 /* Pad_below needs the pre-rounded size to know how much to pad below
3426 so this must be done before rounding up. */
3427 locate->offset = locate->slot_offset;
3428 if (where_pad == downward)
3429 pad_below (&locate->offset, passed_mode, sizetree);
3431 if (where_pad != none
3432 && (!host_integerp (sizetree, 1)
3433 || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
3434 sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3436 ADD_PARM_SIZE (locate->size, sizetree);
3438 locate->size.constant -= part_size_in_regs;
3439 #endif /* ARGS_GROW_DOWNWARD */
3442 /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
3443 BOUNDARY is measured in bits, but must be a multiple of a storage unit. */
3446 pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
3447 struct args_size *alignment_pad)
3449 tree save_var = NULL_TREE;
3450 HOST_WIDE_INT save_constant = 0;
3451 int boundary_in_bytes = boundary / BITS_PER_UNIT;
3452 HOST_WIDE_INT sp_offset = STACK_POINTER_OFFSET;
3454 #ifdef SPARC_STACK_BOUNDARY_HACK
3455 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
3456 the real alignment of %sp. However, when it does this, the
3457 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
3458 if (SPARC_STACK_BOUNDARY_HACK)
3462 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3464 save_var = offset_ptr->var;
3465 save_constant = offset_ptr->constant;
3468 alignment_pad->var = NULL_TREE;
3469 alignment_pad->constant = 0;
3471 if (boundary > BITS_PER_UNIT)
3473 if (offset_ptr->var)
3475 tree sp_offset_tree = ssize_int (sp_offset);
3476 tree offset = size_binop (PLUS_EXPR,
3477 ARGS_SIZE_TREE (*offset_ptr),
3479 #ifdef ARGS_GROW_DOWNWARD
3480 tree rounded = round_down (offset, boundary / BITS_PER_UNIT);
3482 tree rounded = round_up (offset, boundary / BITS_PER_UNIT);
3485 offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
3486 /* ARGS_SIZE_TREE includes constant term. */
3487 offset_ptr->constant = 0;
3488 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3489 alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
3494 offset_ptr->constant = -sp_offset +
3495 #ifdef ARGS_GROW_DOWNWARD
3496 FLOOR_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3498 CEIL_ROUND (offset_ptr->constant + sp_offset, boundary_in_bytes);
3500 if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
3501 alignment_pad->constant = offset_ptr->constant - save_constant;
3507 pad_below (struct args_size *offset_ptr, enum machine_mode passed_mode, tree sizetree)
3509 if (passed_mode != BLKmode)
3511 if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
3512 offset_ptr->constant
3513 += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
3514 / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
3515 - GET_MODE_SIZE (passed_mode));
3519 if (TREE_CODE (sizetree) != INTEGER_CST
3520 || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
3522 /* Round the size up to multiple of PARM_BOUNDARY bits. */
3523 tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
3525 ADD_PARM_SIZE (*offset_ptr, s2);
3526 SUB_PARM_SIZE (*offset_ptr, sizetree);
3531 /* Walk the tree of blocks describing the binding levels within a function
3532 and warn about variables the might be killed by setjmp or vfork.
3533 This is done after calling flow_analysis and before global_alloc
3534 clobbers the pseudo-regs to hard regs. */
3537 setjmp_vars_warning (tree block)
3541 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
3543 if (TREE_CODE (decl) == VAR_DECL
3544 && DECL_RTL_SET_P (decl)
3545 && REG_P (DECL_RTL (decl))
3546 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3547 warning (0, "variable %q+D might be clobbered by %<longjmp%>"
3552 for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
3553 setjmp_vars_warning (sub);
3556 /* Do the appropriate part of setjmp_vars_warning
3557 but for arguments instead of local variables. */
3560 setjmp_args_warning (void)
3563 for (decl = DECL_ARGUMENTS (current_function_decl);
3564 decl; decl = TREE_CHAIN (decl))
3565 if (DECL_RTL (decl) != 0
3566 && REG_P (DECL_RTL (decl))
3567 && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
3568 warning (0, "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
3573 /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
3574 and create duplicate blocks. */
3575 /* ??? Need an option to either create block fragments or to create
3576 abstract origin duplicates of a source block. It really depends
3577 on what optimization has been performed. */
3580 reorder_blocks (void)
3582 tree block = DECL_INITIAL (current_function_decl);
3583 VEC(tree,heap) *block_stack;
3585 if (block == NULL_TREE)
3588 block_stack = VEC_alloc (tree, heap, 10);
3590 /* Reset the TREE_ASM_WRITTEN bit for all blocks. */
3591 clear_block_marks (block);
3593 /* Prune the old trees away, so that they don't get in the way. */
3594 BLOCK_SUBBLOCKS (block) = NULL_TREE;
3595 BLOCK_CHAIN (block) = NULL_TREE;
3597 /* Recreate the block tree from the note nesting. */
3598 reorder_blocks_1 (get_insns (), block, &block_stack);
3599 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
3601 VEC_free (tree, heap, block_stack);
3604 /* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
3607 clear_block_marks (tree block)
3611 TREE_ASM_WRITTEN (block) = 0;
3612 clear_block_marks (BLOCK_SUBBLOCKS (block));
3613 block = BLOCK_CHAIN (block);
3618 reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
3622 for (insn = insns; insn; insn = NEXT_INSN (insn))
3626 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
3628 tree block = NOTE_BLOCK (insn);
3631 origin = (BLOCK_FRAGMENT_ORIGIN (block)
3632 ? BLOCK_FRAGMENT_ORIGIN (block)
3635 /* If we have seen this block before, that means it now
3636 spans multiple address regions. Create a new fragment. */
3637 if (TREE_ASM_WRITTEN (block))
3639 tree new_block = copy_node (block);
3641 BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
3642 BLOCK_FRAGMENT_CHAIN (new_block)
3643 = BLOCK_FRAGMENT_CHAIN (origin);
3644 BLOCK_FRAGMENT_CHAIN (origin) = new_block;
3646 NOTE_BLOCK (insn) = new_block;
3650 BLOCK_SUBBLOCKS (block) = 0;
3651 TREE_ASM_WRITTEN (block) = 1;
3652 /* When there's only one block for the entire function,
3653 current_block == block and we mustn't do this, it
3654 will cause infinite recursion. */
3655 if (block != current_block)
3657 if (block != origin)
3658 gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block);
3660 BLOCK_SUPERCONTEXT (block) = current_block;
3661 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
3662 BLOCK_SUBBLOCKS (current_block) = block;
3663 current_block = origin;
3665 VEC_safe_push (tree, heap, *p_block_stack, block);
3667 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
3669 NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
3670 BLOCK_SUBBLOCKS (current_block)
3671 = blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
3672 current_block = BLOCK_SUPERCONTEXT (current_block);
3678 /* Reverse the order of elements in the chain T of blocks,
3679 and return the new head of the chain (old last element). */
3682 blocks_nreverse (tree t)
3684 tree prev = 0, decl, next;
3685 for (decl = t; decl; decl = next)
3687 next = BLOCK_CHAIN (decl);
3688 BLOCK_CHAIN (decl) = prev;
3694 /* Count the subblocks of the list starting with BLOCK. If VECTOR is
3695 non-NULL, list them all into VECTOR, in a depth-first preorder
3696 traversal of the block tree. Also clear TREE_ASM_WRITTEN in all
3700 all_blocks (tree block, tree *vector)
3706 TREE_ASM_WRITTEN (block) = 0;
3708 /* Record this block. */
3710 vector[n_blocks] = block;
3714 /* Record the subblocks, and their subblocks... */
3715 n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
3716 vector ? vector + n_blocks : 0);
3717 block = BLOCK_CHAIN (block);
3723 /* Return a vector containing all the blocks rooted at BLOCK. The
3724 number of elements in the vector is stored in N_BLOCKS_P. The
3725 vector is dynamically allocated; it is the caller's responsibility
3726 to call `free' on the pointer returned. */
3729 get_block_vector (tree block, int *n_blocks_p)
3733 *n_blocks_p = all_blocks (block, NULL);
3734 block_vector = XNEWVEC (tree, *n_blocks_p);
3735 all_blocks (block, block_vector);
3737 return block_vector;
3740 static GTY(()) int next_block_index = 2;
3742 /* Set BLOCK_NUMBER for all the blocks in FN. */
3745 number_blocks (tree fn)
3751 /* For SDB and XCOFF debugging output, we start numbering the blocks
3752 from 1 within each function, rather than keeping a running
3754 #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
3755 if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
3756 next_block_index = 1;
3759 block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
3761 /* The top-level BLOCK isn't numbered at all. */
3762 for (i = 1; i < n_blocks; ++i)
3763 /* We number the blocks from two. */
3764 BLOCK_NUMBER (block_vector[i]) = next_block_index++;
3766 free (block_vector);
3771 /* If VAR is present in a subblock of BLOCK, return the subblock. */
3774 debug_find_var_in_block_tree (tree var, tree block)
3778 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
3782 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
3784 tree ret = debug_find_var_in_block_tree (var, t);
3792 /* Allocate a function structure for FNDECL and set its contents
3796 allocate_struct_function (tree fndecl)
3799 tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
3801 cfun = ggc_alloc_cleared (sizeof (struct function));
3803 cfun->stack_alignment_needed = STACK_BOUNDARY;
3804 cfun->preferred_stack_boundary = STACK_BOUNDARY;
3806 current_function_funcdef_no = funcdef_no++;
3808 cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
3810 init_eh_for_function ();
3812 lang_hooks.function.init (cfun);
3813 if (init_machine_status)
3814 cfun->machine = (*init_machine_status) ();
3819 DECL_STRUCT_FUNCTION (fndecl) = cfun;
3820 cfun->decl = fndecl;
3822 /* APPLE LOCAL begin radar 5732232 - blocks */
3823 /* We cannot support blocks which return aggregates because at this
3824 point we do not have info on the return type. */
3827 result = DECL_RESULT (fndecl);
3828 if (aggregate_value_p (result, fndecl))
3830 #ifdef PCC_STATIC_STRUCT_RETURN
3831 current_function_returns_pcc_struct = 1;
3833 current_function_returns_struct = 1;
3835 /* This code is not used anywhere ! */
3836 current_function_returns_pointer = POINTER_TYPE_P (TREE_TYPE (result));
3838 /* APPLE LOCAL end radar 5732232 - blocks */
3839 current_function_stdarg
3841 && TYPE_ARG_TYPES (fntype) != 0
3842 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3843 != void_type_node));
3845 /* Assume all registers in stdarg functions need to be saved. */
3846 cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
3847 cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
3850 /* Reset cfun, and other non-struct-function variables to defaults as
3851 appropriate for emitting rtl at the start of a function. */
3854 prepare_function_start (tree fndecl)
3856 if (fndecl && DECL_STRUCT_FUNCTION (fndecl))
3857 cfun = DECL_STRUCT_FUNCTION (fndecl);
3859 allocate_struct_function (fndecl);
3861 init_varasm_status (cfun);
3864 cse_not_expected = ! optimize;
3866 /* Caller save not needed yet. */
3867 caller_save_needed = 0;
3869 /* We haven't done register allocation yet. */
3872 /* Indicate that we have not instantiated virtual registers yet. */
3873 virtuals_instantiated = 0;
3875 /* Indicate that we want CONCATs now. */
3876 generating_concat_p = 1;
3878 /* Indicate we have no need of a frame pointer yet. */
3879 frame_pointer_needed = 0;
3882 /* Initialize the rtl expansion mechanism so that we can do simple things
3883 like generate sequences. This is used to provide a context during global
3884 initialization of some passes. */
3886 init_dummy_function_start (void)
3888 prepare_function_start (NULL);
3891 /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
3892 and initialize static variables for generating RTL for the statements
3896 init_function_start (tree subr)
3898 prepare_function_start (subr);
3900 /* Prevent ever trying to delete the first instruction of a
3901 function. Also tell final how to output a linenum before the
3902 function prologue. Note linenums could be missing, e.g. when
3903 compiling a Java .class file. */
3904 if (! DECL_IS_BUILTIN (subr))
3905 emit_line_note (DECL_SOURCE_LOCATION (subr));
3907 /* Make sure first insn is a note even if we don't want linenums.
3908 This makes sure the first insn will never be deleted.
3909 Also, final expects a note to appear there. */
3910 emit_note (NOTE_INSN_DELETED);
3912 /* Warn if this value is an aggregate type,
3913 regardless of which calling convention we are using for it. */
3914 if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
3915 warning (OPT_Waggregate_return, "function returns an aggregate");
3918 /* Make sure all values used by the optimization passes have sane
3921 init_function_for_compilation (void)
3925 /* No prologue/epilogue insns yet. Make sure that these vectors are
3927 gcc_assert (VEC_length (int, prologue) == 0);
3928 gcc_assert (VEC_length (int, epilogue) == 0);
3929 gcc_assert (VEC_length (int, sibcall_epilogue) == 0);
3933 struct tree_opt_pass pass_init_function =
3937 init_function_for_compilation, /* execute */
3940 0, /* static_pass_number */
3942 0, /* properties_required */
3943 0, /* properties_provided */
3944 0, /* properties_destroyed */
3945 0, /* todo_flags_start */
3946 0, /* todo_flags_finish */
3952 expand_main_function (void)
3954 #if (defined(INVOKE__main) \
3955 || (!defined(HAS_INIT_SECTION) \
3956 && !defined(INIT_SECTION_ASM_OP) \
3957 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
3958 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
3962 /* Expand code to initialize the stack_protect_guard. This is invoked at
3963 the beginning of a function to be protected. */
3965 #ifndef HAVE_stack_protect_set
3966 # define HAVE_stack_protect_set 0
3967 # define gen_stack_protect_set(x,y) (gcc_unreachable (), NULL_RTX)
3971 stack_protect_prologue (void)
3973 tree guard_decl = targetm.stack_protect_guard ();
3976 /* Avoid expand_expr here, because we don't want guard_decl pulled
3977 into registers unless absolutely necessary. And we know that
3978 cfun->stack_protect_guard is a local stack slot, so this skips
3980 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
3981 y = validize_mem (DECL_RTL (guard_decl));
3983 /* Allow the target to copy from Y to X without leaking Y into a
3985 if (HAVE_stack_protect_set)
3987 rtx insn = gen_stack_protect_set (x, y);
3995 /* Otherwise do a straight move. */
3996 emit_move_insn (x, y);
3999 /* Expand code to verify the stack_protect_guard. This is invoked at
4000 the end of a function to be protected. */
4002 #ifndef HAVE_stack_protect_test
4003 # define HAVE_stack_protect_test 0
4004 # define gen_stack_protect_test(x, y, z) (gcc_unreachable (), NULL_RTX)
4008 stack_protect_epilogue (void)
4010 tree guard_decl = targetm.stack_protect_guard ();
4011 rtx label = gen_label_rtx ();
4014 /* Avoid expand_expr here, because we don't want guard_decl pulled
4015 into registers unless absolutely necessary. And we know that
4016 cfun->stack_protect_guard is a local stack slot, so this skips
4018 x = validize_mem (DECL_RTL (cfun->stack_protect_guard));
4019 y = validize_mem (DECL_RTL (guard_decl));
4021 /* Allow the target to compare Y with X without leaking either into
4023 if (HAVE_stack_protect_test != 0)
4025 tmp = gen_stack_protect_test (x, y, label);
4033 emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
4036 /* The noreturn predictor has been moved to the tree level. The rtl-level
4037 predictors estimate this branch about 20%, which isn't enough to get
4038 things moved out of line. Since this is the only extant case of adding
4039 a noreturn function at the rtl level, it doesn't seem worth doing ought
4040 except adding the prediction by hand. */
4041 tmp = get_last_insn ();
4043 predict_insn_def (tmp, PRED_NORETURN, TAKEN);
4045 expand_expr_stmt (targetm.stack_protect_fail ());
4049 /* Start the RTL for a new function, and set variables used for
4051 SUBR is the FUNCTION_DECL node.
4052 PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
4053 the function's parameters, which must be run at any return statement. */
4056 expand_function_start (tree subr)
4058 /* Make sure volatile mem refs aren't considered
4059 valid operands of arithmetic insns. */
4060 init_recog_no_volatile ();
4062 current_function_profile
4064 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
4066 current_function_limit_stack
4067 = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
4069 /* Make the label for return statements to jump to. Do not special
4070 case machines with special return instructions -- they will be
4071 handled later during jump, ifcvt, or epilogue creation. */
4072 return_label = gen_label_rtx ();
4074 /* Initialize rtx used to return the value. */
4075 /* Do this before assign_parms so that we copy the struct value address
4076 before any library calls that assign parms might generate. */
4078 /* Decide whether to return the value in memory or in a register. */
4079 if (aggregate_value_p (DECL_RESULT (subr), subr))
4081 /* Returning something that won't go in a register. */
4082 rtx value_address = 0;
4084 #ifdef PCC_STATIC_STRUCT_RETURN
4085 if (current_function_returns_pcc_struct)
4087 int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
4088 value_address = assemble_static_space (size);
4093 rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
4094 /* Expect to be passed the address of a place to store the value.
4095 If it is passed as an argument, assign_parms will take care of
4099 value_address = gen_reg_rtx (Pmode);
4100 emit_move_insn (value_address, sv);
4105 rtx x = value_address;
4106 if (!DECL_BY_REFERENCE (DECL_RESULT (subr)))
4108 x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), x);
4109 set_mem_attributes (x, DECL_RESULT (subr), 1);
4111 SET_DECL_RTL (DECL_RESULT (subr), x);
4114 else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
4115 /* If return mode is void, this decl rtl should not be used. */
4116 SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
4119 /* Compute the return values into a pseudo reg, which we will copy
4120 into the true return register after the cleanups are done. */
4121 tree return_type = TREE_TYPE (DECL_RESULT (subr));
4122 if (TYPE_MODE (return_type) != BLKmode
4123 && targetm.calls.return_in_msb (return_type))
4124 /* expand_function_end will insert the appropriate padding in
4125 this case. Use the return value's natural (unpadded) mode
4126 within the function proper. */
4127 SET_DECL_RTL (DECL_RESULT (subr),
4128 gen_reg_rtx (TYPE_MODE (return_type)));
4131 /* In order to figure out what mode to use for the pseudo, we
4132 figure out what the mode of the eventual return register will
4133 actually be, and use that. */
4134 rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
4136 /* Structures that are returned in registers are not
4137 aggregate_value_p, so we may see a PARALLEL or a REG. */
4138 if (REG_P (hard_reg))
4139 SET_DECL_RTL (DECL_RESULT (subr),
4140 gen_reg_rtx (GET_MODE (hard_reg)));
4143 gcc_assert (GET_CODE (hard_reg) == PARALLEL);
4144 SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
4148 /* Set DECL_REGISTER flag so that expand_function_end will copy the
4149 result to the real return register(s). */
4150 DECL_REGISTER (DECL_RESULT (subr)) = 1;
4153 /* Initialize rtx for parameters and local variables.
4154 In some cases this requires emitting insns. */
4155 assign_parms (subr);
4157 /* If function gets a static chain arg, store it. */
4158 if (cfun->static_chain_decl)
4160 tree parm = cfun->static_chain_decl;
4161 rtx local = gen_reg_rtx (Pmode);
4163 set_decl_incoming_rtl (parm, static_chain_incoming_rtx);
4164 SET_DECL_RTL (parm, local);
4165 mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
4167 emit_move_insn (local, static_chain_incoming_rtx);
4170 /* If the function receives a non-local goto, then store the
4171 bits we need to restore the frame pointer. */
4172 if (cfun->nonlocal_goto_save_area)
4177 /* ??? We need to do this save early. Unfortunately here is
4178 before the frame variable gets declared. Help out... */
4179 expand_var (TREE_OPERAND (cfun->nonlocal_goto_save_area, 0));
4181 t_save = build4 (ARRAY_REF, ptr_type_node,
4182 cfun->nonlocal_goto_save_area,
4183 integer_zero_node, NULL_TREE, NULL_TREE);
4184 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
4185 r_save = convert_memory_address (Pmode, r_save);
4187 emit_move_insn (r_save, virtual_stack_vars_rtx);
4188 update_nonlocal_goto_save_area ();
4191 /* The following was moved from init_function_start.
4192 The move is supposed to make sdb output more accurate. */
4193 /* Indicate the beginning of the function body,
4194 as opposed to parm setup. */
4195 emit_note (NOTE_INSN_FUNCTION_BEG);
4197 gcc_assert (NOTE_P (get_last_insn ()));
4199 parm_birth_insn = get_last_insn ();
4201 if (current_function_profile)
4204 PROFILE_HOOK (current_function_funcdef_no);
4208 /* After the display initializations is where the stack checking
4210 if(flag_stack_check)
4211 stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
4213 /* Make sure there is a line number after the function entry setup code. */
4214 force_next_line_note ();
4217 /* Undo the effects of init_dummy_function_start. */
4219 expand_dummy_function_end (void)
4221 /* End any sequences that failed to be closed due to syntax errors. */
4222 while (in_sequence_p ())
4225 /* Outside function body, can't compute type's actual size
4226 until next function's body starts. */
4228 free_after_parsing (cfun);
4229 free_after_compilation (cfun);
4233 /* Call DOIT for each hard register used as a return value from
4234 the current function. */
4237 diddle_return_value (void (*doit) (rtx, void *), void *arg)
4239 rtx outgoing = current_function_return_rtx;
4244 if (REG_P (outgoing))
4245 (*doit) (outgoing, arg);
4246 else if (GET_CODE (outgoing) == PARALLEL)
4250 for (i = 0; i < XVECLEN (outgoing, 0); i++)
4252 rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
4254 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
4261 do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4263 emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
4267 clobber_return_register (void)
4269 diddle_return_value (do_clobber_return_reg, NULL);
4271 /* In case we do use pseudo to return value, clobber it too. */
4272 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4274 tree decl_result = DECL_RESULT (current_function_decl);
4275 rtx decl_rtl = DECL_RTL (decl_result);
4276 if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
4278 do_clobber_return_reg (decl_rtl, NULL);
4284 do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
4286 emit_insn (gen_rtx_USE (VOIDmode, reg));
4290 use_return_register (void)
4292 diddle_return_value (do_use_return_reg, NULL);
4295 /* Possibly warn about unused parameters. */
4297 do_warn_unused_parameter (tree fn)
4301 for (decl = DECL_ARGUMENTS (fn);
4302 decl; decl = TREE_CHAIN (decl))
4303 if (!TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
4304 && DECL_NAME (decl) && !DECL_ARTIFICIAL (decl))
4305 warning (OPT_Wunused_parameter, "unused parameter %q+D", decl);
4308 static GTY(()) rtx initial_trampoline;
4310 /* Generate RTL for the end of the current function. */
4313 expand_function_end (void)
4317 /* If arg_pointer_save_area was referenced only from a nested
4318 function, we will not have initialized it yet. Do that now. */
4319 if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
4320 get_arg_pointer_save_area (cfun);
4322 /* If we are doing stack checking and this function makes calls,
4323 do a stack probe at the start of the function to ensure we have enough
4324 space for another stack frame. */
4325 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
4329 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4333 probe_stack_range (STACK_CHECK_PROTECT,
4334 GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
4337 emit_insn_before (seq, stack_check_probe_note);
4342 /* Possibly warn about unused parameters.
4343 When frontend does unit-at-a-time, the warning is already
4344 issued at finalization time. */
4345 if (warn_unused_parameter
4346 && !lang_hooks.callgraph.expand_function)
4347 do_warn_unused_parameter (current_function_decl);
4349 /* End any sequences that failed to be closed due to syntax errors. */
4350 while (in_sequence_p ())
4353 clear_pending_stack_adjust ();
4354 do_pending_stack_adjust ();
4356 /* Mark the end of the function body.
4357 If control reaches this insn, the function can drop through
4358 without returning a value. */
4359 emit_note (NOTE_INSN_FUNCTION_END);
4361 /* Must mark the last line number note in the function, so that the test
4362 coverage code can avoid counting the last line twice. This just tells
4363 the code to ignore the immediately following line note, since there
4364 already exists a copy of this note somewhere above. This line number
4365 note is still needed for debugging though, so we can't delete it. */
4366 if (flag_test_coverage)
4367 emit_note (NOTE_INSN_REPEATED_LINE_NUMBER);
4369 /* Output a linenumber for the end of the function.
4370 SDB depends on this. */
4371 force_next_line_note ();
4372 emit_line_note (input_location);
4374 /* Before the return label (if any), clobber the return
4375 registers so that they are not propagated live to the rest of
4376 the function. This can only happen with functions that drop
4377 through; if there had been a return statement, there would
4378 have either been a return rtx, or a jump to the return label.
4380 We delay actual code generation after the current_function_value_rtx
4382 clobber_after = get_last_insn ();
4384 /* Output the label for the actual return from the function. */
4385 emit_label (return_label);
4387 #ifdef TARGET_PROFILER_EPILOGUE
4388 if (current_function_profile && TARGET_PROFILER_EPILOGUE)
4390 static rtx mexitcount_libfunc;
4391 static int initialized;
4395 mexitcount_libfunc = init_one_libfunc (".mexitcount");
4398 emit_library_call (mexitcount_libfunc, LCT_NORMAL, VOIDmode, 0);
4402 if (USING_SJLJ_EXCEPTIONS)
4404 /* Let except.c know where it should emit the call to unregister
4405 the function context for sjlj exceptions. */
4406 if (flag_exceptions)
4407 sjlj_emit_function_exit_after (get_last_insn ());
4411 /* @@@ This is a kludge. We want to ensure that instructions that
4412 may trap are not moved into the epilogue by scheduling, because
4413 we don't always emit unwind information for the epilogue.
4414 However, not all machine descriptions define a blockage insn, so
4415 emit an ASM_INPUT to act as one. */
4416 if (flag_non_call_exceptions)
4417 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
4420 /* If this is an implementation of throw, do what's necessary to
4421 communicate between __builtin_eh_return and the epilogue. */
4422 expand_eh_return ();
4424 /* If scalar return value was computed in a pseudo-reg, or was a named
4425 return value that got dumped to the stack, copy that to the hard
4427 if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
4429 tree decl_result = DECL_RESULT (current_function_decl);
4430 rtx decl_rtl = DECL_RTL (decl_result);
4432 if (REG_P (decl_rtl)
4433 ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
4434 : DECL_REGISTER (decl_result))
4436 rtx real_decl_rtl = current_function_return_rtx;
4438 /* This should be set in assign_parms. */
4439 gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
4441 /* If this is a BLKmode structure being returned in registers,
4442 then use the mode computed in expand_return. Note that if
4443 decl_rtl is memory, then its mode may have been changed,
4444 but that current_function_return_rtx has not. */
4445 if (GET_MODE (real_decl_rtl) == BLKmode)
4446 PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
4448 /* If a non-BLKmode return value should be padded at the least
4449 significant end of the register, shift it left by the appropriate
4450 amount. BLKmode results are handled using the group load/store
4452 if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
4453 && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
4455 emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
4456 REGNO (real_decl_rtl)),
4458 shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
4460 /* If a named return value dumped decl_return to memory, then
4461 we may need to re-do the PROMOTE_MODE signed/unsigned
4463 else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
4465 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
4467 if (targetm.calls.promote_function_return (TREE_TYPE (current_function_decl)))
4468 promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
4471 convert_move (real_decl_rtl, decl_rtl, unsignedp);
4473 else if (GET_CODE (real_decl_rtl) == PARALLEL)
4475 /* If expand_function_start has created a PARALLEL for decl_rtl,
4476 move the result to the real return registers. Otherwise, do
4477 a group load from decl_rtl for a named return. */
4478 if (GET_CODE (decl_rtl) == PARALLEL)
4479 emit_group_move (real_decl_rtl, decl_rtl);
4481 emit_group_load (real_decl_rtl, decl_rtl,
4482 TREE_TYPE (decl_result),
4483 int_size_in_bytes (TREE_TYPE (decl_result)));
4485 /* In the case of complex integer modes smaller than a word, we'll
4486 need to generate some non-trivial bitfield insertions. Do that
4487 on a pseudo and not the hard register. */
4488 else if (GET_CODE (decl_rtl) == CONCAT
4489 && GET_MODE_CLASS (GET_MODE (decl_rtl)) == MODE_COMPLEX_INT
4490 && GET_MODE_BITSIZE (GET_MODE (decl_rtl)) <= BITS_PER_WORD)
4492 int old_generating_concat_p;
4495 old_generating_concat_p = generating_concat_p;
4496 generating_concat_p = 0;
4497 tmp = gen_reg_rtx (GET_MODE (decl_rtl));
4498 generating_concat_p = old_generating_concat_p;
4500 emit_move_insn (tmp, decl_rtl);
4501 emit_move_insn (real_decl_rtl, tmp);
4504 emit_move_insn (real_decl_rtl, decl_rtl);
4508 /* If returning a structure, arrange to return the address of the value
4509 in a place where debuggers expect to find it.
4511 If returning a structure PCC style,
4512 the caller also depends on this value.
4513 And current_function_returns_pcc_struct is not necessarily set. */
4514 if (current_function_returns_struct
4515 || current_function_returns_pcc_struct)
4517 rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
4518 tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
4521 if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
4522 type = TREE_TYPE (type);
4524 value_address = XEXP (value_address, 0);
4526 outgoing = targetm.calls.function_value (build_pointer_type (type),
4527 current_function_decl, true);
4529 /* Mark this as a function return value so integrate will delete the
4530 assignment and USE below when inlining this function. */
4531 REG_FUNCTION_VALUE_P (outgoing) = 1;
4533 /* The address may be ptr_mode and OUTGOING may be Pmode. */
4534 value_address = convert_memory_address (GET_MODE (outgoing),
4537 emit_move_insn (outgoing, value_address);
4539 /* Show return register used to hold result (in this case the address
4541 current_function_return_rtx = outgoing;
4544 /* Emit the actual code to clobber return register. */
4549 clobber_return_register ();
4550 expand_naked_return ();
4554 emit_insn_after (seq, clobber_after);
4557 /* Output the label for the naked return from the function. */
4558 emit_label (naked_return_label);
4560 /* If stack protection is enabled for this function, check the guard. */
4561 if (cfun->stack_protect_guard)
4562 stack_protect_epilogue ();
4564 /* If we had calls to alloca, and this machine needs
4565 an accurate stack pointer to exit the function,
4566 insert some code to save and restore the stack pointer. */
4567 if (! EXIT_IGNORE_STACK
4568 && current_function_calls_alloca)
4572 emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
4573 emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
4576 /* ??? This should no longer be necessary since stupid is no longer with
4577 us, but there are some parts of the compiler (eg reload_combine, and
4578 sh mach_dep_reorg) that still try and compute their own lifetime info
4579 instead of using the general framework. */
4580 use_return_register ();
4584 get_arg_pointer_save_area (struct function *f)
4586 rtx ret = f->x_arg_pointer_save_area;
4590 ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
4591 f->x_arg_pointer_save_area = ret;
4594 if (f == cfun && ! f->arg_pointer_save_area_init)
4598 /* Save the arg pointer at the beginning of the function. The
4599 generated stack slot may not be a valid memory address, so we
4600 have to check it and fix it if necessary. */
4602 emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
4606 push_topmost_sequence ();
4607 emit_insn_after (seq, entry_of_function ());
4608 pop_topmost_sequence ();
4614 /* Extend a vector that records the INSN_UIDs of INSNS
4615 (a list of one or more insns). */
4618 record_insns (rtx insns, VEC(int,heap) **vecp)
4622 for (tmp = insns; tmp != NULL_RTX; tmp = NEXT_INSN (tmp))
4623 VEC_safe_push (int, heap, *vecp, INSN_UID (tmp));
4626 /* Set the locator of the insn chain starting at INSN to LOC. */
4628 set_insn_locators (rtx insn, int loc)
4630 while (insn != NULL_RTX)
4633 INSN_LOCATOR (insn) = loc;
4634 insn = NEXT_INSN (insn);
4638 /* Determine how many INSN_UIDs in VEC are part of INSN. Because we can
4639 be running after reorg, SEQUENCE rtl is possible. */
4642 contains (rtx insn, VEC(int,heap) **vec)
4646 if (NONJUMP_INSN_P (insn)
4647 && GET_CODE (PATTERN (insn)) == SEQUENCE)
4650 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
4651 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4652 if (INSN_UID (XVECEXP (PATTERN (insn), 0, i))
4653 == VEC_index (int, *vec, j))
4659 for (j = VEC_length (int, *vec) - 1; j >= 0; --j)
4660 if (INSN_UID (insn) == VEC_index (int, *vec, j))
4667 prologue_epilogue_contains (rtx insn)
4669 if (contains (insn, &prologue))
4671 if (contains (insn, &epilogue))
4677 sibcall_epilogue_contains (rtx insn)
4679 if (sibcall_epilogue)
4680 return contains (insn, &sibcall_epilogue);
4685 /* Insert gen_return at the end of block BB. This also means updating
4686 block_for_insn appropriately. */
4689 emit_return_into_block (basic_block bb, rtx line_note)
4691 emit_jump_insn_after (gen_return (), BB_END (bb));
4693 emit_note_copy_after (line_note, PREV_INSN (BB_END (bb)));
4695 #endif /* HAVE_return */
4697 #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
4699 /* These functions convert the epilogue into a variant that does not
4700 modify the stack pointer. This is used in cases where a function
4701 returns an object whose size is not known until it is computed.
4702 The called function leaves the object on the stack, leaves the
4703 stack depressed, and returns a pointer to the object.
4705 What we need to do is track all modifications and references to the
4706 stack pointer, deleting the modifications and changing the
4707 references to point to the location the stack pointer would have
4708 pointed to had the modifications taken place.
4710 These functions need to be portable so we need to make as few
4711 assumptions about the epilogue as we can. However, the epilogue
4712 basically contains three things: instructions to reset the stack
4713 pointer, instructions to reload registers, possibly including the
4714 frame pointer, and an instruction to return to the caller.
4716 We must be sure of what a relevant epilogue insn is doing. We also
4717 make no attempt to validate the insns we make since if they are
4718 invalid, we probably can't do anything valid. The intent is that
4719 these routines get "smarter" as more and more machines start to use
4720 them and they try operating on different epilogues.
4722 We use the following structure to track what the part of the
4723 epilogue that we've already processed has done. We keep two copies
4724 of the SP equivalence, one for use during the insn we are
4725 processing and one for use in the next insn. The difference is
4726 because one part of a PARALLEL may adjust SP and the other may use
4731 rtx sp_equiv_reg; /* REG that SP is set from, perhaps SP. */
4732 HOST_WIDE_INT sp_offset; /* Offset from SP_EQUIV_REG of present SP. */
4733 rtx new_sp_equiv_reg; /* REG to be used at end of insn. */
4734 HOST_WIDE_INT new_sp_offset; /* Offset to be used at end of insn. */
4735 rtx equiv_reg_src; /* If nonzero, the value that SP_EQUIV_REG
4736 should be set to once we no longer need
4738 rtx const_equiv[FIRST_PSEUDO_REGISTER]; /* Any known constant equivalences
4742 static void handle_epilogue_set (rtx, struct epi_info *);
4743 static void update_epilogue_consts (rtx, rtx, void *);
4744 static void emit_equiv_load (struct epi_info *);
4746 /* Modify INSN, a list of one or more insns that is part of the epilogue, to
4747 no modifications to the stack pointer. Return the new list of insns. */
4750 keep_stack_depressed (rtx insns)
4753 struct epi_info info;
4756 /* If the epilogue is just a single instruction, it must be OK as is. */
4757 if (NEXT_INSN (insns) == NULL_RTX)
4760 /* Otherwise, start a sequence, initialize the information we have, and
4761 process all the insns we were given. */
4764 info.sp_equiv_reg = stack_pointer_rtx;
4766 info.equiv_reg_src = 0;
4768 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
4769 info.const_equiv[j] = 0;
4773 while (insn != NULL_RTX)
4775 next = NEXT_INSN (insn);
4784 /* If this insn references the register that SP is equivalent to and
4785 we have a pending load to that register, we must force out the load
4786 first and then indicate we no longer know what SP's equivalent is. */
4787 if (info.equiv_reg_src != 0
4788 && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
4790 emit_equiv_load (&info);
4791 info.sp_equiv_reg = 0;
4794 info.new_sp_equiv_reg = info.sp_equiv_reg;
4795 info.new_sp_offset = info.sp_offset;
4797 /* If this is a (RETURN) and the return address is on the stack,
4798 update the address and change to an indirect jump. */
4799 if (GET_CODE (PATTERN (insn)) == RETURN
4800 || (GET_CODE (PATTERN (insn)) == PARALLEL
4801 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
4803 rtx retaddr = INCOMING_RETURN_ADDR_RTX;
4805 HOST_WIDE_INT offset = 0;
4806 rtx jump_insn, jump_set;
4808 /* If the return address is in a register, we can emit the insn
4809 unchanged. Otherwise, it must be a MEM and we see what the
4810 base register and offset are. In any case, we have to emit any
4811 pending load to the equivalent reg of SP, if any. */
4812 if (REG_P (retaddr))
4814 emit_equiv_load (&info);
4822 gcc_assert (MEM_P (retaddr));
4824 ret_ptr = XEXP (retaddr, 0);
4826 if (REG_P (ret_ptr))
4828 base = gen_rtx_REG (Pmode, REGNO (ret_ptr));
4833 gcc_assert (GET_CODE (ret_ptr) == PLUS
4834 && REG_P (XEXP (ret_ptr, 0))
4835 && GET_CODE (XEXP (ret_ptr, 1)) == CONST_INT);
4836 base = gen_rtx_REG (Pmode, REGNO (XEXP (ret_ptr, 0)));
4837 offset = INTVAL (XEXP (ret_ptr, 1));
4841 /* If the base of the location containing the return pointer
4842 is SP, we must update it with the replacement address. Otherwise,
4843 just build the necessary MEM. */
4844 retaddr = plus_constant (base, offset);
4845 if (base == stack_pointer_rtx)
4846 retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
4847 plus_constant (info.sp_equiv_reg,
4850 retaddr = gen_rtx_MEM (Pmode, retaddr);
4851 MEM_NOTRAP_P (retaddr) = 1;
4853 /* If there is a pending load to the equivalent register for SP
4854 and we reference that register, we must load our address into
4855 a scratch register and then do that load. */
4856 if (info.equiv_reg_src
4857 && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
4862 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
4863 if (HARD_REGNO_MODE_OK (regno, Pmode)
4864 && !fixed_regs[regno]
4865 && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
4867 (EXIT_BLOCK_PTR->il.rtl->global_live_at_start, regno)
4868 && !refers_to_regno_p (regno,
4869 regno + hard_regno_nregs[regno]
4871 info.equiv_reg_src, NULL)
4872 && info.const_equiv[regno] == 0)
4875 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
4877 reg = gen_rtx_REG (Pmode, regno);
4878 emit_move_insn (reg, retaddr);
4882 emit_equiv_load (&info);
4883 jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
4885 /* Show the SET in the above insn is a RETURN. */
4886 jump_set = single_set (jump_insn);
4887 gcc_assert (jump_set);
4888 SET_IS_RETURN_P (jump_set) = 1;
4891 /* If SP is not mentioned in the pattern and its equivalent register, if
4892 any, is not modified, just emit it. Otherwise, if neither is set,
4893 replace the reference to SP and emit the insn. If none of those are
4894 true, handle each SET individually. */
4895 else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
4896 && (info.sp_equiv_reg == stack_pointer_rtx
4897 || !reg_set_p (info.sp_equiv_reg, insn)))
4899 else if (! reg_set_p (stack_pointer_rtx, insn)
4900 && (info.sp_equiv_reg == stack_pointer_rtx
4901 || !reg_set_p (info.sp_equiv_reg, insn)))
4905 changed = validate_replace_rtx (stack_pointer_rtx,
4906 plus_constant (info.sp_equiv_reg,
4909 gcc_assert (changed);
4913 else if (GET_CODE (PATTERN (insn)) == SET)
4914 handle_epilogue_set (PATTERN (insn), &info);
4915 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
4917 for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
4918 if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
4919 handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
4924 info.sp_equiv_reg = info.new_sp_equiv_reg;
4925 info.sp_offset = info.new_sp_offset;
4927 /* Now update any constants this insn sets. */
4928 note_stores (PATTERN (insn), update_epilogue_consts, &info);
4932 insns = get_insns ();
4937 /* SET is a SET from an insn in the epilogue. P is a pointer to the epi_info
4938 structure that contains information about what we've seen so far. We
4939 process this SET by either updating that data or by emitting one or
4943 handle_epilogue_set (rtx set, struct epi_info *p)
4945 /* First handle the case where we are setting SP. Record what it is being
4946 set from, which we must be able to determine */
4947 if (reg_set_p (stack_pointer_rtx, set))
4949 gcc_assert (SET_DEST (set) == stack_pointer_rtx);
4951 if (GET_CODE (SET_SRC (set)) == PLUS)
4953 p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
4954 if (GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
4955 p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
4958 gcc_assert (REG_P (XEXP (SET_SRC (set), 1))
4959 && (REGNO (XEXP (SET_SRC (set), 1))
4960 < FIRST_PSEUDO_REGISTER)
4961 && p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4963 = INTVAL (p->const_equiv[REGNO (XEXP (SET_SRC (set), 1))]);
4967 p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
4969 /* If we are adjusting SP, we adjust from the old data. */
4970 if (p->new_sp_equiv_reg == stack_pointer_rtx)
4972 p->new_sp_equiv_reg = p->sp_equiv_reg;
4973 p->new_sp_offset += p->sp_offset;
4976 gcc_assert (p->new_sp_equiv_reg && REG_P (p->new_sp_equiv_reg));
4981 /* Next handle the case where we are setting SP's equivalent
4982 register. We must not already have a value to set it to. We
4983 could update, but there seems little point in handling that case.
4984 Note that we have to allow for the case where we are setting the
4985 register set in the previous part of a PARALLEL inside a single
4986 insn. But use the old offset for any updates within this insn.
4987 We must allow for the case where the register is being set in a
4988 different (usually wider) mode than Pmode). */
4989 else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
4991 gcc_assert (!p->equiv_reg_src
4992 && REG_P (p->new_sp_equiv_reg)
4993 && REG_P (SET_DEST (set))
4994 && (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set)))
4996 && REGNO (p->new_sp_equiv_reg) == REGNO (SET_DEST (set)));
4998 = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
4999 plus_constant (p->sp_equiv_reg,
5003 /* Otherwise, replace any references to SP in the insn to its new value
5004 and emit the insn. */
5007 SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
5008 plus_constant (p->sp_equiv_reg,
5010 SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
5011 plus_constant (p->sp_equiv_reg,
5017 /* Update the tracking information for registers set to constants. */
5020 update_epilogue_consts (rtx dest, rtx x, void *data)
5022 struct epi_info *p = (struct epi_info *) data;
5025 if (!REG_P (dest) || REGNO (dest) >= FIRST_PSEUDO_REGISTER)
5028 /* If we are either clobbering a register or doing a partial set,
5029 show we don't know the value. */
5030 else if (GET_CODE (x) == CLOBBER || ! rtx_equal_p (dest, SET_DEST (x)))
5031 p->const_equiv[REGNO (dest)] = 0;
5033 /* If we are setting it to a constant, record that constant. */
5034 else if (GET_CODE (SET_SRC (x)) == CONST_INT)
5035 p->const_equiv[REGNO (dest)] = SET_SRC (x);
5037 /* If this is a binary operation between a register we have been tracking
5038 and a constant, see if we can compute a new constant value. */
5039 else if (ARITHMETIC_P (SET_SRC (x))
5040 && REG_P (XEXP (SET_SRC (x), 0))
5041 && REGNO (XEXP (SET_SRC (x), 0)) < FIRST_PSEUDO_REGISTER
5042 && p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))] != 0
5043 && GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
5044 && 0 != (new = simplify_binary_operation
5045 (GET_CODE (SET_SRC (x)), GET_MODE (dest),
5046 p->const_equiv[REGNO (XEXP (SET_SRC (x), 0))],
5047 XEXP (SET_SRC (x), 1)))
5048 && GET_CODE (new) == CONST_INT)
5049 p->const_equiv[REGNO (dest)] = new;
5051 /* Otherwise, we can't do anything with this value. */
5053 p->const_equiv[REGNO (dest)] = 0;
5056 /* Emit an insn to do the load shown in p->equiv_reg_src, if needed. */
5059 emit_equiv_load (struct epi_info *p)
5061 if (p->equiv_reg_src != 0)
5063 rtx dest = p->sp_equiv_reg;
5065 if (GET_MODE (p->equiv_reg_src) != GET_MODE (dest))
5066 dest = gen_rtx_REG (GET_MODE (p->equiv_reg_src),
5067 REGNO (p->sp_equiv_reg));
5069 emit_move_insn (dest, p->equiv_reg_src);
5070 p->equiv_reg_src = 0;
5075 /* APPLE LOCAL begin radar 6163705, Blocks prologues */
5077 /* The function should only be called for Blocks functions.
5079 On being called, the main instruction list for the Blocks function
5080 may contain instructions for setting up the ref_decl and byref_decl
5081 variables in the Block. Those isns really need to go before the
5082 function prologue note rather than after. If such instructions are
5083 present, they are identifiable by their source line number, which
5084 will be one line preceding the declaration of the function. If
5085 they are present, there will also be a source line note instruction
5088 This function does a set of things:
5089 - It finds the first such prologue insn.
5090 - It finds the last such prologue insn.
5091 - It changes the insn locator of all such prologue insns to
5092 the prologue locator.
5093 - It finds the source line note for the bogus location and
5095 - It decides if it is safe to place the prolgoue end note
5096 after the last prologue insn it finds, and if so, returns
5097 the last prologue insn (otherwise it returns NULL).
5099 This function makes the following checks to determine if it is
5100 safe to move the prologue end note to just below the last
5101 prologue insn it finds. If ALL of the checks succeed then it
5102 is safe. If any check fails, this function returns NULL. The
5103 checks it makes are:
5105 - There were no INSN_P instructions that occurred before the
5106 first prologue insn.
5107 - If there are any non-prologue insns between the first & last
5108 prologue insn, the non-prologue insns do not outnumber the
5110 - The first prologue insn & the last prologue insn are in the
5115 find_block_prologue_insns (void)
5117 rtx first_prologue_insn = NULL;
5118 rtx last_prologue_insn = NULL;
5119 rtx line_number_note = NULL;
5121 int num_prologue_insns = 0;
5122 int total_insns = 0;
5123 int prologue_line = DECL_SOURCE_LINE (cfun->decl) - 1;
5124 bool other_insns_before_prologue = false;
5125 bool start_of_fnbody_found = false;
5127 /* Go through all the insns and find the first prologue insn, the
5128 last prologue insn, the source line location note, and whether or
5129 not there are any "real" insns that occur before the first
5130 prologue insn. Re-set the insn locator for prologue insns to the
5131 prologue locator. */
5133 for (tmp_insn = get_insns(); tmp_insn; tmp_insn = NEXT_INSN (tmp_insn))
5135 if (INSN_P (tmp_insn))
5137 if (insn_line (tmp_insn) == prologue_line)
5139 if (!first_prologue_insn)
5140 first_prologue_insn = tmp_insn;
5141 num_prologue_insns++;
5142 last_prologue_insn = tmp_insn;
5143 INSN_LOCATOR (tmp_insn) = prologue_locator;
5145 else if (!first_prologue_insn
5146 && start_of_fnbody_found)
5147 other_insns_before_prologue = true;
5149 else if (NOTE_P (tmp_insn)
5150 && NOTE_LINE_NUMBER (tmp_insn) == NOTE_INSN_FUNCTION_BEG)
5151 start_of_fnbody_found = true;
5152 else if (NOTE_P (tmp_insn)
5153 && (XINT (tmp_insn, 5) == prologue_line))
5154 line_number_note = tmp_insn;
5157 /* If there were no prologue insns, return now. */
5159 if (!first_prologue_insn)
5162 /* If the source location note for the line before the beginning of the
5163 function was found, remove it. */
5165 if (line_number_note)
5166 remove_insn (line_number_note);
5168 /* If other real insns got moved above the prologue insns, we can't
5169 pull out the prologue insns, so return now. */
5171 if (other_insns_before_prologue && (optimize > 0))
5174 /* Count the number of insns between the first prologue insn and the
5175 last prologue insn; also count the number of non-prologue insns
5176 between the first prologue insn and the last prologue insn. */
5178 tmp_insn = first_prologue_insn;
5179 while (tmp_insn != last_prologue_insn)
5182 tmp_insn = NEXT_INSN (tmp_insn);
5186 /* If more than half of the insns between the first & last prologue
5187 insns are not prologue insns, then there is too much code that
5188 got moved in between prologue insns (by optimizations), so we
5189 will not try to pull it out. */
5191 if ((num_prologue_insns * 2) <= total_insns)
5194 /* Make sure all the prologue insns are within one basic block.
5195 If the insns cross a basic block boundary, then there is a chance
5196 that moving them will cause incorrect code, so don't do it. */
5198 gcc_assert (first_prologue_insn != NULL);
5199 gcc_assert (last_prologue_insn != NULL);
5201 if (BLOCK_FOR_INSN (first_prologue_insn) !=
5202 BLOCK_FOR_INSN (last_prologue_insn))
5205 return last_prologue_insn;
5207 /* APPLE LOCAL end radar 6163705, Blocks prologues */
5209 /* Generate the prologue and epilogue RTL if the machine supports it. Thread
5210 this into place with notes indicating where the prologue ends and where
5211 the epilogue begins. Update the basic block information when possible. */
5214 thread_prologue_and_epilogue_insns (rtx f ATTRIBUTE_UNUSED)
5218 #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
5221 #ifdef HAVE_prologue
5222 rtx prologue_end = NULL_RTX;
5224 #if defined (HAVE_epilogue) || defined(HAVE_return)
5225 rtx epilogue_end = NULL_RTX;
5229 #ifdef HAVE_prologue
5232 /* APPLE LOCAL begin radar 6163705, Blocks prologues */
5233 rtx last_prologue_insn = NULL;
5235 if (BLOCK_SYNTHESIZED_FUNC (cfun->decl))
5236 last_prologue_insn = find_block_prologue_insns();
5237 /* APPLE LOCAL end radar 6163705, Blocks prologues */
5240 seq = gen_prologue ();
5243 /* Retain a map of the prologue insns. */
5244 record_insns (seq, &prologue);
5245 /* APPLE LOCAL begin radar 6163705, Blocks prologues */
5246 if (!last_prologue_insn)
5247 prologue_end = emit_note (NOTE_INSN_PROLOGUE_END);
5248 /* APPLE LOCAL end radar 6163705, Blocks prologues */
5250 #ifndef PROFILE_BEFORE_PROLOGUE
5251 /* Ensure that instructions are not moved into the prologue when
5252 profiling is on. The call to the profiling routine can be
5253 emitted within the live range of a call-clobbered register. */
5254 if (current_function_profile)
5255 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
5260 set_insn_locators (seq, prologue_locator);
5262 /* Can't deal with multiple successors of the entry block
5263 at the moment. Function should always have at least one
5265 gcc_assert (single_succ_p (ENTRY_BLOCK_PTR));
5267 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
5270 /* APPLE LOCAL begin radar 6163705, Blocks prologues */
5271 if (last_prologue_insn)
5272 emit_note_after (NOTE_INSN_PROLOGUE_END, last_prologue_insn);
5273 /* APPLE LOCAL end radar 6163705, Blocks prologues */ }
5276 /* If the exit block has no non-fake predecessors, we don't need
5278 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5279 if ((e->flags & EDGE_FAKE) == 0)
5285 if (optimize && HAVE_return)
5287 /* If we're allowed to generate a simple return instruction,
5288 then by definition we don't need a full epilogue. Examine
5289 the block that falls through to EXIT. If it does not
5290 contain any code, examine its predecessors and try to
5291 emit (conditional) return instructions. */
5296 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5297 if (e->flags & EDGE_FALLTHRU)
5303 /* Verify that there are no active instructions in the last block. */
5304 label = BB_END (last);
5305 while (label && !LABEL_P (label))
5307 if (active_insn_p (label))
5309 label = PREV_INSN (label);
5312 if (BB_HEAD (last) == label && LABEL_P (label))
5315 rtx epilogue_line_note = NULL_RTX;
5317 /* Locate the line number associated with the closing brace,
5318 if we can find one. */
5319 for (seq = get_last_insn ();
5320 seq && ! active_insn_p (seq);
5321 seq = PREV_INSN (seq))
5322 if (NOTE_P (seq) && NOTE_LINE_NUMBER (seq) > 0)
5324 epilogue_line_note = seq;
5328 for (ei2 = ei_start (last->preds); (e = ei_safe_edge (ei2)); )
5330 basic_block bb = e->src;
5333 if (bb == ENTRY_BLOCK_PTR)
5340 if (!JUMP_P (jump) || JUMP_LABEL (jump) != label)
5346 /* If we have an unconditional jump, we can replace that
5347 with a simple return instruction. */
5348 if (simplejump_p (jump))
5350 emit_return_into_block (bb, epilogue_line_note);
5354 /* If we have a conditional jump, we can try to replace
5355 that with a conditional return instruction. */
5356 else if (condjump_p (jump))
5358 if (! redirect_jump (jump, 0, 0))
5364 /* If this block has only one successor, it both jumps
5365 and falls through to the fallthru block, so we can't
5367 if (single_succ_p (bb))
5379 /* Fix up the CFG for the successful change we just made. */
5380 redirect_edge_succ (e, EXIT_BLOCK_PTR);
5383 /* Emit a return insn for the exit fallthru block. Whether
5384 this is still reachable will be determined later. */
5386 emit_barrier_after (BB_END (last));
5387 emit_return_into_block (last, epilogue_line_note);
5388 epilogue_end = BB_END (last);
5389 single_succ_edge (last)->flags &= ~EDGE_FALLTHRU;
5394 /* Find the edge that falls through to EXIT. Other edges may exist
5395 due to RETURN instructions, but those don't need epilogues.
5396 There really shouldn't be a mixture -- either all should have
5397 been converted or none, however... */
5399 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
5400 if (e->flags & EDGE_FALLTHRU)
5405 #ifdef HAVE_epilogue
5409 epilogue_end = emit_note (NOTE_INSN_EPILOGUE_BEG);
5411 seq = gen_epilogue ();
5413 #ifdef INCOMING_RETURN_ADDR_RTX
5414 /* If this function returns with the stack depressed and we can support
5415 it, massage the epilogue to actually do that. */
5416 if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
5417 && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
5418 seq = keep_stack_depressed (seq);
5421 emit_jump_insn (seq);
5423 /* Retain a map of the epilogue insns. */
5424 record_insns (seq, &epilogue);
5425 set_insn_locators (seq, epilogue_locator);
5430 insert_insn_on_edge (seq, e);
5438 if (! next_active_insn (BB_END (e->src)))
5440 /* We have a fall-through edge to the exit block, the source is not
5441 at the end of the function, and there will be an assembler epilogue
5442 at the end of the function.
5443 We can't use force_nonfallthru here, because that would try to
5444 use return. Inserting a jump 'by hand' is extremely messy, so
5445 we take advantage of cfg_layout_finalize using
5446 fixup_fallthru_exit_predecessor. */
5447 cfg_layout_initialize (0);
5448 FOR_EACH_BB (cur_bb)
5449 if (cur_bb->index >= NUM_FIXED_BLOCKS
5450 && cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
5451 cur_bb->aux = cur_bb->next_bb;
5452 cfg_layout_finalize ();
5457 commit_edge_insertions ();
5459 #ifdef HAVE_sibcall_epilogue
5460 /* Emit sibling epilogues before any sibling call sites. */
5461 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
5463 basic_block bb = e->src;
5464 rtx insn = BB_END (bb);
5467 || ! SIBLING_CALL_P (insn))
5474 emit_insn (gen_sibcall_epilogue ());
5478 /* Retain a map of the epilogue insns. Used in life analysis to
5479 avoid getting rid of sibcall epilogue insns. Do this before we
5480 actually emit the sequence. */
5481 record_insns (seq, &sibcall_epilogue);
5482 set_insn_locators (seq, epilogue_locator);
5484 emit_insn_before (seq, insn);
5489 #ifdef HAVE_prologue
5490 /* This is probably all useless now that we use locators. */
5495 /* GDB handles `break f' by setting a breakpoint on the first
5496 line note after the prologue. Which means (1) that if
5497 there are line number notes before where we inserted the
5498 prologue we should move them, and (2) we should generate a
5499 note before the end of the first basic block, if there isn't
5502 ??? This behavior is completely broken when dealing with
5503 multiple entry functions. We simply place the note always
5504 into first basic block and let alternate entry points
5508 for (insn = prologue_end; insn; insn = prev)
5510 prev = PREV_INSN (insn);
5511 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5513 /* Note that we cannot reorder the first insn in the
5514 chain, since rest_of_compilation relies on that
5515 remaining constant. */
5518 reorder_insns (insn, insn, prologue_end);
5522 /* Find the last line number note in the first block. */
5523 for (insn = BB_END (ENTRY_BLOCK_PTR->next_bb);
5524 insn != prologue_end && insn;
5525 insn = PREV_INSN (insn))
5526 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5529 /* If we didn't find one, make a copy of the first line number
5533 for (insn = next_active_insn (prologue_end);
5535 insn = PREV_INSN (insn))
5536 if (NOTE_P (insn) && NOTE_LINE_NUMBER (insn) > 0)
5538 emit_note_copy_after (insn, prologue_end);
5544 #ifdef HAVE_epilogue
5549 /* Similarly, move any line notes that appear after the epilogue.
5550 There is no need, however, to be quite so anal about the existence
5551 of such a note. Also move the NOTE_INSN_FUNCTION_END and (possibly)
5552 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
5554 for (insn = epilogue_end; insn; insn = next)
5556 next = NEXT_INSN (insn);
5558 && (NOTE_LINE_NUMBER (insn) > 0
5559 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG
5560 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_END))
5561 reorder_insns (insn, insn, PREV_INSN (epilogue_end));
5567 /* Reposition the prologue-end and epilogue-begin notes after instruction
5568 scheduling and delayed branch scheduling. */
5571 reposition_prologue_and_epilogue_notes (rtx f ATTRIBUTE_UNUSED)
5573 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
5574 rtx insn, last, note;
5577 if ((len = VEC_length (int, prologue)) > 0)
5581 /* Scan from the beginning until we reach the last prologue insn.
5582 We apparently can't depend on basic_block_{head,end} after
5584 for (insn = f; insn; insn = NEXT_INSN (insn))
5588 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
5591 else if (contains (insn, &prologue))
5601 /* Find the prologue-end note if we haven't already, and
5602 move it to just after the last prologue insn. */
5605 for (note = last; (note = NEXT_INSN (note));)
5607 && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
5611 /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note. */
5613 last = NEXT_INSN (last);
5614 reorder_insns (note, note, last);
5618 if ((len = VEC_length (int, epilogue)) > 0)
5622 /* Scan from the end until we reach the first epilogue insn.
5623 We apparently can't depend on basic_block_{head,end} after
5625 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
5629 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
5632 else if (contains (insn, &epilogue))
5642 /* Find the epilogue-begin note if we haven't already, and
5643 move it to just before the first epilogue insn. */
5646 for (note = insn; (note = PREV_INSN (note));)
5648 && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
5652 if (PREV_INSN (last) != note)
5653 reorder_insns (note, note, PREV_INSN (last));
5656 #endif /* HAVE_prologue or HAVE_epilogue */
5659 /* Resets insn_block_boundaries array. */
5662 reset_block_changes (void)
5664 cfun->ib_boundaries_block = VEC_alloc (tree, gc, 100);
5665 VEC_quick_push (tree, cfun->ib_boundaries_block, NULL_TREE);
5668 /* Record the boundary for BLOCK. */
5670 record_block_change (tree block)
5678 if(!cfun->ib_boundaries_block)
5681 last_block = VEC_pop (tree, cfun->ib_boundaries_block);
5683 for (i = VEC_length (tree, cfun->ib_boundaries_block); i < n; i++)
5684 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, last_block);
5686 VEC_safe_push (tree, gc, cfun->ib_boundaries_block, block);
5689 /* Finishes record of boundaries. */
5691 finalize_block_changes (void)
5693 record_block_change (DECL_INITIAL (current_function_decl));
5696 /* For INSN return the BLOCK it belongs to. */
5698 check_block_change (rtx insn, tree *block)
5700 unsigned uid = INSN_UID (insn);
5702 if (uid >= VEC_length (tree, cfun->ib_boundaries_block))
5705 *block = VEC_index (tree, cfun->ib_boundaries_block, uid);
5708 /* Releases the ib_boundaries_block records. */
5710 free_block_changes (void)
5712 VEC_free (tree, gc, cfun->ib_boundaries_block);
5715 /* Returns the name of the current function. */
5717 current_function_name (void)
5719 return lang_hooks.decl_printable_name (cfun->decl, 2);
5724 rest_of_handle_check_leaf_regs (void)
5726 #ifdef LEAF_REGISTERS
5727 current_function_uses_only_leaf_regs
5728 = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
5733 /* Insert a TYPE into the used types hash table of CFUN. */
5735 used_types_insert_helper (tree type, struct function *func)
5737 if (type != NULL && func != NULL)
5741 if (func->used_types_hash == NULL)
5742 func->used_types_hash = htab_create_ggc (37, htab_hash_pointer,
5743 htab_eq_pointer, NULL);
5744 slot = htab_find_slot (func->used_types_hash, type, INSERT);
5750 /* Given a type, insert it into the used hash table in cfun. */
5752 used_types_insert (tree t)
5754 while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
5756 t = TYPE_MAIN_VARIANT (t);
5757 if (debug_info_level > DINFO_LEVEL_NONE)
5758 used_types_insert_helper (t, cfun);
5761 struct tree_opt_pass pass_leaf_regs =
5765 rest_of_handle_check_leaf_regs, /* execute */
5768 0, /* static_pass_number */
5770 0, /* properties_required */
5771 0, /* properties_provided */
5772 0, /* properties_destroyed */
5773 0, /* todo_flags_start */
5774 0, /* todo_flags_finish */
5779 #include "gt-function.h"