1 /* Convert function calls to rtl insns, for GNU C compiler.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
40 #include "langhooks.h"
45 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits. */
46 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
48 /* Data structure and subroutines used within expand_call. */
52 /* Tree node for this argument. */
54 /* Mode for value; TYPE_MODE unless promoted. */
55 enum machine_mode mode;
56 /* Current RTL value for argument, or 0 if it isn't precomputed. */
58 /* Initially-compute RTL value for argument; only for const functions. */
60 /* Register to pass this argument in, 0 if passed on stack, or an
61 PARALLEL if the arg is to be copied into multiple non-contiguous
64 /* Register to pass this argument in when generating tail call sequence.
65 This is not the same register as for normal calls on machines with
68 /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
69 form for emit_group_move. */
71 /* If REG was promoted from the actual mode of the argument expression,
72 indicates whether the promotion is sign- or zero-extended. */
74 /* Number of bytes to put in registers. 0 means put the whole arg
75 in registers. Also 0 if not passed in registers. */
77 /* Nonzero if argument must be passed on stack.
78 Note that some arguments may be passed on the stack
79 even though pass_on_stack is zero, just because FUNCTION_ARG says so.
80 pass_on_stack identifies arguments that *cannot* go in registers. */
82 /* Some fields packaged up for locate_and_pad_parm. */
83 struct locate_and_pad_arg_data locate;
84 /* Location on the stack at which parameter should be stored. The store
85 has already been done if STACK == VALUE. */
87 /* Location on the stack of the start of this argument slot. This can
88 differ from STACK if this arg pads downward. This location is known
89 to be aligned to FUNCTION_ARG_BOUNDARY. */
91 /* Place that this stack area has been saved, if needed. */
93 /* If an argument's alignment does not permit direct copying into registers,
94 copy in smaller-sized pieces into pseudos. These are stored in a
95 block pointed to by this field. The next field says how many
96 word-sized pseudos we made. */
101 /* A vector of one char per byte of stack space. A byte if nonzero if
102 the corresponding stack location has been used.
103 This vector is used to prevent a function call within an argument from
104 clobbering any stack already set up. */
105 static char *stack_usage_map;
107 /* Size of STACK_USAGE_MAP. */
108 static int highest_outgoing_arg_in_use;
110 /* A bitmap of virtual-incoming stack space. Bit is set if the corresponding
111 stack location's tail call argument has been already stored into the stack.
112 This bitmap is used to prevent sibling call optimization if function tries
113 to use parent's incoming argument slots when they have been already
114 overwritten with tail call arguments. */
115 static sbitmap stored_args_map;
117 /* stack_arg_under_construction is nonzero when an argument may be
118 initialized with a constructor call (including a C function that
119 returns a BLKmode struct) and expand_call must take special action
120 to make sure the object being constructed does not overlap the
121 argument list for the constructor call. */
122 static int stack_arg_under_construction;
124 static void emit_call_1 (rtx, tree, tree, tree, HOST_WIDE_INT, HOST_WIDE_INT,
125 HOST_WIDE_INT, rtx, rtx, int, rtx, int,
127 static void precompute_register_parameters (int, struct arg_data *, int *);
128 static int store_one_arg (struct arg_data *, rtx, int, int, int);
129 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
130 static int finalize_must_preallocate (int, int, struct arg_data *,
132 static void precompute_arguments (int, int, struct arg_data *);
133 static int compute_argument_block_size (int, struct args_size *, int);
134 static void initialize_argument_information (int, struct arg_data *,
135 struct args_size *, int, tree,
136 tree, CUMULATIVE_ARGS *, int,
137 rtx *, int *, int *, int *,
139 static void compute_argument_addresses (struct arg_data *, rtx, int);
140 static rtx rtx_for_function_call (tree, tree);
141 static void load_register_parameters (struct arg_data *, int, rtx *, int,
143 static rtx emit_library_call_value_1 (int, rtx, rtx, enum libcall_type,
144 enum machine_mode, int, va_list);
145 static int special_function_p (tree, int);
146 static int check_sibcall_argument_overlap_1 (rtx);
147 static int check_sibcall_argument_overlap (rtx, struct arg_data *, int);
149 static int combine_pending_stack_adjustment_and_call (int, struct args_size *,
151 static tree split_complex_values (tree);
152 static tree split_complex_types (tree);
154 #ifdef REG_PARM_STACK_SPACE
155 static rtx save_fixed_argument_area (int, rtx, int *, int *);
156 static void restore_fixed_argument_area (rtx, rtx, int, int);
159 /* Force FUNEXP into a form suitable for the address of a CALL,
160 and return that as an rtx. Also load the static chain register
161 if FNDECL is a nested function.
163 CALL_FUSAGE points to a variable holding the prospective
164 CALL_INSN_FUNCTION_USAGE information. */
167 prepare_call_address (rtx funexp, rtx static_chain_value,
168 rtx *call_fusage, int reg_parm_seen, int sibcallp)
170 /* Make a valid memory address and copy constants through pseudo-regs,
171 but not for a constant address if -fno-function-cse. */
172 if (GET_CODE (funexp) != SYMBOL_REF)
173 /* If we are using registers for parameters, force the
174 function address into a register now. */
175 funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
176 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
177 : memory_address (FUNCTION_MODE, funexp));
180 #ifndef NO_FUNCTION_CSE
181 if (optimize && ! flag_no_function_cse)
182 funexp = force_reg (Pmode, funexp);
186 if (static_chain_value != 0)
188 static_chain_value = convert_memory_address (Pmode, static_chain_value);
189 emit_move_insn (static_chain_rtx, static_chain_value);
191 if (REG_P (static_chain_rtx))
192 use_reg (call_fusage, static_chain_rtx);
198 /* Generate instructions to call function FUNEXP,
199 and optionally pop the results.
200 The CALL_INSN is the first insn generated.
202 FNDECL is the declaration node of the function. This is given to the
203 macro RETURN_POPS_ARGS to determine whether this function pops its own args.
205 FUNTYPE is the data type of the function. This is given to the macro
206 RETURN_POPS_ARGS to determine whether this function pops its own args.
207 We used to allow an identifier for library functions, but that doesn't
208 work when the return type is an aggregate type and the calling convention
209 says that the pointer to this aggregate is to be popped by the callee.
211 STACK_SIZE is the number of bytes of arguments on the stack,
212 ROUNDED_STACK_SIZE is that number rounded up to
213 PREFERRED_STACK_BOUNDARY; zero if the size is variable. This is
214 both to put into the call insn and to generate explicit popping
217 STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
218 It is zero if this call doesn't want a structure value.
220 NEXT_ARG_REG is the rtx that results from executing
221 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
222 just after all the args have had their registers assigned.
223 This could be whatever you like, but normally it is the first
224 arg-register beyond those used for args in this call,
225 or 0 if all the arg-registers are used in this call.
226 It is passed on to `gen_call' so you can put this info in the call insn.
228 VALREG is a hard register in which a value is returned,
229 or 0 if the call does not return a value.
231 OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
232 the args to this call were processed.
233 We restore `inhibit_defer_pop' to that value.
235 CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
236 denote registers used by the called function. */
239 emit_call_1 (rtx funexp, tree fntree, tree fndecl ATTRIBUTE_UNUSED,
240 tree funtype ATTRIBUTE_UNUSED,
241 HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED,
242 HOST_WIDE_INT rounded_stack_size,
243 HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED,
244 rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
245 int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
246 CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED)
248 rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
250 int already_popped = 0;
251 HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
252 #if defined (HAVE_call) && defined (HAVE_call_value)
253 rtx struct_value_size_rtx;
254 struct_value_size_rtx = GEN_INT (struct_value_size);
257 #ifdef CALL_POPS_ARGS
258 n_popped += CALL_POPS_ARGS (* args_so_far);
261 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
262 and we don't want to load it into a register as an optimization,
263 because prepare_call_address already did it if it should be done. */
264 if (GET_CODE (funexp) != SYMBOL_REF)
265 funexp = memory_address (FUNCTION_MODE, funexp);
267 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
268 if ((ecf_flags & ECF_SIBCALL)
269 && HAVE_sibcall_pop && HAVE_sibcall_value_pop
270 && (n_popped > 0 || stack_size == 0))
272 rtx n_pop = GEN_INT (n_popped);
275 /* If this subroutine pops its own args, record that in the call insn
276 if possible, for the sake of frame pointer elimination. */
279 pat = GEN_SIBCALL_VALUE_POP (valreg,
280 gen_rtx_MEM (FUNCTION_MODE, funexp),
281 rounded_stack_size_rtx, next_arg_reg,
284 pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
285 rounded_stack_size_rtx, next_arg_reg, n_pop);
287 emit_call_insn (pat);
293 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
294 /* If the target has "call" or "call_value" insns, then prefer them
295 if no arguments are actually popped. If the target does not have
296 "call" or "call_value" insns, then we must use the popping versions
297 even if the call has no arguments to pop. */
298 #if defined (HAVE_call) && defined (HAVE_call_value)
299 if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
300 && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
302 if (HAVE_call_pop && HAVE_call_value_pop)
305 rtx n_pop = GEN_INT (n_popped);
308 /* If this subroutine pops its own args, record that in the call insn
309 if possible, for the sake of frame pointer elimination. */
312 pat = GEN_CALL_VALUE_POP (valreg,
313 gen_rtx_MEM (FUNCTION_MODE, funexp),
314 rounded_stack_size_rtx, next_arg_reg, n_pop);
316 pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
317 rounded_stack_size_rtx, next_arg_reg, n_pop);
319 emit_call_insn (pat);
325 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
326 if ((ecf_flags & ECF_SIBCALL)
327 && HAVE_sibcall && HAVE_sibcall_value)
330 emit_call_insn (GEN_SIBCALL_VALUE (valreg,
331 gen_rtx_MEM (FUNCTION_MODE, funexp),
332 rounded_stack_size_rtx,
333 next_arg_reg, NULL_RTX));
335 emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
336 rounded_stack_size_rtx, next_arg_reg,
337 struct_value_size_rtx));
342 #if defined (HAVE_call) && defined (HAVE_call_value)
343 if (HAVE_call && HAVE_call_value)
346 emit_call_insn (GEN_CALL_VALUE (valreg,
347 gen_rtx_MEM (FUNCTION_MODE, funexp),
348 rounded_stack_size_rtx, next_arg_reg,
351 emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
352 rounded_stack_size_rtx, next_arg_reg,
353 struct_value_size_rtx));
359 /* Find the call we just emitted. */
360 call_insn = last_call_insn ();
362 /* Mark memory as used for "pure" function call. */
363 if (ecf_flags & ECF_PURE)
367 gen_rtx_USE (VOIDmode,
368 gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
371 /* Put the register usage information there. */
372 add_function_usage_to (call_insn, call_fusage);
374 /* If this is a const call, then set the insn's unchanging bit. */
375 if (ecf_flags & (ECF_CONST | ECF_PURE))
376 CONST_OR_PURE_CALL_P (call_insn) = 1;
378 /* If this call can't throw, attach a REG_EH_REGION reg note to that
380 if (ecf_flags & ECF_NOTHROW)
381 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
382 REG_NOTES (call_insn));
385 int rn = lookup_stmt_eh_region (fntree);
387 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't
388 throw, which we already took care of. */
390 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
391 REG_NOTES (call_insn));
392 note_current_region_may_contain_throw ();
395 if (ecf_flags & ECF_NORETURN)
396 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
397 REG_NOTES (call_insn));
399 if (ecf_flags & ECF_RETURNS_TWICE)
401 REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
402 REG_NOTES (call_insn));
403 current_function_calls_setjmp = 1;
406 SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
408 /* Restore this now, so that we do defer pops for this call's args
409 if the context of the call as a whole permits. */
410 inhibit_defer_pop = old_inhibit_defer_pop;
415 CALL_INSN_FUNCTION_USAGE (call_insn)
416 = gen_rtx_EXPR_LIST (VOIDmode,
417 gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
418 CALL_INSN_FUNCTION_USAGE (call_insn));
419 rounded_stack_size -= n_popped;
420 rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
421 stack_pointer_delta -= n_popped;
424 if (!ACCUMULATE_OUTGOING_ARGS)
426 /* If returning from the subroutine does not automatically pop the args,
427 we need an instruction to pop them sooner or later.
428 Perhaps do it now; perhaps just record how much space to pop later.
430 If returning from the subroutine does pop the args, indicate that the
431 stack pointer will be changed. */
433 if (rounded_stack_size != 0)
435 if (ecf_flags & (ECF_SP_DEPRESSED | ECF_NORETURN))
436 /* Just pretend we did the pop. */
437 stack_pointer_delta -= rounded_stack_size;
438 else if (flag_defer_pop && inhibit_defer_pop == 0
439 && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
440 pending_stack_adjust += rounded_stack_size;
442 adjust_stack (rounded_stack_size_rtx);
445 /* When we accumulate outgoing args, we must avoid any stack manipulations.
446 Restore the stack pointer to its original value now. Usually
447 ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
448 On i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
449 popping variants of functions exist as well.
451 ??? We may optimize similar to defer_pop above, but it is
452 probably not worthwhile.
454 ??? It will be worthwhile to enable combine_stack_adjustments even for
457 anti_adjust_stack (GEN_INT (n_popped));
460 /* Determine if the function identified by NAME and FNDECL is one with
461 special properties we wish to know about.
463 For example, if the function might return more than one time (setjmp), then
464 set RETURNS_TWICE to a nonzero value.
466 Similarly set NORETURN if the function is in the longjmp family.
468 Set MAY_BE_ALLOCA for any memory allocation function that might allocate
469 space from the stack such as alloca. */
472 special_function_p (tree fndecl, int flags)
474 if (fndecl && DECL_NAME (fndecl)
475 && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
476 /* Exclude functions not at the file scope, or not `extern',
477 since they are not the magic functions we would otherwise
479 FIXME: this should be handled with attributes, not with this
480 hacky imitation of DECL_ASSEMBLER_NAME. It's (also) wrong
481 because you can declare fork() inside a function if you
483 && (DECL_CONTEXT (fndecl) == NULL_TREE
484 || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
485 && TREE_PUBLIC (fndecl))
487 const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
488 const char *tname = name;
490 /* We assume that alloca will always be called by name. It
491 makes no sense to pass it as a pointer-to-function to
492 anything that does not understand its behavior. */
493 if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
495 && ! strcmp (name, "alloca"))
496 || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
498 && ! strcmp (name, "__builtin_alloca"))))
499 flags |= ECF_MAY_BE_ALLOCA;
501 /* Disregard prefix _, __ or __x. */
504 if (name[1] == '_' && name[2] == 'x')
506 else if (name[1] == '_')
515 && (! strcmp (tname, "setjmp")
516 || ! strcmp (tname, "setjmp_syscall")))
518 && ! strcmp (tname, "sigsetjmp"))
520 && ! strcmp (tname, "savectx")))
521 flags |= ECF_RETURNS_TWICE;
524 && ! strcmp (tname, "siglongjmp"))
525 flags |= ECF_NORETURN;
527 else if ((tname[0] == 'q' && tname[1] == 's'
528 && ! strcmp (tname, "qsetjmp"))
529 || (tname[0] == 'v' && tname[1] == 'f'
530 && ! strcmp (tname, "vfork"))
531 || (tname[0] == 'g' && tname[1] == 'e'
532 && !strcmp (tname, "getcontext")))
533 flags |= ECF_RETURNS_TWICE;
535 else if (tname[0] == 'l' && tname[1] == 'o'
536 && ! strcmp (tname, "longjmp"))
537 flags |= ECF_NORETURN;
543 /* Return nonzero when FNDECL represents a call to setjmp. */
546 setjmp_call_p (tree fndecl)
548 return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
551 /* Return true when exp contains alloca call. */
553 alloca_call_p (tree exp)
555 if (TREE_CODE (exp) == CALL_EXPR
556 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
557 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
559 && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
560 0) & ECF_MAY_BE_ALLOCA))
565 /* Detect flags (function attributes) from the function decl or type node. */
568 flags_from_decl_or_type (tree exp)
575 type = TREE_TYPE (exp);
577 /* The function exp may have the `malloc' attribute. */
578 if (DECL_IS_MALLOC (exp))
581 /* The function exp may have the `returns_twice' attribute. */
582 if (DECL_IS_RETURNS_TWICE (exp))
583 flags |= ECF_RETURNS_TWICE;
585 /* The function exp may have the `pure' attribute. */
586 if (DECL_IS_PURE (exp))
589 if (DECL_IS_NOVOPS (exp))
592 if (TREE_NOTHROW (exp))
593 flags |= ECF_NOTHROW;
595 if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
598 flags = special_function_p (exp, flags);
600 else if (TYPE_P (exp) && TYPE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
603 if (TREE_THIS_VOLATILE (exp))
604 flags |= ECF_NORETURN;
606 /* Mark if the function returns with the stack pointer depressed. We
607 cannot consider it pure or constant in that case. */
608 if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
610 flags |= ECF_SP_DEPRESSED;
611 flags &= ~(ECF_PURE | ECF_CONST);
617 /* Detect flags from a CALL_EXPR. */
620 call_expr_flags (tree t)
623 tree decl = get_callee_fndecl (t);
626 flags = flags_from_decl_or_type (decl);
629 t = TREE_TYPE (TREE_OPERAND (t, 0));
630 if (t && TREE_CODE (t) == POINTER_TYPE)
631 flags = flags_from_decl_or_type (TREE_TYPE (t));
639 /* Precompute all register parameters as described by ARGS, storing values
640 into fields within the ARGS array.
642 NUM_ACTUALS indicates the total number elements in the ARGS array.
644 Set REG_PARM_SEEN if we encounter a register parameter. */
647 precompute_register_parameters (int num_actuals, struct arg_data *args,
654 for (i = 0; i < num_actuals; i++)
655 if (args[i].reg != 0 && ! args[i].pass_on_stack)
659 if (args[i].value == 0)
662 args[i].value = expand_normal (args[i].tree_value);
663 preserve_temp_slots (args[i].value);
667 /* If the value is a non-legitimate constant, force it into a
668 pseudo now. TLS symbols sometimes need a call to resolve. */
669 if (CONSTANT_P (args[i].value)
670 && !LEGITIMATE_CONSTANT_P (args[i].value))
671 args[i].value = force_reg (args[i].mode, args[i].value);
673 /* If we are to promote the function arg to a wider mode,
676 if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
678 = convert_modes (args[i].mode,
679 TYPE_MODE (TREE_TYPE (args[i].tree_value)),
680 args[i].value, args[i].unsignedp);
682 /* If we're going to have to load the value by parts, pull the
683 parts into pseudos. The part extraction process can involve
684 non-trivial computation. */
685 if (GET_CODE (args[i].reg) == PARALLEL)
687 tree type = TREE_TYPE (args[i].tree_value);
688 args[i].parallel_value
689 = emit_group_load_into_temps (args[i].reg, args[i].value,
690 type, int_size_in_bytes (type));
693 /* If the value is expensive, and we are inside an appropriately
694 short loop, put the value into a pseudo and then put the pseudo
697 For small register classes, also do this if this call uses
698 register parameters. This is to avoid reload conflicts while
699 loading the parameters registers. */
701 else if ((! (REG_P (args[i].value)
702 || (GET_CODE (args[i].value) == SUBREG
703 && REG_P (SUBREG_REG (args[i].value)))))
704 && args[i].mode != BLKmode
705 && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
706 && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
708 args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
712 #ifdef REG_PARM_STACK_SPACE
714 /* The argument list is the property of the called routine and it
715 may clobber it. If the fixed area has been used for previous
716 parameters, we must save and restore it. */
719 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
724 /* Compute the boundary of the area that needs to be saved, if any. */
725 high = reg_parm_stack_space;
726 #ifdef ARGS_GROW_DOWNWARD
729 if (high > highest_outgoing_arg_in_use)
730 high = highest_outgoing_arg_in_use;
732 for (low = 0; low < high; low++)
733 if (stack_usage_map[low] != 0)
736 enum machine_mode save_mode;
741 while (stack_usage_map[--high] == 0)
745 *high_to_save = high;
747 num_to_save = high - low + 1;
748 save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
750 /* If we don't have the required alignment, must do this
752 if ((low & (MIN (GET_MODE_SIZE (save_mode),
753 BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
756 #ifdef ARGS_GROW_DOWNWARD
761 stack_area = gen_rtx_MEM (save_mode,
762 memory_address (save_mode,
763 plus_constant (argblock,
766 set_mem_align (stack_area, PARM_BOUNDARY);
767 if (save_mode == BLKmode)
769 save_area = assign_stack_temp (BLKmode, num_to_save, 0);
770 emit_block_move (validize_mem (save_area), stack_area,
771 GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
775 save_area = gen_reg_rtx (save_mode);
776 emit_move_insn (save_area, stack_area);
786 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
788 enum machine_mode save_mode = GET_MODE (save_area);
792 #ifdef ARGS_GROW_DOWNWARD
793 delta = -high_to_save;
797 stack_area = gen_rtx_MEM (save_mode,
798 memory_address (save_mode,
799 plus_constant (argblock, delta)));
800 set_mem_align (stack_area, PARM_BOUNDARY);
802 if (save_mode != BLKmode)
803 emit_move_insn (stack_area, save_area);
805 emit_block_move (stack_area, validize_mem (save_area),
806 GEN_INT (high_to_save - low_to_save + 1),
809 #endif /* REG_PARM_STACK_SPACE */
811 /* If any elements in ARGS refer to parameters that are to be passed in
812 registers, but not in memory, and whose alignment does not permit a
813 direct copy into registers. Copy the values into a group of pseudos
814 which we will later copy into the appropriate hard registers.
816 Pseudos for each unaligned argument will be stored into the array
817 args[argnum].aligned_regs. The caller is responsible for deallocating
818 the aligned_regs array if it is nonzero. */
821 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
825 for (i = 0; i < num_actuals; i++)
826 if (args[i].reg != 0 && ! args[i].pass_on_stack
827 && args[i].mode == BLKmode
828 && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
829 < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
831 int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
832 int endian_correction = 0;
836 gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
837 args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
841 args[i].n_aligned_regs
842 = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
845 args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
847 /* Structures smaller than a word are normally aligned to the
848 least significant byte. On a BYTES_BIG_ENDIAN machine,
849 this means we must skip the empty high order bytes when
850 calculating the bit offset. */
851 if (bytes < UNITS_PER_WORD
852 #ifdef BLOCK_REG_PADDING
853 && (BLOCK_REG_PADDING (args[i].mode,
854 TREE_TYPE (args[i].tree_value), 1)
860 endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
862 for (j = 0; j < args[i].n_aligned_regs; j++)
864 rtx reg = gen_reg_rtx (word_mode);
865 rtx word = operand_subword_force (args[i].value, j, BLKmode);
866 int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
868 args[i].aligned_regs[j] = reg;
869 word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
870 word_mode, word_mode);
872 /* There is no need to restrict this code to loading items
873 in TYPE_ALIGN sized hunks. The bitfield instructions can
874 load up entire word sized registers efficiently.
876 ??? This may not be needed anymore.
877 We use to emit a clobber here but that doesn't let later
878 passes optimize the instructions we emit. By storing 0 into
879 the register later passes know the first AND to zero out the
880 bitfield being set in the register is unnecessary. The store
881 of 0 will be deleted as will at least the first AND. */
883 emit_move_insn (reg, const0_rtx);
885 bytes -= bitsize / BITS_PER_UNIT;
886 store_bit_field (reg, bitsize, endian_correction, word_mode,
892 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
895 NUM_ACTUALS is the total number of parameters.
897 N_NAMED_ARGS is the total number of named arguments.
899 FNDECL is the tree code for the target of this call (if known)
901 ARGS_SO_FAR holds state needed by the target to know where to place
904 REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
905 for arguments which are passed in registers.
907 OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
908 and may be modified by this routine.
910 OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
911 flags which may may be modified by this routine.
913 MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
914 that requires allocation of stack space.
916 CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
917 the thunked-to function. */
920 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
921 struct arg_data *args,
922 struct args_size *args_size,
923 int n_named_args ATTRIBUTE_UNUSED,
924 tree actparms, tree fndecl,
925 CUMULATIVE_ARGS *args_so_far,
926 int reg_parm_stack_space,
927 rtx *old_stack_level, int *old_pending_adj,
928 int *must_preallocate, int *ecf_flags,
929 bool *may_tailcall, bool call_from_thunk_p)
931 /* 1 if scanning parms front to back, -1 if scanning back to front. */
934 /* Count arg position in order args appear. */
940 args_size->constant = 0;
943 /* In this loop, we consider args in the order they are written.
944 We fill up ARGS from the front or from the back if necessary
945 so that in any case the first arg to be pushed ends up at the front. */
947 if (PUSH_ARGS_REVERSED)
949 i = num_actuals - 1, inc = -1;
950 /* In this case, must reverse order of args
951 so that we compute and push the last arg first. */
958 /* I counts args in order (to be) pushed; ARGPOS counts in order written. */
959 for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
961 tree type = TREE_TYPE (TREE_VALUE (p));
963 enum machine_mode mode;
965 args[i].tree_value = TREE_VALUE (p);
967 /* Replace erroneous argument with constant zero. */
968 if (type == error_mark_node || !COMPLETE_TYPE_P (type))
969 args[i].tree_value = integer_zero_node, type = integer_type_node;
971 /* If TYPE is a transparent union, pass things the way we would
972 pass the first field of the union. We have already verified that
973 the modes are the same. */
974 if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
975 type = TREE_TYPE (TYPE_FIELDS (type));
977 /* Decide where to pass this arg.
979 args[i].reg is nonzero if all or part is passed in registers.
981 args[i].partial is nonzero if part but not all is passed in registers,
982 and the exact value says how many bytes are passed in registers.
984 args[i].pass_on_stack is nonzero if the argument must at least be
985 computed on the stack. It may then be loaded back into registers
986 if args[i].reg is nonzero.
988 These decisions are driven by the FUNCTION_... macros and must agree
989 with those made by function.c. */
991 /* See if this argument should be passed by invisible reference. */
992 if (pass_by_reference (args_so_far, TYPE_MODE (type),
993 type, argpos < n_named_args))
999 = reference_callee_copied (args_so_far, TYPE_MODE (type),
1000 type, argpos < n_named_args);
1002 /* If we're compiling a thunk, pass through invisible references
1003 instead of making a copy. */
1004 if (call_from_thunk_p
1006 && !TREE_ADDRESSABLE (type)
1007 && (base = get_base_address (args[i].tree_value))
1008 && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1010 /* We can't use sibcalls if a callee-copied argument is
1011 stored in the current function's frame. */
1012 if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1013 *may_tailcall = false;
1015 args[i].tree_value = build_fold_addr_expr (args[i].tree_value);
1016 type = TREE_TYPE (args[i].tree_value);
1018 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1022 /* We make a copy of the object and pass the address to the
1023 function being called. */
1026 if (!COMPLETE_TYPE_P (type)
1027 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1028 || (flag_stack_check && ! STACK_CHECK_BUILTIN
1029 && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1030 STACK_CHECK_MAX_VAR_SIZE))))
1032 /* This is a variable-sized object. Make space on the stack
1034 rtx size_rtx = expr_size (TREE_VALUE (p));
1036 if (*old_stack_level == 0)
1038 emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1039 *old_pending_adj = pending_stack_adjust;
1040 pending_stack_adjust = 0;
1043 copy = gen_rtx_MEM (BLKmode,
1044 allocate_dynamic_stack_space
1045 (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1046 set_mem_attributes (copy, type, 1);
1049 copy = assign_temp (type, 0, 1, 0);
1051 store_expr (args[i].tree_value, copy, 0);
1054 *ecf_flags &= ~(ECF_CONST | ECF_LIBCALL_BLOCK);
1056 *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1059 = build_fold_addr_expr (make_tree (type, copy));
1060 type = TREE_TYPE (args[i].tree_value);
1061 *may_tailcall = false;
1065 mode = TYPE_MODE (type);
1066 unsignedp = TYPE_UNSIGNED (type);
1068 if (targetm.calls.promote_function_args (fndecl ? TREE_TYPE (fndecl) : 0))
1069 mode = promote_mode (type, mode, &unsignedp, 1);
1071 args[i].unsignedp = unsignedp;
1072 args[i].mode = mode;
1074 args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1075 argpos < n_named_args);
1076 #ifdef FUNCTION_INCOMING_ARG
1077 /* If this is a sibling call and the machine has register windows, the
1078 register window has to be unwinded before calling the routine, so
1079 arguments have to go into the incoming registers. */
1080 args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1081 argpos < n_named_args);
1083 args[i].tail_call_reg = args[i].reg;
1088 = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
1089 argpos < n_named_args);
1091 args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
1093 /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1094 it means that we are to pass this arg in the register(s) designated
1095 by the PARALLEL, but also to pass it in the stack. */
1096 if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1097 && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1098 args[i].pass_on_stack = 1;
1100 /* If this is an addressable type, we must preallocate the stack
1101 since we must evaluate the object into its final location.
1103 If this is to be passed in both registers and the stack, it is simpler
1105 if (TREE_ADDRESSABLE (type)
1106 || (args[i].pass_on_stack && args[i].reg != 0))
1107 *must_preallocate = 1;
1109 /* If this is an addressable type, we cannot pre-evaluate it. Thus,
1110 we cannot consider this function call constant. */
1111 if (TREE_ADDRESSABLE (type))
1112 *ecf_flags &= ~ECF_LIBCALL_BLOCK;
1114 /* Compute the stack-size of this argument. */
1115 if (args[i].reg == 0 || args[i].partial != 0
1116 || reg_parm_stack_space > 0
1117 || args[i].pass_on_stack)
1118 locate_and_pad_parm (mode, type,
1119 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1124 args[i].pass_on_stack ? 0 : args[i].partial,
1125 fndecl, args_size, &args[i].locate);
1126 #ifdef BLOCK_REG_PADDING
1128 /* The argument is passed entirely in registers. See at which
1129 end it should be padded. */
1130 args[i].locate.where_pad =
1131 BLOCK_REG_PADDING (mode, type,
1132 int_size_in_bytes (type) <= UNITS_PER_WORD);
1135 /* Update ARGS_SIZE, the total stack space for args so far. */
1137 args_size->constant += args[i].locate.size.constant;
1138 if (args[i].locate.size.var)
1139 ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
1141 /* Increment ARGS_SO_FAR, which has info about which arg-registers
1142 have been used, etc. */
1144 FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1145 argpos < n_named_args);
1149 /* Update ARGS_SIZE to contain the total size for the argument block.
1150 Return the original constant component of the argument block's size.
1152 REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1153 for arguments passed in registers. */
1156 compute_argument_block_size (int reg_parm_stack_space,
1157 struct args_size *args_size,
1158 int preferred_stack_boundary ATTRIBUTE_UNUSED)
1160 int unadjusted_args_size = args_size->constant;
1162 /* For accumulate outgoing args mode we don't need to align, since the frame
1163 will be already aligned. Align to STACK_BOUNDARY in order to prevent
1164 backends from generating misaligned frame sizes. */
1165 if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1166 preferred_stack_boundary = STACK_BOUNDARY;
1168 /* Compute the actual size of the argument block required. The variable
1169 and constant sizes must be combined, the size may have to be rounded,
1170 and there may be a minimum required size. */
1174 args_size->var = ARGS_SIZE_TREE (*args_size);
1175 args_size->constant = 0;
1177 preferred_stack_boundary /= BITS_PER_UNIT;
1178 if (preferred_stack_boundary > 1)
1180 /* We don't handle this case yet. To handle it correctly we have
1181 to add the delta, round and subtract the delta.
1182 Currently no machine description requires this support. */
1183 gcc_assert (!(stack_pointer_delta & (preferred_stack_boundary - 1)));
1184 args_size->var = round_up (args_size->var, preferred_stack_boundary);
1187 if (reg_parm_stack_space > 0)
1190 = size_binop (MAX_EXPR, args_size->var,
1191 ssize_int (reg_parm_stack_space));
1193 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1194 /* The area corresponding to register parameters is not to count in
1195 the size of the block we need. So make the adjustment. */
1197 = size_binop (MINUS_EXPR, args_size->var,
1198 ssize_int (reg_parm_stack_space));
1204 preferred_stack_boundary /= BITS_PER_UNIT;
1205 if (preferred_stack_boundary < 1)
1206 preferred_stack_boundary = 1;
1207 args_size->constant = (((args_size->constant
1208 + stack_pointer_delta
1209 + preferred_stack_boundary - 1)
1210 / preferred_stack_boundary
1211 * preferred_stack_boundary)
1212 - stack_pointer_delta);
1214 args_size->constant = MAX (args_size->constant,
1215 reg_parm_stack_space);
1217 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1218 args_size->constant -= reg_parm_stack_space;
1221 return unadjusted_args_size;
1224 /* Precompute parameters as needed for a function call.
1226 FLAGS is mask of ECF_* constants.
1228 NUM_ACTUALS is the number of arguments.
1230 ARGS is an array containing information for each argument; this
1231 routine fills in the INITIAL_VALUE and VALUE fields for each
1232 precomputed argument. */
1235 precompute_arguments (int flags, int num_actuals, struct arg_data *args)
1239 /* If this is a libcall, then precompute all arguments so that we do not
1240 get extraneous instructions emitted as part of the libcall sequence. */
1242 /* If we preallocated the stack space, and some arguments must be passed
1243 on the stack, then we must precompute any parameter which contains a
1244 function call which will store arguments on the stack.
1245 Otherwise, evaluating the parameter may clobber previous parameters
1246 which have already been stored into the stack. (we have code to avoid
1247 such case by saving the outgoing stack arguments, but it results in
1249 if ((flags & ECF_LIBCALL_BLOCK) == 0 && !ACCUMULATE_OUTGOING_ARGS)
1252 for (i = 0; i < num_actuals; i++)
1254 enum machine_mode mode;
1256 if ((flags & ECF_LIBCALL_BLOCK) == 0
1257 && TREE_CODE (args[i].tree_value) != CALL_EXPR)
1260 /* If this is an addressable type, we cannot pre-evaluate it. */
1261 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)));
1263 args[i].initial_value = args[i].value
1264 = expand_normal (args[i].tree_value);
1266 mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1267 if (mode != args[i].mode)
1270 = convert_modes (args[i].mode, mode,
1271 args[i].value, args[i].unsignedp);
1272 #if defined(PROMOTE_FUNCTION_MODE) && !defined(PROMOTE_MODE)
1273 /* CSE will replace this only if it contains args[i].value
1274 pseudo, so convert it down to the declared mode using
1276 if (REG_P (args[i].value)
1277 && GET_MODE_CLASS (args[i].mode) == MODE_INT)
1279 args[i].initial_value
1280 = gen_lowpart_SUBREG (mode, args[i].value);
1281 SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1282 SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1290 /* Given the current state of MUST_PREALLOCATE and information about
1291 arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1292 compute and return the final value for MUST_PREALLOCATE. */
1295 finalize_must_preallocate (int must_preallocate, int num_actuals, struct arg_data *args, struct args_size *args_size)
1297 /* See if we have or want to preallocate stack space.
1299 If we would have to push a partially-in-regs parm
1300 before other stack parms, preallocate stack space instead.
1302 If the size of some parm is not a multiple of the required stack
1303 alignment, we must preallocate.
1305 If the total size of arguments that would otherwise create a copy in
1306 a temporary (such as a CALL) is more than half the total argument list
1307 size, preallocation is faster.
1309 Another reason to preallocate is if we have a machine (like the m88k)
1310 where stack alignment is required to be maintained between every
1311 pair of insns, not just when the call is made. However, we assume here
1312 that such machines either do not have push insns (and hence preallocation
1313 would occur anyway) or the problem is taken care of with
1316 if (! must_preallocate)
1318 int partial_seen = 0;
1319 int copy_to_evaluate_size = 0;
1322 for (i = 0; i < num_actuals && ! must_preallocate; i++)
1324 if (args[i].partial > 0 && ! args[i].pass_on_stack)
1326 else if (partial_seen && args[i].reg == 0)
1327 must_preallocate = 1;
1329 if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1330 && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1331 || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1332 || TREE_CODE (args[i].tree_value) == COND_EXPR
1333 || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1334 copy_to_evaluate_size
1335 += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1338 if (copy_to_evaluate_size * 2 >= args_size->constant
1339 && args_size->constant > 0)
1340 must_preallocate = 1;
1342 return must_preallocate;
1345 /* If we preallocated stack space, compute the address of each argument
1346 and store it into the ARGS array.
1348 We need not ensure it is a valid memory address here; it will be
1349 validized when it is used.
1351 ARGBLOCK is an rtx for the address of the outgoing arguments. */
1354 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
1358 rtx arg_reg = argblock;
1359 int i, arg_offset = 0;
1361 if (GET_CODE (argblock) == PLUS)
1362 arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1364 for (i = 0; i < num_actuals; i++)
1366 rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
1367 rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
1369 unsigned int align, boundary;
1370 unsigned int units_on_stack = 0;
1371 enum machine_mode partial_mode = VOIDmode;
1373 /* Skip this parm if it will not be passed on the stack. */
1374 if (! args[i].pass_on_stack
1376 && args[i].partial == 0)
1379 if (GET_CODE (offset) == CONST_INT)
1380 addr = plus_constant (arg_reg, INTVAL (offset));
1382 addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1384 addr = plus_constant (addr, arg_offset);
1386 if (args[i].partial != 0)
1388 /* Only part of the parameter is being passed on the stack.
1389 Generate a simple memory reference of the correct size. */
1390 units_on_stack = args[i].locate.size.constant;
1391 partial_mode = mode_for_size (units_on_stack * BITS_PER_UNIT,
1393 args[i].stack = gen_rtx_MEM (partial_mode, addr);
1394 set_mem_size (args[i].stack, GEN_INT (units_on_stack));
1398 args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1399 set_mem_attributes (args[i].stack,
1400 TREE_TYPE (args[i].tree_value), 1);
1402 align = BITS_PER_UNIT;
1403 boundary = args[i].locate.boundary;
1404 if (args[i].locate.where_pad != downward)
1406 else if (GET_CODE (offset) == CONST_INT)
1408 align = INTVAL (offset) * BITS_PER_UNIT | boundary;
1409 align = align & -align;
1411 set_mem_align (args[i].stack, align);
1413 if (GET_CODE (slot_offset) == CONST_INT)
1414 addr = plus_constant (arg_reg, INTVAL (slot_offset));
1416 addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1418 addr = plus_constant (addr, arg_offset);
1420 if (args[i].partial != 0)
1422 /* Only part of the parameter is being passed on the stack.
1423 Generate a simple memory reference of the correct size. */
1424 args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
1425 set_mem_size (args[i].stack_slot, GEN_INT (units_on_stack));
1429 args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1430 set_mem_attributes (args[i].stack_slot,
1431 TREE_TYPE (args[i].tree_value), 1);
1433 set_mem_align (args[i].stack_slot, args[i].locate.boundary);
1435 /* Function incoming arguments may overlap with sibling call
1436 outgoing arguments and we cannot allow reordering of reads
1437 from function arguments with stores to outgoing arguments
1438 of sibling calls. */
1439 set_mem_alias_set (args[i].stack, 0);
1440 set_mem_alias_set (args[i].stack_slot, 0);
1445 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1446 in a call instruction.
1448 FNDECL is the tree node for the target function. For an indirect call
1449 FNDECL will be NULL_TREE.
1451 ADDR is the operand 0 of CALL_EXPR for this call. */
1454 rtx_for_function_call (tree fndecl, tree addr)
1458 /* Get the function to call, in the form of RTL. */
1461 /* If this is the first use of the function, see if we need to
1462 make an external definition for it. */
1463 if (! TREE_USED (fndecl))
1465 assemble_external (fndecl);
1466 TREE_USED (fndecl) = 1;
1469 /* Get a SYMBOL_REF rtx for the function address. */
1470 funexp = XEXP (DECL_RTL (fndecl), 0);
1473 /* Generate an rtx (probably a pseudo-register) for the address. */
1476 funexp = expand_normal (addr);
1477 pop_temp_slots (); /* FUNEXP can't be BLKmode. */
1482 /* Return true if and only if SIZE storage units (usually bytes)
1483 starting from address ADDR overlap with already clobbered argument
1484 area. This function is used to determine if we should give up a
1488 mem_overlaps_already_clobbered_arg_p (rtx addr, unsigned HOST_WIDE_INT size)
1492 if (addr == current_function_internal_arg_pointer)
1494 else if (GET_CODE (addr) == PLUS
1495 && XEXP (addr, 0) == current_function_internal_arg_pointer
1496 && GET_CODE (XEXP (addr, 1)) == CONST_INT)
1497 i = INTVAL (XEXP (addr, 1));
1498 /* Return true for arg pointer based indexed addressing. */
1499 else if (GET_CODE (addr) == PLUS
1500 && (XEXP (addr, 0) == current_function_internal_arg_pointer
1501 || XEXP (addr, 1) == current_function_internal_arg_pointer))
1506 #ifdef ARGS_GROW_DOWNWARD
1511 unsigned HOST_WIDE_INT k;
1513 for (k = 0; k < size; k++)
1514 if (i + k < stored_args_map->n_bits
1515 && TEST_BIT (stored_args_map, i + k))
1522 /* Do the register loads required for any wholly-register parms or any
1523 parms which are passed both on the stack and in a register. Their
1524 expressions were already evaluated.
1526 Mark all register-parms as living through the call, putting these USE
1527 insns in the CALL_INSN_FUNCTION_USAGE field.
1529 When IS_SIBCALL, perform the check_sibcall_argument_overlap
1530 checking, setting *SIBCALL_FAILURE if appropriate. */
1533 load_register_parameters (struct arg_data *args, int num_actuals,
1534 rtx *call_fusage, int flags, int is_sibcall,
1535 int *sibcall_failure)
1539 for (i = 0; i < num_actuals; i++)
1541 rtx reg = ((flags & ECF_SIBCALL)
1542 ? args[i].tail_call_reg : args[i].reg);
1545 int partial = args[i].partial;
1548 rtx before_arg = get_last_insn ();
1549 /* Set non-negative if we must move a word at a time, even if
1550 just one word (e.g, partial == 4 && mode == DFmode). Set
1551 to -1 if we just use a normal move insn. This value can be
1552 zero if the argument is a zero size structure. */
1554 if (GET_CODE (reg) == PARALLEL)
1558 gcc_assert (partial % UNITS_PER_WORD == 0);
1559 nregs = partial / UNITS_PER_WORD;
1561 else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
1563 size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1564 nregs = (size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1567 size = GET_MODE_SIZE (args[i].mode);
1569 /* Handle calls that pass values in multiple non-contiguous
1570 locations. The Irix 6 ABI has examples of this. */
1572 if (GET_CODE (reg) == PARALLEL)
1573 emit_group_move (reg, args[i].parallel_value);
1575 /* If simple case, just do move. If normal partial, store_one_arg
1576 has already loaded the register for us. In all other cases,
1577 load the register(s) from memory. */
1579 else if (nregs == -1)
1581 emit_move_insn (reg, args[i].value);
1582 #ifdef BLOCK_REG_PADDING
1583 /* Handle case where we have a value that needs shifting
1584 up to the msb. eg. a QImode value and we're padding
1585 upward on a BYTES_BIG_ENDIAN machine. */
1586 if (size < UNITS_PER_WORD
1587 && (args[i].locate.where_pad
1588 == (BYTES_BIG_ENDIAN ? upward : downward)))
1591 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1593 /* Assigning REG here rather than a temp makes CALL_FUSAGE
1594 report the whole reg as used. Strictly speaking, the
1595 call only uses SIZE bytes at the msb end, but it doesn't
1596 seem worth generating rtl to say that. */
1597 reg = gen_rtx_REG (word_mode, REGNO (reg));
1598 x = expand_shift (LSHIFT_EXPR, word_mode, reg,
1599 build_int_cst (NULL_TREE, shift),
1602 emit_move_insn (reg, x);
1607 /* If we have pre-computed the values to put in the registers in
1608 the case of non-aligned structures, copy them in now. */
1610 else if (args[i].n_aligned_regs != 0)
1611 for (j = 0; j < args[i].n_aligned_regs; j++)
1612 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1613 args[i].aligned_regs[j]);
1615 else if (partial == 0 || args[i].pass_on_stack)
1617 rtx mem = validize_mem (args[i].value);
1619 /* Check for overlap with already clobbered argument area. */
1621 && mem_overlaps_already_clobbered_arg_p (XEXP (args[i].value, 0),
1623 *sibcall_failure = 1;
1625 /* Handle a BLKmode that needs shifting. */
1626 if (nregs == 1 && size < UNITS_PER_WORD
1627 #ifdef BLOCK_REG_PADDING
1628 && args[i].locate.where_pad == downward
1634 rtx tem = operand_subword_force (mem, 0, args[i].mode);
1635 rtx ri = gen_rtx_REG (word_mode, REGNO (reg));
1636 rtx x = gen_reg_rtx (word_mode);
1637 int shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
1638 enum tree_code dir = BYTES_BIG_ENDIAN ? RSHIFT_EXPR
1641 emit_move_insn (x, tem);
1642 x = expand_shift (dir, word_mode, x,
1643 build_int_cst (NULL_TREE, shift),
1646 emit_move_insn (ri, x);
1649 move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
1652 /* When a parameter is a block, and perhaps in other cases, it is
1653 possible that it did a load from an argument slot that was
1654 already clobbered. */
1656 && check_sibcall_argument_overlap (before_arg, &args[i], 0))
1657 *sibcall_failure = 1;
1659 /* Handle calls that pass values in multiple non-contiguous
1660 locations. The Irix 6 ABI has examples of this. */
1661 if (GET_CODE (reg) == PARALLEL)
1662 use_group_regs (call_fusage, reg);
1663 else if (nregs == -1)
1664 use_reg (call_fusage, reg);
1666 use_regs (call_fusage, REGNO (reg), nregs);
1671 /* We need to pop PENDING_STACK_ADJUST bytes. But, if the arguments
1672 wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1673 bytes, then we would need to push some additional bytes to pad the
1674 arguments. So, we compute an adjust to the stack pointer for an
1675 amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1676 bytes. Then, when the arguments are pushed the stack will be perfectly
1677 aligned. ARGS_SIZE->CONSTANT is set to the number of bytes that should
1678 be popped after the call. Returns the adjustment. */
1681 combine_pending_stack_adjustment_and_call (int unadjusted_args_size,
1682 struct args_size *args_size,
1683 unsigned int preferred_unit_stack_boundary)
1685 /* The number of bytes to pop so that the stack will be
1686 under-aligned by UNADJUSTED_ARGS_SIZE bytes. */
1687 HOST_WIDE_INT adjustment;
1688 /* The alignment of the stack after the arguments are pushed, if we
1689 just pushed the arguments without adjust the stack here. */
1690 unsigned HOST_WIDE_INT unadjusted_alignment;
1692 unadjusted_alignment
1693 = ((stack_pointer_delta + unadjusted_args_size)
1694 % preferred_unit_stack_boundary);
1696 /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1697 as possible -- leaving just enough left to cancel out the
1698 UNADJUSTED_ALIGNMENT. In other words, we want to ensure that the
1699 PENDING_STACK_ADJUST is non-negative, and congruent to
1700 -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY. */
1702 /* Begin by trying to pop all the bytes. */
1703 unadjusted_alignment
1704 = (unadjusted_alignment
1705 - (pending_stack_adjust % preferred_unit_stack_boundary));
1706 adjustment = pending_stack_adjust;
1707 /* Push enough additional bytes that the stack will be aligned
1708 after the arguments are pushed. */
1709 if (preferred_unit_stack_boundary > 1)
1711 if (unadjusted_alignment > 0)
1712 adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1714 adjustment += unadjusted_alignment;
1717 /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1718 bytes after the call. The right number is the entire
1719 PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1720 by the arguments in the first place. */
1722 = pending_stack_adjust - adjustment + unadjusted_args_size;
1727 /* Scan X expression if it does not dereference any argument slots
1728 we already clobbered by tail call arguments (as noted in stored_args_map
1730 Return nonzero if X expression dereferences such argument slots,
1734 check_sibcall_argument_overlap_1 (rtx x)
1743 code = GET_CODE (x);
1746 return mem_overlaps_already_clobbered_arg_p (XEXP (x, 0),
1747 GET_MODE_SIZE (GET_MODE (x)));
1749 /* Scan all subexpressions. */
1750 fmt = GET_RTX_FORMAT (code);
1751 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
1755 if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
1758 else if (*fmt == 'E')
1760 for (j = 0; j < XVECLEN (x, i); j++)
1761 if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
1768 /* Scan sequence after INSN if it does not dereference any argument slots
1769 we already clobbered by tail call arguments (as noted in stored_args_map
1770 bitmap). If MARK_STORED_ARGS_MAP, add stack slots for ARG to
1771 stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
1772 should be 0). Return nonzero if sequence after INSN dereferences such argument
1773 slots, zero otherwise. */
1776 check_sibcall_argument_overlap (rtx insn, struct arg_data *arg, int mark_stored_args_map)
1780 if (insn == NULL_RTX)
1781 insn = get_insns ();
1783 insn = NEXT_INSN (insn);
1785 for (; insn; insn = NEXT_INSN (insn))
1787 && check_sibcall_argument_overlap_1 (PATTERN (insn)))
1790 if (mark_stored_args_map)
1792 #ifdef ARGS_GROW_DOWNWARD
1793 low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
1795 low = arg->locate.slot_offset.constant;
1798 for (high = low + arg->locate.size.constant; low < high; low++)
1799 SET_BIT (stored_args_map, low);
1801 return insn != NULL_RTX;
1804 /* Given that a function returns a value of mode MODE at the most
1805 significant end of hard register VALUE, shift VALUE left or right
1806 as specified by LEFT_P. Return true if some action was needed. */
1809 shift_return_value (enum machine_mode mode, bool left_p, rtx value)
1811 HOST_WIDE_INT shift;
1813 gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
1814 shift = GET_MODE_BITSIZE (GET_MODE (value)) - GET_MODE_BITSIZE (mode);
1818 /* Use ashr rather than lshr for right shifts. This is for the benefit
1819 of the MIPS port, which requires SImode values to be sign-extended
1820 when stored in 64-bit registers. */
1821 if (!force_expand_binop (GET_MODE (value), left_p ? ashl_optab : ashr_optab,
1822 value, GEN_INT (shift), value, 1, OPTAB_WIDEN))
1827 /* Generate all the code for a function call
1828 and return an rtx for its value.
1829 Store the value in TARGET (specified as an rtx) if convenient.
1830 If the value is stored in TARGET then TARGET is returned.
1831 If IGNORE is nonzero, then we ignore the value of the function call. */
1834 expand_call (tree exp, rtx target, int ignore)
1836 /* Nonzero if we are currently expanding a call. */
1837 static int currently_expanding_call = 0;
1839 /* List of actual parameters. */
1840 tree actparms = TREE_OPERAND (exp, 1);
1841 /* RTX for the function to be called. */
1843 /* Sequence of insns to perform a normal "call". */
1844 rtx normal_call_insns = NULL_RTX;
1845 /* Sequence of insns to perform a tail "call". */
1846 rtx tail_call_insns = NULL_RTX;
1847 /* Data type of the function. */
1849 tree type_arg_types;
1850 /* Declaration of the function being called,
1851 or 0 if the function is computed (not known by name). */
1853 /* The type of the function being called. */
1855 bool try_tail_call = CALL_EXPR_TAILCALL (exp);
1858 /* Register in which non-BLKmode value will be returned,
1859 or 0 if no value or if value is BLKmode. */
1861 /* Address where we should return a BLKmode value;
1862 0 if value not BLKmode. */
1863 rtx structure_value_addr = 0;
1864 /* Nonzero if that address is being passed by treating it as
1865 an extra, implicit first parameter. Otherwise,
1866 it is passed by being copied directly into struct_value_rtx. */
1867 int structure_value_addr_parm = 0;
1868 /* Size of aggregate value wanted, or zero if none wanted
1869 or if we are using the non-reentrant PCC calling convention
1870 or expecting the value in registers. */
1871 HOST_WIDE_INT struct_value_size = 0;
1872 /* Nonzero if called function returns an aggregate in memory PCC style,
1873 by returning the address of where to find it. */
1874 int pcc_struct_value = 0;
1875 rtx struct_value = 0;
1877 /* Number of actual parameters in this call, including struct value addr. */
1879 /* Number of named args. Args after this are anonymous ones
1880 and they must all go on the stack. */
1883 /* Vector of information about each argument.
1884 Arguments are numbered in the order they will be pushed,
1885 not the order they are written. */
1886 struct arg_data *args;
1888 /* Total size in bytes of all the stack-parms scanned so far. */
1889 struct args_size args_size;
1890 struct args_size adjusted_args_size;
1891 /* Size of arguments before any adjustments (such as rounding). */
1892 int unadjusted_args_size;
1893 /* Data on reg parms scanned so far. */
1894 CUMULATIVE_ARGS args_so_far;
1895 /* Nonzero if a reg parm has been scanned. */
1897 /* Nonzero if this is an indirect function call. */
1899 /* Nonzero if we must avoid push-insns in the args for this call.
1900 If stack space is allocated for register parameters, but not by the
1901 caller, then it is preallocated in the fixed part of the stack frame.
1902 So the entire argument block must then be preallocated (i.e., we
1903 ignore PUSH_ROUNDING in that case). */
1905 int must_preallocate = !PUSH_ARGS;
1907 /* Size of the stack reserved for parameter registers. */
1908 int reg_parm_stack_space = 0;
1910 /* Address of space preallocated for stack parms
1911 (on machines that lack push insns), or 0 if space not preallocated. */
1914 /* Mask of ECF_ flags. */
1916 #ifdef REG_PARM_STACK_SPACE
1917 /* Define the boundary of the register parm stack space that needs to be
1919 int low_to_save, high_to_save;
1920 rtx save_area = 0; /* Place that it is saved */
1923 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
1924 char *initial_stack_usage_map = stack_usage_map;
1925 char *stack_usage_map_buf = NULL;
1927 int old_stack_allocated;
1929 /* State variables to track stack modifications. */
1930 rtx old_stack_level = 0;
1931 int old_stack_arg_under_construction = 0;
1932 int old_pending_adj = 0;
1933 int old_inhibit_defer_pop = inhibit_defer_pop;
1935 /* Some stack pointer alterations we make are performed via
1936 allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
1937 which we then also need to save/restore along the way. */
1938 int old_stack_pointer_delta = 0;
1941 tree p = TREE_OPERAND (exp, 0);
1942 tree addr = TREE_OPERAND (exp, 0);
1944 /* The alignment of the stack, in bits. */
1945 unsigned HOST_WIDE_INT preferred_stack_boundary;
1946 /* The alignment of the stack, in bytes. */
1947 unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
1948 /* The static chain value to use for this call. */
1949 rtx static_chain_value;
1950 /* See if this is "nothrow" function call. */
1951 if (TREE_NOTHROW (exp))
1952 flags |= ECF_NOTHROW;
1954 /* See if we can find a DECL-node for the actual function, and get the
1955 function attributes (flags) from the function decl or type node. */
1956 fndecl = get_callee_fndecl (exp);
1959 fntype = TREE_TYPE (fndecl);
1960 flags |= flags_from_decl_or_type (fndecl);
1964 fntype = TREE_TYPE (TREE_TYPE (p));
1965 flags |= flags_from_decl_or_type (fntype);
1968 struct_value = targetm.calls.struct_value_rtx (fntype, 0);
1970 /* Warn if this value is an aggregate type,
1971 regardless of which calling convention we are using for it. */
1972 if (AGGREGATE_TYPE_P (TREE_TYPE (exp)))
1973 warning (OPT_Waggregate_return, "function call has aggregate value");
1975 /* If the result of a pure or const function call is ignored (or void),
1976 and none of its arguments are volatile, we can avoid expanding the
1977 call and just evaluate the arguments for side-effects. */
1978 if ((flags & (ECF_CONST | ECF_PURE))
1979 && (ignore || target == const0_rtx
1980 || TYPE_MODE (TREE_TYPE (exp)) == VOIDmode))
1982 bool volatilep = false;
1985 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1986 if (TREE_THIS_VOLATILE (TREE_VALUE (arg)))
1994 for (arg = actparms; arg; arg = TREE_CHAIN (arg))
1995 expand_expr (TREE_VALUE (arg), const0_rtx,
1996 VOIDmode, EXPAND_NORMAL);
2001 #ifdef REG_PARM_STACK_SPACE
2002 reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2005 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2006 if (reg_parm_stack_space > 0 && PUSH_ARGS)
2007 must_preallocate = 1;
2010 /* Set up a place to return a structure. */
2012 /* Cater to broken compilers. */
2013 if (aggregate_value_p (exp, fndecl))
2015 /* This call returns a big structure. */
2016 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2018 #ifdef PCC_STATIC_STRUCT_RETURN
2020 pcc_struct_value = 1;
2022 #else /* not PCC_STATIC_STRUCT_RETURN */
2024 struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2026 if (target && MEM_P (target) && CALL_EXPR_RETURN_SLOT_OPT (exp))
2027 structure_value_addr = XEXP (target, 0);
2030 /* For variable-sized objects, we must be called with a target
2031 specified. If we were to allocate space on the stack here,
2032 we would have no way of knowing when to free it. */
2033 rtx d = assign_temp (TREE_TYPE (exp), 0, 1, 1);
2035 mark_temp_addr_taken (d);
2036 structure_value_addr = XEXP (d, 0);
2040 #endif /* not PCC_STATIC_STRUCT_RETURN */
2043 /* Figure out the amount to which the stack should be aligned. */
2044 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2047 struct cgraph_rtl_info *i = cgraph_rtl_info (fndecl);
2048 if (i && i->preferred_incoming_stack_boundary)
2049 preferred_stack_boundary = i->preferred_incoming_stack_boundary;
2052 /* Operand 0 is a pointer-to-function; get the type of the function. */
2053 funtype = TREE_TYPE (addr);
2054 /* APPLE LOCAL blocks */
2055 gcc_assert (POINTER_TYPE_P (funtype) || TREE_CODE (funtype) == BLOCK_POINTER_TYPE);
2056 funtype = TREE_TYPE (funtype);
2058 /* Munge the tree to split complex arguments into their imaginary
2060 if (targetm.calls.split_complex_arg)
2062 type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
2063 actparms = split_complex_values (actparms);
2066 type_arg_types = TYPE_ARG_TYPES (funtype);
2068 if (flags & ECF_MAY_BE_ALLOCA)
2069 current_function_calls_alloca = 1;
2071 /* If struct_value_rtx is 0, it means pass the address
2072 as if it were an extra parameter. */
2073 if (structure_value_addr && struct_value == 0)
2075 /* If structure_value_addr is a REG other than
2076 virtual_outgoing_args_rtx, we can use always use it. If it
2077 is not a REG, we must always copy it into a register.
2078 If it is virtual_outgoing_args_rtx, we must copy it to another
2079 register in some cases. */
2080 rtx temp = (!REG_P (structure_value_addr)
2081 || (ACCUMULATE_OUTGOING_ARGS
2082 && stack_arg_under_construction
2083 && structure_value_addr == virtual_outgoing_args_rtx)
2084 ? copy_addr_to_reg (convert_memory_address
2085 (Pmode, structure_value_addr))
2086 : structure_value_addr);
2089 = tree_cons (error_mark_node,
2090 make_tree (build_pointer_type (TREE_TYPE (funtype)),
2093 structure_value_addr_parm = 1;
2096 /* Count the arguments and set NUM_ACTUALS. */
2097 for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2100 /* Compute number of named args.
2101 First, do a raw count of the args for INIT_CUMULATIVE_ARGS. */
2103 if (type_arg_types != 0)
2105 = (list_length (type_arg_types)
2106 /* Count the struct value address, if it is passed as a parm. */
2107 + structure_value_addr_parm);
2109 /* If we know nothing, treat all args as named. */
2110 n_named_args = num_actuals;
2112 /* Start updating where the next arg would go.
2114 On some machines (such as the PA) indirect calls have a different
2115 calling convention than normal calls. The fourth argument in
2116 INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2118 INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, fndecl, n_named_args);
2120 /* Now possibly adjust the number of named args.
2121 Normally, don't include the last named arg if anonymous args follow.
2122 We do include the last named arg if
2123 targetm.calls.strict_argument_naming() returns nonzero.
2124 (If no anonymous args follow, the result of list_length is actually
2125 one too large. This is harmless.)
2127 If targetm.calls.pretend_outgoing_varargs_named() returns
2128 nonzero, and targetm.calls.strict_argument_naming() returns zero,
2129 this machine will be able to place unnamed args that were passed
2130 in registers into the stack. So treat all args as named. This
2131 allows the insns emitting for a specific argument list to be
2132 independent of the function declaration.
2134 If targetm.calls.pretend_outgoing_varargs_named() returns zero,
2135 we do not have any reliable way to pass unnamed args in
2136 registers, so we must force them into memory. */
2138 if (type_arg_types != 0
2139 && targetm.calls.strict_argument_naming (&args_so_far))
2141 else if (type_arg_types != 0
2142 && ! targetm.calls.pretend_outgoing_varargs_named (&args_so_far))
2143 /* Don't include the last named arg. */
2146 /* Treat all args as named. */
2147 n_named_args = num_actuals;
2149 /* Make a vector to hold all the information about each arg. */
2150 args = alloca (num_actuals * sizeof (struct arg_data));
2151 memset (args, 0, num_actuals * sizeof (struct arg_data));
2153 /* Build up entries in the ARGS array, compute the size of the
2154 arguments into ARGS_SIZE, etc. */
2155 initialize_argument_information (num_actuals, args, &args_size,
2156 n_named_args, actparms, fndecl,
2157 &args_so_far, reg_parm_stack_space,
2158 &old_stack_level, &old_pending_adj,
2159 &must_preallocate, &flags,
2160 &try_tail_call, CALL_FROM_THUNK_P (exp));
2164 /* If this function requires a variable-sized argument list, don't
2165 try to make a cse'able block for this call. We may be able to
2166 do this eventually, but it is too complicated to keep track of
2167 what insns go in the cse'able block and which don't. */
2169 flags &= ~ECF_LIBCALL_BLOCK;
2170 must_preallocate = 1;
2173 /* Now make final decision about preallocating stack space. */
2174 must_preallocate = finalize_must_preallocate (must_preallocate,
2178 /* If the structure value address will reference the stack pointer, we
2179 must stabilize it. We don't need to do this if we know that we are
2180 not going to adjust the stack pointer in processing this call. */
2182 if (structure_value_addr
2183 && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2184 || reg_mentioned_p (virtual_outgoing_args_rtx,
2185 structure_value_addr))
2187 || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2188 structure_value_addr = copy_to_reg (structure_value_addr);
2190 /* Tail calls can make things harder to debug, and we've traditionally
2191 pushed these optimizations into -O2. Don't try if we're already
2192 expanding a call, as that means we're an argument. Don't try if
2193 there's cleanups, as we know there's code to follow the call. */
2195 if (currently_expanding_call++ != 0
2196 || !flag_optimize_sibling_calls
2198 || lookup_stmt_eh_region (exp) >= 0)
2201 /* Rest of purposes for tail call optimizations to fail. */
2203 #ifdef HAVE_sibcall_epilogue
2204 !HAVE_sibcall_epilogue
2209 /* Doing sibling call optimization needs some work, since
2210 structure_value_addr can be allocated on the stack.
2211 It does not seem worth the effort since few optimizable
2212 sibling calls will return a structure. */
2213 || structure_value_addr != NULL_RTX
2214 /* Check whether the target is able to optimize the call
2216 || !targetm.function_ok_for_sibcall (fndecl, exp)
2217 /* Functions that do not return exactly once may not be sibcall
2219 || (flags & (ECF_RETURNS_TWICE | ECF_NORETURN))
2220 || TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr)))
2221 /* If the called function is nested in the current one, it might access
2222 some of the caller's arguments, but could clobber them beforehand if
2223 the argument areas are shared. */
2224 || (fndecl && decl_function_context (fndecl) == current_function_decl)
2225 /* If this function requires more stack slots than the current
2226 function, we cannot change it into a sibling call.
2227 current_function_pretend_args_size is not part of the
2228 stack allocated by our caller. */
2229 || args_size.constant > (current_function_args_size
2230 - current_function_pretend_args_size)
2231 /* If the callee pops its own arguments, then it must pop exactly
2232 the same number of arguments as the current function. */
2233 || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2234 != RETURN_POPS_ARGS (current_function_decl,
2235 TREE_TYPE (current_function_decl),
2236 current_function_args_size))
2237 || !lang_hooks.decls.ok_for_sibcall (fndecl))
2240 /* Ensure current function's preferred stack boundary is at least
2241 what we need. We don't have to increase alignment for recursive
2243 if (cfun->preferred_stack_boundary < preferred_stack_boundary
2244 && fndecl != current_function_decl)
2245 cfun->preferred_stack_boundary = preferred_stack_boundary;
2246 if (fndecl == current_function_decl)
2247 cfun->recursive_call_emit = true;
2249 preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2251 /* We want to make two insn chains; one for a sibling call, the other
2252 for a normal call. We will select one of the two chains after
2253 initial RTL generation is complete. */
2254 for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
2256 int sibcall_failure = 0;
2257 /* We want to emit any pending stack adjustments before the tail
2258 recursion "call". That way we know any adjustment after the tail
2259 recursion call can be ignored if we indeed use the tail
2261 int save_pending_stack_adjust = 0;
2262 int save_stack_pointer_delta = 0;
2264 rtx before_call, next_arg_reg;
2268 /* State variables we need to save and restore between
2270 save_pending_stack_adjust = pending_stack_adjust;
2271 save_stack_pointer_delta = stack_pointer_delta;
2274 flags &= ~ECF_SIBCALL;
2276 flags |= ECF_SIBCALL;
2278 /* Other state variables that we must reinitialize each time
2279 through the loop (that are not initialized by the loop itself). */
2283 /* Start a new sequence for the normal call case.
2285 From this point on, if the sibling call fails, we want to set
2286 sibcall_failure instead of continuing the loop. */
2289 /* Don't let pending stack adjusts add up to too much.
2290 Also, do all pending adjustments now if there is any chance
2291 this might be a call to alloca or if we are expanding a sibling
2292 call sequence or if we are calling a function that is to return
2293 with stack pointer depressed.
2294 Also do the adjustments before a throwing call, otherwise
2295 exception handling can fail; PR 19225. */
2296 if (pending_stack_adjust >= 32
2297 || (pending_stack_adjust > 0
2298 && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2299 || (pending_stack_adjust > 0
2300 && flag_exceptions && !(flags & ECF_NOTHROW))
2302 do_pending_stack_adjust ();
2304 /* When calling a const function, we must pop the stack args right away,
2305 so that the pop is deleted or moved with the call. */
2306 if (pass && (flags & ECF_LIBCALL_BLOCK))
2309 /* Precompute any arguments as needed. */
2311 precompute_arguments (flags, num_actuals, args);
2313 /* Now we are about to start emitting insns that can be deleted
2314 if a libcall is deleted. */
2315 if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2318 if (pass == 0 && cfun->stack_protect_guard)
2319 stack_protect_epilogue ();
2321 adjusted_args_size = args_size;
2322 /* Compute the actual size of the argument block required. The variable
2323 and constant sizes must be combined, the size may have to be rounded,
2324 and there may be a minimum required size. When generating a sibcall
2325 pattern, do not round up, since we'll be re-using whatever space our
2327 unadjusted_args_size
2328 = compute_argument_block_size (reg_parm_stack_space,
2329 &adjusted_args_size,
2331 : preferred_stack_boundary));
2333 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2335 /* The argument block when performing a sibling call is the
2336 incoming argument block. */
2339 argblock = virtual_incoming_args_rtx;
2341 #ifdef STACK_GROWS_DOWNWARD
2342 = plus_constant (argblock, current_function_pretend_args_size);
2344 = plus_constant (argblock, -current_function_pretend_args_size);
2346 stored_args_map = sbitmap_alloc (args_size.constant);
2347 sbitmap_zero (stored_args_map);
2350 /* If we have no actual push instructions, or shouldn't use them,
2351 make space for all args right now. */
2352 else if (adjusted_args_size.var != 0)
2354 if (old_stack_level == 0)
2356 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2357 old_stack_pointer_delta = stack_pointer_delta;
2358 old_pending_adj = pending_stack_adjust;
2359 pending_stack_adjust = 0;
2360 /* stack_arg_under_construction says whether a stack arg is
2361 being constructed at the old stack level. Pushing the stack
2362 gets a clean outgoing argument block. */
2363 old_stack_arg_under_construction = stack_arg_under_construction;
2364 stack_arg_under_construction = 0;
2366 argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2370 /* Note that we must go through the motions of allocating an argument
2371 block even if the size is zero because we may be storing args
2372 in the area reserved for register arguments, which may be part of
2375 int needed = adjusted_args_size.constant;
2377 /* Store the maximum argument space used. It will be pushed by
2378 the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2381 if (needed > current_function_outgoing_args_size)
2382 current_function_outgoing_args_size = needed;
2384 if (must_preallocate)
2386 if (ACCUMULATE_OUTGOING_ARGS)
2388 /* Since the stack pointer will never be pushed, it is
2389 possible for the evaluation of a parm to clobber
2390 something we have already written to the stack.
2391 Since most function calls on RISC machines do not use
2392 the stack, this is uncommon, but must work correctly.
2394 Therefore, we save any area of the stack that was already
2395 written and that we are using. Here we set up to do this
2396 by making a new stack usage map from the old one. The
2397 actual save will be done by store_one_arg.
2399 Another approach might be to try to reorder the argument
2400 evaluations to avoid this conflicting stack usage. */
2402 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2403 /* Since we will be writing into the entire argument area,
2404 the map must be allocated for its entire size, not just
2405 the part that is the responsibility of the caller. */
2406 needed += reg_parm_stack_space;
2409 #ifdef ARGS_GROW_DOWNWARD
2410 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2413 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2416 if (stack_usage_map_buf)
2417 free (stack_usage_map_buf);
2418 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2419 stack_usage_map = stack_usage_map_buf;
2421 if (initial_highest_arg_in_use)
2422 memcpy (stack_usage_map, initial_stack_usage_map,
2423 initial_highest_arg_in_use);
2425 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2426 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2427 (highest_outgoing_arg_in_use
2428 - initial_highest_arg_in_use));
2431 /* The address of the outgoing argument list must not be
2432 copied to a register here, because argblock would be left
2433 pointing to the wrong place after the call to
2434 allocate_dynamic_stack_space below. */
2436 argblock = virtual_outgoing_args_rtx;
2440 if (inhibit_defer_pop == 0)
2442 /* Try to reuse some or all of the pending_stack_adjust
2443 to get this space. */
2445 = (combine_pending_stack_adjustment_and_call
2446 (unadjusted_args_size,
2447 &adjusted_args_size,
2448 preferred_unit_stack_boundary));
2450 /* combine_pending_stack_adjustment_and_call computes
2451 an adjustment before the arguments are allocated.
2452 Account for them and see whether or not the stack
2453 needs to go up or down. */
2454 needed = unadjusted_args_size - needed;
2458 /* We're releasing stack space. */
2459 /* ??? We can avoid any adjustment at all if we're
2460 already aligned. FIXME. */
2461 pending_stack_adjust = -needed;
2462 do_pending_stack_adjust ();
2466 /* We need to allocate space. We'll do that in
2467 push_block below. */
2468 pending_stack_adjust = 0;
2471 /* Special case this because overhead of `push_block' in
2472 this case is non-trivial. */
2474 argblock = virtual_outgoing_args_rtx;
2477 argblock = push_block (GEN_INT (needed), 0, 0);
2478 #ifdef ARGS_GROW_DOWNWARD
2479 argblock = plus_constant (argblock, needed);
2483 /* We only really need to call `copy_to_reg' in the case
2484 where push insns are going to be used to pass ARGBLOCK
2485 to a function call in ARGS. In that case, the stack
2486 pointer changes value from the allocation point to the
2487 call point, and hence the value of
2488 VIRTUAL_OUTGOING_ARGS_RTX changes as well. But might
2489 as well always do it. */
2490 argblock = copy_to_reg (argblock);
2495 if (ACCUMULATE_OUTGOING_ARGS)
2497 /* The save/restore code in store_one_arg handles all
2498 cases except one: a constructor call (including a C
2499 function returning a BLKmode struct) to initialize
2501 if (stack_arg_under_construction)
2503 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2504 rtx push_size = GEN_INT (reg_parm_stack_space
2505 + adjusted_args_size.constant);
2507 rtx push_size = GEN_INT (adjusted_args_size.constant);
2509 if (old_stack_level == 0)
2511 emit_stack_save (SAVE_BLOCK, &old_stack_level,
2513 old_stack_pointer_delta = stack_pointer_delta;
2514 old_pending_adj = pending_stack_adjust;
2515 pending_stack_adjust = 0;
2516 /* stack_arg_under_construction says whether a stack
2517 arg is being constructed at the old stack level.
2518 Pushing the stack gets a clean outgoing argument
2520 old_stack_arg_under_construction
2521 = stack_arg_under_construction;
2522 stack_arg_under_construction = 0;
2523 /* Make a new map for the new argument list. */
2524 if (stack_usage_map_buf)
2525 free (stack_usage_map_buf);
2526 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
2527 stack_usage_map = stack_usage_map_buf;
2528 memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2529 highest_outgoing_arg_in_use = 0;
2531 allocate_dynamic_stack_space (push_size, NULL_RTX,
2535 /* If argument evaluation might modify the stack pointer,
2536 copy the address of the argument list to a register. */
2537 for (i = 0; i < num_actuals; i++)
2538 if (args[i].pass_on_stack)
2540 argblock = copy_addr_to_reg (argblock);
2545 compute_argument_addresses (args, argblock, num_actuals);
2547 /* If we push args individually in reverse order, perform stack alignment
2548 before the first push (the last arg). */
2549 if (PUSH_ARGS_REVERSED && argblock == 0
2550 && adjusted_args_size.constant != unadjusted_args_size)
2552 /* When the stack adjustment is pending, we get better code
2553 by combining the adjustments. */
2554 if (pending_stack_adjust
2555 && ! (flags & ECF_LIBCALL_BLOCK)
2556 && ! inhibit_defer_pop)
2558 pending_stack_adjust
2559 = (combine_pending_stack_adjustment_and_call
2560 (unadjusted_args_size,
2561 &adjusted_args_size,
2562 preferred_unit_stack_boundary));
2563 do_pending_stack_adjust ();
2565 else if (argblock == 0)
2566 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2567 - unadjusted_args_size));
2569 /* Now that the stack is properly aligned, pops can't safely
2570 be deferred during the evaluation of the arguments. */
2573 funexp = rtx_for_function_call (fndecl, addr);
2575 /* Figure out the register where the value, if any, will come back. */
2577 if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2578 && ! structure_value_addr)
2580 if (pcc_struct_value)
2581 valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2582 fndecl, NULL, (pass == 0));
2584 valreg = hard_function_value (TREE_TYPE (exp), fndecl, fntype,
2588 /* Precompute all register parameters. It isn't safe to compute anything
2589 once we have started filling any specific hard regs. */
2590 precompute_register_parameters (num_actuals, args, ®_parm_seen);
2592 if (TREE_OPERAND (exp, 2))
2593 static_chain_value = expand_normal (TREE_OPERAND (exp, 2));
2595 static_chain_value = 0;
2597 #ifdef REG_PARM_STACK_SPACE
2598 /* Save the fixed argument area if it's part of the caller's frame and
2599 is clobbered by argument setup for this call. */
2600 if (ACCUMULATE_OUTGOING_ARGS && pass)
2601 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2602 &low_to_save, &high_to_save);
2605 /* Now store (and compute if necessary) all non-register parms.
2606 These come before register parms, since they can require block-moves,
2607 which could clobber the registers used for register parms.
2608 Parms which have partial registers are not stored here,
2609 but we do preallocate space here if they want that. */
2611 for (i = 0; i < num_actuals; i++)
2612 if (args[i].reg == 0 || args[i].pass_on_stack)
2614 rtx before_arg = get_last_insn ();
2616 if (store_one_arg (&args[i], argblock, flags,
2617 adjusted_args_size.var != 0,
2618 reg_parm_stack_space)
2620 && check_sibcall_argument_overlap (before_arg,
2622 sibcall_failure = 1;
2624 if (flags & ECF_CONST
2626 && args[i].value == args[i].stack)
2627 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
2628 gen_rtx_USE (VOIDmode,
2633 /* If we have a parm that is passed in registers but not in memory
2634 and whose alignment does not permit a direct copy into registers,
2635 make a group of pseudos that correspond to each register that we
2637 if (STRICT_ALIGNMENT)
2638 store_unaligned_arguments_into_pseudos (args, num_actuals);
2640 /* Now store any partially-in-registers parm.
2641 This is the last place a block-move can happen. */
2643 for (i = 0; i < num_actuals; i++)
2644 if (args[i].partial != 0 && ! args[i].pass_on_stack)
2646 rtx before_arg = get_last_insn ();
2648 if (store_one_arg (&args[i], argblock, flags,
2649 adjusted_args_size.var != 0,
2650 reg_parm_stack_space)
2652 && check_sibcall_argument_overlap (before_arg,
2654 sibcall_failure = 1;
2657 /* If we pushed args in forward order, perform stack alignment
2658 after pushing the last arg. */
2659 if (!PUSH_ARGS_REVERSED && argblock == 0)
2660 anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2661 - unadjusted_args_size));
2663 /* If register arguments require space on the stack and stack space
2664 was not preallocated, allocate stack space here for arguments
2665 passed in registers. */
2666 #ifdef OUTGOING_REG_PARM_STACK_SPACE
2667 if (!ACCUMULATE_OUTGOING_ARGS
2668 && must_preallocate == 0 && reg_parm_stack_space > 0)
2669 anti_adjust_stack (GEN_INT (reg_parm_stack_space));
2672 /* Pass the function the address in which to return a
2674 if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
2676 structure_value_addr
2677 = convert_memory_address (Pmode, structure_value_addr);
2678 emit_move_insn (struct_value,
2680 force_operand (structure_value_addr,
2683 if (REG_P (struct_value))
2684 use_reg (&call_fusage, struct_value);
2687 funexp = prepare_call_address (funexp, static_chain_value,
2688 &call_fusage, reg_parm_seen, pass == 0);
2690 load_register_parameters (args, num_actuals, &call_fusage, flags,
2691 pass == 0, &sibcall_failure);
2693 /* Save a pointer to the last insn before the call, so that we can
2694 later safely search backwards to find the CALL_INSN. */
2695 before_call = get_last_insn ();
2697 /* Set up next argument register. For sibling calls on machines
2698 with register windows this should be the incoming register. */
2699 #ifdef FUNCTION_INCOMING_ARG
2701 next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
2705 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
2708 /* All arguments and registers used for the call must be set up by
2711 /* Stack must be properly aligned now. */
2713 || !(stack_pointer_delta % preferred_unit_stack_boundary));
2715 /* Generate the actual call instruction. */
2716 emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
2717 adjusted_args_size.constant, struct_value_size,
2718 next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
2719 flags, & args_so_far);
2721 /* If a non-BLKmode value is returned at the most significant end
2722 of a register, shift the register right by the appropriate amount
2723 and update VALREG accordingly. BLKmode values are handled by the
2724 group load/store machinery below. */
2725 if (!structure_value_addr
2726 && !pcc_struct_value
2727 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2728 && targetm.calls.return_in_msb (TREE_TYPE (exp)))
2730 if (shift_return_value (TYPE_MODE (TREE_TYPE (exp)), false, valreg))
2731 sibcall_failure = 1;
2732 valreg = gen_rtx_REG (TYPE_MODE (TREE_TYPE (exp)), REGNO (valreg));
2735 /* If call is cse'able, make appropriate pair of reg-notes around it.
2736 Test valreg so we don't crash; may safely ignore `const'
2737 if return type is void. Disable for PARALLEL return values, because
2738 we have no way to move such values into a pseudo register. */
2739 if (pass && (flags & ECF_LIBCALL_BLOCK))
2743 bool failed = valreg == 0 || GET_CODE (valreg) == PARALLEL;
2745 insns = get_insns ();
2747 /* Expansion of block moves possibly introduced a loop that may
2748 not appear inside libcall block. */
2749 for (insn = insns; insn; insn = NEXT_INSN (insn))
2761 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2763 /* Mark the return value as a pointer if needed. */
2764 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2765 mark_reg_pointer (temp,
2766 TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
2769 if (flag_unsafe_math_optimizations
2771 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2772 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRT
2773 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTF
2774 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SQRTL))
2775 note = gen_rtx_fmt_e (SQRT,
2777 args[0].initial_value);
2780 /* Construct an "equal form" for the value which
2781 mentions all the arguments in order as well as
2782 the function name. */
2783 for (i = 0; i < num_actuals; i++)
2784 note = gen_rtx_EXPR_LIST (VOIDmode,
2785 args[i].initial_value, note);
2786 note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
2788 if (flags & ECF_PURE)
2789 note = gen_rtx_EXPR_LIST (VOIDmode,
2790 gen_rtx_USE (VOIDmode,
2791 gen_rtx_MEM (BLKmode,
2792 gen_rtx_SCRATCH (VOIDmode))),
2795 emit_libcall_block (insns, temp, valreg, note);
2800 else if (pass && (flags & ECF_MALLOC))
2802 rtx temp = gen_reg_rtx (GET_MODE (valreg));
2805 /* The return value from a malloc-like function is a pointer. */
2806 if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
2807 mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
2809 emit_move_insn (temp, valreg);
2811 /* The return value from a malloc-like function can not alias
2813 last = get_last_insn ();
2815 gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
2817 /* Write out the sequence. */
2818 insns = get_insns ();
2824 /* For calls to `setjmp', etc., inform flow.c it should complain
2825 if nonvolatile values are live. For functions that cannot return,
2826 inform flow that control does not fall through. */
2828 if ((flags & ECF_NORETURN) || pass == 0)
2830 /* The barrier must be emitted
2831 immediately after the CALL_INSN. Some ports emit more
2832 than just a CALL_INSN above, so we must search for it here. */
2834 rtx last = get_last_insn ();
2835 while (!CALL_P (last))
2837 last = PREV_INSN (last);
2838 /* There was no CALL_INSN? */
2839 gcc_assert (last != before_call);
2842 emit_barrier_after (last);
2844 /* Stack adjustments after a noreturn call are dead code.
2845 However when NO_DEFER_POP is in effect, we must preserve
2846 stack_pointer_delta. */
2847 if (inhibit_defer_pop == 0)
2849 stack_pointer_delta = old_stack_allocated;
2850 pending_stack_adjust = 0;
2854 /* If value type not void, return an rtx for the value. */
2856 if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
2858 target = const0_rtx;
2859 else if (structure_value_addr)
2861 if (target == 0 || !MEM_P (target))
2864 = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2865 memory_address (TYPE_MODE (TREE_TYPE (exp)),
2866 structure_value_addr));
2867 set_mem_attributes (target, exp, 1);
2870 else if (pcc_struct_value)
2872 /* This is the special C++ case where we need to
2873 know what the true target was. We take care to
2874 never use this value more than once in one expression. */
2875 target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
2876 copy_to_reg (valreg));
2877 set_mem_attributes (target, exp, 1);
2879 /* Handle calls that return values in multiple non-contiguous locations.
2880 The Irix 6 ABI has examples of this. */
2881 else if (GET_CODE (valreg) == PARALLEL)
2885 /* This will only be assigned once, so it can be readonly. */
2886 tree nt = build_qualified_type (TREE_TYPE (exp),
2887 (TYPE_QUALS (TREE_TYPE (exp))
2888 | TYPE_QUAL_CONST));
2890 target = assign_temp (nt, 0, 1, 1);
2893 if (! rtx_equal_p (target, valreg))
2894 emit_group_store (target, valreg, TREE_TYPE (exp),
2895 int_size_in_bytes (TREE_TYPE (exp)));
2897 /* We can not support sibling calls for this case. */
2898 sibcall_failure = 1;
2901 && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
2902 && GET_MODE (target) == GET_MODE (valreg))
2904 bool may_overlap = false;
2906 /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
2907 reg to a plain register. */
2909 && HARD_REGISTER_P (valreg)
2910 && CLASS_LIKELY_SPILLED_P (REGNO_REG_CLASS (REGNO (valreg)))
2911 && !(REG_P (target) && !HARD_REGISTER_P (target)))
2912 valreg = copy_to_reg (valreg);
2914 /* If TARGET is a MEM in the argument area, and we have
2915 saved part of the argument area, then we can't store
2916 directly into TARGET as it may get overwritten when we
2917 restore the argument save area below. Don't work too
2918 hard though and simply force TARGET to a register if it
2919 is a MEM; the optimizer is quite likely to sort it out. */
2920 if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
2921 for (i = 0; i < num_actuals; i++)
2922 if (args[i].save_area)
2929 target = copy_to_reg (valreg);
2932 /* TARGET and VALREG cannot be equal at this point
2933 because the latter would not have
2934 REG_FUNCTION_VALUE_P true, while the former would if
2935 it were referring to the same register.
2937 If they refer to the same register, this move will be
2938 a no-op, except when function inlining is being
2940 emit_move_insn (target, valreg);
2942 /* If we are setting a MEM, this code must be executed.
2943 Since it is emitted after the call insn, sibcall
2944 optimization cannot be performed in that case. */
2946 sibcall_failure = 1;
2949 else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
2951 target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
2953 /* We can not support sibling calls for this case. */
2954 sibcall_failure = 1;
2957 target = copy_to_reg (valreg);
2959 if (targetm.calls.promote_function_return(funtype))
2961 /* If we promoted this return value, make the proper SUBREG.
2962 TARGET might be const0_rtx here, so be careful. */
2964 && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
2965 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2967 tree type = TREE_TYPE (exp);
2968 int unsignedp = TYPE_UNSIGNED (type);
2970 enum machine_mode pmode;
2972 pmode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1);
2973 /* If we don't promote as expected, something is wrong. */
2974 gcc_assert (GET_MODE (target) == pmode);
2976 if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
2977 && (GET_MODE_SIZE (GET_MODE (target))
2978 > GET_MODE_SIZE (TYPE_MODE (type))))
2980 offset = GET_MODE_SIZE (GET_MODE (target))
2981 - GET_MODE_SIZE (TYPE_MODE (type));
2982 if (! BYTES_BIG_ENDIAN)
2983 offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
2984 else if (! WORDS_BIG_ENDIAN)
2985 offset %= UNITS_PER_WORD;
2987 target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
2988 SUBREG_PROMOTED_VAR_P (target) = 1;
2989 SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
2993 /* If size of args is variable or this was a constructor call for a stack
2994 argument, restore saved stack-pointer value. */
2996 if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
2998 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
2999 stack_pointer_delta = old_stack_pointer_delta;
3000 pending_stack_adjust = old_pending_adj;
3001 old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3002 stack_arg_under_construction = old_stack_arg_under_construction;
3003 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3004 stack_usage_map = initial_stack_usage_map;
3005 sibcall_failure = 1;
3007 else if (ACCUMULATE_OUTGOING_ARGS && pass)
3009 #ifdef REG_PARM_STACK_SPACE
3011 restore_fixed_argument_area (save_area, argblock,
3012 high_to_save, low_to_save);
3015 /* If we saved any argument areas, restore them. */
3016 for (i = 0; i < num_actuals; i++)
3017 if (args[i].save_area)
3019 enum machine_mode save_mode = GET_MODE (args[i].save_area);
3021 = gen_rtx_MEM (save_mode,
3022 memory_address (save_mode,
3023 XEXP (args[i].stack_slot, 0)));
3025 if (save_mode != BLKmode)
3026 emit_move_insn (stack_area, args[i].save_area);
3028 emit_block_move (stack_area, args[i].save_area,
3029 GEN_INT (args[i].locate.size.constant),
3030 BLOCK_OP_CALL_PARM);
3033 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3034 stack_usage_map = initial_stack_usage_map;
3037 /* If this was alloca, record the new stack level for nonlocal gotos.
3038 Check for the handler slots since we might not have a save area
3039 for non-local gotos. */
3041 if ((flags & ECF_MAY_BE_ALLOCA) && cfun->nonlocal_goto_save_area != 0)
3042 update_nonlocal_goto_save_area ();
3044 /* Free up storage we no longer need. */
3045 for (i = 0; i < num_actuals; ++i)
3046 if (args[i].aligned_regs)
3047 free (args[i].aligned_regs);
3049 insns = get_insns ();
3054 tail_call_insns = insns;
3056 /* Restore the pending stack adjustment now that we have
3057 finished generating the sibling call sequence. */
3059 pending_stack_adjust = save_pending_stack_adjust;
3060 stack_pointer_delta = save_stack_pointer_delta;
3062 /* Prepare arg structure for next iteration. */
3063 for (i = 0; i < num_actuals; i++)
3066 args[i].aligned_regs = 0;
3070 sbitmap_free (stored_args_map);
3074 normal_call_insns = insns;
3076 /* Verify that we've deallocated all the stack we used. */
3077 gcc_assert ((flags & ECF_NORETURN)
3078 || (old_stack_allocated
3079 == stack_pointer_delta - pending_stack_adjust));
3082 /* If something prevents making this a sibling call,
3083 zero out the sequence. */
3084 if (sibcall_failure)
3085 tail_call_insns = NULL_RTX;
3090 /* If tail call production succeeded, we need to remove REG_EQUIV notes on
3091 arguments too, as argument area is now clobbered by the call. */
3092 if (tail_call_insns)
3094 emit_insn (tail_call_insns);
3095 cfun->tail_call_emit = true;
3098 emit_insn (normal_call_insns);
3100 currently_expanding_call--;
3102 /* If this function returns with the stack pointer depressed, ensure
3103 this block saves and restores the stack pointer, show it was
3104 changed, and adjust for any outgoing arg space. */
3105 if (flags & ECF_SP_DEPRESSED)
3107 clear_pending_stack_adjust ();
3108 emit_insn (gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx));
3109 emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3112 if (stack_usage_map_buf)
3113 free (stack_usage_map_buf);
3118 /* A sibling call sequence invalidates any REG_EQUIV notes made for
3119 this function's incoming arguments.
3121 At the start of RTL generation we know the only REG_EQUIV notes
3122 in the rtl chain are those for incoming arguments, so we can look
3123 for REG_EQUIV notes between the start of the function and the
3124 NOTE_INSN_FUNCTION_BEG.
3126 This is (slight) overkill. We could keep track of the highest
3127 argument we clobber and be more selective in removing notes, but it
3128 does not seem to be worth the effort. */
3131 fixup_tail_calls (void)
3135 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3137 /* There are never REG_EQUIV notes for the incoming arguments
3138 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it. */
3140 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
3145 rtx note = find_reg_note (insn, REG_EQUIV, 0);
3148 /* Remove the note and keep looking at the notes for
3150 remove_note (insn, note);
3158 /* Traverse an argument list in VALUES and expand all complex
3159 arguments into their components. */
3161 split_complex_values (tree values)
3165 /* Before allocating memory, check for the common case of no complex. */
3166 for (p = values; p; p = TREE_CHAIN (p))
3168 tree type = TREE_TYPE (TREE_VALUE (p));
3169 if (type && TREE_CODE (type) == COMPLEX_TYPE
3170 && targetm.calls.split_complex_arg (type))
3176 values = copy_list (values);
3178 for (p = values; p; p = TREE_CHAIN (p))
3180 tree complex_value = TREE_VALUE (p);
3183 complex_type = TREE_TYPE (complex_value);
3187 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3188 && targetm.calls.split_complex_arg (complex_type))
3191 tree real, imag, next;
3193 subtype = TREE_TYPE (complex_type);
3194 complex_value = save_expr (complex_value);
3195 real = build1 (REALPART_EXPR, subtype, complex_value);
3196 imag = build1 (IMAGPART_EXPR, subtype, complex_value);
3198 TREE_VALUE (p) = real;
3199 next = TREE_CHAIN (p);
3200 imag = build_tree_list (NULL_TREE, imag);
3201 TREE_CHAIN (p) = imag;
3202 TREE_CHAIN (imag) = next;
3204 /* Skip the newly created node. */
3212 /* Traverse a list of TYPES and expand all complex types into their
3215 split_complex_types (tree types)
3219 /* Before allocating memory, check for the common case of no complex. */
3220 for (p = types; p; p = TREE_CHAIN (p))
3222 tree type = TREE_VALUE (p);
3223 if (TREE_CODE (type) == COMPLEX_TYPE
3224 && targetm.calls.split_complex_arg (type))
3230 types = copy_list (types);
3232 for (p = types; p; p = TREE_CHAIN (p))
3234 tree complex_type = TREE_VALUE (p);
3236 if (TREE_CODE (complex_type) == COMPLEX_TYPE
3237 && targetm.calls.split_complex_arg (complex_type))
3241 /* Rewrite complex type with component type. */
3242 TREE_VALUE (p) = TREE_TYPE (complex_type);
3243 next = TREE_CHAIN (p);
3245 /* Add another component type for the imaginary part. */
3246 imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
3247 TREE_CHAIN (p) = imag;
3248 TREE_CHAIN (imag) = next;
3250 /* Skip the newly created node. */
3258 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3259 The RETVAL parameter specifies whether return value needs to be saved, other
3260 parameters are documented in the emit_library_call function below. */
3263 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
3264 enum libcall_type fn_type,
3265 enum machine_mode outmode, int nargs, va_list p)
3267 /* Total size in bytes of all the stack-parms scanned so far. */
3268 struct args_size args_size;
3269 /* Size of arguments before any adjustments (such as rounding). */
3270 struct args_size original_args_size;
3276 CUMULATIVE_ARGS args_so_far;
3280 enum machine_mode mode;
3283 struct locate_and_pad_arg_data locate;
3287 int old_inhibit_defer_pop = inhibit_defer_pop;
3288 rtx call_fusage = 0;
3291 int pcc_struct_value = 0;
3292 int struct_value_size = 0;
3294 int reg_parm_stack_space = 0;
3297 tree tfom; /* type_for_mode (outmode, 0) */
3299 #ifdef REG_PARM_STACK_SPACE
3300 /* Define the boundary of the register parm stack space that needs to be
3302 int low_to_save, high_to_save;
3303 rtx save_area = 0; /* Place that it is saved. */
3306 /* Size of the stack reserved for parameter registers. */
3307 int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3308 char *initial_stack_usage_map = stack_usage_map;
3309 char *stack_usage_map_buf = NULL;
3311 rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
3313 #ifdef REG_PARM_STACK_SPACE
3314 reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3317 /* By default, library functions can not throw. */
3318 flags = ECF_NOTHROW;
3330 case LCT_CONST_MAKE_BLOCK:
3331 flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3333 case LCT_PURE_MAKE_BLOCK:
3334 flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3337 flags |= ECF_NORETURN;
3340 flags = ECF_NORETURN;
3342 case LCT_RETURNS_TWICE:
3343 flags = ECF_RETURNS_TWICE;
3348 /* Ensure current function's preferred stack boundary is at least
3350 if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3351 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3353 /* If this kind of value comes back in memory,
3354 decide where in memory it should come back. */
3355 if (outmode != VOIDmode)
3357 tfom = lang_hooks.types.type_for_mode (outmode, 0);
3358 if (aggregate_value_p (tfom, 0))
3360 #ifdef PCC_STATIC_STRUCT_RETURN
3362 = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
3363 mem_value = gen_rtx_MEM (outmode, pointer_reg);
3364 pcc_struct_value = 1;
3366 value = gen_reg_rtx (outmode);
3367 #else /* not PCC_STATIC_STRUCT_RETURN */
3368 struct_value_size = GET_MODE_SIZE (outmode);
3369 if (value != 0 && MEM_P (value))
3372 mem_value = assign_temp (tfom, 0, 1, 1);
3374 /* This call returns a big structure. */
3375 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3379 tfom = void_type_node;
3381 /* ??? Unfinished: must pass the memory address as an argument. */
3383 /* Copy all the libcall-arguments out of the varargs data
3384 and into a vector ARGVEC.
3386 Compute how to pass each argument. We only support a very small subset
3387 of the full argument passing conventions to limit complexity here since
3388 library functions shouldn't have many args. */
3390 argvec = alloca ((nargs + 1) * sizeof (struct arg));
3391 memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
3393 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3394 INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3396 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0, nargs);
3399 args_size.constant = 0;
3404 /* Now we are about to start emitting insns that can be deleted
3405 if a libcall is deleted. */
3406 if (flags & ECF_LIBCALL_BLOCK)
3411 /* If there's a structure value address to be passed,
3412 either pass it in the special place, or pass it as an extra argument. */
3413 if (mem_value && struct_value == 0 && ! pcc_struct_value)
3415 rtx addr = XEXP (mem_value, 0);
3419 /* Make sure it is a reasonable operand for a move or push insn. */
3420 if (!REG_P (addr) && !MEM_P (addr)
3421 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3422 addr = force_operand (addr, NULL_RTX);
3424 argvec[count].value = addr;
3425 argvec[count].mode = Pmode;
3426 argvec[count].partial = 0;
3428 argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3429 gcc_assert (targetm.calls.arg_partial_bytes (&args_so_far, Pmode,
3430 NULL_TREE, 1) == 0);
3432 locate_and_pad_parm (Pmode, NULL_TREE,
3433 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3436 argvec[count].reg != 0,
3438 0, NULL_TREE, &args_size, &argvec[count].locate);
3440 if (argvec[count].reg == 0 || argvec[count].partial != 0
3441 || reg_parm_stack_space > 0)
3442 args_size.constant += argvec[count].locate.size.constant;
3444 FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3449 for (; count < nargs; count++)
3451 rtx val = va_arg (p, rtx);
3452 enum machine_mode mode = va_arg (p, enum machine_mode);
3454 /* We cannot convert the arg value to the mode the library wants here;
3455 must do it earlier where we know the signedness of the arg. */
3456 gcc_assert (mode != BLKmode
3457 && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
3459 /* Make sure it is a reasonable operand for a move or push insn. */
3460 if (!REG_P (val) && !MEM_P (val)
3461 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3462 val = force_operand (val, NULL_RTX);
3464 if (pass_by_reference (&args_so_far, mode, NULL_TREE, 1))
3468 = !reference_callee_copied (&args_so_far, mode, NULL_TREE, 1);
3470 /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3471 functions, so we have to pretend this isn't such a function. */
3472 if (flags & ECF_LIBCALL_BLOCK)
3474 rtx insns = get_insns ();
3478 flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3480 /* If this was a CONST function, it is now PURE since
3481 it now reads memory. */
3482 if (flags & ECF_CONST)
3484 flags &= ~ECF_CONST;
3488 if (GET_MODE (val) == MEM && !must_copy)
3492 slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
3494 emit_move_insn (slot, val);
3497 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3498 gen_rtx_USE (VOIDmode, slot),
3501 call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3502 gen_rtx_CLOBBER (VOIDmode,
3507 val = force_operand (XEXP (slot, 0), NULL_RTX);
3510 argvec[count].value = val;
3511 argvec[count].mode = mode;
3513 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3515 argvec[count].partial
3516 = targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL_TREE, 1);
3518 locate_and_pad_parm (mode, NULL_TREE,
3519 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3522 argvec[count].reg != 0,
3524 argvec[count].partial,
3525 NULL_TREE, &args_size, &argvec[count].locate);
3527 gcc_assert (!argvec[count].locate.size.var);
3529 if (argvec[count].reg == 0 || argvec[count].partial != 0
3530 || reg_parm_stack_space > 0)
3531 args_size.constant += argvec[count].locate.size.constant;
3533 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3536 /* If this machine requires an external definition for library
3537 functions, write one out. */
3538 assemble_external_libcall (fun);
3540 original_args_size = args_size;
3541 args_size.constant = (((args_size.constant
3542 + stack_pointer_delta
3546 - stack_pointer_delta);
3548 args_size.constant = MAX (args_size.constant,
3549 reg_parm_stack_space);
3551 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3552 args_size.constant -= reg_parm_stack_space;
3555 if (args_size.constant > current_function_outgoing_args_size)
3556 current_function_outgoing_args_size = args_size.constant;
3558 if (ACCUMULATE_OUTGOING_ARGS)
3560 /* Since the stack pointer will never be pushed, it is possible for
3561 the evaluation of a parm to clobber something we have already
3562 written to the stack. Since most function calls on RISC machines
3563 do not use the stack, this is uncommon, but must work correctly.
3565 Therefore, we save any area of the stack that was already written
3566 and that we are using. Here we set up to do this by making a new
3567 stack usage map from the old one.
3569 Another approach might be to try to reorder the argument
3570 evaluations to avoid this conflicting stack usage. */
3572 needed = args_size.constant;
3574 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3575 /* Since we will be writing into the entire argument area, the
3576 map must be allocated for its entire size, not just the part that
3577 is the responsibility of the caller. */
3578 needed += reg_parm_stack_space;
3581 #ifdef ARGS_GROW_DOWNWARD
3582 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3585 highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3588 stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3589 stack_usage_map = stack_usage_map_buf;
3591 if (initial_highest_arg_in_use)
3592 memcpy (stack_usage_map, initial_stack_usage_map,
3593 initial_highest_arg_in_use);
3595 if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3596 memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3597 highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3600 /* We must be careful to use virtual regs before they're instantiated,
3601 and real regs afterwards. Loop optimization, for example, can create
3602 new libcalls after we've instantiated the virtual regs, and if we
3603 use virtuals anyway, they won't match the rtl patterns. */
3605 if (virtuals_instantiated)
3606 argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3608 argblock = virtual_outgoing_args_rtx;
3613 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3616 /* If we push args individually in reverse order, perform stack alignment
3617 before the first push (the last arg). */
3618 if (argblock == 0 && PUSH_ARGS_REVERSED)
3619 anti_adjust_stack (GEN_INT (args_size.constant
3620 - original_args_size.constant));
3622 if (PUSH_ARGS_REVERSED)
3633 #ifdef REG_PARM_STACK_SPACE
3634 if (ACCUMULATE_OUTGOING_ARGS)
3636 /* The argument list is the property of the called routine and it
3637 may clobber it. If the fixed area has been used for previous
3638 parameters, we must save and restore it. */
3639 save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
3640 &low_to_save, &high_to_save);
3644 /* Push the args that need to be pushed. */
3646 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3647 are to be pushed. */
3648 for (count = 0; count < nargs; count++, argnum += inc)
3650 enum machine_mode mode = argvec[argnum].mode;
3651 rtx val = argvec[argnum].value;
3652 rtx reg = argvec[argnum].reg;
3653 int partial = argvec[argnum].partial;
3654 int lower_bound = 0, upper_bound = 0, i;
3656 if (! (reg != 0 && partial == 0))
3658 if (ACCUMULATE_OUTGOING_ARGS)
3660 /* If this is being stored into a pre-allocated, fixed-size,
3661 stack area, save any previous data at that location. */
3663 #ifdef ARGS_GROW_DOWNWARD
3664 /* stack_slot is negative, but we want to index stack_usage_map
3665 with positive values. */
3666 upper_bound = -argvec[argnum].locate.offset.constant + 1;
3667 lower_bound = upper_bound - argvec[argnum].locate.size.constant;
3669 lower_bound = argvec[argnum].locate.offset.constant;
3670 upper_bound = lower_bound + argvec[argnum].locate.size.constant;
3674 /* Don't worry about things in the fixed argument area;
3675 it has already been saved. */
3676 if (i < reg_parm_stack_space)
3677 i = reg_parm_stack_space;
3678 while (i < upper_bound && stack_usage_map[i] == 0)
3681 if (i < upper_bound)
3683 /* We need to make a save area. */
3685 = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
3686 enum machine_mode save_mode
3687 = mode_for_size (size, MODE_INT, 1);
3689 = plus_constant (argblock,
3690 argvec[argnum].locate.offset.constant);
3692 = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
3694 if (save_mode == BLKmode)
3696 argvec[argnum].save_area
3697 = assign_stack_temp (BLKmode,
3698 argvec[argnum].locate.size.constant,
3701 emit_block_move (validize_mem (argvec[argnum].save_area),
3703 GEN_INT (argvec[argnum].locate.size.constant),
3704 BLOCK_OP_CALL_PARM);
3708 argvec[argnum].save_area = gen_reg_rtx (save_mode);
3710 emit_move_insn (argvec[argnum].save_area, stack_area);
3715 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
3716 partial, reg, 0, argblock,
3717 GEN_INT (argvec[argnum].locate.offset.constant),
3718 reg_parm_stack_space,
3719 ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad));
3721 /* Now mark the segment we just used. */
3722 if (ACCUMULATE_OUTGOING_ARGS)
3723 for (i = lower_bound; i < upper_bound; i++)
3724 stack_usage_map[i] = 1;
3728 if (flags & ECF_CONST)
3732 /* Indicate argument access so that alias.c knows that these
3735 use = plus_constant (argblock,
3736 argvec[argnum].locate.offset.constant);
3738 /* When arguments are pushed, trying to tell alias.c where
3739 exactly this argument is won't work, because the
3740 auto-increment causes confusion. So we merely indicate
3741 that we access something with a known mode somewhere on
3743 use = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3744 gen_rtx_SCRATCH (Pmode));
3745 use = gen_rtx_MEM (argvec[argnum].mode, use);
3746 use = gen_rtx_USE (VOIDmode, use);
3747 call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
3752 /* If we pushed args in forward order, perform stack alignment
3753 after pushing the last arg. */
3754 if (argblock == 0 && !PUSH_ARGS_REVERSED)
3755 anti_adjust_stack (GEN_INT (args_size.constant
3756 - original_args_size.constant));
3758 if (PUSH_ARGS_REVERSED)
3763 fun = prepare_call_address (fun, NULL, &call_fusage, 0, 0);
3765 /* Now load any reg parms into their regs. */
3767 /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3768 are to be pushed. */
3769 for (count = 0; count < nargs; count++, argnum += inc)
3771 enum machine_mode mode = argvec[argnum].mode;
3772 rtx val = argvec[argnum].value;
3773 rtx reg = argvec[argnum].reg;
3774 int partial = argvec[argnum].partial;
3776 /* Handle calls that pass values in multiple non-contiguous
3777 locations. The PA64 has examples of this for library calls. */
3778 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3779 emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
3780 else if (reg != 0 && partial == 0)
3781 emit_move_insn (reg, val);
3786 /* Any regs containing parms remain in use through the call. */
3787 for (count = 0; count < nargs; count++)
3789 rtx reg = argvec[count].reg;
3790 if (reg != 0 && GET_CODE (reg) == PARALLEL)
3791 use_group_regs (&call_fusage, reg);
3793 use_reg (&call_fusage, reg);
3796 /* Pass the function the address in which to return a structure value. */
3797 if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
3799 emit_move_insn (struct_value,
3801 force_operand (XEXP (mem_value, 0),
3803 if (REG_P (struct_value))
3804 use_reg (&call_fusage, struct_value);
3807 /* Don't allow popping to be deferred, since then
3808 cse'ing of library calls could delete a call and leave the pop. */
3810 valreg = (mem_value == 0 && outmode != VOIDmode
3811 ? hard_libcall_value (outmode) : NULL_RTX);
3813 /* Stack must be properly aligned now. */
3814 gcc_assert (!(stack_pointer_delta
3815 & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)));
3817 before_call = get_last_insn ();
3819 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
3820 will set inhibit_defer_pop to that value. */
3821 /* The return type is needed to decide how many bytes the function pops.
3822 Signedness plays no role in that, so for simplicity, we pretend it's
3823 always signed. We also assume that the list of arguments passed has
3824 no impact, so we pretend it is unknown. */
3826 emit_call_1 (fun, NULL,
3827 get_identifier (XSTR (orgfun, 0)),
3828 build_function_type (tfom, NULL_TREE),
3829 original_args_size.constant, args_size.constant,
3831 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
3833 old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
3835 /* For calls to `setjmp', etc., inform flow.c it should complain
3836 if nonvolatile values are live. For functions that cannot return,
3837 inform flow that control does not fall through. */
3839 if (flags & ECF_NORETURN)
3841 /* The barrier note must be emitted
3842 immediately after the CALL_INSN. Some ports emit more than
3843 just a CALL_INSN above, so we must search for it here. */
3845 rtx last = get_last_insn ();
3846 while (!CALL_P (last))
3848 last = PREV_INSN (last);
3849 /* There was no CALL_INSN? */
3850 gcc_assert (last != before_call);
3853 emit_barrier_after (last);
3856 /* Now restore inhibit_defer_pop to its actual original value. */
3859 /* If call is cse'able, make appropriate pair of reg-notes around it.
3860 Test valreg so we don't crash; may safely ignore `const'
3861 if return type is void. Disable for PARALLEL return values, because
3862 we have no way to move such values into a pseudo register. */
3863 if (flags & ECF_LIBCALL_BLOCK)
3869 insns = get_insns ();
3879 if (GET_CODE (valreg) == PARALLEL)
3881 temp = gen_reg_rtx (outmode);
3882 emit_group_store (temp, valreg, NULL_TREE,
3883 GET_MODE_SIZE (outmode));
3887 temp = gen_reg_rtx (GET_MODE (valreg));
3889 /* Construct an "equal form" for the value which mentions all the
3890 arguments in order as well as the function name. */
3891 for (i = 0; i < nargs; i++)
3892 note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
3893 note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
3895 insns = get_insns ();
3898 if (flags & ECF_PURE)
3899 note = gen_rtx_EXPR_LIST (VOIDmode,
3900 gen_rtx_USE (VOIDmode,
3901 gen_rtx_MEM (BLKmode,
3902 gen_rtx_SCRATCH (VOIDmode))),
3905 emit_libcall_block (insns, temp, valreg, note);
3912 /* Copy the value to the right place. */
3913 if (outmode != VOIDmode && retval)
3919 if (value != mem_value)
3920 emit_move_insn (value, mem_value);
3922 else if (GET_CODE (valreg) == PARALLEL)
3925 value = gen_reg_rtx (outmode);
3926 emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
3928 else if (value != 0)
3929 emit_move_insn (value, valreg);
3934 if (ACCUMULATE_OUTGOING_ARGS)
3936 #ifdef REG_PARM_STACK_SPACE
3938 restore_fixed_argument_area (save_area, argblock,
3939 high_to_save, low_to_save);
3942 /* If we saved any argument areas, restore them. */
3943 for (count = 0; count < nargs; count++)
3944 if (argvec[count].save_area)
3946 enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
3947 rtx adr = plus_constant (argblock,
3948 argvec[count].locate.offset.constant);
3949 rtx stack_area = gen_rtx_MEM (save_mode,
3950 memory_address (save_mode, adr));
3952 if (save_mode == BLKmode)
3953 emit_block_move (stack_area,
3954 validize_mem (argvec[count].save_area),
3955 GEN_INT (argvec[count].locate.size.constant),
3956 BLOCK_OP_CALL_PARM);
3958 emit_move_insn (stack_area, argvec[count].save_area);
3961 highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3962 stack_usage_map = initial_stack_usage_map;
3965 if (stack_usage_map_buf)
3966 free (stack_usage_map_buf);
3972 /* Output a library call to function FUN (a SYMBOL_REF rtx)
3973 (emitting the queue unless NO_QUEUE is nonzero),
3974 for a value of mode OUTMODE,
3975 with NARGS different arguments, passed as alternating rtx values
3976 and machine_modes to convert them to.
3978 FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
3979 calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
3980 which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
3981 LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
3982 REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
3983 or other LCT_ value for other types of library calls. */
3986 emit_library_call (rtx orgfun, enum libcall_type fn_type,
3987 enum machine_mode outmode, int nargs, ...)
3991 va_start (p, nargs);
3992 emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
3996 /* Like emit_library_call except that an extra argument, VALUE,
3997 comes second and says where to store the result.
3998 (If VALUE is zero, this function chooses a convenient way
3999 to return the value.
4001 This function returns an rtx for where the value is to be found.
4002 If VALUE is nonzero, VALUE is returned. */
4005 emit_library_call_value (rtx orgfun, rtx value,
4006 enum libcall_type fn_type,
4007 enum machine_mode outmode, int nargs, ...)
4012 va_start (p, nargs);
4013 result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4020 /* Store a single argument for a function call
4021 into the register or memory area where it must be passed.
4022 *ARG describes the argument value and where to pass it.
4024 ARGBLOCK is the address of the stack-block for all the arguments,
4025 or 0 on a machine where arguments are pushed individually.
4027 MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4028 so must be careful about how the stack is used.
4030 VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4031 argument stack. This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4032 that we need not worry about saving and restoring the stack.
4034 FNDECL is the declaration of the function we are calling.
4036 Return nonzero if this arg should cause sibcall failure,
4040 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
4041 int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
4043 tree pval = arg->tree_value;
4047 int i, lower_bound = 0, upper_bound = 0;
4048 int sibcall_failure = 0;
4050 if (TREE_CODE (pval) == ERROR_MARK)
4053 /* Push a new temporary level for any temporaries we make for
4057 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4059 /* If this is being stored into a pre-allocated, fixed-size, stack area,
4060 save any previous data at that location. */
4061 if (argblock && ! variable_size && arg->stack)
4063 #ifdef ARGS_GROW_DOWNWARD
4064 /* stack_slot is negative, but we want to index stack_usage_map
4065 with positive values. */
4066 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4067 upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4071 lower_bound = upper_bound - arg->locate.size.constant;
4073 if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4074 lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4078 upper_bound = lower_bound + arg->locate.size.constant;
4082 /* Don't worry about things in the fixed argument area;
4083 it has already been saved. */
4084 if (i < reg_parm_stack_space)
4085 i = reg_parm_stack_space;
4086 while (i < upper_bound && stack_usage_map[i] == 0)
4089 if (i < upper_bound)
4091 /* We need to make a save area. */
4092 unsigned int size = arg->locate.size.constant * BITS_PER_UNIT;
4093 enum machine_mode save_mode = mode_for_size (size, MODE_INT, 1);
4094 rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
4095 rtx stack_area = gen_rtx_MEM (save_mode, adr);
4097 if (save_mode == BLKmode)
4099 tree ot = TREE_TYPE (arg->tree_value);
4100 tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4101 | TYPE_QUAL_CONST));
4103 arg->save_area = assign_temp (nt, 0, 1, 1);
4104 preserve_temp_slots (arg->save_area);
4105 emit_block_move (validize_mem (arg->save_area), stack_area,
4106 GEN_INT (arg->locate.size.constant),
4107 BLOCK_OP_CALL_PARM);
4111 arg->save_area = gen_reg_rtx (save_mode);
4112 emit_move_insn (arg->save_area, stack_area);
4118 /* If this isn't going to be placed on both the stack and in registers,
4119 set up the register and number of words. */
4120 if (! arg->pass_on_stack)
4122 if (flags & ECF_SIBCALL)
4123 reg = arg->tail_call_reg;
4126 partial = arg->partial;
4129 /* Being passed entirely in a register. We shouldn't be called in
4131 gcc_assert (reg == 0 || partial != 0);
4133 /* If this arg needs special alignment, don't load the registers
4135 if (arg->n_aligned_regs != 0)
4138 /* If this is being passed partially in a register, we can't evaluate
4139 it directly into its stack slot. Otherwise, we can. */
4140 if (arg->value == 0)
4142 /* stack_arg_under_construction is nonzero if a function argument is
4143 being evaluated directly into the outgoing argument list and
4144 expand_call must take special action to preserve the argument list
4145 if it is called recursively.
4147 For scalar function arguments stack_usage_map is sufficient to
4148 determine which stack slots must be saved and restored. Scalar
4149 arguments in general have pass_on_stack == 0.
4151 If this argument is initialized by a function which takes the
4152 address of the argument (a C++ constructor or a C function
4153 returning a BLKmode structure), then stack_usage_map is
4154 insufficient and expand_call must push the stack around the
4155 function call. Such arguments have pass_on_stack == 1.
4157 Note that it is always safe to set stack_arg_under_construction,
4158 but this generates suboptimal code if set when not needed. */
4160 if (arg->pass_on_stack)
4161 stack_arg_under_construction++;
4163 arg->value = expand_expr (pval,
4165 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4166 ? NULL_RTX : arg->stack,
4167 VOIDmode, EXPAND_STACK_PARM);
4169 /* If we are promoting object (or for any other reason) the mode
4170 doesn't agree, convert the mode. */
4172 if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4173 arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4174 arg->value, arg->unsignedp);
4176 if (arg->pass_on_stack)
4177 stack_arg_under_construction--;
4180 /* Check for overlap with already clobbered argument area. */
4181 if ((flags & ECF_SIBCALL)
4182 && MEM_P (arg->value)
4183 && mem_overlaps_already_clobbered_arg_p (XEXP (arg->value, 0),
4184 arg->locate.size.constant))
4185 sibcall_failure = 1;
4187 /* Don't allow anything left on stack from computation
4188 of argument to alloca. */
4189 if (flags & ECF_MAY_BE_ALLOCA)
4190 do_pending_stack_adjust ();
4192 if (arg->value == arg->stack)
4193 /* If the value is already in the stack slot, we are done. */
4195 else if (arg->mode != BLKmode)
4199 /* Argument is a scalar, not entirely passed in registers.
4200 (If part is passed in registers, arg->partial says how much
4201 and emit_push_insn will take care of putting it there.)
4203 Push it, and if its size is less than the
4204 amount of space allocated to it,
4205 also bump stack pointer by the additional space.
4206 Note that in C the default argument promotions
4207 will prevent such mismatches. */
4209 size = GET_MODE_SIZE (arg->mode);
4210 /* Compute how much space the push instruction will push.
4211 On many machines, pushing a byte will advance the stack
4212 pointer by a halfword. */
4213 #ifdef PUSH_ROUNDING
4214 size = PUSH_ROUNDING (size);
4218 /* Compute how much space the argument should get:
4219 round up to a multiple of the alignment for arguments. */
4220 if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4221 used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4222 / (PARM_BOUNDARY / BITS_PER_UNIT))
4223 * (PARM_BOUNDARY / BITS_PER_UNIT));
4225 /* This isn't already where we want it on the stack, so put it there.
4226 This can either be done with push or copy insns. */
4227 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4228 PARM_BOUNDARY, partial, reg, used - size, argblock,
4229 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4230 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4232 /* Unless this is a partially-in-register argument, the argument is now
4235 arg->value = arg->stack;
4239 /* BLKmode, at least partly to be pushed. */
4241 unsigned int parm_align;
4245 /* Pushing a nonscalar.
4246 If part is passed in registers, PARTIAL says how much
4247 and emit_push_insn will take care of putting it there. */
4249 /* Round its size up to a multiple
4250 of the allocation unit for arguments. */
4252 if (arg->locate.size.var != 0)
4255 size_rtx = ARGS_SIZE_RTX (arg->locate.size);
4259 /* PUSH_ROUNDING has no effect on us, because emit_push_insn
4260 for BLKmode is careful to avoid it. */
4261 excess = (arg->locate.size.constant
4262 - int_size_in_bytes (TREE_TYPE (pval))
4264 size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4265 NULL_RTX, TYPE_MODE (sizetype), 0);
4268 parm_align = arg->locate.boundary;
4270 /* When an argument is padded down, the block is aligned to
4271 PARM_BOUNDARY, but the actual argument isn't. */
4272 if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4274 if (arg->locate.size.var)
4275 parm_align = BITS_PER_UNIT;
4278 unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4279 parm_align = MIN (parm_align, excess_align);
4283 if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
4285 /* emit_push_insn might not work properly if arg->value and
4286 argblock + arg->locate.offset areas overlap. */
4290 if (XEXP (x, 0) == current_function_internal_arg_pointer
4291 || (GET_CODE (XEXP (x, 0)) == PLUS
4292 && XEXP (XEXP (x, 0), 0) ==
4293 current_function_internal_arg_pointer
4294 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4296 if (XEXP (x, 0) != current_function_internal_arg_pointer)
4297 i = INTVAL (XEXP (XEXP (x, 0), 1));
4299 /* expand_call should ensure this. */
4300 gcc_assert (!arg->locate.offset.var
4301 && arg->locate.size.var == 0
4302 && GET_CODE (size_rtx) == CONST_INT);
4304 if (arg->locate.offset.constant > i)
4306 if (arg->locate.offset.constant < i + INTVAL (size_rtx))
4307 sibcall_failure = 1;
4309 else if (arg->locate.offset.constant < i)
4311 /* Use arg->locate.size.constant instead of size_rtx
4312 because we only care about the part of the argument
4314 if (i < (arg->locate.offset.constant
4315 + arg->locate.size.constant))
4316 sibcall_failure = 1;
4320 /* Even though they appear to be at the same location,
4321 if part of the outgoing argument is in registers,
4322 they aren't really at the same location. Check for
4323 this by making sure that the incoming size is the
4324 same as the outgoing size. */
4325 if (arg->locate.size.constant != INTVAL (size_rtx))
4326 sibcall_failure = 1;
4331 emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4332 parm_align, partial, reg, excess, argblock,
4333 ARGS_SIZE_RTX (arg->locate.offset), reg_parm_stack_space,
4334 ARGS_SIZE_RTX (arg->locate.alignment_pad));
4336 /* Unless this is a partially-in-register argument, the argument is now
4339 ??? Unlike the case above, in which we want the actual
4340 address of the data, so that we can load it directly into a
4341 register, here we want the address of the stack slot, so that
4342 it's properly aligned for word-by-word copying or something
4343 like that. It's not clear that this is always correct. */
4345 arg->value = arg->stack_slot;
4348 if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
4350 tree type = TREE_TYPE (arg->tree_value);
4352 = emit_group_load_into_temps (arg->reg, arg->value, type,
4353 int_size_in_bytes (type));
4356 /* Mark all slots this store used. */
4357 if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4358 && argblock && ! variable_size && arg->stack)
4359 for (i = lower_bound; i < upper_bound; i++)
4360 stack_usage_map[i] = 1;
4362 /* Once we have pushed something, pops can't safely
4363 be deferred during the rest of the arguments. */
4366 /* Free any temporary slots made in processing this argument. Show
4367 that we might have taken the address of something and pushed that
4369 preserve_temp_slots (NULL_RTX);
4373 return sibcall_failure;
4376 /* Nonzero if we do not know how to pass TYPE solely in registers. */
4379 must_pass_in_stack_var_size (enum machine_mode mode ATTRIBUTE_UNUSED,
4385 /* If the type has variable size... */
4386 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4389 /* If the type is marked as addressable (it is required
4390 to be constructed into the stack)... */
4391 if (TREE_ADDRESSABLE (type))
4397 /* Another version of the TARGET_MUST_PASS_IN_STACK hook. This one
4398 takes trailing padding of a structure into account. */
4399 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING. */
4402 must_pass_in_stack_var_size_or_pad (enum machine_mode mode, tree type)
4407 /* If the type has variable size... */
4408 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4411 /* If the type is marked as addressable (it is required
4412 to be constructed into the stack)... */
4413 if (TREE_ADDRESSABLE (type))
4416 /* If the padding and mode of the type is such that a copy into
4417 a register would put it into the wrong part of the register. */
4419 && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4420 && (FUNCTION_ARG_PADDING (mode, type)
4421 == (BYTES_BIG_ENDIAN ? upward : downward)))