1 /* Procedure integration for GNU CC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
29 #include "insn-config.h"
30 #include "insn-flags.h"
33 #include "integrate.h"
39 #define obstack_chunk_alloc xmalloc
40 #define obstack_chunk_free free
42 extern struct obstack *function_maybepermanent_obstack;
44 extern tree pushdecl ();
45 extern tree poplevel ();
47 /* Similar, but round to the next highest integer that meets the
49 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
51 /* Default max number of insns a function can have and still be inline.
52 This is overridden on RISC machines. */
53 #ifndef INTEGRATE_THRESHOLD
54 #define INTEGRATE_THRESHOLD(DECL) \
55 (8 * (8 + list_length (DECL_ARGUMENTS (DECL))))
58 static rtx initialize_for_inline PROTO((tree, int, int, int, int));
59 static void finish_inline PROTO((tree, rtx));
60 static void adjust_copied_decl_tree PROTO((tree));
61 static tree copy_decl_list PROTO((tree));
62 static tree copy_decl_tree PROTO((tree));
63 static void copy_decl_rtls PROTO((tree));
64 static void save_constants PROTO((rtx *));
65 static void note_modified_parmregs PROTO((rtx, rtx));
66 static rtx copy_for_inline PROTO((rtx));
67 static void integrate_parm_decls PROTO((tree, struct inline_remap *, rtvec));
68 static void integrate_decl_tree PROTO((tree, int, struct inline_remap *));
69 static void subst_constants PROTO((rtx *, rtx, struct inline_remap *));
70 static void restore_constants PROTO((rtx *));
71 static void set_block_origin_self PROTO((tree));
72 static void set_decl_origin_self PROTO((tree));
73 static void set_block_abstract_flags PROTO((tree, int));
75 void set_decl_abstract_flags PROTO((tree, int));
77 /* Zero if the current function (whose FUNCTION_DECL is FNDECL)
78 is safe and reasonable to integrate into other functions.
79 Nonzero means value is a warning message with a single %s
80 for the function's name. */
83 function_cannot_inline_p (fndecl)
87 tree last = tree_last (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
88 int max_insns = INTEGRATE_THRESHOLD (fndecl);
89 register int ninsns = 0;
92 /* No inlines with varargs. `grokdeclarator' gives a warning
93 message about that if `inline' is specified. This code
94 it put in to catch the volunteers. */
95 if ((last && TREE_VALUE (last) != void_type_node)
96 || current_function_varargs)
97 return "varargs function cannot be inline";
99 if (current_function_calls_alloca)
100 return "function using alloca cannot be inline";
102 if (current_function_contains_functions)
103 return "function with nested functions cannot be inline";
105 /* If its not even close, don't even look. */
106 if (!DECL_INLINE (fndecl) && get_max_uid () > 3 * max_insns)
107 return "function too large to be inline";
110 /* Large stacks are OK now that inlined functions can share them. */
111 /* Don't inline functions with large stack usage,
112 since they can make other recursive functions burn up stack. */
113 if (!DECL_INLINE (fndecl) && get_frame_size () > 100)
114 return "function stack frame for inlining";
118 /* Don't inline functions which do not specify a function prototype and
119 have BLKmode argument or take the address of a parameter. */
120 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
122 if (TYPE_MODE (TREE_TYPE (parms)) == BLKmode)
123 TREE_ADDRESSABLE (parms) = 1;
124 if (last == NULL_TREE && TREE_ADDRESSABLE (parms))
125 return "no prototype, and parameter address used; cannot be inline";
129 /* We can't inline functions that return structures
130 the old-fashioned PCC way, copying into a static block. */
131 if (current_function_returns_pcc_struct)
132 return "inline functions not supported for this return value type";
134 /* We can't inline functions that return BLKmode structures in registers. */
135 if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == BLKmode
136 && ! aggregate_value_p (TREE_TYPE (TREE_TYPE (fndecl))))
137 return "inline functions not supported for this return value type";
139 /* We can't inline functions that return structures of varying size. */
140 if (int_size_in_bytes (TREE_TYPE (TREE_TYPE (fndecl))) < 0)
141 return "function with varying-size return value cannot be inline";
143 /* Cannot inline a function with a varying size argument or one that
144 receives a transparent union. */
145 for (parms = DECL_ARGUMENTS (fndecl); parms; parms = TREE_CHAIN (parms))
147 if (int_size_in_bytes (TREE_TYPE (parms)) < 0)
148 return "function with varying-size parameter cannot be inline";
149 else if (TYPE_TRANSPARENT_UNION (TREE_TYPE (parms)))
150 return "function with transparent unit parameter cannot be inline";
153 if (!DECL_INLINE (fndecl) && get_max_uid () > max_insns)
155 for (ninsns = 0, insn = get_first_nonparm_insn (); insn && ninsns < max_insns;
156 insn = NEXT_INSN (insn))
158 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
162 if (ninsns >= max_insns)
163 return "function too large to be inline";
166 /* We cannot inline this function if forced_labels is non-zero. This
167 implies that a label in this function was used as an initializer.
168 Because labels can not be duplicated, all labels in the function
169 will be renamed when it is inlined. However, there is no way to find
170 and fix all variables initialized with addresses of labels in this
171 function, hence inlining is impossible. */
174 return "function with label addresses used in initializers cannot inline";
176 /* We cannot inline a nested function that jumps to a nonlocal label. */
177 if (current_function_has_nonlocal_goto)
178 return "function with nonlocal goto cannot be inline";
183 /* Variables used within save_for_inline. */
185 /* Mapping from old pseudo-register to new pseudo-registers.
186 The first element of this map is reg_map[FIRST_PSEUDO_REGISTER].
187 It is allocated in `save_for_inline' and `expand_inline_function',
188 and deallocated on exit from each of those routines. */
191 /* Mapping from old code-labels to new code-labels.
192 The first element of this map is label_map[min_labelno].
193 It is allocated in `save_for_inline' and `expand_inline_function',
194 and deallocated on exit from each of those routines. */
195 static rtx *label_map;
197 /* Mapping from old insn uid's to copied insns.
198 It is allocated in `save_for_inline' and `expand_inline_function',
199 and deallocated on exit from each of those routines. */
200 static rtx *insn_map;
202 /* Map pseudo reg number into the PARM_DECL for the parm living in the reg.
203 Zero for a reg that isn't a parm's home.
204 Only reg numbers less than max_parm_reg are mapped here. */
205 static tree *parmdecl_map;
207 /* Keep track of first pseudo-register beyond those that are parms. */
208 static int max_parm_reg;
210 /* When an insn is being copied by copy_for_inline,
211 this is nonzero if we have copied an ASM_OPERANDS.
212 In that case, it is the original input-operand vector. */
213 static rtvec orig_asm_operands_vector;
215 /* When an insn is being copied by copy_for_inline,
216 this is nonzero if we have copied an ASM_OPERANDS.
217 In that case, it is the copied input-operand vector. */
218 static rtvec copy_asm_operands_vector;
220 /* Likewise, this is the copied constraints vector. */
221 static rtvec copy_asm_constraints_vector;
223 /* In save_for_inline, nonzero if past the parm-initialization insns. */
224 static int in_nonparm_insns;
226 /* Subroutine for `save_for_inline{copying,nocopy}'. Performs initialization
227 needed to save FNDECL's insns and info for future inline expansion. */
230 initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, copy)
237 int function_flags, i;
241 /* Compute the values of any flags we must restore when inlining this. */
244 = (current_function_calls_alloca * FUNCTION_FLAGS_CALLS_ALLOCA
245 + current_function_calls_setjmp * FUNCTION_FLAGS_CALLS_SETJMP
246 + current_function_calls_longjmp * FUNCTION_FLAGS_CALLS_LONGJMP
247 + current_function_returns_struct * FUNCTION_FLAGS_RETURNS_STRUCT
248 + current_function_returns_pcc_struct * FUNCTION_FLAGS_RETURNS_PCC_STRUCT
249 + current_function_needs_context * FUNCTION_FLAGS_NEEDS_CONTEXT
250 + current_function_has_nonlocal_label * FUNCTION_FLAGS_HAS_NONLOCAL_LABEL
251 + current_function_returns_pointer * FUNCTION_FLAGS_RETURNS_POINTER
252 + current_function_uses_const_pool * FUNCTION_FLAGS_USES_CONST_POOL
253 + current_function_uses_pic_offset_table * FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE);
255 /* Clear out PARMDECL_MAP. It was allocated in the caller's frame. */
256 bzero ((char *) parmdecl_map, max_parm_reg * sizeof (tree));
257 arg_vector = rtvec_alloc (list_length (DECL_ARGUMENTS (fndecl)));
259 for (parms = DECL_ARGUMENTS (fndecl), i = 0;
261 parms = TREE_CHAIN (parms), i++)
263 rtx p = DECL_RTL (parms);
265 if (GET_CODE (p) == MEM && copy)
267 /* Copy the rtl so that modifications of the addresses
268 later in compilation won't affect this arg_vector.
269 Virtual register instantiation can screw the address
271 rtx new = copy_rtx (p);
273 /* Don't leave the old copy anywhere in this decl. */
274 if (DECL_RTL (parms) == DECL_INCOMING_RTL (parms)
275 || (GET_CODE (DECL_RTL (parms)) == MEM
276 && GET_CODE (DECL_INCOMING_RTL (parms)) == MEM
277 && (XEXP (DECL_RTL (parms), 0)
278 == XEXP (DECL_INCOMING_RTL (parms), 0))))
279 DECL_INCOMING_RTL (parms) = new;
280 DECL_RTL (parms) = new;
283 RTVEC_ELT (arg_vector, i) = p;
285 if (GET_CODE (p) == REG)
286 parmdecl_map[REGNO (p)] = parms;
287 else if (GET_CODE (p) == CONCAT)
289 rtx preal = gen_realpart (GET_MODE (XEXP (p, 0)), p);
290 rtx pimag = gen_imagpart (GET_MODE (preal), p);
292 if (GET_CODE (preal) == REG)
293 parmdecl_map[REGNO (preal)] = parms;
294 if (GET_CODE (pimag) == REG)
295 parmdecl_map[REGNO (pimag)] = parms;
298 /* This flag is cleared later
299 if the function ever modifies the value of the parm. */
300 TREE_READONLY (parms) = 1;
303 /* Assume we start out in the insns that set up the parameters. */
304 in_nonparm_insns = 0;
306 /* The list of DECL_SAVED_INSNS, starts off with a header which
307 contains the following information:
309 the first insn of the function (not including the insns that copy
310 parameters into registers).
311 the first parameter insn of the function,
312 the first label used by that function,
313 the last label used by that function,
314 the highest register number used for parameters,
315 the total number of registers used,
316 the size of the incoming stack area for parameters,
317 the number of bytes popped on return,
319 some flags that are used to restore compiler globals,
320 the value of current_function_outgoing_args_size,
321 the original argument vector,
322 and the original DECL_INITIAL. */
324 return gen_inline_header_rtx (NULL_RTX, NULL_RTX, min_labelno, max_labelno,
325 max_parm_reg, max_reg,
326 current_function_args_size,
327 current_function_pops_args,
328 stack_slot_list, forced_labels, function_flags,
329 current_function_outgoing_args_size,
330 arg_vector, (rtx) DECL_INITIAL (fndecl));
333 /* Subroutine for `save_for_inline{copying,nocopy}'. Finishes up the
334 things that must be done to make FNDECL expandable as an inline function.
335 HEAD contains the chain of insns to which FNDECL will expand. */
338 finish_inline (fndecl, head)
342 NEXT_INSN (head) = get_first_nonparm_insn ();
343 FIRST_PARM_INSN (head) = get_insns ();
344 DECL_SAVED_INSNS (fndecl) = head;
345 DECL_FRAME_SIZE (fndecl) = get_frame_size ();
348 /* Adjust the BLOCK_END_NOTE pointers in a given copied DECL tree so that
349 they all point to the new (copied) rtxs. */
352 adjust_copied_decl_tree (block)
355 register tree subblock;
356 register rtx original_end;
358 original_end = BLOCK_END_NOTE (block);
361 BLOCK_END_NOTE (block) = (rtx) NOTE_SOURCE_FILE (original_end);
362 NOTE_SOURCE_FILE (original_end) = 0;
365 /* Process all subblocks. */
366 for (subblock = BLOCK_SUBBLOCKS (block);
368 subblock = TREE_CHAIN (subblock))
369 adjust_copied_decl_tree (subblock);
372 /* Make the insns and PARM_DECLs of the current function permanent
373 and record other information in DECL_SAVED_INSNS to allow inlining
374 of this function in subsequent calls.
376 This function is called when we are going to immediately compile
377 the insns for FNDECL. The insns in maybepermanent_obstack cannot be
378 modified by the compilation process, so we copy all of them to
379 new storage and consider the new insns to be the insn chain to be
380 compiled. Our caller (rest_of_compilation) saves the original
381 DECL_INITIAL and DECL_ARGUMENTS; here we copy them. */
383 /* ??? The nonlocal_label list should be adjusted also. However, since
384 a function that contains a nested function never gets inlined currently,
385 the nonlocal_label list will always be empty, so we don't worry about
389 save_for_inline_copying (fndecl)
392 rtx first_insn, last_insn, insn;
394 int max_labelno, min_labelno, i, len;
397 rtx first_nonparm_insn;
399 /* Make and emit a return-label if we have not already done so.
400 Do this before recording the bounds on label numbers. */
402 if (return_label == 0)
404 return_label = gen_label_rtx ();
405 emit_label (return_label);
408 /* Get some bounds on the labels and registers used. */
410 max_labelno = max_label_num ();
411 min_labelno = get_first_label_num ();
412 max_reg = max_reg_num ();
414 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
415 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
416 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
417 for the parms, prior to elimination of virtual registers.
418 These values are needed for substituting parms properly. */
420 max_parm_reg = max_parm_reg_num ();
421 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
423 head = initialize_for_inline (fndecl, min_labelno, max_labelno, max_reg, 1);
425 if (current_function_uses_const_pool)
427 /* Replace any constant pool references with the actual constant. We
428 will put the constants back in the copy made below. */
429 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
430 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
432 save_constants (&PATTERN (insn));
433 if (REG_NOTES (insn))
434 save_constants (®_NOTES (insn));
437 /* Clear out the constant pool so that we can recreate it with the
438 copied constants below. */
439 init_const_rtx_hash_table ();
440 clear_const_double_mem ();
443 max_uid = INSN_UID (head);
445 /* We have now allocated all that needs to be allocated permanently
446 on the rtx obstack. Set our high-water mark, so that we
447 can free the rest of this when the time comes. */
451 /* Copy the chain insns of this function.
452 Install the copied chain as the insns of this function,
453 for continued compilation;
454 the original chain is recorded as the DECL_SAVED_INSNS
455 for inlining future calls. */
457 /* If there are insns that copy parms from the stack into pseudo registers,
458 those insns are not copied. `expand_inline_function' must
459 emit the correct code to handle such things. */
462 if (GET_CODE (insn) != NOTE)
464 first_insn = rtx_alloc (NOTE);
465 NOTE_SOURCE_FILE (first_insn) = NOTE_SOURCE_FILE (insn);
466 NOTE_LINE_NUMBER (first_insn) = NOTE_LINE_NUMBER (insn);
467 INSN_UID (first_insn) = INSN_UID (insn);
468 PREV_INSN (first_insn) = NULL;
469 NEXT_INSN (first_insn) = NULL;
470 last_insn = first_insn;
472 /* Each pseudo-reg in the old insn chain must have a unique rtx in the copy.
473 Make these new rtx's now, and install them in regno_reg_rtx, so they
474 will be the official pseudo-reg rtx's for the rest of compilation. */
476 reg_map = (rtx *) alloca ((max_reg + 1) * sizeof (rtx));
478 len = sizeof (struct rtx_def) + (GET_RTX_LENGTH (REG) - 1) * sizeof (rtunion);
479 for (i = max_reg - 1; i > LAST_VIRTUAL_REGISTER; i--)
480 reg_map[i] = (rtx)obstack_copy (function_maybepermanent_obstack,
481 regno_reg_rtx[i], len);
483 bcopy ((char *) (reg_map + LAST_VIRTUAL_REGISTER + 1),
484 (char *) (regno_reg_rtx + LAST_VIRTUAL_REGISTER + 1),
485 (max_reg - (LAST_VIRTUAL_REGISTER + 1)) * sizeof (rtx));
487 /* Likewise each label rtx must have a unique rtx as its copy. */
489 label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
490 label_map -= min_labelno;
492 for (i = min_labelno; i < max_labelno; i++)
493 label_map[i] = gen_label_rtx ();
495 /* Record the mapping of old insns to copied insns. */
497 insn_map = (rtx *) alloca (max_uid * sizeof (rtx));
498 bzero ((char *) insn_map, max_uid * sizeof (rtx));
500 /* Get the insn which signals the end of parameter setup code. */
501 first_nonparm_insn = get_first_nonparm_insn ();
503 /* Copy any entries in regno_reg_rtx or DECL_RTLs that reference MEM
504 (the former occurs when a variable has its address taken)
505 since these may be shared and can be changed by virtual
506 register instantiation. DECL_RTL values for our arguments
507 have already been copied by initialize_for_inline. */
508 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_reg; i++)
509 if (GET_CODE (regno_reg_rtx[i]) == MEM)
510 XEXP (regno_reg_rtx[i], 0)
511 = copy_for_inline (XEXP (regno_reg_rtx[i], 0));
513 /* Copy the tree of subblocks of the function, and the decls in them.
514 We will use the copy for compiling this function, then restore the original
515 subblocks and decls for use when inlining this function.
517 Several parts of the compiler modify BLOCK trees. In particular,
518 instantiate_virtual_regs will instantiate any virtual regs
519 mentioned in the DECL_RTLs of the decls, and loop
520 unrolling will replicate any BLOCK trees inside an unrolled loop.
522 The modified subblocks or DECL_RTLs would be incorrect for the original rtl
523 which we will use for inlining. The rtl might even contain pseudoregs
524 whose space has been freed. */
526 DECL_INITIAL (fndecl) = copy_decl_tree (DECL_INITIAL (fndecl));
527 DECL_ARGUMENTS (fndecl) = copy_decl_list (DECL_ARGUMENTS (fndecl));
529 /* Now copy each DECL_RTL which is a MEM,
530 so it is safe to modify their addresses. */
531 copy_decl_rtls (DECL_INITIAL (fndecl));
533 /* The fndecl node acts as its own progenitor, so mark it as such. */
534 DECL_ABSTRACT_ORIGIN (fndecl) = fndecl;
536 /* Now copy the chain of insns. Do this twice. The first copy the insn
537 itself and its body. The second time copy of REG_NOTES. This is because
538 a REG_NOTE may have a forward pointer to another insn. */
540 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
542 orig_asm_operands_vector = 0;
544 if (insn == first_nonparm_insn)
545 in_nonparm_insns = 1;
547 switch (GET_CODE (insn))
550 /* No need to keep these. */
551 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_DELETED)
554 copy = rtx_alloc (NOTE);
555 NOTE_LINE_NUMBER (copy) = NOTE_LINE_NUMBER (insn);
556 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END)
557 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
560 NOTE_SOURCE_FILE (insn) = (char *) copy;
561 NOTE_SOURCE_FILE (copy) = 0;
568 copy = rtx_alloc (GET_CODE (insn));
570 if (GET_CODE (insn) == CALL_INSN)
571 CALL_INSN_FUNCTION_USAGE (copy) =
572 copy_for_inline (CALL_INSN_FUNCTION_USAGE (insn));
574 PATTERN (copy) = copy_for_inline (PATTERN (insn));
575 INSN_CODE (copy) = -1;
576 LOG_LINKS (copy) = NULL_RTX;
577 RTX_INTEGRATED_P (copy) = RTX_INTEGRATED_P (insn);
581 copy = label_map[CODE_LABEL_NUMBER (insn)];
582 LABEL_NAME (copy) = LABEL_NAME (insn);
586 copy = rtx_alloc (BARRIER);
592 INSN_UID (copy) = INSN_UID (insn);
593 insn_map[INSN_UID (insn)] = copy;
594 NEXT_INSN (last_insn) = copy;
595 PREV_INSN (copy) = last_insn;
599 adjust_copied_decl_tree (DECL_INITIAL (fndecl));
601 /* Now copy the REG_NOTES. */
602 for (insn = NEXT_INSN (get_insns ()); insn; insn = NEXT_INSN (insn))
603 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
604 && insn_map[INSN_UID(insn)])
605 REG_NOTES (insn_map[INSN_UID (insn)])
606 = copy_for_inline (REG_NOTES (insn));
608 NEXT_INSN (last_insn) = NULL;
610 finish_inline (fndecl, head);
612 set_new_first_and_last_insn (first_insn, last_insn);
615 /* Return a copy of a chain of nodes, chained through the TREE_CHAIN field.
616 For example, this can copy a list made of TREE_LIST nodes. While copying,
617 for each node copied which doesn't already have is DECL_ABSTRACT_ORIGIN
618 set to some non-zero value, set the DECL_ABSTRACT_ORIGIN of the copy to
619 point to the corresponding (abstract) original node. */
622 copy_decl_list (list)
626 register tree prev, next;
631 head = prev = copy_node (list);
632 if (DECL_ABSTRACT_ORIGIN (head) == NULL_TREE)
633 DECL_ABSTRACT_ORIGIN (head) = list;
634 next = TREE_CHAIN (list);
639 copy = copy_node (next);
640 if (DECL_ABSTRACT_ORIGIN (copy) == NULL_TREE)
641 DECL_ABSTRACT_ORIGIN (copy) = next;
642 TREE_CHAIN (prev) = copy;
644 next = TREE_CHAIN (next);
649 /* Make a copy of the entire tree of blocks BLOCK, and return it. */
652 copy_decl_tree (block)
655 tree t, vars, subblocks;
657 vars = copy_decl_list (BLOCK_VARS (block));
660 /* Process all subblocks. */
661 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
663 tree copy = copy_decl_tree (t);
664 TREE_CHAIN (copy) = subblocks;
668 t = copy_node (block);
669 BLOCK_VARS (t) = vars;
670 BLOCK_SUBBLOCKS (t) = nreverse (subblocks);
671 /* If the BLOCK being cloned is already marked as having been instantiated
672 from something else, then leave that `origin' marking alone. Otherwise,
673 mark the clone as having originated from the BLOCK we are cloning. */
674 if (BLOCK_ABSTRACT_ORIGIN (t) == NULL_TREE)
675 BLOCK_ABSTRACT_ORIGIN (t) = block;
679 /* Copy DECL_RTLs in all decls in the given BLOCK node. */
682 copy_decl_rtls (block)
687 for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
688 if (DECL_RTL (t) && GET_CODE (DECL_RTL (t)) == MEM)
689 DECL_RTL (t) = copy_for_inline (DECL_RTL (t));
691 /* Process all subblocks. */
692 for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
696 /* Make the insns and PARM_DECLs of the current function permanent
697 and record other information in DECL_SAVED_INSNS to allow inlining
698 of this function in subsequent calls.
700 This routine need not copy any insns because we are not going
701 to immediately compile the insns in the insn chain. There
702 are two cases when we would compile the insns for FNDECL:
703 (1) when FNDECL is expanded inline, and (2) when FNDECL needs to
704 be output at the end of other compilation, because somebody took
705 its address. In the first case, the insns of FNDECL are copied
706 as it is expanded inline, so FNDECL's saved insns are not
707 modified. In the second case, FNDECL is used for the last time,
708 so modifying the rtl is not a problem.
710 We don't have to worry about FNDECL being inline expanded by
711 other functions which are written at the end of compilation
712 because flag_no_inline is turned on when we begin writing
713 functions at the end of compilation. */
716 save_for_inline_nocopy (fndecl)
721 rtx first_nonparm_insn;
723 /* Set up PARMDECL_MAP which maps pseudo-reg number to its PARM_DECL.
724 Later we set TREE_READONLY to 0 if the parm is modified inside the fn.
725 Also set up ARG_VECTOR, which holds the unmodified DECL_RTX values
726 for the parms, prior to elimination of virtual registers.
727 These values are needed for substituting parms properly. */
729 max_parm_reg = max_parm_reg_num ();
730 parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
732 /* Make and emit a return-label if we have not already done so. */
734 if (return_label == 0)
736 return_label = gen_label_rtx ();
737 emit_label (return_label);
740 head = initialize_for_inline (fndecl, get_first_label_num (),
741 max_label_num (), max_reg_num (), 0);
743 /* If there are insns that copy parms from the stack into pseudo registers,
744 those insns are not copied. `expand_inline_function' must
745 emit the correct code to handle such things. */
748 if (GET_CODE (insn) != NOTE)
751 /* Get the insn which signals the end of parameter setup code. */
752 first_nonparm_insn = get_first_nonparm_insn ();
754 /* Now just scan the chain of insns to see what happens to our
755 PARM_DECLs. If a PARM_DECL is used but never modified, we
756 can substitute its rtl directly when expanding inline (and
757 perform constant folding when its incoming value is constant).
758 Otherwise, we have to copy its value into a new register and track
759 the new register's life. */
761 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
763 if (insn == first_nonparm_insn)
764 in_nonparm_insns = 1;
766 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
768 if (current_function_uses_const_pool)
770 /* Replace any constant pool references with the actual constant.
771 We will put the constant back if we need to write the
772 function out after all. */
773 save_constants (&PATTERN (insn));
774 if (REG_NOTES (insn))
775 save_constants (®_NOTES (insn));
778 /* Record what interesting things happen to our parameters. */
779 note_stores (PATTERN (insn), note_modified_parmregs);
783 /* We have now allocated all that needs to be allocated permanently
784 on the rtx obstack. Set our high-water mark, so that we
785 can free the rest of this when the time comes. */
789 finish_inline (fndecl, head);
792 /* Given PX, a pointer into an insn, search for references to the constant
793 pool. Replace each with a CONST that has the mode of the original
794 constant, contains the constant, and has RTX_INTEGRATED_P set.
795 Similarly, constant pool addresses not enclosed in a MEM are replaced
796 with an ADDRESS rtx which also gives the constant, mode, and has
797 RTX_INTEGRATED_P set. */
809 /* If this is a CONST_DOUBLE, don't try to fix things up in
810 CONST_DOUBLE_MEM, because this is an infinite recursion. */
811 if (GET_CODE (x) == CONST_DOUBLE)
813 else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF
814 && CONSTANT_POOL_ADDRESS_P (XEXP (x,0)))
816 enum machine_mode const_mode = get_pool_mode (XEXP (x, 0));
817 rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0)));
818 RTX_INTEGRATED_P (new) = 1;
820 /* If the MEM was in a different mode than the constant (perhaps we
821 were only looking at the low-order part), surround it with a
822 SUBREG so we can save both modes. */
824 if (GET_MODE (x) != const_mode)
826 new = gen_rtx (SUBREG, GET_MODE (x), new, 0);
827 RTX_INTEGRATED_P (new) = 1;
831 save_constants (&XEXP (*px, 0));
833 else if (GET_CODE (x) == SYMBOL_REF
834 && CONSTANT_POOL_ADDRESS_P (x))
836 *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x));
837 save_constants (&XEXP (*px, 0));
838 RTX_INTEGRATED_P (*px) = 1;
843 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
844 int len = GET_RTX_LENGTH (GET_CODE (x));
846 for (i = len-1; i >= 0; i--)
851 for (j = 0; j < XVECLEN (x, i); j++)
852 save_constants (&XVECEXP (x, i, j));
856 if (XEXP (x, i) == 0)
860 /* Hack tail-recursion here. */
864 save_constants (&XEXP (x, i));
871 /* Note whether a parameter is modified or not. */
874 note_modified_parmregs (reg, x)
878 if (GET_CODE (reg) == REG && in_nonparm_insns
879 && REGNO (reg) < max_parm_reg
880 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
881 && parmdecl_map[REGNO (reg)] != 0)
882 TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;
885 /* Copy the rtx ORIG recursively, replacing pseudo-regs and labels
886 according to `reg_map' and `label_map'. The original rtl insns
887 will be saved for inlining; this is used to make a copy
888 which is used to finish compiling the inline function itself.
890 If we find a "saved" constant pool entry, one which was replaced with
891 the value of the constant, convert it back to a constant pool entry.
892 Since the pool wasn't touched, this should simply restore the old
895 All other kinds of rtx are copied except those that can never be
896 changed during compilation. */
899 copy_for_inline (orig)
902 register rtx x = orig;
904 register enum rtx_code code;
905 register char *format_ptr;
912 /* These types may be freely shared. */
924 /* We have to make a new CONST_DOUBLE to ensure that we account for
925 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
926 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
930 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
931 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
934 return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
938 /* Get constant pool entry for constant in the pool. */
939 if (RTX_INTEGRATED_P (x))
940 return validize_mem (force_const_mem (GET_MODE (x),
941 copy_for_inline (XEXP (x, 0))));
945 /* Get constant pool entry, but access in different mode. */
946 if (RTX_INTEGRATED_P (x))
949 = force_const_mem (GET_MODE (SUBREG_REG (x)),
950 copy_for_inline (XEXP (SUBREG_REG (x), 0)));
952 PUT_MODE (new, GET_MODE (x));
953 return validize_mem (new);
958 /* If not special for constant pool error. Else get constant pool
960 if (! RTX_INTEGRATED_P (x))
963 return XEXP (force_const_mem (GET_MODE (x),
964 copy_for_inline (XEXP (x, 0))), 0);
967 /* If a single asm insn contains multiple output operands
968 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
969 We must make sure that the copied insn continues to share it. */
970 if (orig_asm_operands_vector == XVEC (orig, 3))
972 x = rtx_alloc (ASM_OPERANDS);
973 x->volatil = orig->volatil;
974 XSTR (x, 0) = XSTR (orig, 0);
975 XSTR (x, 1) = XSTR (orig, 1);
976 XINT (x, 2) = XINT (orig, 2);
977 XVEC (x, 3) = copy_asm_operands_vector;
978 XVEC (x, 4) = copy_asm_constraints_vector;
979 XSTR (x, 5) = XSTR (orig, 5);
980 XINT (x, 6) = XINT (orig, 6);
986 /* A MEM is usually allowed to be shared if its address is constant
987 or is a constant plus one of the special registers.
989 We do not allow sharing of addresses that are either a special
990 register or the sum of a constant and a special register because
991 it is possible for unshare_all_rtl to copy the address, into memory
992 that won't be saved. Although the MEM can safely be shared, and
993 won't be copied there, the address itself cannot be shared, and may
996 There are also two exceptions with constants: The first is if the
997 constant is a LABEL_REF or the sum of the LABEL_REF
998 and an integer. This case can happen if we have an inline
999 function that supplies a constant operand to the call of another
1000 inline function that uses it in a switch statement. In this case,
1001 we will be replacing the LABEL_REF, so we have to replace this MEM
1004 The second case is if we have a (const (plus (address ..) ...)).
1005 In that case we need to put back the address of the constant pool
1008 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
1009 && GET_CODE (XEXP (x, 0)) != LABEL_REF
1010 && ! (GET_CODE (XEXP (x, 0)) == CONST
1011 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
1012 && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1014 || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0))
1020 /* If this is a non-local label, just make a new LABEL_REF.
1021 Otherwise, use the new label as well. */
1022 x = gen_rtx (LABEL_REF, GET_MODE (orig),
1023 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
1024 : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
1025 LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig);
1026 LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig);
1030 if (REGNO (x) > LAST_VIRTUAL_REGISTER)
1031 return reg_map [REGNO (x)];
1036 /* If a parm that gets modified lives in a pseudo-reg,
1037 clear its TREE_READONLY to prevent certain optimizations. */
1039 rtx dest = SET_DEST (x);
1041 while (GET_CODE (dest) == STRICT_LOW_PART
1042 || GET_CODE (dest) == ZERO_EXTRACT
1043 || GET_CODE (dest) == SUBREG)
1044 dest = XEXP (dest, 0);
1046 if (GET_CODE (dest) == REG
1047 && REGNO (dest) < max_parm_reg
1048 && REGNO (dest) >= FIRST_PSEUDO_REGISTER
1049 && parmdecl_map[REGNO (dest)] != 0
1050 /* The insn to load an arg pseudo from a stack slot
1051 does not count as modifying it. */
1052 && in_nonparm_insns)
1053 TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0;
1057 #if 0 /* This is a good idea, but here is the wrong place for it. */
1058 /* Arrange that CONST_INTs always appear as the second operand
1059 if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx'
1060 always appear as the first. */
1062 if (GET_CODE (XEXP (x, 0)) == CONST_INT
1063 || (XEXP (x, 1) == frame_pointer_rtx
1064 || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1065 && XEXP (x, 1) == arg_pointer_rtx)))
1067 rtx t = XEXP (x, 0);
1068 XEXP (x, 0) = XEXP (x, 1);
1075 /* Replace this rtx with a copy of itself. */
1077 x = rtx_alloc (code);
1078 bcopy ((char *) orig, (char *) x,
1079 (sizeof (*x) - sizeof (x->fld)
1080 + sizeof (x->fld[0]) * GET_RTX_LENGTH (code)));
1082 /* Now scan the subexpressions recursively.
1083 We can store any replaced subexpressions directly into X
1084 since we know X is not shared! Any vectors in X
1085 must be copied if X was copied. */
1087 format_ptr = GET_RTX_FORMAT (code);
1089 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1091 switch (*format_ptr++)
1094 XEXP (x, i) = copy_for_inline (XEXP (x, i));
1098 /* Change any references to old-insns to point to the
1099 corresponding copied insns. */
1100 XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))];
1104 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
1108 XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0));
1109 for (j = 0; j < XVECLEN (x, i); j++)
1111 = copy_for_inline (XVECEXP (x, i, j));
1117 if (code == ASM_OPERANDS && orig_asm_operands_vector == 0)
1119 orig_asm_operands_vector = XVEC (orig, 3);
1120 copy_asm_operands_vector = XVEC (x, 3);
1121 copy_asm_constraints_vector = XVEC (x, 4);
1127 /* Unfortunately, we need a global copy of const_equiv map for communication
1128 with a function called from note_stores. Be *very* careful that this
1129 is used properly in the presence of recursion. */
1131 rtx *global_const_equiv_map;
1132 int global_const_equiv_map_size;
1134 #define FIXED_BASE_PLUS_P(X) \
1135 (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \
1136 && GET_CODE (XEXP (X, 0)) == REG \
1137 && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \
1138 && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)
1140 /* Integrate the procedure defined by FNDECL. Note that this function
1141 may wind up calling itself. Since the static variables are not
1142 reentrant, we do not assign them until after the possibility
1143 of recursion is eliminated.
1145 If IGNORE is nonzero, do not produce a value.
1146 Otherwise store the value in TARGET if it is nonzero and that is convenient.
1149 (rtx)-1 if we could not substitute the function
1150 0 if we substituted it and it does not produce a value
1151 else an rtx for where the value is stored. */
1154 expand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr)
1159 rtx structure_value_addr;
1161 tree formal, actual, block;
1162 rtx header = DECL_SAVED_INSNS (fndecl);
1163 rtx insns = FIRST_FUNCTION_INSN (header);
1164 rtx parm_insns = FIRST_PARM_INSN (header);
1170 int min_labelno = FIRST_LABELNO (header);
1171 int max_labelno = LAST_LABELNO (header);
1173 rtx local_return_label = 0;
1177 struct inline_remap *map;
1179 rtvec arg_vector = ORIGINAL_ARG_VECTOR (header);
1180 rtx static_chain_value = 0;
1182 /* Allow for equivalences of the pseudos we make for virtual fp and ap. */
1183 max_regno = MAX_REGNUM (header) + 3;
1184 if (max_regno < FIRST_PSEUDO_REGISTER)
1187 nargs = list_length (DECL_ARGUMENTS (fndecl));
1189 /* Check that the parms type match and that sufficient arguments were
1190 passed. Since the appropriate conversions or default promotions have
1191 already been applied, the machine modes should match exactly. */
1193 for (formal = DECL_ARGUMENTS (fndecl),
1196 formal = TREE_CHAIN (formal),
1197 actual = TREE_CHAIN (actual))
1200 enum machine_mode mode;
1203 return (rtx) (HOST_WIDE_INT) -1;
1205 arg = TREE_VALUE (actual);
1206 mode= TYPE_MODE (DECL_ARG_TYPE (formal));
1208 if (mode != TYPE_MODE (TREE_TYPE (arg))
1209 /* If they are block mode, the types should match exactly.
1210 They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE,
1211 which could happen if the parameter has incomplete type. */
1212 || (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)))
1213 return (rtx) (HOST_WIDE_INT) -1;
1216 /* Extra arguments are valid, but will be ignored below, so we must
1217 evaluate them here for side-effects. */
1218 for (; actual; actual = TREE_CHAIN (actual))
1219 expand_expr (TREE_VALUE (actual), const0_rtx,
1220 TYPE_MODE (TREE_TYPE (TREE_VALUE (actual))), 0);
1222 /* Make a binding contour to keep inline cleanups called at
1223 outer function-scope level from looking like they are shadowing
1224 parameter declarations. */
1227 /* Make a fresh binding contour that we can easily remove. */
1229 expand_start_bindings (0);
1231 /* Expand the function arguments. Do this first so that any
1232 new registers get created before we allocate the maps. */
1234 arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
1235 arg_trees = (tree *) alloca (nargs * sizeof (tree));
1237 for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
1239 formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual), i++)
1241 /* Actual parameter, converted to the type of the argument within the
1243 tree arg = convert (TREE_TYPE (formal), TREE_VALUE (actual));
1244 /* Mode of the variable used within the function. */
1245 enum machine_mode mode = TYPE_MODE (TREE_TYPE (formal));
1249 loc = RTVEC_ELT (arg_vector, i);
1251 /* If this is an object passed by invisible reference, we copy the
1252 object into a stack slot and save its address. If this will go
1253 into memory, we do nothing now. Otherwise, we just expand the
1255 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1256 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1259 = assign_stack_temp (TYPE_MODE (TREE_TYPE (arg)),
1260 int_size_in_bytes (TREE_TYPE (arg)), 1);
1261 MEM_IN_STRUCT_P (stack_slot) = AGGREGATE_TYPE_P (TREE_TYPE (arg));
1263 store_expr (arg, stack_slot, 0);
1265 arg_vals[i] = XEXP (stack_slot, 0);
1268 else if (GET_CODE (loc) != MEM)
1270 if (GET_MODE (loc) != TYPE_MODE (TREE_TYPE (arg)))
1271 /* The mode if LOC and ARG can differ if LOC was a variable
1272 that had its mode promoted via PROMOTED_MODE. */
1273 arg_vals[i] = convert_modes (GET_MODE (loc),
1274 TYPE_MODE (TREE_TYPE (arg)),
1275 expand_expr (arg, NULL_RTX, mode,
1277 TREE_UNSIGNED (TREE_TYPE (formal)));
1279 arg_vals[i] = expand_expr (arg, NULL_RTX, mode, EXPAND_SUM);
1284 if (arg_vals[i] != 0
1285 && (! TREE_READONLY (formal)
1286 /* If the parameter is not read-only, copy our argument through
1287 a register. Also, we cannot use ARG_VALS[I] if it overlaps
1288 TARGET in any way. In the inline function, they will likely
1289 be two different pseudos, and `safe_from_p' will make all
1290 sorts of smart assumptions about their not conflicting.
1291 But if ARG_VALS[I] overlaps TARGET, these assumptions are
1292 wrong, so put ARG_VALS[I] into a fresh register.
1293 Don't worry about invisible references, since their stack
1294 temps will never overlap the target. */
1297 && (GET_CODE (arg_vals[i]) == REG
1298 || GET_CODE (arg_vals[i]) == SUBREG
1299 || GET_CODE (arg_vals[i]) == MEM)
1300 && reg_overlap_mentioned_p (arg_vals[i], target))
1301 /* ??? We must always copy a SUBREG into a REG, because it might
1302 get substituted into an address, and not all ports correctly
1303 handle SUBREGs in addresses. */
1304 || (GET_CODE (arg_vals[i]) == SUBREG)))
1305 arg_vals[i] = copy_to_mode_reg (GET_MODE (loc), arg_vals[i]);
1308 /* Allocate the structures we use to remap things. */
1310 map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
1311 map->fndecl = fndecl;
1313 map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
1314 bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
1316 map->label_map = (rtx *)alloca ((max_labelno - min_labelno) * sizeof (rtx));
1317 map->label_map -= min_labelno;
1319 map->insn_map = (rtx *) alloca (INSN_UID (header) * sizeof (rtx));
1320 bzero ((char *) map->insn_map, INSN_UID (header) * sizeof (rtx));
1321 map->min_insnno = 0;
1322 map->max_insnno = INSN_UID (header);
1324 map->integrating = 1;
1326 /* const_equiv_map maps pseudos in our routine to constants, so it needs to
1327 be large enough for all our pseudos. This is the number we are currently
1328 using plus the number in the called routine, plus 15 for each arg,
1329 five to compute the virtual frame pointer, and five for the return value.
1330 This should be enough for most cases. We do not reference entries
1331 outside the range of the map.
1333 ??? These numbers are quite arbitrary and were obtained by
1334 experimentation. At some point, we should try to allocate the
1335 table after all the parameters are set up so we an more accurately
1336 estimate the number of pseudos we will need. */
1338 map->const_equiv_map_size
1339 = max_reg_num () + (max_regno - FIRST_PSEUDO_REGISTER) + 15 * nargs + 10;
1341 map->const_equiv_map
1342 = (rtx *)alloca (map->const_equiv_map_size * sizeof (rtx));
1343 bzero ((char *) map->const_equiv_map,
1344 map->const_equiv_map_size * sizeof (rtx));
1347 = (unsigned *)alloca (map->const_equiv_map_size * sizeof (unsigned));
1348 bzero ((char *) map->const_age_map,
1349 map->const_equiv_map_size * sizeof (unsigned));
1352 /* Record the current insn in case we have to set up pointers to frame
1353 and argument memory blocks. */
1354 map->insns_at_start = get_last_insn ();
1356 /* Update the outgoing argument size to allow for those in the inlined
1358 if (OUTGOING_ARGS_SIZE (header) > current_function_outgoing_args_size)
1359 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (header);
1361 /* If the inline function needs to make PIC references, that means
1362 that this function's PIC offset table must be used. */
1363 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
1364 current_function_uses_pic_offset_table = 1;
1366 /* If this function needs a context, set it up. */
1367 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_NEEDS_CONTEXT)
1368 static_chain_value = lookup_static_chain (fndecl);
1370 if (GET_CODE (parm_insns) == NOTE
1371 && NOTE_LINE_NUMBER (parm_insns) > 0)
1373 rtx note = emit_note (NOTE_SOURCE_FILE (parm_insns),
1374 NOTE_LINE_NUMBER (parm_insns));
1376 RTX_INTEGRATED_P (note) = 1;
1379 /* Process each argument. For each, set up things so that the function's
1380 reference to the argument will refer to the argument being passed.
1381 We only replace REG with REG here. Any simplifications are done
1382 via const_equiv_map.
1384 We make two passes: In the first, we deal with parameters that will
1385 be placed into registers, since we need to ensure that the allocated
1386 register number fits in const_equiv_map. Then we store all non-register
1387 parameters into their memory location. */
1389 /* Don't try to free temp stack slots here, because we may put one of the
1390 parameters into a temp stack slot. */
1392 for (i = 0; i < nargs; i++)
1394 rtx copy = arg_vals[i];
1396 loc = RTVEC_ELT (arg_vector, i);
1398 /* There are three cases, each handled separately. */
1399 if (GET_CODE (loc) == MEM && GET_CODE (XEXP (loc, 0)) == REG
1400 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER)
1402 /* This must be an object passed by invisible reference (it could
1403 also be a variable-sized object, but we forbid inlining functions
1404 with variable-sized arguments). COPY is the address of the
1405 actual value (this computation will cause it to be copied). We
1406 map that address for the register, noting the actual address as
1407 an equivalent in case it can be substituted into the insns. */
1409 if (GET_CODE (copy) != REG)
1411 temp = copy_addr_to_reg (copy);
1412 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1413 && REGNO (temp) < map->const_equiv_map_size)
1415 map->const_equiv_map[REGNO (temp)] = copy;
1416 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1420 map->reg_map[REGNO (XEXP (loc, 0))] = copy;
1422 else if (GET_CODE (loc) == MEM)
1424 /* This is the case of a parameter that lives in memory.
1425 It will live in the block we allocate in the called routine's
1426 frame that simulates the incoming argument area. Do nothing
1427 now; we will call store_expr later. */
1430 else if (GET_CODE (loc) == REG)
1432 /* This is the good case where the parameter is in a register.
1433 If it is read-only and our argument is a constant, set up the
1434 constant equivalence.
1436 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1437 that flag set if it is a register.
1439 Also, don't allow hard registers here; they might not be valid
1440 when substituted into insns. */
1442 if ((GET_CODE (copy) != REG && GET_CODE (copy) != SUBREG)
1443 || (GET_CODE (copy) == REG && REG_USERVAR_P (loc)
1444 && ! REG_USERVAR_P (copy))
1445 || (GET_CODE (copy) == REG
1446 && REGNO (copy) < FIRST_PSEUDO_REGISTER))
1448 temp = copy_to_mode_reg (GET_MODE (loc), copy);
1449 REG_USERVAR_P (temp) = REG_USERVAR_P (loc);
1450 if ((CONSTANT_P (copy) || FIXED_BASE_PLUS_P (copy))
1451 && REGNO (temp) < map->const_equiv_map_size)
1453 map->const_equiv_map[REGNO (temp)] = copy;
1454 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1458 map->reg_map[REGNO (loc)] = copy;
1460 else if (GET_CODE (loc) == CONCAT)
1462 /* This is the good case where the parameter is in a
1463 pair of separate pseudos.
1464 If it is read-only and our argument is a constant, set up the
1465 constant equivalence.
1467 If LOC is REG_USERVAR_P, the usual case, COPY must also have
1468 that flag set if it is a register.
1470 Also, don't allow hard registers here; they might not be valid
1471 when substituted into insns. */
1472 rtx locreal = gen_realpart (GET_MODE (XEXP (loc, 0)), loc);
1473 rtx locimag = gen_imagpart (GET_MODE (XEXP (loc, 0)), loc);
1474 rtx copyreal = gen_realpart (GET_MODE (locreal), copy);
1475 rtx copyimag = gen_imagpart (GET_MODE (locimag), copy);
1477 if ((GET_CODE (copyreal) != REG && GET_CODE (copyreal) != SUBREG)
1478 || (GET_CODE (copyreal) == REG && REG_USERVAR_P (locreal)
1479 && ! REG_USERVAR_P (copyreal))
1480 || (GET_CODE (copyreal) == REG
1481 && REGNO (copyreal) < FIRST_PSEUDO_REGISTER))
1483 temp = copy_to_mode_reg (GET_MODE (locreal), copyreal);
1484 REG_USERVAR_P (temp) = REG_USERVAR_P (locreal);
1485 if ((CONSTANT_P (copyreal) || FIXED_BASE_PLUS_P (copyreal))
1486 && REGNO (temp) < map->const_equiv_map_size)
1488 map->const_equiv_map[REGNO (temp)] = copyreal;
1489 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1493 map->reg_map[REGNO (locreal)] = copyreal;
1495 if ((GET_CODE (copyimag) != REG && GET_CODE (copyimag) != SUBREG)
1496 || (GET_CODE (copyimag) == REG && REG_USERVAR_P (locimag)
1497 && ! REG_USERVAR_P (copyimag))
1498 || (GET_CODE (copyimag) == REG
1499 && REGNO (copyimag) < FIRST_PSEUDO_REGISTER))
1501 temp = copy_to_mode_reg (GET_MODE (locimag), copyimag);
1502 REG_USERVAR_P (temp) = REG_USERVAR_P (locimag);
1503 if ((CONSTANT_P (copyimag) || FIXED_BASE_PLUS_P (copyimag))
1504 && REGNO (temp) < map->const_equiv_map_size)
1506 map->const_equiv_map[REGNO (temp)] = copyimag;
1507 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1511 map->reg_map[REGNO (locimag)] = copyimag;
1517 /* Now do the parameters that will be placed in memory. */
1519 for (formal = DECL_ARGUMENTS (fndecl), i = 0;
1520 formal; formal = TREE_CHAIN (formal), i++)
1522 loc = RTVEC_ELT (arg_vector, i);
1524 if (GET_CODE (loc) == MEM
1525 /* Exclude case handled above. */
1526 && ! (GET_CODE (XEXP (loc, 0)) == REG
1527 && REGNO (XEXP (loc, 0)) > LAST_VIRTUAL_REGISTER))
1529 rtx note = emit_note (DECL_SOURCE_FILE (formal),
1530 DECL_SOURCE_LINE (formal));
1532 RTX_INTEGRATED_P (note) = 1;
1534 /* Compute the address in the area we reserved and store the
1536 temp = copy_rtx_and_substitute (loc, map);
1537 subst_constants (&temp, NULL_RTX, map);
1538 apply_change_group ();
1539 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
1540 temp = change_address (temp, VOIDmode, XEXP (temp, 0));
1541 store_expr (arg_trees[i], temp, 0);
1545 /* Deal with the places that the function puts its result.
1546 We are driven by what is placed into DECL_RESULT.
1548 Initially, we assume that we don't have anything special handling for
1549 REG_FUNCTION_RETURN_VALUE_P. */
1551 map->inline_target = 0;
1552 loc = DECL_RTL (DECL_RESULT (fndecl));
1553 if (TYPE_MODE (type) == VOIDmode)
1554 /* There is no return value to worry about. */
1556 else if (GET_CODE (loc) == MEM)
1558 if (! structure_value_addr || ! aggregate_value_p (DECL_RESULT (fndecl)))
1561 /* Pass the function the address in which to return a structure value.
1562 Note that a constructor can cause someone to call us with
1563 STRUCTURE_VALUE_ADDR, but the initialization takes place
1564 via the first parameter, rather than the struct return address.
1566 We have two cases: If the address is a simple register indirect,
1567 use the mapping mechanism to point that register to our structure
1568 return address. Otherwise, store the structure return value into
1569 the place that it will be referenced from. */
1571 if (GET_CODE (XEXP (loc, 0)) == REG)
1573 temp = force_reg (Pmode, structure_value_addr);
1574 map->reg_map[REGNO (XEXP (loc, 0))] = temp;
1575 if ((CONSTANT_P (structure_value_addr)
1576 || (GET_CODE (structure_value_addr) == PLUS
1577 && XEXP (structure_value_addr, 0) == virtual_stack_vars_rtx
1578 && GET_CODE (XEXP (structure_value_addr, 1)) == CONST_INT))
1579 && REGNO (temp) < map->const_equiv_map_size)
1581 map->const_equiv_map[REGNO (temp)] = structure_value_addr;
1582 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
1587 temp = copy_rtx_and_substitute (loc, map);
1588 subst_constants (&temp, NULL_RTX, map);
1589 apply_change_group ();
1590 emit_move_insn (temp, structure_value_addr);
1594 /* We will ignore the result value, so don't look at its structure.
1595 Note that preparations for an aggregate return value
1596 do need to be made (above) even if it will be ignored. */
1598 else if (GET_CODE (loc) == REG)
1600 /* The function returns an object in a register and we use the return
1601 value. Set up our target for remapping. */
1603 /* Machine mode function was declared to return. */
1604 enum machine_mode departing_mode = TYPE_MODE (type);
1605 /* (Possibly wider) machine mode it actually computes
1606 (for the sake of callers that fail to declare it right). */
1607 enum machine_mode arriving_mode
1608 = TYPE_MODE (TREE_TYPE (DECL_RESULT (fndecl)));
1611 /* Don't use MEMs as direct targets because on some machines
1612 substituting a MEM for a REG makes invalid insns.
1613 Let the combiner substitute the MEM if that is valid. */
1614 if (target == 0 || GET_CODE (target) != REG
1615 || GET_MODE (target) != departing_mode)
1616 target = gen_reg_rtx (departing_mode);
1618 /* If function's value was promoted before return,
1619 avoid machine mode mismatch when we substitute INLINE_TARGET.
1620 But TARGET is what we will return to the caller. */
1621 if (arriving_mode != departing_mode)
1622 reg_to_map = gen_rtx (SUBREG, arriving_mode, target, 0);
1624 reg_to_map = target;
1626 /* Usually, the result value is the machine's return register.
1627 Sometimes it may be a pseudo. Handle both cases. */
1628 if (REG_FUNCTION_VALUE_P (loc))
1629 map->inline_target = reg_to_map;
1631 map->reg_map[REGNO (loc)] = reg_to_map;
1634 /* Make new label equivalences for the labels in the called function. */
1635 for (i = min_labelno; i < max_labelno; i++)
1636 map->label_map[i] = gen_label_rtx ();
1638 /* Perform postincrements before actually calling the function. */
1641 /* Clean up stack so that variables might have smaller offsets. */
1642 do_pending_stack_adjust ();
1644 /* Save a copy of the location of const_equiv_map for mark_stores, called
1646 global_const_equiv_map = map->const_equiv_map;
1647 global_const_equiv_map_size = map->const_equiv_map_size;
1649 /* If the called function does an alloca, save and restore the
1650 stack pointer around the call. This saves stack space, but
1651 also is required if this inline is being done between two
1653 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1654 emit_stack_save (SAVE_BLOCK, &stack_save, NULL_RTX);
1656 /* Now copy the insns one by one. Do this in two passes, first the insns and
1657 then their REG_NOTES, just like save_for_inline. */
1659 /* This loop is very similar to the loop in copy_loop_body in unroll.c. */
1661 for (insn = insns; insn; insn = NEXT_INSN (insn))
1663 rtx copy, pattern, set;
1665 map->orig_asm_operands_vector = 0;
1667 switch (GET_CODE (insn))
1670 pattern = PATTERN (insn);
1671 set = single_set (insn);
1673 if (GET_CODE (pattern) == USE
1674 && GET_CODE (XEXP (pattern, 0)) == REG
1675 && REG_FUNCTION_VALUE_P (XEXP (pattern, 0)))
1676 /* The (USE (REG n)) at return from the function should
1677 be ignored since we are changing (REG n) into
1681 /* Ignore setting a function value that we don't want to use. */
1682 if (map->inline_target == 0
1684 && GET_CODE (SET_DEST (set)) == REG
1685 && REG_FUNCTION_VALUE_P (SET_DEST (set)))
1687 if (volatile_refs_p (SET_SRC (set)))
1691 /* If we must not delete the source,
1692 load it into a new temporary. */
1693 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1695 new_set = single_set (copy);
1700 = gen_reg_rtx (GET_MODE (SET_DEST (new_set)));
1702 /* If the source and destination are the same and it
1703 has a note on it, keep the insn. */
1704 else if (rtx_equal_p (SET_DEST (set), SET_SRC (set))
1705 && REG_NOTES (insn) != 0)
1706 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1711 /* If this is setting the static chain rtx, omit it. */
1712 else if (static_chain_value != 0
1714 && GET_CODE (SET_DEST (set)) == REG
1715 && rtx_equal_p (SET_DEST (set),
1716 static_chain_incoming_rtx))
1719 /* If this is setting the static chain pseudo, set it from
1720 the value we want to give it instead. */
1721 else if (static_chain_value != 0
1723 && rtx_equal_p (SET_SRC (set),
1724 static_chain_incoming_rtx))
1726 rtx newdest = copy_rtx_and_substitute (SET_DEST (set), map);
1728 copy = emit_move_insn (newdest, static_chain_value);
1729 static_chain_value = 0;
1732 copy = emit_insn (copy_rtx_and_substitute (pattern, map));
1733 /* REG_NOTES will be copied later. */
1736 /* If this insn is setting CC0, it may need to look at
1737 the insn that uses CC0 to see what type of insn it is.
1738 In that case, the call to recog via validate_change will
1739 fail. So don't substitute constants here. Instead,
1740 do it when we emit the following insn.
1742 For example, see the pyr.md file. That machine has signed and
1743 unsigned compares. The compare patterns must check the
1744 following branch insn to see which what kind of compare to
1747 If the previous insn set CC0, substitute constants on it as
1749 if (sets_cc0_p (PATTERN (copy)) != 0)
1754 try_constants (cc0_insn, map);
1756 try_constants (copy, map);
1759 try_constants (copy, map);
1764 if (GET_CODE (PATTERN (insn)) == RETURN)
1766 if (local_return_label == 0)
1767 local_return_label = gen_label_rtx ();
1768 pattern = gen_jump (local_return_label);
1771 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1773 copy = emit_jump_insn (pattern);
1777 try_constants (cc0_insn, map);
1780 try_constants (copy, map);
1782 /* If this used to be a conditional jump insn but whose branch
1783 direction is now know, we must do something special. */
1784 if (condjump_p (insn) && ! simplejump_p (insn) && map->last_pc_value)
1787 /* The previous insn set cc0 for us. So delete it. */
1788 delete_insn (PREV_INSN (copy));
1791 /* If this is now a no-op, delete it. */
1792 if (map->last_pc_value == pc_rtx)
1798 /* Otherwise, this is unconditional jump so we must put a
1799 BARRIER after it. We could do some dead code elimination
1800 here, but jump.c will do it just as well. */
1806 pattern = copy_rtx_and_substitute (PATTERN (insn), map);
1807 copy = emit_call_insn (pattern);
1809 /* Because the USAGE information potentially contains objects other
1810 than hard registers, we need to copy it. */
1811 CALL_INSN_FUNCTION_USAGE (copy) =
1812 copy_rtx_and_substitute (CALL_INSN_FUNCTION_USAGE (insn), map);
1816 try_constants (cc0_insn, map);
1819 try_constants (copy, map);
1821 /* Be lazy and assume CALL_INSNs clobber all hard registers. */
1822 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1823 map->const_equiv_map[i] = 0;
1827 copy = emit_label (map->label_map[CODE_LABEL_NUMBER (insn)]);
1828 LABEL_NAME (copy) = LABEL_NAME (insn);
1833 copy = emit_barrier ();
1837 /* It is important to discard function-end and function-beg notes,
1838 so we have only one of each in the current function.
1839 Also, NOTE_INSN_DELETED notes aren't useful (save_for_inline
1840 deleted these in the copy used for continuing compilation,
1841 not the copy used for inlining). */
1842 if (NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_END
1843 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_FUNCTION_BEG
1844 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_DELETED)
1845 copy = emit_note (NOTE_SOURCE_FILE (insn), NOTE_LINE_NUMBER (insn));
1856 RTX_INTEGRATED_P (copy) = 1;
1858 map->insn_map[INSN_UID (insn)] = copy;
1861 /* Now copy the REG_NOTES. Increment const_age, so that only constants
1862 from parameters can be substituted in. These are the only ones that
1863 are valid across the entire function. */
1865 for (insn = insns; insn; insn = NEXT_INSN (insn))
1866 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1867 && map->insn_map[INSN_UID (insn)]
1868 && REG_NOTES (insn))
1870 rtx tem = copy_rtx_and_substitute (REG_NOTES (insn), map);
1871 /* We must also do subst_constants, in case one of our parameters
1872 has const type and constant value. */
1873 subst_constants (&tem, NULL_RTX, map);
1874 apply_change_group ();
1875 REG_NOTES (map->insn_map[INSN_UID (insn)]) = tem;
1878 if (local_return_label)
1879 emit_label (local_return_label);
1881 /* Restore the stack pointer if we saved it above. */
1882 if (FUNCTION_FLAGS (header) & FUNCTION_FLAGS_CALLS_ALLOCA)
1883 emit_stack_restore (SAVE_BLOCK, stack_save, NULL_RTX);
1885 /* Make copies of the decls of the symbols in the inline function, so that
1886 the copies of the variables get declared in the current function. Set
1887 up things so that lookup_static_chain knows that to interpret registers
1888 in SAVE_EXPRs for TYPE_SIZEs as local. */
1890 inline_function_decl = fndecl;
1891 integrate_parm_decls (DECL_ARGUMENTS (fndecl), map, arg_vector);
1892 integrate_decl_tree ((tree) ORIGINAL_DECL_INITIAL (header), 0, map);
1893 inline_function_decl = 0;
1895 /* End the scope containing the copied formal parameter variables
1896 and copied LABEL_DECLs. */
1898 expand_end_bindings (getdecls (), 1, 1);
1899 block = poplevel (1, 1, 0);
1900 BLOCK_ABSTRACT_ORIGIN (block) = (DECL_ABSTRACT_ORIGIN (fndecl) == NULL
1901 ? fndecl : DECL_ABSTRACT_ORIGIN (fndecl));
1903 emit_line_note (input_filename, lineno);
1905 if (structure_value_addr)
1907 target = gen_rtx (MEM, TYPE_MODE (type),
1908 memory_address (TYPE_MODE (type), structure_value_addr));
1909 MEM_IN_STRUCT_P (target) = 1;
1914 /* Given a chain of PARM_DECLs, ARGS, copy each decl into a VAR_DECL,
1915 push all of those decls and give each one the corresponding home. */
1918 integrate_parm_decls (args, map, arg_vector)
1920 struct inline_remap *map;
1926 for (tail = args, i = 0; tail; tail = TREE_CHAIN (tail), i++)
1928 register tree decl = build_decl (VAR_DECL, DECL_NAME (tail),
1931 = copy_rtx_and_substitute (RTVEC_ELT (arg_vector, i), map);
1933 DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (tail);
1934 /* We really should be setting DECL_INCOMING_RTL to something reasonable
1935 here, but that's going to require some more work. */
1936 /* DECL_INCOMING_RTL (decl) = ?; */
1937 /* These args would always appear unused, if not for this. */
1938 TREE_USED (decl) = 1;
1939 /* Prevent warning for shadowing with these. */
1940 DECL_ABSTRACT_ORIGIN (decl) = tail;
1942 /* Fully instantiate the address with the equivalent form so that the
1943 debugging information contains the actual register, instead of the
1944 virtual register. Do this by not passing an insn to
1946 subst_constants (&new_decl_rtl, NULL_RTX, map);
1947 apply_change_group ();
1948 DECL_RTL (decl) = new_decl_rtl;
1952 /* Given a BLOCK node LET, push decls and levels so as to construct in the
1953 current function a tree of contexts isomorphic to the one that is given.
1955 LEVEL indicates how far down into the BLOCK tree is the node we are
1956 currently traversing. It is always zero except for recursive calls.
1958 MAP, if nonzero, is a pointer to an inline_remap map which indicates how
1959 registers used in the DECL_RTL field should be remapped. If it is zero,
1960 no mapping is necessary. */
1963 integrate_decl_tree (let, level, map)
1966 struct inline_remap *map;
1973 for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
1978 push_obstacks_nochange ();
1979 saveable_allocation ();
1983 if (DECL_RTL (t) != 0)
1985 DECL_RTL (d) = copy_rtx_and_substitute (DECL_RTL (t), map);
1986 /* Fully instantiate the address with the equivalent form so that the
1987 debugging information contains the actual register, instead of the
1988 virtual register. Do this by not passing an insn to
1990 subst_constants (&DECL_RTL (d), NULL_RTX, map);
1991 apply_change_group ();
1993 /* These args would always appear unused, if not for this. */
1996 if (DECL_LANG_SPECIFIC (d))
1999 /* Must set DECL_ABSTRACT_ORIGIN here for local variables, to ensure
2000 that we don't get -Wshadow warnings. But don't set it here if
2001 pushdecl might return a duplicate decl, as that will result in
2002 incorrect DWARF debug info. */
2003 if (! DECL_EXTERNAL (d) || ! TREE_PUBLIC (d))
2004 /* Prevent warning for shadowing with these. */
2005 DECL_ABSTRACT_ORIGIN (d) = t;
2007 newd = pushdecl (d);
2009 /* If we didn't set DECL_ABSTRACT_ORIGIN above, then set it now.
2010 Simpler to just set it always rather than checking.
2011 If the decl we get back is the copy of 't' that we started with,
2012 then set the DECL_ABSTRACT_ORIGIN. Otherwise, we must have a
2013 duplicate decl, and we got the older one back. In that case, setting
2014 DECL_ABSTRACT_ORIGIN is not appropriate. */
2016 DECL_ABSTRACT_ORIGIN (d) = t;
2019 for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
2020 integrate_decl_tree (t, level + 1, map);
2024 node = poplevel (1, 0, 0);
2027 TREE_USED (node) = TREE_USED (let);
2028 BLOCK_ABSTRACT_ORIGIN (node) = let;
2033 /* Create a new copy of an rtx.
2034 Recursively copies the operands of the rtx,
2035 except for those few rtx codes that are sharable.
2037 We always return an rtx that is similar to that incoming rtx, with the
2038 exception of possibly changing a REG to a SUBREG or vice versa. No
2039 rtl is ever emitted.
2041 Handle constants that need to be placed in the constant pool by
2042 calling `force_const_mem'. */
2045 copy_rtx_and_substitute (orig, map)
2047 struct inline_remap *map;
2049 register rtx copy, temp;
2051 register RTX_CODE code;
2052 register enum machine_mode mode;
2053 register char *format_ptr;
2059 code = GET_CODE (orig);
2060 mode = GET_MODE (orig);
2065 /* If the stack pointer register shows up, it must be part of
2066 stack-adjustments (*not* because we eliminated the frame pointer!).
2067 Small hard registers are returned as-is. Pseudo-registers
2068 go through their `reg_map'. */
2069 regno = REGNO (orig);
2070 if (regno <= LAST_VIRTUAL_REGISTER)
2072 /* Some hard registers are also mapped,
2073 but others are not translated. */
2074 if (map->reg_map[regno] != 0)
2075 return map->reg_map[regno];
2077 /* If this is the virtual frame pointer, make space in current
2078 function's stack frame for the stack frame of the inline function.
2080 Copy the address of this area into a pseudo. Map
2081 virtual_stack_vars_rtx to this pseudo and set up a constant
2082 equivalence for it to be the address. This will substitute the
2083 address into insns where it can be substituted and use the new
2084 pseudo where it can't. */
2085 if (regno == VIRTUAL_STACK_VARS_REGNUM)
2088 int size = DECL_FRAME_SIZE (map->fndecl);
2092 loc = assign_stack_temp (BLKmode, size, 1);
2093 loc = XEXP (loc, 0);
2094 #ifdef FRAME_GROWS_DOWNWARD
2095 /* In this case, virtual_stack_vars_rtx points to one byte
2096 higher than the top of the frame area. So compute the offset
2097 to one byte higher than our substitute frame.
2098 Keep the fake frame pointer aligned like a real one. */
2099 rounded = CEIL_ROUND (size, BIGGEST_ALIGNMENT / BITS_PER_UNIT);
2100 loc = plus_constant (loc, rounded);
2102 map->reg_map[regno] = temp
2103 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2105 if (REGNO (temp) < map->const_equiv_map_size)
2107 map->const_equiv_map[REGNO (temp)] = loc;
2108 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2111 seq = gen_sequence ();
2113 emit_insn_after (seq, map->insns_at_start);
2116 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
2118 /* Do the same for a block to contain any arguments referenced
2121 int size = FUNCTION_ARGS_SIZE (DECL_SAVED_INSNS (map->fndecl));
2124 loc = assign_stack_temp (BLKmode, size, 1);
2125 loc = XEXP (loc, 0);
2126 /* When arguments grow downward, the virtual incoming
2127 args pointer points to the top of the argument block,
2128 so the remapped location better do the same. */
2129 #ifdef ARGS_GROW_DOWNWARD
2130 loc = plus_constant (loc, size);
2132 map->reg_map[regno] = temp
2133 = force_reg (Pmode, force_operand (loc, NULL_RTX));
2135 if (REGNO (temp) < map->const_equiv_map_size)
2137 map->const_equiv_map[REGNO (temp)] = loc;
2138 map->const_age_map[REGNO (temp)] = CONST_AGE_PARM;
2141 seq = gen_sequence ();
2143 emit_insn_after (seq, map->insns_at_start);
2146 else if (REG_FUNCTION_VALUE_P (orig))
2148 /* This is a reference to the function return value. If
2149 the function doesn't have a return value, error. If the
2150 mode doesn't agree, make a SUBREG. */
2151 if (map->inline_target == 0)
2152 /* Must be unrolling loops or replicating code if we
2153 reach here, so return the register unchanged. */
2155 else if (mode != GET_MODE (map->inline_target))
2156 return gen_lowpart (mode, map->inline_target);
2158 return map->inline_target;
2162 if (map->reg_map[regno] == NULL)
2164 map->reg_map[regno] = gen_reg_rtx (mode);
2165 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
2166 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
2167 RTX_UNCHANGING_P (map->reg_map[regno]) = RTX_UNCHANGING_P (orig);
2168 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
2170 return map->reg_map[regno];
2173 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map);
2174 /* SUBREG is ordinary, but don't make nested SUBREGs. */
2175 if (GET_CODE (copy) == SUBREG)
2176 return gen_rtx (SUBREG, GET_MODE (orig), SUBREG_REG (copy),
2177 SUBREG_WORD (orig) + SUBREG_WORD (copy));
2178 else if (GET_CODE (copy) == CONCAT)
2179 return (subreg_realpart_p (orig) ? XEXP (copy, 0) : XEXP (copy, 1));
2181 return gen_rtx (SUBREG, GET_MODE (orig), copy,
2182 SUBREG_WORD (orig));
2186 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
2187 to (use foo) if the original insn didn't have a subreg.
2188 Removing the subreg distorts the VAX movstrhi pattern
2189 by changing the mode of an operand. */
2190 copy = copy_rtx_and_substitute (XEXP (orig, 0), map);
2191 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
2192 copy = SUBREG_REG (copy);
2193 return gen_rtx (code, VOIDmode, copy);
2196 LABEL_PRESERVE_P (map->label_map[CODE_LABEL_NUMBER (orig)])
2197 = LABEL_PRESERVE_P (orig);
2198 return map->label_map[CODE_LABEL_NUMBER (orig)];
2201 copy = gen_rtx (LABEL_REF, mode,
2202 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
2203 : map->label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]);
2204 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
2206 /* The fact that this label was previously nonlocal does not mean
2207 it still is, so we must check if it is within the range of
2208 this function's labels. */
2209 LABEL_REF_NONLOCAL_P (copy)
2210 = (LABEL_REF_NONLOCAL_P (orig)
2211 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
2212 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
2214 /* If we have made a nonlocal label local, it means that this
2215 inlined call will be referring to our nonlocal goto handler.
2216 So make sure we create one for this block; we normally would
2217 not since this is not otherwise considered a "call". */
2218 if (LABEL_REF_NONLOCAL_P (orig) && ! LABEL_REF_NONLOCAL_P (copy))
2219 function_call_count++;
2229 /* Symbols which represent the address of a label stored in the constant
2230 pool must be modified to point to a constant pool entry for the
2231 remapped label. Otherwise, symbols are returned unchanged. */
2232 if (CONSTANT_POOL_ADDRESS_P (orig))
2234 rtx constant = get_pool_constant (orig);
2235 if (GET_CODE (constant) == LABEL_REF)
2236 return XEXP (force_const_mem (Pmode,
2237 copy_rtx_and_substitute (constant,
2245 /* We have to make a new copy of this CONST_DOUBLE because don't want
2246 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
2247 duplicate of a CONST_DOUBLE we have already seen. */
2248 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
2252 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
2253 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
2256 return immed_double_const (CONST_DOUBLE_LOW (orig),
2257 CONST_DOUBLE_HIGH (orig), VOIDmode);
2260 /* Make new constant pool entry for a constant
2261 that was in the pool of the inline function. */
2262 if (RTX_INTEGRATED_P (orig))
2264 /* If this was an address of a constant pool entry that itself
2265 had to be placed in the constant pool, it might not be a
2266 valid address. So the recursive call below might turn it
2267 into a register. In that case, it isn't a constant any
2268 more, so return it. This has the potential of changing a
2269 MEM into a REG, but we'll assume that it safe. */
2270 temp = copy_rtx_and_substitute (XEXP (orig, 0), map);
2271 if (! CONSTANT_P (temp))
2273 return validize_mem (force_const_mem (GET_MODE (orig), temp));
2278 /* If from constant pool address, make new constant pool entry and
2279 return its address. */
2280 if (! RTX_INTEGRATED_P (orig))
2283 temp = force_const_mem (GET_MODE (orig),
2284 copy_rtx_and_substitute (XEXP (orig, 0), map));
2287 /* Legitimizing the address here is incorrect.
2289 The only ADDRESS rtx's that can reach here are ones created by
2290 save_constants. Hence the operand of the ADDRESS is always valid
2291 in this position of the instruction, since the original rtx without
2292 the ADDRESS was valid.
2294 The reason we don't legitimize the address here is that on the
2295 Sparc, the caller may have a (high ...) surrounding this ADDRESS.
2296 This code forces the operand of the address to a register, which
2297 fails because we can not take the HIGH part of a register.
2299 Also, change_address may create new registers. These registers
2300 will not have valid reg_map entries. This can cause try_constants()
2301 to fail because assumes that all registers in the rtx have valid
2302 reg_map entries, and it may end up replacing one of these new
2303 registers with junk. */
2305 if (! memory_address_p (GET_MODE (temp), XEXP (temp, 0)))
2306 temp = change_address (temp, GET_MODE (temp), XEXP (temp, 0));
2309 return XEXP (temp, 0);
2312 /* If a single asm insn contains multiple output operands
2313 then it contains multiple ASM_OPERANDS rtx's that share operand 3.
2314 We must make sure that the copied insn continues to share it. */
2315 if (map->orig_asm_operands_vector == XVEC (orig, 3))
2317 copy = rtx_alloc (ASM_OPERANDS);
2318 copy->volatil = orig->volatil;
2319 XSTR (copy, 0) = XSTR (orig, 0);
2320 XSTR (copy, 1) = XSTR (orig, 1);
2321 XINT (copy, 2) = XINT (orig, 2);
2322 XVEC (copy, 3) = map->copy_asm_operands_vector;
2323 XVEC (copy, 4) = map->copy_asm_constraints_vector;
2324 XSTR (copy, 5) = XSTR (orig, 5);
2325 XINT (copy, 6) = XINT (orig, 6);
2331 /* This is given special treatment because the first
2332 operand of a CALL is a (MEM ...) which may get
2333 forced into a register for cse. This is undesirable
2334 if function-address cse isn't wanted or if we won't do cse. */
2335 #ifndef NO_FUNCTION_CSE
2336 if (! (optimize && ! flag_no_function_cse))
2338 return gen_rtx (CALL, GET_MODE (orig),
2339 gen_rtx (MEM, GET_MODE (XEXP (orig, 0)),
2340 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0), map)),
2341 copy_rtx_and_substitute (XEXP (orig, 1), map));
2345 /* Must be ifdefed out for loop unrolling to work. */
2351 /* If this is setting fp or ap, it means that we have a nonlocal goto.
2353 If the nonlocal goto is into the current function,
2354 this will result in unnecessarily bad code, but should work. */
2355 if (SET_DEST (orig) == virtual_stack_vars_rtx
2356 || SET_DEST (orig) == virtual_incoming_args_rtx)
2357 return gen_rtx (SET, VOIDmode, SET_DEST (orig),
2358 copy_rtx_and_substitute (SET_SRC (orig), map));
2362 copy = rtx_alloc (MEM);
2363 PUT_MODE (copy, mode);
2364 XEXP (copy, 0) = copy_rtx_and_substitute (XEXP (orig, 0), map);
2365 MEM_IN_STRUCT_P (copy) = MEM_IN_STRUCT_P (orig);
2366 MEM_VOLATILE_P (copy) = MEM_VOLATILE_P (orig);
2368 /* If doing function inlining, this MEM might not be const in the
2369 function that it is being inlined into, and thus may not be
2370 unchanging after function inlining. Constant pool references are
2371 handled elsewhere, so this doesn't lose RTX_UNCHANGING_P bits
2373 if (! map->integrating)
2374 RTX_UNCHANGING_P (copy) = RTX_UNCHANGING_P (orig);
2379 copy = rtx_alloc (code);
2380 PUT_MODE (copy, mode);
2381 copy->in_struct = orig->in_struct;
2382 copy->volatil = orig->volatil;
2383 copy->unchanging = orig->unchanging;
2385 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2387 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2389 switch (*format_ptr++)
2395 XEXP (copy, i) = copy_rtx_and_substitute (XEXP (orig, i), map);
2399 /* Change any references to old-insns to point to the
2400 corresponding copied insns. */
2401 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
2405 XVEC (copy, i) = XVEC (orig, i);
2406 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
2408 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2409 for (j = 0; j < XVECLEN (copy, i); j++)
2410 XVECEXP (copy, i, j)
2411 = copy_rtx_and_substitute (XVECEXP (orig, i, j), map);
2416 XWINT (copy, i) = XWINT (orig, i);
2420 XINT (copy, i) = XINT (orig, i);
2424 XSTR (copy, i) = XSTR (orig, i);
2432 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
2434 map->orig_asm_operands_vector = XVEC (orig, 3);
2435 map->copy_asm_operands_vector = XVEC (copy, 3);
2436 map->copy_asm_constraints_vector = XVEC (copy, 4);
2442 /* Substitute known constant values into INSN, if that is valid. */
2445 try_constants (insn, map)
2447 struct inline_remap *map;
2452 subst_constants (&PATTERN (insn), insn, map);
2454 /* Apply the changes if they are valid; otherwise discard them. */
2455 apply_change_group ();
2457 /* Show we don't know the value of anything stored or clobbered. */
2458 note_stores (PATTERN (insn), mark_stores);
2459 map->last_pc_value = 0;
2461 map->last_cc0_value = 0;
2464 /* Set up any constant equivalences made in this insn. */
2465 for (i = 0; i < map->num_sets; i++)
2467 if (GET_CODE (map->equiv_sets[i].dest) == REG)
2469 int regno = REGNO (map->equiv_sets[i].dest);
2471 if (regno < map->const_equiv_map_size
2472 && (map->const_equiv_map[regno] == 0
2473 /* Following clause is a hack to make case work where GNU C++
2474 reassigns a variable to make cse work right. */
2475 || ! rtx_equal_p (map->const_equiv_map[regno],
2476 map->equiv_sets[i].equiv)))
2478 map->const_equiv_map[regno] = map->equiv_sets[i].equiv;
2479 map->const_age_map[regno] = map->const_age;
2482 else if (map->equiv_sets[i].dest == pc_rtx)
2483 map->last_pc_value = map->equiv_sets[i].equiv;
2485 else if (map->equiv_sets[i].dest == cc0_rtx)
2486 map->last_cc0_value = map->equiv_sets[i].equiv;
2491 /* Substitute known constants for pseudo regs in the contents of LOC,
2492 which are part of INSN.
2493 If INSN is zero, the substitution should always be done (this is used to
2495 These changes are taken out by try_constants if the result is not valid.
2497 Note that we are more concerned with determining when the result of a SET
2498 is a constant, for further propagation, than actually inserting constants
2499 into insns; cse will do the latter task better.
2501 This function is also used to adjust address of items previously addressed
2502 via the virtual stack variable or virtual incoming arguments registers. */
2505 subst_constants (loc, insn, map)
2508 struct inline_remap *map;
2512 register enum rtx_code code;
2513 register char *format_ptr;
2514 int num_changes = num_validated_changes ();
2516 enum machine_mode op0_mode;
2518 code = GET_CODE (x);
2533 validate_change (insn, loc, map->last_cc0_value, 1);
2539 /* The only thing we can do with a USE or CLOBBER is possibly do
2540 some substitutions in a MEM within it. */
2541 if (GET_CODE (XEXP (x, 0)) == MEM)
2542 subst_constants (&XEXP (XEXP (x, 0), 0), insn, map);
2546 /* Substitute for parms and known constants. Don't replace
2547 hard regs used as user variables with constants. */
2549 int regno = REGNO (x);
2551 if (! (regno < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
2552 && regno < map->const_equiv_map_size
2553 && map->const_equiv_map[regno] != 0
2554 && map->const_age_map[regno] >= map->const_age)
2555 validate_change (insn, loc, map->const_equiv_map[regno], 1);
2560 /* SUBREG applied to something other than a reg
2561 should be treated as ordinary, since that must
2562 be a special hack and we don't know how to treat it specially.
2563 Consider for example mulsidi3 in m68k.md.
2564 Ordinary SUBREG of a REG needs this special treatment. */
2565 if (GET_CODE (SUBREG_REG (x)) == REG)
2567 rtx inner = SUBREG_REG (x);
2570 /* We can't call subst_constants on &SUBREG_REG (x) because any
2571 constant or SUBREG wouldn't be valid inside our SUBEG. Instead,
2572 see what is inside, try to form the new SUBREG and see if that is
2573 valid. We handle two cases: extracting a full word in an
2574 integral mode and extracting the low part. */
2575 subst_constants (&inner, NULL_RTX, map);
2577 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
2578 && GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
2579 && GET_MODE (SUBREG_REG (x)) != VOIDmode)
2580 new = operand_subword (inner, SUBREG_WORD (x), 0,
2581 GET_MODE (SUBREG_REG (x)));
2583 if (new == 0 && subreg_lowpart_p (x))
2584 new = gen_lowpart_common (GET_MODE (x), inner);
2587 validate_change (insn, loc, new, 1);
2594 subst_constants (&XEXP (x, 0), insn, map);
2596 /* If a memory address got spoiled, change it back. */
2597 if (insn != 0 && num_validated_changes () != num_changes
2598 && !memory_address_p (GET_MODE (x), XEXP (x, 0)))
2599 cancel_changes (num_changes);
2604 /* Substitute constants in our source, and in any arguments to a
2605 complex (e..g, ZERO_EXTRACT) destination, but not in the destination
2607 rtx *dest_loc = &SET_DEST (x);
2608 rtx dest = *dest_loc;
2611 subst_constants (&SET_SRC (x), insn, map);
2614 while (GET_CODE (*dest_loc) == ZERO_EXTRACT
2615 /* By convention, we always use ZERO_EXTRACT in the dest. */
2616 /* || GET_CODE (*dest_loc) == SIGN_EXTRACT */
2617 || GET_CODE (*dest_loc) == SUBREG
2618 || GET_CODE (*dest_loc) == STRICT_LOW_PART)
2620 if (GET_CODE (*dest_loc) == ZERO_EXTRACT)
2622 subst_constants (&XEXP (*dest_loc, 1), insn, map);
2623 subst_constants (&XEXP (*dest_loc, 2), insn, map);
2625 dest_loc = &XEXP (*dest_loc, 0);
2628 /* Do substitute in the address of a destination in memory. */
2629 if (GET_CODE (*dest_loc) == MEM)
2630 subst_constants (&XEXP (*dest_loc, 0), insn, map);
2632 /* Check for the case of DEST a SUBREG, both it and the underlying
2633 register are less than one word, and the SUBREG has the wider mode.
2634 In the case, we are really setting the underlying register to the
2635 source converted to the mode of DEST. So indicate that. */
2636 if (GET_CODE (dest) == SUBREG
2637 && GET_MODE_SIZE (GET_MODE (dest)) <= UNITS_PER_WORD
2638 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest))) <= UNITS_PER_WORD
2639 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2640 <= GET_MODE_SIZE (GET_MODE (dest)))
2641 && (tem = gen_lowpart_if_possible (GET_MODE (SUBREG_REG (dest)),
2643 src = tem, dest = SUBREG_REG (dest);
2645 /* If storing a recognizable value save it for later recording. */
2646 if ((map->num_sets < MAX_RECOG_OPERANDS)
2647 && (CONSTANT_P (src)
2648 || (GET_CODE (src) == REG
2649 && (REGNO (src) == VIRTUAL_INCOMING_ARGS_REGNUM
2650 || REGNO (src) == VIRTUAL_STACK_VARS_REGNUM))
2651 || (GET_CODE (src) == PLUS
2652 && GET_CODE (XEXP (src, 0)) == REG
2653 && (REGNO (XEXP (src, 0)) == VIRTUAL_INCOMING_ARGS_REGNUM
2654 || REGNO (XEXP (src, 0)) == VIRTUAL_STACK_VARS_REGNUM)
2655 && CONSTANT_P (XEXP (src, 1)))
2656 || GET_CODE (src) == COMPARE
2661 && (src == pc_rtx || GET_CODE (src) == RETURN
2662 || GET_CODE (src) == LABEL_REF))))
2664 /* Normally, this copy won't do anything. But, if SRC is a COMPARE
2665 it will cause us to save the COMPARE with any constants
2666 substituted, which is what we want for later. */
2667 map->equiv_sets[map->num_sets].equiv = copy_rtx (src);
2668 map->equiv_sets[map->num_sets++].dest = dest;
2675 format_ptr = GET_RTX_FORMAT (code);
2677 /* If the first operand is an expression, save its mode for later. */
2678 if (*format_ptr == 'e')
2679 op0_mode = GET_MODE (XEXP (x, 0));
2681 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2683 switch (*format_ptr++)
2690 subst_constants (&XEXP (x, i), insn, map);
2700 if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0)
2703 for (j = 0; j < XVECLEN (x, i); j++)
2704 subst_constants (&XVECEXP (x, i, j), insn, map);
2713 /* If this is a commutative operation, move a constant to the second
2714 operand unless the second operand is already a CONST_INT. */
2715 if ((GET_RTX_CLASS (code) == 'c' || code == NE || code == EQ)
2716 && CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2718 rtx tem = XEXP (x, 0);
2719 validate_change (insn, &XEXP (x, 0), XEXP (x, 1), 1);
2720 validate_change (insn, &XEXP (x, 1), tem, 1);
2723 /* Simplify the expression in case we put in some constants. */
2724 switch (GET_RTX_CLASS (code))
2727 new = simplify_unary_operation (code, GET_MODE (x),
2728 XEXP (x, 0), op0_mode);
2733 enum machine_mode op_mode = GET_MODE (XEXP (x, 0));
2734 if (op_mode == VOIDmode)
2735 op_mode = GET_MODE (XEXP (x, 1));
2736 new = simplify_relational_operation (code, op_mode,
2737 XEXP (x, 0), XEXP (x, 1));
2738 #ifdef FLOAT_STORE_FLAG_VALUE
2739 if (new != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2740 new = ((new == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2741 : CONST_DOUBLE_FROM_REAL_VALUE (FLOAT_STORE_FLAG_VALUE,
2749 new = simplify_binary_operation (code, GET_MODE (x),
2750 XEXP (x, 0), XEXP (x, 1));
2755 new = simplify_ternary_operation (code, GET_MODE (x), op0_mode,
2756 XEXP (x, 0), XEXP (x, 1), XEXP (x, 2));
2761 validate_change (insn, loc, new, 1);
2764 /* Show that register modified no longer contain known constants. We are
2765 called from note_stores with parts of the new insn. */
2768 mark_stores (dest, x)
2773 enum machine_mode mode;
2775 /* DEST is always the innermost thing set, except in the case of
2776 SUBREGs of hard registers. */
2778 if (GET_CODE (dest) == REG)
2779 regno = REGNO (dest), mode = GET_MODE (dest);
2780 else if (GET_CODE (dest) == SUBREG && GET_CODE (SUBREG_REG (dest)) == REG)
2782 regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
2783 mode = GET_MODE (SUBREG_REG (dest));
2788 int last_reg = (regno >= FIRST_PSEUDO_REGISTER ? regno
2789 : regno + HARD_REGNO_NREGS (regno, mode) - 1);
2792 for (i = regno; i <= last_reg; i++)
2793 if (i < global_const_equiv_map_size)
2794 global_const_equiv_map[i] = 0;
2798 /* If any CONST expressions with RTX_INTEGRATED_P are present in the rtx
2799 pointed to by PX, they represent constants in the constant pool.
2800 Replace these with a new memory reference obtained from force_const_mem.
2801 Similarly, ADDRESS expressions with RTX_INTEGRATED_P represent the
2802 address of a constant pool entry. Replace them with the address of
2803 a new constant pool entry obtained from force_const_mem. */
2806 restore_constants (px)
2816 if (GET_CODE (x) == CONST_DOUBLE)
2818 /* We have to make a new CONST_DOUBLE to ensure that we account for
2819 it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */
2820 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2824 REAL_VALUE_FROM_CONST_DOUBLE (d, x);
2825 *px = CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x));
2828 *px = immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x),
2832 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == CONST)
2834 restore_constants (&XEXP (x, 0));
2835 *px = validize_mem (force_const_mem (GET_MODE (x), XEXP (x, 0)));
2837 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == SUBREG)
2839 /* This must be (subreg/i:M1 (const/i:M2 ...) 0). */
2840 rtx new = XEXP (SUBREG_REG (x), 0);
2842 restore_constants (&new);
2843 new = force_const_mem (GET_MODE (SUBREG_REG (x)), new);
2844 PUT_MODE (new, GET_MODE (x));
2845 *px = validize_mem (new);
2847 else if (RTX_INTEGRATED_P (x) && GET_CODE (x) == ADDRESS)
2849 restore_constants (&XEXP (x, 0));
2850 *px = XEXP (force_const_mem (GET_MODE (x), XEXP (x, 0)), 0);
2854 fmt = GET_RTX_FORMAT (GET_CODE (x));
2855 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
2860 for (j = 0; j < XVECLEN (x, i); j++)
2861 restore_constants (&XVECEXP (x, i, j));
2865 restore_constants (&XEXP (x, i));
2872 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
2873 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
2874 that it points to the node itself, thus indicating that the node is its
2875 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
2876 the given node is NULL, recursively descend the decl/block tree which
2877 it is the root of, and for each other ..._DECL or BLOCK node contained
2878 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
2879 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
2880 values to point to themselves. */
2883 set_block_origin_self (stmt)
2886 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
2888 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
2891 register tree local_decl;
2893 for (local_decl = BLOCK_VARS (stmt);
2894 local_decl != NULL_TREE;
2895 local_decl = TREE_CHAIN (local_decl))
2896 set_decl_origin_self (local_decl); /* Potential recursion. */
2900 register tree subblock;
2902 for (subblock = BLOCK_SUBBLOCKS (stmt);
2903 subblock != NULL_TREE;
2904 subblock = BLOCK_CHAIN (subblock))
2905 set_block_origin_self (subblock); /* Recurse. */
2910 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
2911 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
2912 node to so that it points to the node itself, thus indicating that the
2913 node represents its own (abstract) origin. Additionally, if the
2914 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
2915 the decl/block tree of which the given node is the root of, and for
2916 each other ..._DECL or BLOCK node contained therein whose
2917 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
2918 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
2919 point to themselves. */
2922 set_decl_origin_self (decl)
2925 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
2927 DECL_ABSTRACT_ORIGIN (decl) = decl;
2928 if (TREE_CODE (decl) == FUNCTION_DECL)
2932 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2933 DECL_ABSTRACT_ORIGIN (arg) = arg;
2934 if (DECL_INITIAL (decl) != NULL_TREE
2935 && DECL_INITIAL (decl) != error_mark_node)
2936 set_block_origin_self (DECL_INITIAL (decl));
2941 /* Given a pointer to some BLOCK node, and a boolean value to set the
2942 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
2943 the given block, and for all local decls and all local sub-blocks
2944 (recursively) which are contained therein. */
2947 set_block_abstract_flags (stmt, setting)
2949 register int setting;
2951 BLOCK_ABSTRACT (stmt) = setting;
2954 register tree local_decl;
2956 for (local_decl = BLOCK_VARS (stmt);
2957 local_decl != NULL_TREE;
2958 local_decl = TREE_CHAIN (local_decl))
2959 set_decl_abstract_flags (local_decl, setting);
2963 register tree subblock;
2965 for (subblock = BLOCK_SUBBLOCKS (stmt);
2966 subblock != NULL_TREE;
2967 subblock = BLOCK_CHAIN (subblock))
2968 set_block_abstract_flags (subblock, setting);
2972 /* Given a pointer to some ..._DECL node, and a boolean value to set the
2973 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
2974 given decl, and (in the case where the decl is a FUNCTION_DECL) also
2975 set the abstract flags for all of the parameters, local vars, local
2976 blocks and sub-blocks (recursively) to the same setting. */
2979 set_decl_abstract_flags (decl, setting)
2981 register int setting;
2983 DECL_ABSTRACT (decl) = setting;
2984 if (TREE_CODE (decl) == FUNCTION_DECL)
2988 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
2989 DECL_ABSTRACT (arg) = setting;
2990 if (DECL_INITIAL (decl) != NULL_TREE
2991 && DECL_INITIAL (decl) != error_mark_node)
2992 set_block_abstract_flags (DECL_INITIAL (decl), setting);
2996 /* Output the assembly language code for the function FNDECL
2997 from its DECL_SAVED_INSNS. Used for inline functions that are output
2998 at end of compilation instead of where they came in the source. */
3001 output_inline_function (fndecl)
3006 int save_flag_no_inline = flag_no_inline;
3008 if (output_bytecode)
3010 warning ("`inline' ignored for bytecode output");
3014 /* Things we allocate from here on are part of this function, not
3016 temporary_allocation ();
3018 head = DECL_SAVED_INSNS (fndecl);
3019 current_function_decl = fndecl;
3021 /* This call is only used to initialize global variables. */
3022 init_function_start (fndecl, "lossage", 1);
3024 /* Redo parameter determinations in case the FUNCTION_...
3025 macros took machine-specific actions that need to be redone. */
3026 assign_parms (fndecl, 1);
3028 /* Set stack frame size. */
3029 assign_stack_local (BLKmode, DECL_FRAME_SIZE (fndecl), 0);
3031 restore_reg_data (FIRST_PARM_INSN (head));
3033 stack_slot_list = STACK_SLOT_LIST (head);
3034 forced_labels = FORCED_LABELS (head);
3036 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_ALLOCA)
3037 current_function_calls_alloca = 1;
3039 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_SETJMP)
3040 current_function_calls_setjmp = 1;
3042 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_CALLS_LONGJMP)
3043 current_function_calls_longjmp = 1;
3045 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_STRUCT)
3046 current_function_returns_struct = 1;
3048 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_PCC_STRUCT)
3049 current_function_returns_pcc_struct = 1;
3051 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_NEEDS_CONTEXT)
3052 current_function_needs_context = 1;
3054 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_HAS_NONLOCAL_LABEL)
3055 current_function_has_nonlocal_label = 1;
3057 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_RETURNS_POINTER)
3058 current_function_returns_pointer = 1;
3060 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_CONST_POOL)
3061 current_function_uses_const_pool = 1;
3063 if (FUNCTION_FLAGS (head) & FUNCTION_FLAGS_USES_PIC_OFFSET_TABLE)
3064 current_function_uses_pic_offset_table = 1;
3066 current_function_outgoing_args_size = OUTGOING_ARGS_SIZE (head);
3067 current_function_pops_args = POPS_ARGS (head);
3069 /* This is the only thing the expand_function_end call that uses to be here
3070 actually does and that call can cause problems. */
3071 immediate_size_expand--;
3073 /* Find last insn and rebuild the constant pool. */
3074 for (last = FIRST_PARM_INSN (head);
3075 NEXT_INSN (last); last = NEXT_INSN (last))
3077 if (GET_RTX_CLASS (GET_CODE (last)) == 'i')
3079 restore_constants (&PATTERN (last));
3080 restore_constants (®_NOTES (last));
3084 set_new_first_and_last_insn (FIRST_PARM_INSN (head), last);
3085 set_new_first_and_last_label_num (FIRST_LABELNO (head), LAST_LABELNO (head));
3087 /* We must have already output DWARF debugging information for the
3088 original (abstract) inline function declaration/definition, so
3089 we want to make sure that the debugging information we generate
3090 for this special instance of the inline function refers back to
3091 the information we already generated. To make sure that happens,
3092 we simply have to set the DECL_ABSTRACT_ORIGIN for the function
3093 node (and for all of the local ..._DECL nodes which are its children)
3094 so that they all point to themselves. */
3096 set_decl_origin_self (fndecl);
3098 /* We're not deferring this any longer. */
3099 DECL_DEFER_OUTPUT (fndecl) = 0;
3101 /* Integrating function calls isn't safe anymore, so turn on
3105 /* Compile this function all the way down to assembly code. */
3106 rest_of_compilation (fndecl);
3108 /* Reset flag_no_inline to its original value. */
3109 flag_no_inline = save_flag_no_inline;
3111 current_function_decl = 0;