1 /* Emit RTL for the GNU C-Compiler expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 /* Middle-to-low level generation of rtx code and insns.
27 This file contains the functions `gen_rtx', `gen_reg_rtx'
28 and `gen_label_rtx' that are the usual ways of creating rtl
29 expressions for most purposes.
31 It also has the functions for creating insns and linking
32 them in the doubly-linked chain.
34 The patterns of the insns are created by machine-dependent
35 routines in insn-emit.c, which is generated automatically from
36 the machine description. These routines use `gen_rtx' to make
37 the individual rtx's of the pattern; what is machine dependent
38 is the kind of rtx's they make and what arguments they use. */
50 #include "hard-reg-set.h"
52 #include "insn-config.h"
57 #include "basic-block.h"
60 #include "langhooks.h"
62 /* Commonly used modes. */
64 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
65 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
66 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
67 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
70 /* This is *not* reset after each function. It gives each CODE_LABEL
71 in the entire compilation a unique label number. */
73 static int label_num = 1;
75 /* Highest label number in current function.
76 Zero means use the value of label_num instead.
77 This is nonzero only when belatedly compiling an inline function. */
79 static int last_label_num;
81 /* Value label_num had when set_new_first_and_last_label_number was called.
82 If label_num has not changed since then, last_label_num is valid. */
84 static int base_label_num;
86 /* Nonzero means do not generate NOTEs for source line numbers. */
88 static int no_line_numbers;
90 /* Commonly used rtx's, so that we only need space for one copy.
91 These are initialized once for the entire compilation.
92 All of these except perhaps the floating-point CONST_DOUBLEs
93 are unique; no other rtx-object will be equal to any of these. */
95 rtx global_rtl[GR_MAX];
97 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
98 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
99 record a copy of const[012]_rtx. */
101 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
105 REAL_VALUE_TYPE dconst0;
106 REAL_VALUE_TYPE dconst1;
107 REAL_VALUE_TYPE dconst2;
108 REAL_VALUE_TYPE dconstm1;
110 /* All references to the following fixed hard registers go through
111 these unique rtl objects. On machines where the frame-pointer and
112 arg-pointer are the same register, they use the same unique object.
114 After register allocation, other rtl objects which used to be pseudo-regs
115 may be clobbered to refer to the frame-pointer register.
116 But references that were originally to the frame-pointer can be
117 distinguished from the others because they contain frame_pointer_rtx.
119 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
120 tricky: until register elimination has taken place hard_frame_pointer_rtx
121 should be used if it is being set, and frame_pointer_rtx otherwise. After
122 register elimination hard_frame_pointer_rtx should always be used.
123 On machines where the two registers are same (most) then these are the
126 In an inline procedure, the stack and frame pointer rtxs may not be
127 used for anything else. */
128 rtx struct_value_rtx; /* (REG:Pmode STRUCT_VALUE_REGNUM) */
129 rtx struct_value_incoming_rtx; /* (REG:Pmode STRUCT_VALUE_INCOMING_REGNUM) */
130 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
131 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
132 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
134 /* This is used to implement __builtin_return_address for some machines.
135 See for instance the MIPS port. */
136 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
138 /* We make one copy of (const_int C) where C is in
139 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
140 to save space during the compilation and simplify comparisons of
143 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
145 /* A hash table storing CONST_INTs whose absolute value is greater
146 than MAX_SAVED_CONST_INT. */
148 static htab_t const_int_htab;
150 /* A hash table storing memory attribute structures. */
151 static htab_t mem_attrs_htab;
153 /* start_sequence and gen_sequence can make a lot of rtx expressions which are
154 shortly thrown away. We use two mechanisms to prevent this waste:
156 For sizes up to 5 elements, we keep a SEQUENCE and its associated
157 rtvec for use by gen_sequence. One entry for each size is
158 sufficient because most cases are calls to gen_sequence followed by
159 immediately emitting the SEQUENCE. Reuse is safe since emitting a
160 sequence is destructive on the insn in it anyway and hence can't be
163 We do not bother to save this cached data over nested function calls.
164 Instead, we just reinitialize them. */
166 #define SEQUENCE_RESULT_SIZE 5
168 static rtx sequence_result[SEQUENCE_RESULT_SIZE];
170 /* During RTL generation, we also keep a list of free INSN rtl codes. */
171 static rtx free_insn;
173 #define first_insn (cfun->emit->x_first_insn)
174 #define last_insn (cfun->emit->x_last_insn)
175 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
176 #define last_linenum (cfun->emit->x_last_linenum)
177 #define last_filename (cfun->emit->x_last_filename)
178 #define first_label_num (cfun->emit->x_first_label_num)
180 static rtx make_jump_insn_raw PARAMS ((rtx));
181 static rtx make_call_insn_raw PARAMS ((rtx));
182 static rtx find_line_note PARAMS ((rtx));
183 static void mark_sequence_stack PARAMS ((struct sequence_stack *));
184 static rtx change_address_1 PARAMS ((rtx, enum machine_mode, rtx,
186 static void unshare_all_rtl_1 PARAMS ((rtx));
187 static void unshare_all_decls PARAMS ((tree));
188 static void reset_used_decls PARAMS ((tree));
189 static void mark_label_nuses PARAMS ((rtx));
190 static hashval_t const_int_htab_hash PARAMS ((const void *));
191 static int const_int_htab_eq PARAMS ((const void *,
193 static hashval_t mem_attrs_htab_hash PARAMS ((const void *));
194 static int mem_attrs_htab_eq PARAMS ((const void *,
196 static void mem_attrs_mark PARAMS ((const void *));
197 static mem_attrs *get_mem_attrs PARAMS ((HOST_WIDE_INT, tree, rtx,
200 static tree component_ref_for_mem_expr PARAMS ((tree));
201 static rtx gen_const_vector_0 PARAMS ((enum machine_mode));
203 /* Probability of the conditional branch currently proceeded by try_split.
204 Set to -1 otherwise. */
205 int split_branch_probability = -1;
207 /* Returns a hash code for X (which is a really a CONST_INT). */
210 const_int_htab_hash (x)
213 return (hashval_t) INTVAL ((const struct rtx_def *) x);
216 /* Returns non-zero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
221 const_int_htab_eq (x, y)
225 return (INTVAL ((const struct rtx_def *) x) == *((const HOST_WIDE_INT *) y));
228 /* Returns a hash code for X (which is a really a mem_attrs *). */
231 mem_attrs_htab_hash (x)
234 mem_attrs *p = (mem_attrs *) x;
236 return (p->alias ^ (p->align * 1000)
237 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
238 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
242 /* Returns non-zero if the value represented by X (which is really a
243 mem_attrs *) is the same as that given by Y (which is also really a
247 mem_attrs_htab_eq (x, y)
251 mem_attrs *p = (mem_attrs *) x;
252 mem_attrs *q = (mem_attrs *) y;
254 return (p->alias == q->alias && p->expr == q->expr && p->offset == q->offset
255 && p->size == q->size && p->align == q->align);
258 /* This routine is called when we determine that we need a mem_attrs entry.
259 It marks the associated decl and RTL as being used, if present. */
265 mem_attrs *p = (mem_attrs *) x;
268 ggc_mark_tree (p->expr);
271 ggc_mark_rtx (p->offset);
274 ggc_mark_rtx (p->size);
277 /* Allocate a new mem_attrs structure and insert it into the hash table if
278 one identical to it is not already in the table. We are doing this for
282 get_mem_attrs (alias, expr, offset, size, align, mode)
288 enum machine_mode mode;
293 /* If everything is the default, we can just return zero. */
294 if (alias == 0 && expr == 0 && offset == 0
296 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
297 && (align == BITS_PER_UNIT
299 && mode != BLKmode && align == GET_MODE_ALIGNMENT (mode))))
304 attrs.offset = offset;
308 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
311 *slot = ggc_alloc (sizeof (mem_attrs));
312 memcpy (*slot, &attrs, sizeof (mem_attrs));
318 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
319 don't attempt to share with the various global pieces of rtl (such as
320 frame_pointer_rtx). */
323 gen_raw_REG (mode, regno)
324 enum machine_mode mode;
327 rtx x = gen_rtx_raw_REG (mode, regno);
328 ORIGINAL_REGNO (x) = regno;
332 /* There are some RTL codes that require special attention; the generation
333 functions do the raw handling. If you add to this list, modify
334 special_rtx in gengenrtl.c as well. */
337 gen_rtx_CONST_INT (mode, arg)
338 enum machine_mode mode ATTRIBUTE_UNUSED;
343 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
344 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
346 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
347 if (const_true_rtx && arg == STORE_FLAG_VALUE)
348 return const_true_rtx;
351 /* Look up the CONST_INT in the hash table. */
352 slot = htab_find_slot_with_hash (const_int_htab, &arg,
353 (hashval_t) arg, INSERT);
355 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
361 gen_int_mode (c, mode)
363 enum machine_mode mode;
365 return GEN_INT (trunc_int_for_mode (c, mode));
368 /* CONST_DOUBLEs needs special handling because their length is known
372 gen_rtx_CONST_DOUBLE (mode, arg0, arg1)
373 enum machine_mode mode;
374 HOST_WIDE_INT arg0, arg1;
376 rtx r = rtx_alloc (CONST_DOUBLE);
380 X0EXP (r, 0) = NULL_RTX;
384 for (i = GET_RTX_LENGTH (CONST_DOUBLE) - 1; i > 2; --i)
391 gen_rtx_REG (mode, regno)
392 enum machine_mode mode;
395 /* In case the MD file explicitly references the frame pointer, have
396 all such references point to the same frame pointer. This is
397 used during frame pointer elimination to distinguish the explicit
398 references to these registers from pseudos that happened to be
401 If we have eliminated the frame pointer or arg pointer, we will
402 be using it as a normal register, for example as a spill
403 register. In such cases, we might be accessing it in a mode that
404 is not Pmode and therefore cannot use the pre-allocated rtx.
406 Also don't do this when we are making new REGs in reload, since
407 we don't want to get confused with the real pointers. */
409 if (mode == Pmode && !reload_in_progress)
411 if (regno == FRAME_POINTER_REGNUM)
412 return frame_pointer_rtx;
413 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
414 if (regno == HARD_FRAME_POINTER_REGNUM)
415 return hard_frame_pointer_rtx;
417 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
418 if (regno == ARG_POINTER_REGNUM)
419 return arg_pointer_rtx;
421 #ifdef RETURN_ADDRESS_POINTER_REGNUM
422 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
423 return return_address_pointer_rtx;
425 if (regno == PIC_OFFSET_TABLE_REGNUM
426 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
427 return pic_offset_table_rtx;
428 if (regno == STACK_POINTER_REGNUM)
429 return stack_pointer_rtx;
432 return gen_raw_REG (mode, regno);
436 gen_rtx_MEM (mode, addr)
437 enum machine_mode mode;
440 rtx rt = gen_rtx_raw_MEM (mode, addr);
442 /* This field is not cleared by the mere allocation of the rtx, so
450 gen_rtx_SUBREG (mode, reg, offset)
451 enum machine_mode mode;
455 /* This is the most common failure type.
456 Catch it early so we can see who does it. */
457 if ((offset % GET_MODE_SIZE (mode)) != 0)
460 /* This check isn't usable right now because combine will
461 throw arbitrary crap like a CALL into a SUBREG in
462 gen_lowpart_for_combine so we must just eat it. */
464 /* Check for this too. */
465 if (offset >= GET_MODE_SIZE (GET_MODE (reg)))
468 return gen_rtx_fmt_ei (SUBREG, mode, reg, offset);
471 /* Generate a SUBREG representing the least-significant part of REG if MODE
472 is smaller than mode of REG, otherwise paradoxical SUBREG. */
475 gen_lowpart_SUBREG (mode, reg)
476 enum machine_mode mode;
479 enum machine_mode inmode;
481 inmode = GET_MODE (reg);
482 if (inmode == VOIDmode)
484 return gen_rtx_SUBREG (mode, reg,
485 subreg_lowpart_offset (mode, inmode));
488 /* rtx gen_rtx (code, mode, [element1, ..., elementn])
490 ** This routine generates an RTX of the size specified by
491 ** <code>, which is an RTX code. The RTX structure is initialized
492 ** from the arguments <element1> through <elementn>, which are
493 ** interpreted according to the specific RTX type's format. The
494 ** special machine mode associated with the rtx (if any) is specified
497 ** gen_rtx can be invoked in a way which resembles the lisp-like
498 ** rtx it will generate. For example, the following rtx structure:
500 ** (plus:QI (mem:QI (reg:SI 1))
501 ** (mem:QI (plusw:SI (reg:SI 2) (reg:SI 3))))
503 ** ...would be generated by the following C code:
505 ** gen_rtx (PLUS, QImode,
506 ** gen_rtx (MEM, QImode,
507 ** gen_rtx (REG, SImode, 1)),
508 ** gen_rtx (MEM, QImode,
509 ** gen_rtx (PLUS, SImode,
510 ** gen_rtx (REG, SImode, 2),
511 ** gen_rtx (REG, SImode, 3)))),
516 gen_rtx VPARAMS ((enum rtx_code code, enum machine_mode mode, ...))
518 int i; /* Array indices... */
519 const char *fmt; /* Current rtx's format... */
520 rtx rt_val; /* RTX to return to caller... */
523 VA_FIXEDARG (p, enum rtx_code, code);
524 VA_FIXEDARG (p, enum machine_mode, mode);
529 rt_val = gen_rtx_CONST_INT (mode, va_arg (p, HOST_WIDE_INT));
534 HOST_WIDE_INT arg0 = va_arg (p, HOST_WIDE_INT);
535 HOST_WIDE_INT arg1 = va_arg (p, HOST_WIDE_INT);
537 rt_val = gen_rtx_CONST_DOUBLE (mode, arg0, arg1);
542 rt_val = gen_rtx_REG (mode, va_arg (p, int));
546 rt_val = gen_rtx_MEM (mode, va_arg (p, rtx));
550 rt_val = rtx_alloc (code); /* Allocate the storage space. */
551 rt_val->mode = mode; /* Store the machine mode... */
553 fmt = GET_RTX_FORMAT (code); /* Find the right format... */
554 for (i = 0; i < GET_RTX_LENGTH (code); i++)
558 case '0': /* Unused field. */
561 case 'i': /* An integer? */
562 XINT (rt_val, i) = va_arg (p, int);
565 case 'w': /* A wide integer? */
566 XWINT (rt_val, i) = va_arg (p, HOST_WIDE_INT);
569 case 's': /* A string? */
570 XSTR (rt_val, i) = va_arg (p, char *);
573 case 'e': /* An expression? */
574 case 'u': /* An insn? Same except when printing. */
575 XEXP (rt_val, i) = va_arg (p, rtx);
578 case 'E': /* An RTX vector? */
579 XVEC (rt_val, i) = va_arg (p, rtvec);
582 case 'b': /* A bitmap? */
583 XBITMAP (rt_val, i) = va_arg (p, bitmap);
586 case 't': /* A tree? */
587 XTREE (rt_val, i) = va_arg (p, tree);
601 /* gen_rtvec (n, [rt1, ..., rtn])
603 ** This routine creates an rtvec and stores within it the
604 ** pointers to rtx's which are its arguments.
609 gen_rtvec VPARAMS ((int n, ...))
615 VA_FIXEDARG (p, int, n);
618 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
620 vector = (rtx *) alloca (n * sizeof (rtx));
622 for (i = 0; i < n; i++)
623 vector[i] = va_arg (p, rtx);
625 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
629 return gen_rtvec_v (save_n, vector);
633 gen_rtvec_v (n, argp)
641 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
643 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
645 for (i = 0; i < n; i++)
646 rt_val->elem[i] = *argp++;
651 /* Generate a REG rtx for a new pseudo register of mode MODE.
652 This pseudo is assigned the next sequential register number. */
656 enum machine_mode mode;
658 struct function *f = cfun;
661 /* Don't let anything called after initial flow analysis create new
666 if (generating_concat_p
667 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
668 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
670 /* For complex modes, don't make a single pseudo.
671 Instead, make a CONCAT of two pseudos.
672 This allows noncontiguous allocation of the real and imaginary parts,
673 which makes much better code. Besides, allocating DCmode
674 pseudos overstrains reload on some machines like the 386. */
675 rtx realpart, imagpart;
676 int size = GET_MODE_UNIT_SIZE (mode);
677 enum machine_mode partmode
678 = mode_for_size (size * BITS_PER_UNIT,
679 (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
680 ? MODE_FLOAT : MODE_INT),
683 realpart = gen_reg_rtx (partmode);
684 imagpart = gen_reg_rtx (partmode);
685 return gen_rtx_CONCAT (mode, realpart, imagpart);
688 /* Make sure regno_pointer_align, regno_decl, and regno_reg_rtx are large
689 enough to have an element for this pseudo reg number. */
691 if (reg_rtx_no == f->emit->regno_pointer_align_length)
693 int old_size = f->emit->regno_pointer_align_length;
698 new = xrealloc (f->emit->regno_pointer_align, old_size * 2);
699 memset (new + old_size, 0, old_size);
700 f->emit->regno_pointer_align = (unsigned char *) new;
702 new1 = (rtx *) xrealloc (f->emit->x_regno_reg_rtx,
703 old_size * 2 * sizeof (rtx));
704 memset (new1 + old_size, 0, old_size * sizeof (rtx));
705 regno_reg_rtx = new1;
707 new2 = (tree *) xrealloc (f->emit->regno_decl,
708 old_size * 2 * sizeof (tree));
709 memset (new2 + old_size, 0, old_size * sizeof (tree));
710 f->emit->regno_decl = new2;
712 f->emit->regno_pointer_align_length = old_size * 2;
715 val = gen_raw_REG (mode, reg_rtx_no);
716 regno_reg_rtx[reg_rtx_no++] = val;
720 /* Identify REG (which may be a CONCAT) as a user register. */
726 if (GET_CODE (reg) == CONCAT)
728 REG_USERVAR_P (XEXP (reg, 0)) = 1;
729 REG_USERVAR_P (XEXP (reg, 1)) = 1;
731 else if (GET_CODE (reg) == REG)
732 REG_USERVAR_P (reg) = 1;
737 /* Identify REG as a probable pointer register and show its alignment
738 as ALIGN, if nonzero. */
741 mark_reg_pointer (reg, align)
745 if (! REG_POINTER (reg))
747 REG_POINTER (reg) = 1;
750 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
752 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
753 /* We can no-longer be sure just how aligned this pointer is */
754 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
757 /* Return 1 plus largest pseudo reg number used in the current function. */
765 /* Return 1 + the largest label number used so far in the current function. */
770 if (last_label_num && label_num == base_label_num)
771 return last_label_num;
775 /* Return first label number used in this function (if any were used). */
778 get_first_label_num ()
780 return first_label_num;
783 /* Return the final regno of X, which is a SUBREG of a hard
786 subreg_hard_regno (x, check_mode)
790 enum machine_mode mode = GET_MODE (x);
791 unsigned int byte_offset, base_regno, final_regno;
792 rtx reg = SUBREG_REG (x);
794 /* This is where we attempt to catch illegal subregs
795 created by the compiler. */
796 if (GET_CODE (x) != SUBREG
797 || GET_CODE (reg) != REG)
799 base_regno = REGNO (reg);
800 if (base_regno >= FIRST_PSEUDO_REGISTER)
802 if (check_mode && ! HARD_REGNO_MODE_OK (base_regno, GET_MODE (reg)))
805 /* Catch non-congruent offsets too. */
806 byte_offset = SUBREG_BYTE (x);
807 if ((byte_offset % GET_MODE_SIZE (mode)) != 0)
810 final_regno = subreg_regno (x);
815 /* Return a value representing some low-order bits of X, where the number
816 of low-order bits is given by MODE. Note that no conversion is done
817 between floating-point and fixed-point values, rather, the bit
818 representation is returned.
820 This function handles the cases in common between gen_lowpart, below,
821 and two variants in cse.c and combine.c. These are the cases that can
822 be safely handled at all points in the compilation.
824 If this is not a case we can handle, return 0. */
827 gen_lowpart_common (mode, x)
828 enum machine_mode mode;
831 int msize = GET_MODE_SIZE (mode);
832 int xsize = GET_MODE_SIZE (GET_MODE (x));
835 if (GET_MODE (x) == mode)
838 /* MODE must occupy no more words than the mode of X. */
839 if (GET_MODE (x) != VOIDmode
840 && ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
841 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
844 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
845 if (GET_MODE_CLASS (mode) == MODE_FLOAT
846 && GET_MODE (x) != VOIDmode && msize > xsize)
849 offset = subreg_lowpart_offset (mode, GET_MODE (x));
851 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
852 && (GET_MODE_CLASS (mode) == MODE_INT
853 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
855 /* If we are getting the low-order part of something that has been
856 sign- or zero-extended, we can either just use the object being
857 extended or make a narrower extension. If we want an even smaller
858 piece than the size of the object being extended, call ourselves
861 This case is used mostly by combine and cse. */
863 if (GET_MODE (XEXP (x, 0)) == mode)
865 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
866 return gen_lowpart_common (mode, XEXP (x, 0));
867 else if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
868 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
870 else if (GET_CODE (x) == SUBREG || GET_CODE (x) == REG
871 || GET_CODE (x) == CONCAT)
872 return simplify_gen_subreg (mode, x, GET_MODE (x), offset);
873 /* If X is a CONST_INT or a CONST_DOUBLE, extract the appropriate bits
874 from the low-order part of the constant. */
875 else if ((GET_MODE_CLASS (mode) == MODE_INT
876 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
877 && GET_MODE (x) == VOIDmode
878 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
880 /* If MODE is twice the host word size, X is already the desired
881 representation. Otherwise, if MODE is wider than a word, we can't
882 do this. If MODE is exactly a word, return just one CONST_INT. */
884 if (GET_MODE_BITSIZE (mode) >= 2 * HOST_BITS_PER_WIDE_INT)
886 else if (GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT)
888 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
889 return (GET_CODE (x) == CONST_INT ? x
890 : GEN_INT (CONST_DOUBLE_LOW (x)));
893 /* MODE must be narrower than HOST_BITS_PER_WIDE_INT. */
894 HOST_WIDE_INT val = (GET_CODE (x) == CONST_INT ? INTVAL (x)
895 : CONST_DOUBLE_LOW (x));
897 /* Sign extend to HOST_WIDE_INT. */
898 val = trunc_int_for_mode (val, mode);
900 return (GET_CODE (x) == CONST_INT && INTVAL (x) == val ? x
905 #ifndef REAL_ARITHMETIC
906 /* If X is an integral constant but we want it in floating-point, it
907 must be the case that we have a union of an integer and a floating-point
908 value. If the machine-parameters allow it, simulate that union here
909 and return the result. The two-word and single-word cases are
912 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
913 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
914 || flag_pretend_float)
915 && GET_MODE_CLASS (mode) == MODE_FLOAT
916 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
917 && GET_CODE (x) == CONST_INT
918 && sizeof (float) * HOST_BITS_PER_CHAR == HOST_BITS_PER_WIDE_INT)
920 union {HOST_WIDE_INT i; float d; } u;
923 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
925 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
926 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
927 || flag_pretend_float)
928 && GET_MODE_CLASS (mode) == MODE_FLOAT
929 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
930 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
931 && GET_MODE (x) == VOIDmode
932 && (sizeof (double) * HOST_BITS_PER_CHAR
933 == 2 * HOST_BITS_PER_WIDE_INT))
935 union {HOST_WIDE_INT i[2]; double d; } u;
936 HOST_WIDE_INT low, high;
938 if (GET_CODE (x) == CONST_INT)
939 low = INTVAL (x), high = low >> (HOST_BITS_PER_WIDE_INT -1);
941 low = CONST_DOUBLE_LOW (x), high = CONST_DOUBLE_HIGH (x);
942 #ifdef HOST_WORDS_BIG_ENDIAN
943 u.i[0] = high, u.i[1] = low;
945 u.i[0] = low, u.i[1] = high;
947 return CONST_DOUBLE_FROM_REAL_VALUE (u.d, mode);
950 /* Similarly, if this is converting a floating-point value into a
951 single-word integer. Only do this is the host and target parameters are
954 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
955 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
956 || flag_pretend_float)
957 && (GET_MODE_CLASS (mode) == MODE_INT
958 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
959 && GET_CODE (x) == CONST_DOUBLE
960 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
961 && GET_MODE_BITSIZE (mode) == BITS_PER_WORD)
962 return constant_subword (x, (offset / UNITS_PER_WORD), GET_MODE (x));
964 /* Similarly, if this is converting a floating-point value into a
965 two-word integer, we can do this one word at a time and make an
966 integer. Only do this is the host and target parameters are
969 else if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
970 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
971 || flag_pretend_float)
972 && (GET_MODE_CLASS (mode) == MODE_INT
973 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
974 && GET_CODE (x) == CONST_DOUBLE
975 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
976 && GET_MODE_BITSIZE (mode) == 2 * BITS_PER_WORD)
978 rtx lowpart, highpart;
980 lowpart = constant_subword (x,
981 (offset / UNITS_PER_WORD) + WORDS_BIG_ENDIAN,
983 highpart = constant_subword (x,
984 (offset / UNITS_PER_WORD) + (! WORDS_BIG_ENDIAN),
986 if (lowpart && GET_CODE (lowpart) == CONST_INT
987 && highpart && GET_CODE (highpart) == CONST_INT)
988 return immed_double_const (INTVAL (lowpart), INTVAL (highpart), mode);
990 #else /* ifndef REAL_ARITHMETIC */
992 /* When we have a FP emulator, we can handle all conversions between
993 FP and integer operands. This simplifies reload because it
994 doesn't have to deal with constructs like (subreg:DI
995 (const_double:SF ...)) or (subreg:DF (const_int ...)). */
996 /* Single-precision floats are always 32-bits and double-precision
997 floats are always 64-bits. */
999 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1000 && GET_MODE_BITSIZE (mode) == 32
1001 && GET_CODE (x) == CONST_INT)
1007 r = REAL_VALUE_FROM_TARGET_SINGLE (i);
1008 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1010 else if (GET_MODE_CLASS (mode) == MODE_FLOAT
1011 && GET_MODE_BITSIZE (mode) == 64
1012 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
1013 && GET_MODE (x) == VOIDmode)
1017 HOST_WIDE_INT low, high;
1019 if (GET_CODE (x) == CONST_INT)
1022 high = low >> (HOST_BITS_PER_WIDE_INT - 1);
1026 low = CONST_DOUBLE_LOW (x);
1027 high = CONST_DOUBLE_HIGH (x);
1030 #if HOST_BITS_PER_WIDE_INT == 32
1031 /* REAL_VALUE_TARGET_DOUBLE takes the addressing order of the
1033 if (WORDS_BIG_ENDIAN)
1034 i[0] = high, i[1] = low;
1036 i[0] = low, i[1] = high;
1041 r = REAL_VALUE_FROM_TARGET_DOUBLE (i);
1042 return CONST_DOUBLE_FROM_REAL_VALUE (r, mode);
1044 else if ((GET_MODE_CLASS (mode) == MODE_INT
1045 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
1046 && GET_CODE (x) == CONST_DOUBLE
1047 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
1050 long i[4]; /* Only the low 32 bits of each 'long' are used. */
1051 int endian = WORDS_BIG_ENDIAN ? 1 : 0;
1053 /* Convert 'r' into an array of four 32-bit words in target word
1055 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
1056 switch (GET_MODE_BITSIZE (GET_MODE (x)))
1059 REAL_VALUE_TO_TARGET_SINGLE (r, i[3 * endian]);
1062 i[3 - 3 * endian] = 0;
1065 REAL_VALUE_TO_TARGET_DOUBLE (r, i + 2 * endian);
1066 i[2 - 2 * endian] = 0;
1067 i[3 - 2 * endian] = 0;
1070 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i + endian);
1071 i[3 - 3 * endian] = 0;
1074 REAL_VALUE_TO_TARGET_LONG_DOUBLE (r, i);
1079 /* Now, pack the 32-bit elements of the array into a CONST_DOUBLE
1081 #if HOST_BITS_PER_WIDE_INT == 32
1082 return immed_double_const (i[3 * endian], i[1 + endian], mode);
1084 if (HOST_BITS_PER_WIDE_INT != 64)
1087 return immed_double_const ((((unsigned long) i[3 * endian])
1088 | ((HOST_WIDE_INT) i[1 + endian] << 32)),
1089 (((unsigned long) i[2 - endian])
1090 | ((HOST_WIDE_INT) i[3 - 3 * endian] << 32)),
1094 #endif /* ifndef REAL_ARITHMETIC */
1096 /* Otherwise, we can't do this. */
1100 /* Return the real part (which has mode MODE) of a complex value X.
1101 This always comes at the low address in memory. */
1104 gen_realpart (mode, x)
1105 enum machine_mode mode;
1108 if (WORDS_BIG_ENDIAN
1109 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1111 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1113 ("can't access real part of complex value in hard register");
1114 else if (WORDS_BIG_ENDIAN)
1115 return gen_highpart (mode, x);
1117 return gen_lowpart (mode, x);
1120 /* Return the imaginary part (which has mode MODE) of a complex value X.
1121 This always comes at the high address in memory. */
1124 gen_imagpart (mode, x)
1125 enum machine_mode mode;
1128 if (WORDS_BIG_ENDIAN)
1129 return gen_lowpart (mode, x);
1130 else if (! WORDS_BIG_ENDIAN
1131 && GET_MODE_BITSIZE (mode) < BITS_PER_WORD
1133 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1135 ("can't access imaginary part of complex value in hard register");
1137 return gen_highpart (mode, x);
1140 /* Return 1 iff X, assumed to be a SUBREG,
1141 refers to the real part of the complex value in its containing reg.
1142 Complex values are always stored with the real part in the first word,
1143 regardless of WORDS_BIG_ENDIAN. */
1146 subreg_realpart_p (x)
1149 if (GET_CODE (x) != SUBREG)
1152 return ((unsigned int) SUBREG_BYTE (x)
1153 < GET_MODE_UNIT_SIZE (GET_MODE (SUBREG_REG (x))));
1156 /* Assuming that X is an rtx (e.g., MEM, REG or SUBREG) for a value,
1157 return an rtx (MEM, SUBREG, or CONST_INT) that refers to the
1158 least-significant part of X.
1159 MODE specifies how big a part of X to return;
1160 it usually should not be larger than a word.
1161 If X is a MEM whose address is a QUEUED, the value may be so also. */
1164 gen_lowpart (mode, x)
1165 enum machine_mode mode;
1168 rtx result = gen_lowpart_common (mode, x);
1172 else if (GET_CODE (x) == REG)
1174 /* Must be a hard reg that's not valid in MODE. */
1175 result = gen_lowpart_common (mode, copy_to_reg (x));
1180 else if (GET_CODE (x) == MEM)
1182 /* The only additional case we can do is MEM. */
1184 if (WORDS_BIG_ENDIAN)
1185 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
1186 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
1188 if (BYTES_BIG_ENDIAN)
1189 /* Adjust the address so that the address-after-the-data
1191 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
1192 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
1194 return adjust_address (x, mode, offset);
1196 else if (GET_CODE (x) == ADDRESSOF)
1197 return gen_lowpart (mode, force_reg (GET_MODE (x), x));
1202 /* Like `gen_lowpart', but refer to the most significant part.
1203 This is used to access the imaginary part of a complex number. */
1206 gen_highpart (mode, x)
1207 enum machine_mode mode;
1210 unsigned int msize = GET_MODE_SIZE (mode);
1213 /* This case loses if X is a subreg. To catch bugs early,
1214 complain if an invalid MODE is used even in other cases. */
1215 if (msize > UNITS_PER_WORD
1216 && msize != GET_MODE_UNIT_SIZE (GET_MODE (x)))
1219 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1220 subreg_highpart_offset (mode, GET_MODE (x)));
1222 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1223 the target if we have a MEM. gen_highpart must return a valid operand,
1224 emitting code if necessary to do so. */
1225 if (result != NULL_RTX && GET_CODE (result) == MEM)
1226 result = validize_mem (result);
1233 /* Like gen_highpart_mode, but accept mode of EXP operand in case EXP can
1234 be VOIDmode constant. */
1236 gen_highpart_mode (outermode, innermode, exp)
1237 enum machine_mode outermode, innermode;
1240 if (GET_MODE (exp) != VOIDmode)
1242 if (GET_MODE (exp) != innermode)
1244 return gen_highpart (outermode, exp);
1246 return simplify_gen_subreg (outermode, exp, innermode,
1247 subreg_highpart_offset (outermode, innermode));
1249 /* Return offset in bytes to get OUTERMODE low part
1250 of the value in mode INNERMODE stored in memory in target format. */
1253 subreg_lowpart_offset (outermode, innermode)
1254 enum machine_mode outermode, innermode;
1256 unsigned int offset = 0;
1257 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1261 if (WORDS_BIG_ENDIAN)
1262 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1263 if (BYTES_BIG_ENDIAN)
1264 offset += difference % UNITS_PER_WORD;
1270 /* Return offset in bytes to get OUTERMODE high part
1271 of the value in mode INNERMODE stored in memory in target format. */
1273 subreg_highpart_offset (outermode, innermode)
1274 enum machine_mode outermode, innermode;
1276 unsigned int offset = 0;
1277 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1279 if (GET_MODE_SIZE (innermode) < GET_MODE_SIZE (outermode))
1284 if (! WORDS_BIG_ENDIAN)
1285 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1286 if (! BYTES_BIG_ENDIAN)
1287 offset += difference % UNITS_PER_WORD;
1293 /* Return 1 iff X, assumed to be a SUBREG,
1294 refers to the least significant part of its containing reg.
1295 If X is not a SUBREG, always return 1 (it is its own low part!). */
1298 subreg_lowpart_p (x)
1301 if (GET_CODE (x) != SUBREG)
1303 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1306 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1307 == SUBREG_BYTE (x));
1311 /* Helper routine for all the constant cases of operand_subword.
1312 Some places invoke this directly. */
1315 constant_subword (op, offset, mode)
1318 enum machine_mode mode;
1320 int size_ratio = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
1323 /* If OP is already an integer word, return it. */
1324 if (GET_MODE_CLASS (mode) == MODE_INT
1325 && GET_MODE_SIZE (mode) == UNITS_PER_WORD)
1328 #ifdef REAL_ARITHMETIC
1329 /* The output is some bits, the width of the target machine's word.
1330 A wider-word host can surely hold them in a CONST_INT. A narrower-word
1332 if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1333 && GET_MODE_CLASS (mode) == MODE_FLOAT
1334 && GET_MODE_BITSIZE (mode) == 64
1335 && GET_CODE (op) == CONST_DOUBLE)
1340 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1341 REAL_VALUE_TO_TARGET_DOUBLE (rv, k);
1343 /* We handle 32-bit and >= 64-bit words here. Note that the order in
1344 which the words are written depends on the word endianness.
1345 ??? This is a potential portability problem and should
1346 be fixed at some point.
1348 We must exercise caution with the sign bit. By definition there
1349 are 32 significant bits in K; there may be more in a HOST_WIDE_INT.
1350 Consider a host with a 32-bit long and a 64-bit HOST_WIDE_INT.
1351 So we explicitly mask and sign-extend as necessary. */
1352 if (BITS_PER_WORD == 32)
1355 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1356 return GEN_INT (val);
1358 #if HOST_BITS_PER_WIDE_INT >= 64
1359 else if (BITS_PER_WORD >= 64 && offset == 0)
1361 val = k[! WORDS_BIG_ENDIAN];
1362 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1363 val |= (HOST_WIDE_INT) k[WORDS_BIG_ENDIAN] & 0xffffffff;
1364 return GEN_INT (val);
1367 else if (BITS_PER_WORD == 16)
1369 val = k[offset >> 1];
1370 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1372 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1373 return GEN_INT (val);
1378 else if (HOST_BITS_PER_WIDE_INT >= BITS_PER_WORD
1379 && GET_MODE_CLASS (mode) == MODE_FLOAT
1380 && GET_MODE_BITSIZE (mode) > 64
1381 && GET_CODE (op) == CONST_DOUBLE)
1386 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1387 REAL_VALUE_TO_TARGET_LONG_DOUBLE (rv, k);
1389 if (BITS_PER_WORD == 32)
1392 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1393 return GEN_INT (val);
1395 #if HOST_BITS_PER_WIDE_INT >= 64
1396 else if (BITS_PER_WORD >= 64 && offset <= 1)
1398 val = k[offset * 2 + ! WORDS_BIG_ENDIAN];
1399 val = (((val & 0xffffffff) ^ 0x80000000) - 0x80000000) << 32;
1400 val |= (HOST_WIDE_INT) k[offset * 2 + WORDS_BIG_ENDIAN] & 0xffffffff;
1401 return GEN_INT (val);
1407 #else /* no REAL_ARITHMETIC */
1408 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1409 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1410 || flag_pretend_float)
1411 && GET_MODE_CLASS (mode) == MODE_FLOAT
1412 && GET_MODE_SIZE (mode) == 2 * UNITS_PER_WORD
1413 && GET_CODE (op) == CONST_DOUBLE)
1415 /* The constant is stored in the host's word-ordering,
1416 but we want to access it in the target's word-ordering. Some
1417 compilers don't like a conditional inside macro args, so we have two
1418 copies of the return. */
1419 #ifdef HOST_WORDS_BIG_ENDIAN
1420 return GEN_INT (offset == WORDS_BIG_ENDIAN
1421 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1423 return GEN_INT (offset != WORDS_BIG_ENDIAN
1424 ? CONST_DOUBLE_HIGH (op) : CONST_DOUBLE_LOW (op));
1427 #endif /* no REAL_ARITHMETIC */
1429 /* Single word float is a little harder, since single- and double-word
1430 values often do not have the same high-order bits. We have already
1431 verified that we want the only defined word of the single-word value. */
1432 #ifdef REAL_ARITHMETIC
1433 if (GET_MODE_CLASS (mode) == MODE_FLOAT
1434 && GET_MODE_BITSIZE (mode) == 32
1435 && GET_CODE (op) == CONST_DOUBLE)
1440 REAL_VALUE_FROM_CONST_DOUBLE (rv, op);
1441 REAL_VALUE_TO_TARGET_SINGLE (rv, l);
1443 /* Sign extend from known 32-bit value to HOST_WIDE_INT. */
1445 val = ((val & 0xffffffff) ^ 0x80000000) - 0x80000000;
1447 if (BITS_PER_WORD == 16)
1449 if ((offset & 1) == ! WORDS_BIG_ENDIAN)
1451 val = ((val & 0xffff) ^ 0x8000) - 0x8000;
1454 return GEN_INT (val);
1457 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1458 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1459 || flag_pretend_float)
1460 && sizeof (float) * 8 == HOST_BITS_PER_WIDE_INT
1461 && GET_MODE_CLASS (mode) == MODE_FLOAT
1462 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1463 && GET_CODE (op) == CONST_DOUBLE)
1466 union {float f; HOST_WIDE_INT i; } u;
1468 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1471 return GEN_INT (u.i);
1473 if (((HOST_FLOAT_FORMAT == TARGET_FLOAT_FORMAT
1474 && HOST_BITS_PER_WIDE_INT == BITS_PER_WORD)
1475 || flag_pretend_float)
1476 && sizeof (double) * 8 == HOST_BITS_PER_WIDE_INT
1477 && GET_MODE_CLASS (mode) == MODE_FLOAT
1478 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
1479 && GET_CODE (op) == CONST_DOUBLE)
1482 union {double d; HOST_WIDE_INT i; } u;
1484 REAL_VALUE_FROM_CONST_DOUBLE (d, op);
1487 return GEN_INT (u.i);
1489 #endif /* no REAL_ARITHMETIC */
1491 /* The only remaining cases that we can handle are integers.
1492 Convert to proper endianness now since these cases need it.
1493 At this point, offset == 0 means the low-order word.
1495 We do not want to handle the case when BITS_PER_WORD <= HOST_BITS_PER_INT
1496 in general. However, if OP is (const_int 0), we can just return
1499 if (op == const0_rtx)
1502 if (GET_MODE_CLASS (mode) != MODE_INT
1503 || (GET_CODE (op) != CONST_INT && GET_CODE (op) != CONST_DOUBLE)
1504 || BITS_PER_WORD > HOST_BITS_PER_WIDE_INT)
1507 if (WORDS_BIG_ENDIAN)
1508 offset = GET_MODE_SIZE (mode) / UNITS_PER_WORD - 1 - offset;
1510 /* Find out which word on the host machine this value is in and get
1511 it from the constant. */
1512 val = (offset / size_ratio == 0
1513 ? (GET_CODE (op) == CONST_INT ? INTVAL (op) : CONST_DOUBLE_LOW (op))
1514 : (GET_CODE (op) == CONST_INT
1515 ? (INTVAL (op) < 0 ? ~0 : 0) : CONST_DOUBLE_HIGH (op)));
1517 /* Get the value we want into the low bits of val. */
1518 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT)
1519 val = ((val >> ((offset % size_ratio) * BITS_PER_WORD)));
1521 val = trunc_int_for_mode (val, word_mode);
1523 return GEN_INT (val);
1526 /* Return subword OFFSET of operand OP.
1527 The word number, OFFSET, is interpreted as the word number starting
1528 at the low-order address. OFFSET 0 is the low-order word if not
1529 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1531 If we cannot extract the required word, we return zero. Otherwise,
1532 an rtx corresponding to the requested word will be returned.
1534 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1535 reload has completed, a valid address will always be returned. After
1536 reload, if a valid address cannot be returned, we return zero.
1538 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1539 it is the responsibility of the caller.
1541 MODE is the mode of OP in case it is a CONST_INT.
1543 ??? This is still rather broken for some cases. The problem for the
1544 moment is that all callers of this thing provide no 'goal mode' to
1545 tell us to work with. This exists because all callers were written
1546 in a word based SUBREG world.
1547 Now use of this function can be deprecated by simplify_subreg in most
1552 operand_subword (op, offset, validate_address, mode)
1554 unsigned int offset;
1555 int validate_address;
1556 enum machine_mode mode;
1558 if (mode == VOIDmode)
1559 mode = GET_MODE (op);
1561 if (mode == VOIDmode)
1564 /* If OP is narrower than a word, fail. */
1566 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1569 /* If we want a word outside OP, return zero. */
1571 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1574 /* Form a new MEM at the requested address. */
1575 if (GET_CODE (op) == MEM)
1577 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1579 if (! validate_address)
1582 else if (reload_completed)
1584 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1588 return replace_equiv_address (new, XEXP (new, 0));
1591 /* Rest can be handled by simplify_subreg. */
1592 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1595 /* Similar to `operand_subword', but never return 0. If we can't extract
1596 the required subword, put OP into a register and try again. If that fails,
1597 abort. We always validate the address in this case.
1599 MODE is the mode of OP, in case it is CONST_INT. */
1602 operand_subword_force (op, offset, mode)
1604 unsigned int offset;
1605 enum machine_mode mode;
1607 rtx result = operand_subword (op, offset, 1, mode);
1612 if (mode != BLKmode && mode != VOIDmode)
1614 /* If this is a register which can not be accessed by words, copy it
1615 to a pseudo register. */
1616 if (GET_CODE (op) == REG)
1617 op = copy_to_reg (op);
1619 op = force_reg (mode, op);
1622 result = operand_subword (op, offset, 1, mode);
1629 /* Given a compare instruction, swap the operands.
1630 A test instruction is changed into a compare of 0 against the operand. */
1633 reverse_comparison (insn)
1636 rtx body = PATTERN (insn);
1639 if (GET_CODE (body) == SET)
1640 comp = SET_SRC (body);
1642 comp = SET_SRC (XVECEXP (body, 0, 0));
1644 if (GET_CODE (comp) == COMPARE)
1646 rtx op0 = XEXP (comp, 0);
1647 rtx op1 = XEXP (comp, 1);
1648 XEXP (comp, 0) = op1;
1649 XEXP (comp, 1) = op0;
1653 rtx new = gen_rtx_COMPARE (VOIDmode,
1654 CONST0_RTX (GET_MODE (comp)), comp);
1655 if (GET_CODE (body) == SET)
1656 SET_SRC (body) = new;
1658 SET_SRC (XVECEXP (body, 0, 0)) = new;
1662 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1663 or (2) a component ref of something variable. Represent the later with
1664 a NULL expression. */
1667 component_ref_for_mem_expr (ref)
1670 tree inner = TREE_OPERAND (ref, 0);
1672 if (TREE_CODE (inner) == COMPONENT_REF)
1673 inner = component_ref_for_mem_expr (inner);
1676 tree placeholder_ptr = 0;
1678 /* Now remove any conversions: they don't change what the underlying
1679 object is. Likewise for SAVE_EXPR. Also handle PLACEHOLDER_EXPR. */
1680 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1681 || TREE_CODE (inner) == NON_LVALUE_EXPR
1682 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1683 || TREE_CODE (inner) == SAVE_EXPR
1684 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
1685 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
1686 inner = find_placeholder (inner, &placeholder_ptr);
1688 inner = TREE_OPERAND (inner, 0);
1690 if (! DECL_P (inner))
1694 if (inner == TREE_OPERAND (ref, 0))
1697 return build (COMPONENT_REF, TREE_TYPE (ref), inner,
1698 TREE_OPERAND (ref, 1));
1701 /* Given REF, a MEM, and T, either the type of X or the expression
1702 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1703 if we are making a new object of this type. */
1706 set_mem_attributes (ref, t, objectp)
1711 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1712 tree expr = MEM_EXPR (ref);
1713 rtx offset = MEM_OFFSET (ref);
1714 rtx size = MEM_SIZE (ref);
1715 unsigned int align = MEM_ALIGN (ref);
1718 /* It can happen that type_for_mode was given a mode for which there
1719 is no language-level type. In which case it returns NULL, which
1724 type = TYPE_P (t) ? t : TREE_TYPE (t);
1726 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1727 wrong answer, as it assumes that DECL_RTL already has the right alias
1728 info. Callers should not set DECL_RTL until after the call to
1729 set_mem_attributes. */
1730 if (DECL_P (t) && ref == DECL_RTL_IF_SET (t))
1733 /* Get the alias set from the expression or type (perhaps using a
1734 front-end routine) and use it. */
1735 alias = get_alias_set (t);
1737 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
1738 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
1739 RTX_UNCHANGING_P (ref)
1740 |= ((lang_hooks.honor_readonly
1741 && (TYPE_READONLY (type) || TREE_READONLY (t)))
1742 || (! TYPE_P (t) && TREE_CONSTANT (t)));
1744 /* If we are making an object of this type, or if this is a DECL, we know
1745 that it is a scalar if the type is not an aggregate. */
1746 if ((objectp || DECL_P (t)) && ! AGGREGATE_TYPE_P (type))
1747 MEM_SCALAR_P (ref) = 1;
1749 /* We can set the alignment from the type if we are making an object,
1750 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1751 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1752 align = MAX (align, TYPE_ALIGN (type));
1754 /* If the size is known, we can set that. */
1755 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1756 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1758 /* If T is not a type, we may be able to deduce some more information about
1762 maybe_set_unchanging (ref, t);
1763 if (TREE_THIS_VOLATILE (t))
1764 MEM_VOLATILE_P (ref) = 1;
1766 /* Now remove any conversions: they don't change what the underlying
1767 object is. Likewise for SAVE_EXPR. */
1768 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1769 || TREE_CODE (t) == NON_LVALUE_EXPR
1770 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1771 || TREE_CODE (t) == SAVE_EXPR)
1772 t = TREE_OPERAND (t, 0);
1774 /* If this expression can't be addressed (e.g., it contains a reference
1775 to a non-addressable field), show we don't change its alias set. */
1776 if (! can_address_p (t))
1777 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1779 /* If this is a decl, set the attributes of the MEM from it. */
1783 offset = const0_rtx;
1784 size = (DECL_SIZE_UNIT (t)
1785 && host_integerp (DECL_SIZE_UNIT (t), 1)
1786 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1787 align = DECL_ALIGN (t);
1790 /* If this is a constant, we know the alignment. */
1791 else if (TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
1793 align = TYPE_ALIGN (type);
1794 #ifdef CONSTANT_ALIGNMENT
1795 align = CONSTANT_ALIGNMENT (t, align);
1799 /* If this is a field reference and not a bit-field, record it. */
1800 /* ??? There is some information that can be gleened from bit-fields,
1801 such as the word offset in the structure that might be modified.
1802 But skip it for now. */
1803 else if (TREE_CODE (t) == COMPONENT_REF
1804 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1806 expr = component_ref_for_mem_expr (t);
1807 offset = const0_rtx;
1808 /* ??? Any reason the field size would be different than
1809 the size we got from the type? */
1812 /* If this is an array reference, look for an outer field reference. */
1813 else if (TREE_CODE (t) == ARRAY_REF)
1815 tree off_tree = size_zero_node;
1820 = fold (build (PLUS_EXPR, sizetype,
1821 fold (build (MULT_EXPR, sizetype,
1822 TREE_OPERAND (t, 1),
1823 TYPE_SIZE_UNIT (TREE_TYPE (t)))),
1825 t = TREE_OPERAND (t, 0);
1827 while (TREE_CODE (t) == ARRAY_REF);
1829 if (TREE_CODE (t) == COMPONENT_REF)
1831 expr = component_ref_for_mem_expr (t);
1832 if (host_integerp (off_tree, 1))
1833 offset = GEN_INT (tree_low_cst (off_tree, 1));
1834 /* ??? Any reason the field size would be different than
1835 the size we got from the type? */
1840 /* Now set the attributes we computed above. */
1842 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1844 /* If this is already known to be a scalar or aggregate, we are done. */
1845 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1848 /* If it is a reference into an aggregate, this is part of an aggregate.
1849 Otherwise we don't know. */
1850 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1851 || TREE_CODE (t) == ARRAY_RANGE_REF
1852 || TREE_CODE (t) == BIT_FIELD_REF)
1853 MEM_IN_STRUCT_P (ref) = 1;
1856 /* Set the alias set of MEM to SET. */
1859 set_mem_alias_set (mem, set)
1863 #ifdef ENABLE_CHECKING
1864 /* If the new and old alias sets don't conflict, something is wrong. */
1865 if (!alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)))
1869 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1870 MEM_SIZE (mem), MEM_ALIGN (mem),
1874 /* Set the alignment of MEM to ALIGN bits. */
1877 set_mem_align (mem, align)
1881 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1882 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1886 /* Set the expr for MEM to EXPR. */
1889 set_mem_expr (mem, expr)
1894 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1895 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1898 /* Set the offset of MEM to OFFSET. */
1901 set_mem_offset (mem, offset)
1904 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1905 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1909 /* Set the size of MEM to SIZE. */
1912 set_mem_size (mem, size)
1915 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1916 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1920 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1921 and its address changed to ADDR. (VOIDmode means don't change the mode.
1922 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1923 returned memory location is required to be valid. The memory
1924 attributes are not changed. */
1927 change_address_1 (memref, mode, addr, validate)
1929 enum machine_mode mode;
1935 if (GET_CODE (memref) != MEM)
1937 if (mode == VOIDmode)
1938 mode = GET_MODE (memref);
1940 addr = XEXP (memref, 0);
1944 if (reload_in_progress || reload_completed)
1946 if (! memory_address_p (mode, addr))
1950 addr = memory_address (mode, addr);
1953 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1956 new = gen_rtx_MEM (mode, addr);
1957 MEM_COPY_ATTRIBUTES (new, memref);
1961 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1962 way we are changing MEMREF, so we only preserve the alias set. */
1965 change_address (memref, mode, addr)
1967 enum machine_mode mode;
1970 rtx new = change_address_1 (memref, mode, addr, 1);
1971 enum machine_mode mmode = GET_MODE (new);
1974 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0,
1975 mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode)),
1976 (mmode == BLKmode ? BITS_PER_UNIT
1977 : GET_MODE_ALIGNMENT (mmode)),
1983 /* Return a memory reference like MEMREF, but with its mode changed
1984 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1985 nonzero, the memory address is forced to be valid.
1986 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1987 and caller is responsible for adjusting MEMREF base register. */
1990 adjust_address_1 (memref, mode, offset, validate, adjust)
1992 enum machine_mode mode;
1993 HOST_WIDE_INT offset;
1994 int validate, adjust;
1996 rtx addr = XEXP (memref, 0);
1998 rtx memoffset = MEM_OFFSET (memref);
2000 unsigned int memalign = MEM_ALIGN (memref);
2002 /* ??? Prefer to create garbage instead of creating shared rtl.
2003 This may happen even if offset is non-zero -- consider
2004 (plus (plus reg reg) const_int) -- so do this always. */
2005 addr = copy_rtx (addr);
2009 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2010 object, we can merge it into the LO_SUM. */
2011 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2013 && (unsigned HOST_WIDE_INT) offset
2014 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2015 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
2016 plus_constant (XEXP (addr, 1), offset));
2018 addr = plus_constant (addr, offset);
2021 new = change_address_1 (memref, mode, addr, validate);
2023 /* Compute the new values of the memory attributes due to this adjustment.
2024 We add the offsets and update the alignment. */
2026 memoffset = GEN_INT (offset + INTVAL (memoffset));
2028 /* Compute the new alignment by taking the MIN of the alignment and the
2029 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2034 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2036 /* We can compute the size in a number of ways. */
2037 if (GET_MODE (new) != BLKmode)
2038 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
2039 else if (MEM_SIZE (memref))
2040 size = plus_constant (MEM_SIZE (memref), -offset);
2042 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2043 memoffset, size, memalign, GET_MODE (new));
2045 /* At some point, we should validate that this offset is within the object,
2046 if all the appropriate values are known. */
2050 /* Return a memory reference like MEMREF, but with its mode changed
2051 to MODE and its address changed to ADDR, which is assumed to be
2052 MEMREF offseted by OFFSET bytes. If VALIDATE is
2053 nonzero, the memory address is forced to be valid. */
2056 adjust_automodify_address_1 (memref, mode, addr, offset, validate)
2058 enum machine_mode mode;
2060 HOST_WIDE_INT offset;
2063 memref = change_address_1 (memref, VOIDmode, addr, validate);
2064 return adjust_address_1 (memref, mode, offset, validate, 0);
2067 /* Return a memory reference like MEMREF, but whose address is changed by
2068 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2069 known to be in OFFSET (possibly 1). */
2072 offset_address (memref, offset, pow2)
2077 rtx new, addr = XEXP (memref, 0);
2079 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2081 /* At this point we don't know _why_ the address is invalid. It
2082 could have secondary memory refereces, multiplies or anything.
2084 However, if we did go and rearrange things, we can wind up not
2085 being able to recognize the magic around pic_offset_table_rtx.
2086 This stuff is fragile, and is yet another example of why it is
2087 bad to expose PIC machinery too early. */
2088 if (! memory_address_p (GET_MODE (memref), new)
2089 && GET_CODE (addr) == PLUS
2090 && XEXP (addr, 0) == pic_offset_table_rtx)
2092 addr = force_reg (GET_MODE (addr), addr);
2093 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
2096 update_temp_slot_address (XEXP (memref, 0), new);
2097 new = change_address_1 (memref, VOIDmode, new, 1);
2099 /* Update the alignment to reflect the offset. Reset the offset, which
2102 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2103 MIN (MEM_ALIGN (memref),
2104 (unsigned HOST_WIDE_INT) pow2 * BITS_PER_UNIT),
2109 /* Return a memory reference like MEMREF, but with its address changed to
2110 ADDR. The caller is asserting that the actual piece of memory pointed
2111 to is the same, just the form of the address is being changed, such as
2112 by putting something into a register. */
2115 replace_equiv_address (memref, addr)
2119 /* change_address_1 copies the memory attribute structure without change
2120 and that's exactly what we want here. */
2121 update_temp_slot_address (XEXP (memref, 0), addr);
2122 return change_address_1 (memref, VOIDmode, addr, 1);
2125 /* Likewise, but the reference is not required to be valid. */
2128 replace_equiv_address_nv (memref, addr)
2132 return change_address_1 (memref, VOIDmode, addr, 0);
2135 /* Return a memory reference like MEMREF, but with its mode widened to
2136 MODE and offset by OFFSET. This would be used by targets that e.g.
2137 cannot issue QImode memory operations and have to use SImode memory
2138 operations plus masking logic. */
2141 widen_memory_access (memref, mode, offset)
2143 enum machine_mode mode;
2144 HOST_WIDE_INT offset;
2146 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2147 tree expr = MEM_EXPR (new);
2148 rtx memoffset = MEM_OFFSET (new);
2149 unsigned int size = GET_MODE_SIZE (mode);
2151 /* If we don't know what offset we were at within the expression, then
2152 we can't know if we've overstepped the bounds. */
2158 if (TREE_CODE (expr) == COMPONENT_REF)
2160 tree field = TREE_OPERAND (expr, 1);
2162 if (! DECL_SIZE_UNIT (field))
2168 /* Is the field at least as large as the access? If so, ok,
2169 otherwise strip back to the containing structure. */
2170 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2171 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2172 && INTVAL (memoffset) >= 0)
2175 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
2181 expr = TREE_OPERAND (expr, 0);
2182 memoffset = (GEN_INT (INTVAL (memoffset)
2183 + tree_low_cst (DECL_FIELD_OFFSET (field), 1)
2184 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2187 /* Similarly for the decl. */
2188 else if (DECL_P (expr)
2189 && DECL_SIZE_UNIT (expr)
2190 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2191 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2192 && (! memoffset || INTVAL (memoffset) >= 0))
2196 /* The widened memory access overflows the expression, which means
2197 that it could alias another expression. Zap it. */
2204 memoffset = NULL_RTX;
2206 /* The widened memory may alias other stuff, so zap the alias set. */
2207 /* ??? Maybe use get_alias_set on any remaining expression. */
2209 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2210 MEM_ALIGN (new), mode);
2215 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2222 label = gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX,
2223 NULL_RTX, label_num++, NULL, NULL);
2225 LABEL_NUSES (label) = 0;
2226 LABEL_ALTERNATE_NAME (label) = NULL;
2230 /* For procedure integration. */
2232 /* Install new pointers to the first and last insns in the chain.
2233 Also, set cur_insn_uid to one higher than the last in use.
2234 Used for an inline-procedure after copying the insn chain. */
2237 set_new_first_and_last_insn (first, last)
2246 for (insn = first; insn; insn = NEXT_INSN (insn))
2247 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2252 /* Set the range of label numbers found in the current function.
2253 This is used when belatedly compiling an inline function. */
2256 set_new_first_and_last_label_num (first, last)
2259 base_label_num = label_num;
2260 first_label_num = first;
2261 last_label_num = last;
2264 /* Set the last label number found in the current function.
2265 This is used when belatedly compiling an inline function. */
2268 set_new_last_label_num (last)
2271 base_label_num = label_num;
2272 last_label_num = last;
2275 /* Restore all variables describing the current status from the structure *P.
2276 This is used after a nested function. */
2279 restore_emit_status (p)
2280 struct function *p ATTRIBUTE_UNUSED;
2283 clear_emit_caches ();
2286 /* Clear out all parts of the state in F that can safely be discarded
2287 after the function has been compiled, to let garbage collection
2288 reclaim the memory. */
2291 free_emit_status (f)
2294 free (f->emit->x_regno_reg_rtx);
2295 free (f->emit->regno_pointer_align);
2296 free (f->emit->regno_decl);
2301 /* Go through all the RTL insn bodies and copy any invalid shared
2302 structure. This routine should only be called once. */
2305 unshare_all_rtl (fndecl, insn)
2311 /* Make sure that virtual parameters are not shared. */
2312 for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
2313 SET_DECL_RTL (decl, copy_rtx_if_shared (DECL_RTL (decl)));
2315 /* Make sure that virtual stack slots are not shared. */
2316 unshare_all_decls (DECL_INITIAL (fndecl));
2318 /* Unshare just about everything else. */
2319 unshare_all_rtl_1 (insn);
2321 /* Make sure the addresses of stack slots found outside the insn chain
2322 (such as, in DECL_RTL of a variable) are not shared
2323 with the insn chain.
2325 This special care is necessary when the stack slot MEM does not
2326 actually appear in the insn chain. If it does appear, its address
2327 is unshared from all else at that point. */
2328 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2331 /* Go through all the RTL insn bodies and copy any invalid shared
2332 structure, again. This is a fairly expensive thing to do so it
2333 should be done sparingly. */
2336 unshare_all_rtl_again (insn)
2342 for (p = insn; p; p = NEXT_INSN (p))
2345 reset_used_flags (PATTERN (p));
2346 reset_used_flags (REG_NOTES (p));
2347 reset_used_flags (LOG_LINKS (p));
2350 /* Make sure that virtual stack slots are not shared. */
2351 reset_used_decls (DECL_INITIAL (cfun->decl));
2353 /* Make sure that virtual parameters are not shared. */
2354 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2355 reset_used_flags (DECL_RTL (decl));
2357 reset_used_flags (stack_slot_list);
2359 unshare_all_rtl (cfun->decl, insn);
2362 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2363 Assumes the mark bits are cleared at entry. */
2366 unshare_all_rtl_1 (insn)
2369 for (; insn; insn = NEXT_INSN (insn))
2372 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2373 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2374 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2378 /* Go through all virtual stack slots of a function and copy any
2379 shared structure. */
2381 unshare_all_decls (blk)
2386 /* Copy shared decls. */
2387 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2388 if (DECL_RTL_SET_P (t))
2389 SET_DECL_RTL (t, copy_rtx_if_shared (DECL_RTL (t)));
2391 /* Now process sub-blocks. */
2392 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2393 unshare_all_decls (t);
2396 /* Go through all virtual stack slots of a function and mark them as
2399 reset_used_decls (blk)
2405 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2406 if (DECL_RTL_SET_P (t))
2407 reset_used_flags (DECL_RTL (t));
2409 /* Now process sub-blocks. */
2410 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2411 reset_used_decls (t);
2414 /* Similar to `copy_rtx' except that if MAY_SHARE is present, it is
2415 placed in the result directly, rather than being copied. MAY_SHARE is
2416 either a MEM of an EXPR_LIST of MEMs. */
2419 copy_most_rtx (orig, may_share)
2426 const char *format_ptr;
2428 if (orig == may_share
2429 || (GET_CODE (may_share) == EXPR_LIST
2430 && in_expr_list_p (may_share, orig)))
2433 code = GET_CODE (orig);
2451 copy = rtx_alloc (code);
2452 PUT_MODE (copy, GET_MODE (orig));
2453 copy->in_struct = orig->in_struct;
2454 copy->volatil = orig->volatil;
2455 copy->unchanging = orig->unchanging;
2456 copy->integrated = orig->integrated;
2457 copy->frame_related = orig->frame_related;
2459 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
2461 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
2463 switch (*format_ptr++)
2466 XEXP (copy, i) = XEXP (orig, i);
2467 if (XEXP (orig, i) != NULL && XEXP (orig, i) != may_share)
2468 XEXP (copy, i) = copy_most_rtx (XEXP (orig, i), may_share);
2472 XEXP (copy, i) = XEXP (orig, i);
2477 XVEC (copy, i) = XVEC (orig, i);
2478 if (XVEC (orig, i) != NULL)
2480 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2481 for (j = 0; j < XVECLEN (copy, i); j++)
2482 XVECEXP (copy, i, j)
2483 = copy_most_rtx (XVECEXP (orig, i, j), may_share);
2488 XWINT (copy, i) = XWINT (orig, i);
2493 XINT (copy, i) = XINT (orig, i);
2497 XTREE (copy, i) = XTREE (orig, i);
2502 XSTR (copy, i) = XSTR (orig, i);
2506 /* Copy this through the wide int field; that's safest. */
2507 X0WINT (copy, i) = X0WINT (orig, i);
2517 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2518 Recursively does the same for subexpressions. */
2521 copy_rtx_if_shared (orig)
2527 const char *format_ptr;
2533 code = GET_CODE (x);
2535 /* These types may be freely shared. */
2549 /* SCRATCH must be shared because they represent distinct values. */
2553 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2554 a LABEL_REF, it isn't sharable. */
2555 if (GET_CODE (XEXP (x, 0)) == PLUS
2556 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2557 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2566 /* The chain of insns is not being copied. */
2570 /* A MEM is allowed to be shared if its address is constant.
2572 We used to allow sharing of MEMs which referenced
2573 virtual_stack_vars_rtx or virtual_incoming_args_rtx, but
2574 that can lose. instantiate_virtual_regs will not unshare
2575 the MEMs, and combine may change the structure of the address
2576 because it looks safe and profitable in one context, but
2577 in some other context it creates unrecognizable RTL. */
2578 if (CONSTANT_ADDRESS_P (XEXP (x, 0)))
2587 /* This rtx may not be shared. If it has already been seen,
2588 replace it with a copy of itself. */
2594 copy = rtx_alloc (code);
2596 (sizeof (*copy) - sizeof (copy->fld)
2597 + sizeof (copy->fld[0]) * GET_RTX_LENGTH (code)));
2603 /* Now scan the subexpressions recursively.
2604 We can store any replaced subexpressions directly into X
2605 since we know X is not shared! Any vectors in X
2606 must be copied if X was copied. */
2608 format_ptr = GET_RTX_FORMAT (code);
2610 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2612 switch (*format_ptr++)
2615 XEXP (x, i) = copy_rtx_if_shared (XEXP (x, i));
2619 if (XVEC (x, i) != NULL)
2622 int len = XVECLEN (x, i);
2624 if (copied && len > 0)
2625 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2626 for (j = 0; j < len; j++)
2627 XVECEXP (x, i, j) = copy_rtx_if_shared (XVECEXP (x, i, j));
2635 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2636 to look for shared sub-parts. */
2639 reset_used_flags (x)
2644 const char *format_ptr;
2649 code = GET_CODE (x);
2651 /* These types may be freely shared so we needn't do any resetting
2673 /* The chain of insns is not being copied. */
2682 format_ptr = GET_RTX_FORMAT (code);
2683 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2685 switch (*format_ptr++)
2688 reset_used_flags (XEXP (x, i));
2692 for (j = 0; j < XVECLEN (x, i); j++)
2693 reset_used_flags (XVECEXP (x, i, j));
2699 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2700 Return X or the rtx for the pseudo reg the value of X was copied into.
2701 OTHER must be valid as a SET_DEST. */
2704 make_safe_from (x, other)
2708 switch (GET_CODE (other))
2711 other = SUBREG_REG (other);
2713 case STRICT_LOW_PART:
2716 other = XEXP (other, 0);
2722 if ((GET_CODE (other) == MEM
2724 && GET_CODE (x) != REG
2725 && GET_CODE (x) != SUBREG)
2726 || (GET_CODE (other) == REG
2727 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2728 || reg_mentioned_p (other, x))))
2730 rtx temp = gen_reg_rtx (GET_MODE (x));
2731 emit_move_insn (temp, x);
2737 /* Emission of insns (adding them to the doubly-linked list). */
2739 /* Return the first insn of the current sequence or current function. */
2747 /* Return the last insn emitted in current sequence or current function. */
2755 /* Specify a new insn as the last in the chain. */
2758 set_last_insn (insn)
2761 if (NEXT_INSN (insn) != 0)
2766 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2769 get_last_insn_anywhere ()
2771 struct sequence_stack *stack;
2774 for (stack = seq_stack; stack; stack = stack->next)
2775 if (stack->last != 0)
2780 /* Return a number larger than any instruction's uid in this function. */
2785 return cur_insn_uid;
2788 /* Renumber instructions so that no instruction UIDs are wasted. */
2791 renumber_insns (stream)
2796 /* If we're not supposed to renumber instructions, don't. */
2797 if (!flag_renumber_insns)
2800 /* If there aren't that many instructions, then it's not really
2801 worth renumbering them. */
2802 if (flag_renumber_insns == 1 && get_max_uid () < 25000)
2807 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2810 fprintf (stream, "Renumbering insn %d to %d\n",
2811 INSN_UID (insn), cur_insn_uid);
2812 INSN_UID (insn) = cur_insn_uid++;
2816 /* Return the next insn. If it is a SEQUENCE, return the first insn
2825 insn = NEXT_INSN (insn);
2826 if (insn && GET_CODE (insn) == INSN
2827 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2828 insn = XVECEXP (PATTERN (insn), 0, 0);
2834 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2838 previous_insn (insn)
2843 insn = PREV_INSN (insn);
2844 if (insn && GET_CODE (insn) == INSN
2845 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2846 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2852 /* Return the next insn after INSN that is not a NOTE. This routine does not
2853 look inside SEQUENCEs. */
2856 next_nonnote_insn (insn)
2861 insn = NEXT_INSN (insn);
2862 if (insn == 0 || GET_CODE (insn) != NOTE)
2869 /* Return the previous insn before INSN that is not a NOTE. This routine does
2870 not look inside SEQUENCEs. */
2873 prev_nonnote_insn (insn)
2878 insn = PREV_INSN (insn);
2879 if (insn == 0 || GET_CODE (insn) != NOTE)
2886 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2887 or 0, if there is none. This routine does not look inside
2891 next_real_insn (insn)
2896 insn = NEXT_INSN (insn);
2897 if (insn == 0 || GET_CODE (insn) == INSN
2898 || GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN)
2905 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2906 or 0, if there is none. This routine does not look inside
2910 prev_real_insn (insn)
2915 insn = PREV_INSN (insn);
2916 if (insn == 0 || GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN
2917 || GET_CODE (insn) == JUMP_INSN)
2924 /* Find the next insn after INSN that really does something. This routine
2925 does not look inside SEQUENCEs. Until reload has completed, this is the
2926 same as next_real_insn. */
2929 active_insn_p (insn)
2932 return (GET_CODE (insn) == CALL_INSN || GET_CODE (insn) == JUMP_INSN
2933 || (GET_CODE (insn) == INSN
2934 && (! reload_completed
2935 || (GET_CODE (PATTERN (insn)) != USE
2936 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2940 next_active_insn (insn)
2945 insn = NEXT_INSN (insn);
2946 if (insn == 0 || active_insn_p (insn))
2953 /* Find the last insn before INSN that really does something. This routine
2954 does not look inside SEQUENCEs. Until reload has completed, this is the
2955 same as prev_real_insn. */
2958 prev_active_insn (insn)
2963 insn = PREV_INSN (insn);
2964 if (insn == 0 || active_insn_p (insn))
2971 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2979 insn = NEXT_INSN (insn);
2980 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
2987 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2995 insn = PREV_INSN (insn);
2996 if (insn == 0 || GET_CODE (insn) == CODE_LABEL)
3004 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3005 and REG_CC_USER notes so we can find it. */
3008 link_cc0_insns (insn)
3011 rtx user = next_nonnote_insn (insn);
3013 if (GET_CODE (user) == INSN && GET_CODE (PATTERN (user)) == SEQUENCE)
3014 user = XVECEXP (PATTERN (user), 0, 0);
3016 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3018 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3021 /* Return the next insn that uses CC0 after INSN, which is assumed to
3022 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3023 applied to the result of this function should yield INSN).
3025 Normally, this is simply the next insn. However, if a REG_CC_USER note
3026 is present, it contains the insn that uses CC0.
3028 Return 0 if we can't find the insn. */
3031 next_cc0_user (insn)
3034 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3037 return XEXP (note, 0);
3039 insn = next_nonnote_insn (insn);
3040 if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
3041 insn = XVECEXP (PATTERN (insn), 0, 0);
3043 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3049 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3050 note, it is the previous insn. */
3053 prev_cc0_setter (insn)
3056 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3059 return XEXP (note, 0);
3061 insn = prev_nonnote_insn (insn);
3062 if (! sets_cc0_p (PATTERN (insn)))
3069 /* Increment the label uses for all labels present in rtx. */
3079 code = GET_CODE (x);
3080 if (code == LABEL_REF)
3081 LABEL_NUSES (XEXP (x, 0))++;
3083 fmt = GET_RTX_FORMAT (code);
3084 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3087 mark_label_nuses (XEXP (x, i));
3088 else if (fmt[i] == 'E')
3089 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3090 mark_label_nuses (XVECEXP (x, i, j));
3095 /* Try splitting insns that can be split for better scheduling.
3096 PAT is the pattern which might split.
3097 TRIAL is the insn providing PAT.
3098 LAST is non-zero if we should return the last insn of the sequence produced.
3100 If this routine succeeds in splitting, it returns the first or last
3101 replacement insn depending on the value of LAST. Otherwise, it
3102 returns TRIAL. If the insn to be returned can be split, it will be. */
3105 try_split (pat, trial, last)
3109 rtx before = PREV_INSN (trial);
3110 rtx after = NEXT_INSN (trial);
3111 int has_barrier = 0;
3116 if (any_condjump_p (trial)
3117 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3118 split_branch_probability = INTVAL (XEXP (note, 0));
3119 probability = split_branch_probability;
3121 seq = split_insns (pat, trial);
3123 split_branch_probability = -1;
3125 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3126 We may need to handle this specially. */
3127 if (after && GET_CODE (after) == BARRIER)
3130 after = NEXT_INSN (after);
3135 /* SEQ can either be a SEQUENCE or the pattern of a single insn.
3136 The latter case will normally arise only when being done so that
3137 it, in turn, will be split (SFmode on the 29k is an example). */
3138 if (GET_CODE (seq) == SEQUENCE)
3142 /* Avoid infinite loop if any insn of the result matches
3143 the original pattern. */
3144 for (i = 0; i < XVECLEN (seq, 0); i++)
3145 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN
3146 && rtx_equal_p (PATTERN (XVECEXP (seq, 0, i)), pat))
3150 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3151 if (GET_CODE (XVECEXP (seq, 0, i)) == JUMP_INSN)
3153 rtx insn = XVECEXP (seq, 0, i);
3154 mark_jump_label (PATTERN (insn),
3155 XVECEXP (seq, 0, i), 0);
3157 if (probability != -1
3158 && any_condjump_p (insn)
3159 && !find_reg_note (insn, REG_BR_PROB, 0))
3161 /* We can preserve the REG_BR_PROB notes only if exactly
3162 one jump is created, otherwise the machine description
3163 is responsible for this step using
3164 split_branch_probability variable. */
3168 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3169 GEN_INT (probability),
3174 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3175 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3176 if (GET_CODE (trial) == CALL_INSN)
3177 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3178 if (GET_CODE (XVECEXP (seq, 0, i)) == CALL_INSN)
3179 CALL_INSN_FUNCTION_USAGE (XVECEXP (seq, 0, i))
3180 = CALL_INSN_FUNCTION_USAGE (trial);
3182 /* Copy notes, particularly those related to the CFG. */
3183 for (note = REG_NOTES (trial); note ; note = XEXP (note, 1))
3185 switch (REG_NOTE_KIND (note))
3188 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3190 rtx insn = XVECEXP (seq, 0, i);
3191 if (GET_CODE (insn) == CALL_INSN
3192 || (flag_non_call_exceptions
3193 && may_trap_p (PATTERN (insn))))
3195 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3203 case REG_ALWAYS_RETURN:
3204 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3206 rtx insn = XVECEXP (seq, 0, i);
3207 if (GET_CODE (insn) == CALL_INSN)
3209 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3215 case REG_NON_LOCAL_GOTO:
3216 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3218 rtx insn = XVECEXP (seq, 0, i);
3219 if (GET_CODE (insn) == JUMP_INSN)
3221 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3232 /* If there are LABELS inside the split insns increment the
3233 usage count so we don't delete the label. */
3234 if (GET_CODE (trial) == INSN)
3235 for (i = XVECLEN (seq, 0) - 1; i >= 0; i--)
3236 if (GET_CODE (XVECEXP (seq, 0, i)) == INSN)
3237 mark_label_nuses (PATTERN (XVECEXP (seq, 0, i)));
3239 tem = emit_insn_after (seq, trial);
3241 delete_related_insns (trial);
3243 emit_barrier_after (tem);
3245 /* Recursively call try_split for each new insn created; by the
3246 time control returns here that insn will be fully split, so
3247 set LAST and continue from the insn after the one returned.
3248 We can't use next_active_insn here since AFTER may be a note.
3249 Ignore deleted insns, which can be occur if not optimizing. */
3250 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3251 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3252 tem = try_split (PATTERN (tem), tem, 1);
3254 /* Avoid infinite loop if the result matches the original pattern. */
3255 else if (rtx_equal_p (seq, pat))
3259 PATTERN (trial) = seq;
3260 INSN_CODE (trial) = -1;
3261 try_split (seq, trial, last);
3264 /* Return either the first or the last insn, depending on which was
3267 ? (after ? PREV_INSN (after) : last_insn)
3268 : NEXT_INSN (before);
3274 /* Make and return an INSN rtx, initializing all its slots.
3275 Store PATTERN in the pattern slots. */
3278 make_insn_raw (pattern)
3283 insn = rtx_alloc (INSN);
3285 INSN_UID (insn) = cur_insn_uid++;
3286 PATTERN (insn) = pattern;
3287 INSN_CODE (insn) = -1;
3288 LOG_LINKS (insn) = NULL;
3289 REG_NOTES (insn) = NULL;
3291 #ifdef ENABLE_RTL_CHECKING
3294 && (returnjump_p (insn)
3295 || (GET_CODE (insn) == SET
3296 && SET_DEST (insn) == pc_rtx)))
3298 warning ("ICE: emit_insn used where emit_jump_insn needed:\n");
3306 /* Like `make_insn' but make a JUMP_INSN instead of an insn. */
3309 make_jump_insn_raw (pattern)
3314 insn = rtx_alloc (JUMP_INSN);
3315 INSN_UID (insn) = cur_insn_uid++;
3317 PATTERN (insn) = pattern;
3318 INSN_CODE (insn) = -1;
3319 LOG_LINKS (insn) = NULL;
3320 REG_NOTES (insn) = NULL;
3321 JUMP_LABEL (insn) = NULL;
3326 /* Like `make_insn' but make a CALL_INSN instead of an insn. */
3329 make_call_insn_raw (pattern)
3334 insn = rtx_alloc (CALL_INSN);
3335 INSN_UID (insn) = cur_insn_uid++;
3337 PATTERN (insn) = pattern;
3338 INSN_CODE (insn) = -1;
3339 LOG_LINKS (insn) = NULL;
3340 REG_NOTES (insn) = NULL;
3341 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3346 /* Add INSN to the end of the doubly-linked list.
3347 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3353 PREV_INSN (insn) = last_insn;
3354 NEXT_INSN (insn) = 0;
3356 if (NULL != last_insn)
3357 NEXT_INSN (last_insn) = insn;
3359 if (NULL == first_insn)
3365 /* Add INSN into the doubly-linked list after insn AFTER. This and
3366 the next should be the only functions called to insert an insn once
3367 delay slots have been filled since only they know how to update a
3371 add_insn_after (insn, after)
3374 rtx next = NEXT_INSN (after);
3377 if (optimize && INSN_DELETED_P (after))
3380 NEXT_INSN (insn) = next;
3381 PREV_INSN (insn) = after;
3385 PREV_INSN (next) = insn;
3386 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3387 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3389 else if (last_insn == after)
3393 struct sequence_stack *stack = seq_stack;
3394 /* Scan all pending sequences too. */
3395 for (; stack; stack = stack->next)
3396 if (after == stack->last)
3406 if (basic_block_for_insn
3407 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3408 && (bb = BLOCK_FOR_INSN (after)))
3410 set_block_for_insn (insn, bb);
3411 /* Should not happen as first in the BB is always
3412 either NOTE or LABEL. */
3413 if (bb->end == after
3414 /* Avoid clobbering of structure when creating new BB. */
3415 && GET_CODE (insn) != BARRIER
3416 && (GET_CODE (insn) != NOTE
3417 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3421 NEXT_INSN (after) = insn;
3422 if (GET_CODE (after) == INSN && GET_CODE (PATTERN (after)) == SEQUENCE)
3424 rtx sequence = PATTERN (after);
3425 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3429 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3430 the previous should be the only functions called to insert an insn once
3431 delay slots have been filled since only they know how to update a
3435 add_insn_before (insn, before)
3438 rtx prev = PREV_INSN (before);
3441 if (optimize && INSN_DELETED_P (before))
3444 PREV_INSN (insn) = prev;
3445 NEXT_INSN (insn) = before;
3449 NEXT_INSN (prev) = insn;
3450 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3452 rtx sequence = PATTERN (prev);
3453 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3456 else if (first_insn == before)
3460 struct sequence_stack *stack = seq_stack;
3461 /* Scan all pending sequences too. */
3462 for (; stack; stack = stack->next)
3463 if (before == stack->first)
3465 stack->first = insn;
3473 if (basic_block_for_insn
3474 && (unsigned int) INSN_UID (before) < basic_block_for_insn->num_elements
3475 && (bb = BLOCK_FOR_INSN (before)))
3477 set_block_for_insn (insn, bb);
3478 /* Should not happen as first in the BB is always
3479 either NOTE or LABEl. */
3480 if (bb->head == insn
3481 /* Avoid clobbering of structure when creating new BB. */
3482 && GET_CODE (insn) != BARRIER
3483 && (GET_CODE (insn) != NOTE
3484 || NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK))
3488 PREV_INSN (before) = insn;
3489 if (GET_CODE (before) == INSN && GET_CODE (PATTERN (before)) == SEQUENCE)
3490 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3493 /* Remove an insn from its doubly-linked list. This function knows how
3494 to handle sequences. */
3499 rtx next = NEXT_INSN (insn);
3500 rtx prev = PREV_INSN (insn);
3505 NEXT_INSN (prev) = next;
3506 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
3508 rtx sequence = PATTERN (prev);
3509 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3512 else if (first_insn == insn)
3516 struct sequence_stack *stack = seq_stack;
3517 /* Scan all pending sequences too. */
3518 for (; stack; stack = stack->next)
3519 if (insn == stack->first)
3521 stack->first = next;
3531 PREV_INSN (next) = prev;
3532 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
3533 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3535 else if (last_insn == insn)
3539 struct sequence_stack *stack = seq_stack;
3540 /* Scan all pending sequences too. */
3541 for (; stack; stack = stack->next)
3542 if (insn == stack->last)
3551 if (basic_block_for_insn
3552 && (unsigned int) INSN_UID (insn) < basic_block_for_insn->num_elements
3553 && (bb = BLOCK_FOR_INSN (insn)))
3555 if (bb->head == insn)
3557 /* Never ever delete the basic block note without deleting whole
3559 if (GET_CODE (insn) == NOTE)
3563 if (bb->end == insn)
3568 /* Delete all insns made since FROM.
3569 FROM becomes the new last instruction. */
3572 delete_insns_since (from)
3578 NEXT_INSN (from) = 0;
3582 /* This function is deprecated, please use sequences instead.
3584 Move a consecutive bunch of insns to a different place in the chain.
3585 The insns to be moved are those between FROM and TO.
3586 They are moved to a new position after the insn AFTER.
3587 AFTER must not be FROM or TO or any insn in between.
3589 This function does not know about SEQUENCEs and hence should not be
3590 called after delay-slot filling has been done. */
3593 reorder_insns_nobb (from, to, after)
3594 rtx from, to, after;
3596 /* Splice this bunch out of where it is now. */
3597 if (PREV_INSN (from))
3598 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3600 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3601 if (last_insn == to)
3602 last_insn = PREV_INSN (from);
3603 if (first_insn == from)
3604 first_insn = NEXT_INSN (to);
3606 /* Make the new neighbors point to it and it to them. */
3607 if (NEXT_INSN (after))
3608 PREV_INSN (NEXT_INSN (after)) = to;
3610 NEXT_INSN (to) = NEXT_INSN (after);
3611 PREV_INSN (from) = after;
3612 NEXT_INSN (after) = from;
3613 if (after == last_insn)
3617 /* Same as function above, but take care to update BB boundaries. */
3619 reorder_insns (from, to, after)
3620 rtx from, to, after;
3622 rtx prev = PREV_INSN (from);
3623 basic_block bb, bb2;
3625 reorder_insns_nobb (from, to, after);
3627 if (basic_block_for_insn
3628 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
3629 && (bb = BLOCK_FOR_INSN (after)))
3633 if (basic_block_for_insn
3634 && ((unsigned int) INSN_UID (from)
3635 < basic_block_for_insn->num_elements)
3636 && (bb2 = BLOCK_FOR_INSN (from)))
3642 if (bb->end == after)
3645 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3646 set_block_for_insn (x, bb);
3650 /* Return the line note insn preceding INSN. */
3653 find_line_note (insn)
3656 if (no_line_numbers)
3659 for (; insn; insn = PREV_INSN (insn))
3660 if (GET_CODE (insn) == NOTE
3661 && NOTE_LINE_NUMBER (insn) >= 0)
3667 /* Like reorder_insns, but inserts line notes to preserve the line numbers
3668 of the moved insns when debugging. This may insert a note between AFTER
3669 and FROM, and another one after TO. */
3672 reorder_insns_with_line_notes (from, to, after)
3673 rtx from, to, after;
3675 rtx from_line = find_line_note (from);
3676 rtx after_line = find_line_note (after);
3678 reorder_insns (from, to, after);
3680 if (from_line == after_line)
3684 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3685 NOTE_LINE_NUMBER (from_line),
3688 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3689 NOTE_LINE_NUMBER (after_line),
3693 /* Remove unnecessary notes from the instruction stream. */
3696 remove_unnecessary_notes ()
3698 rtx block_stack = NULL_RTX;
3699 rtx eh_stack = NULL_RTX;
3704 /* We must not remove the first instruction in the function because
3705 the compiler depends on the first instruction being a note. */
3706 for (insn = NEXT_INSN (get_insns ()); insn; insn = next)
3708 /* Remember what's next. */
3709 next = NEXT_INSN (insn);
3711 /* We're only interested in notes. */
3712 if (GET_CODE (insn) != NOTE)
3715 switch (NOTE_LINE_NUMBER (insn))
3717 case NOTE_INSN_DELETED:
3718 case NOTE_INSN_LOOP_END_TOP_COND:
3722 case NOTE_INSN_EH_REGION_BEG:
3723 eh_stack = alloc_INSN_LIST (insn, eh_stack);
3726 case NOTE_INSN_EH_REGION_END:
3727 /* Too many end notes. */
3728 if (eh_stack == NULL_RTX)
3730 /* Mismatched nesting. */
3731 if (NOTE_EH_HANDLER (XEXP (eh_stack, 0)) != NOTE_EH_HANDLER (insn))
3734 eh_stack = XEXP (eh_stack, 1);
3735 free_INSN_LIST_node (tmp);
3738 case NOTE_INSN_BLOCK_BEG:
3739 /* By now, all notes indicating lexical blocks should have
3740 NOTE_BLOCK filled in. */
3741 if (NOTE_BLOCK (insn) == NULL_TREE)
3743 block_stack = alloc_INSN_LIST (insn, block_stack);
3746 case NOTE_INSN_BLOCK_END:
3747 /* Too many end notes. */
3748 if (block_stack == NULL_RTX)
3750 /* Mismatched nesting. */
3751 if (NOTE_BLOCK (XEXP (block_stack, 0)) != NOTE_BLOCK (insn))
3754 block_stack = XEXP (block_stack, 1);
3755 free_INSN_LIST_node (tmp);
3757 /* Scan back to see if there are any non-note instructions
3758 between INSN and the beginning of this block. If not,
3759 then there is no PC range in the generated code that will
3760 actually be in this block, so there's no point in
3761 remembering the existence of the block. */
3762 for (tmp = PREV_INSN (insn); tmp ; tmp = PREV_INSN (tmp))
3764 /* This block contains a real instruction. Note that we
3765 don't include labels; if the only thing in the block
3766 is a label, then there are still no PC values that
3767 lie within the block. */
3771 /* We're only interested in NOTEs. */
3772 if (GET_CODE (tmp) != NOTE)
3775 if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_BEG)
3777 /* We just verified that this BLOCK matches us with
3778 the block_stack check above. Never delete the
3779 BLOCK for the outermost scope of the function; we
3780 can refer to names from that scope even if the
3781 block notes are messed up. */
3782 if (! is_body_block (NOTE_BLOCK (insn))
3783 && (*debug_hooks->ignore_block) (NOTE_BLOCK (insn)))
3790 else if (NOTE_LINE_NUMBER (tmp) == NOTE_INSN_BLOCK_END)
3791 /* There's a nested block. We need to leave the
3792 current block in place since otherwise the debugger
3793 wouldn't be able to show symbols from our block in
3794 the nested block. */
3800 /* Too many begin notes. */
3801 if (block_stack || eh_stack)
3806 /* Emit an insn of given code and pattern
3807 at a specified place within the doubly-linked list. */
3809 /* Make an instruction with body PATTERN
3810 and output it before the instruction BEFORE. */
3813 emit_insn_before (pattern, before)
3814 rtx pattern, before;
3818 if (GET_CODE (pattern) == SEQUENCE)
3822 for (i = 0; i < XVECLEN (pattern, 0); i++)
3824 insn = XVECEXP (pattern, 0, i);
3825 add_insn_before (insn, before);
3830 insn = make_insn_raw (pattern);
3831 add_insn_before (insn, before);
3837 /* Make an instruction with body PATTERN and code JUMP_INSN
3838 and output it before the instruction BEFORE. */
3841 emit_jump_insn_before (pattern, before)
3842 rtx pattern, before;
3846 if (GET_CODE (pattern) == SEQUENCE)
3847 insn = emit_insn_before (pattern, before);
3850 insn = make_jump_insn_raw (pattern);
3851 add_insn_before (insn, before);
3857 /* Make an instruction with body PATTERN and code CALL_INSN
3858 and output it before the instruction BEFORE. */
3861 emit_call_insn_before (pattern, before)
3862 rtx pattern, before;
3866 if (GET_CODE (pattern) == SEQUENCE)
3867 insn = emit_insn_before (pattern, before);
3870 insn = make_call_insn_raw (pattern);
3871 add_insn_before (insn, before);
3872 PUT_CODE (insn, CALL_INSN);
3878 /* Make an insn of code BARRIER
3879 and output it before the insn BEFORE. */
3882 emit_barrier_before (before)
3885 rtx insn = rtx_alloc (BARRIER);
3887 INSN_UID (insn) = cur_insn_uid++;
3889 add_insn_before (insn, before);
3893 /* Emit the label LABEL before the insn BEFORE. */
3896 emit_label_before (label, before)
3899 /* This can be called twice for the same label as a result of the
3900 confusion that follows a syntax error! So make it harmless. */
3901 if (INSN_UID (label) == 0)
3903 INSN_UID (label) = cur_insn_uid++;
3904 add_insn_before (label, before);
3910 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3913 emit_note_before (subtype, before)
3917 rtx note = rtx_alloc (NOTE);
3918 INSN_UID (note) = cur_insn_uid++;
3919 NOTE_SOURCE_FILE (note) = 0;
3920 NOTE_LINE_NUMBER (note) = subtype;
3922 add_insn_before (note, before);
3926 /* Make an insn of code INSN with body PATTERN
3927 and output it after the insn AFTER. */
3930 emit_insn_after (pattern, after)
3935 if (GET_CODE (pattern) == SEQUENCE)
3939 for (i = 0; i < XVECLEN (pattern, 0); i++)
3941 insn = XVECEXP (pattern, 0, i);
3942 add_insn_after (insn, after);
3948 insn = make_insn_raw (pattern);
3949 add_insn_after (insn, after);
3955 /* Similar to emit_insn_after, except that line notes are to be inserted so
3956 as to act as if this insn were at FROM. */
3959 emit_insn_after_with_line_notes (pattern, after, from)
3960 rtx pattern, after, from;
3962 rtx from_line = find_line_note (from);
3963 rtx after_line = find_line_note (after);
3964 rtx insn = emit_insn_after (pattern, after);
3967 emit_line_note_after (NOTE_SOURCE_FILE (from_line),
3968 NOTE_LINE_NUMBER (from_line),
3972 emit_line_note_after (NOTE_SOURCE_FILE (after_line),
3973 NOTE_LINE_NUMBER (after_line),
3977 /* Make an insn of code JUMP_INSN with body PATTERN
3978 and output it after the insn AFTER. */
3981 emit_jump_insn_after (pattern, after)
3986 if (GET_CODE (pattern) == SEQUENCE)
3987 insn = emit_insn_after (pattern, after);
3990 insn = make_jump_insn_raw (pattern);
3991 add_insn_after (insn, after);
3997 /* Make an insn of code BARRIER
3998 and output it after the insn AFTER. */
4001 emit_barrier_after (after)
4004 rtx insn = rtx_alloc (BARRIER);
4006 INSN_UID (insn) = cur_insn_uid++;
4008 add_insn_after (insn, after);
4012 /* Emit the label LABEL after the insn AFTER. */
4015 emit_label_after (label, after)
4018 /* This can be called twice for the same label
4019 as a result of the confusion that follows a syntax error!
4020 So make it harmless. */
4021 if (INSN_UID (label) == 0)
4023 INSN_UID (label) = cur_insn_uid++;
4024 add_insn_after (label, after);
4030 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4033 emit_note_after (subtype, after)
4037 rtx note = rtx_alloc (NOTE);
4038 INSN_UID (note) = cur_insn_uid++;
4039 NOTE_SOURCE_FILE (note) = 0;
4040 NOTE_LINE_NUMBER (note) = subtype;
4041 add_insn_after (note, after);
4045 /* Emit a line note for FILE and LINE after the insn AFTER. */
4048 emit_line_note_after (file, line, after)
4055 if (no_line_numbers && line > 0)
4061 note = rtx_alloc (NOTE);
4062 INSN_UID (note) = cur_insn_uid++;
4063 NOTE_SOURCE_FILE (note) = file;
4064 NOTE_LINE_NUMBER (note) = line;
4065 add_insn_after (note, after);
4069 /* Make an insn of code INSN with pattern PATTERN
4070 and add it to the end of the doubly-linked list.
4071 If PATTERN is a SEQUENCE, take the elements of it
4072 and emit an insn for each element.
4074 Returns the last insn emitted. */
4080 rtx insn = last_insn;
4082 if (GET_CODE (pattern) == SEQUENCE)
4086 for (i = 0; i < XVECLEN (pattern, 0); i++)
4088 insn = XVECEXP (pattern, 0, i);
4094 insn = make_insn_raw (pattern);
4101 /* Emit the insns in a chain starting with INSN.
4102 Return the last insn emitted. */
4112 rtx next = NEXT_INSN (insn);
4121 /* Emit the insns in a chain starting with INSN and place them in front of
4122 the insn BEFORE. Return the last insn emitted. */
4125 emit_insns_before (insn, before)
4133 rtx next = NEXT_INSN (insn);
4134 add_insn_before (insn, before);
4142 /* Emit the insns in a chain starting with FIRST and place them in back of
4143 the insn AFTER. Return the last insn emitted. */
4146 emit_insns_after (first, after)
4160 if (basic_block_for_insn
4161 && (unsigned int) INSN_UID (after) < basic_block_for_insn->num_elements
4162 && (bb = BLOCK_FOR_INSN (after)))
4164 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4165 set_block_for_insn (last, bb);
4166 set_block_for_insn (last, bb);
4167 if (bb->end == after)
4171 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4174 after_after = NEXT_INSN (after);
4176 NEXT_INSN (after) = first;
4177 PREV_INSN (first) = after;
4178 NEXT_INSN (last) = after_after;
4180 PREV_INSN (after_after) = last;
4182 if (after == last_insn)
4187 /* Make an insn of code JUMP_INSN with pattern PATTERN
4188 and add it to the end of the doubly-linked list. */
4191 emit_jump_insn (pattern)
4194 if (GET_CODE (pattern) == SEQUENCE)
4195 return emit_insn (pattern);
4198 rtx insn = make_jump_insn_raw (pattern);
4204 /* Make an insn of code CALL_INSN with pattern PATTERN
4205 and add it to the end of the doubly-linked list. */
4208 emit_call_insn (pattern)
4211 if (GET_CODE (pattern) == SEQUENCE)
4212 return emit_insn (pattern);
4215 rtx insn = make_call_insn_raw (pattern);
4217 PUT_CODE (insn, CALL_INSN);
4222 /* Add the label LABEL to the end of the doubly-linked list. */
4228 /* This can be called twice for the same label
4229 as a result of the confusion that follows a syntax error!
4230 So make it harmless. */
4231 if (INSN_UID (label) == 0)
4233 INSN_UID (label) = cur_insn_uid++;
4239 /* Make an insn of code BARRIER
4240 and add it to the end of the doubly-linked list. */
4245 rtx barrier = rtx_alloc (BARRIER);
4246 INSN_UID (barrier) = cur_insn_uid++;
4251 /* Make an insn of code NOTE
4252 with data-fields specified by FILE and LINE
4253 and add it to the end of the doubly-linked list,
4254 but only if line-numbers are desired for debugging info. */
4257 emit_line_note (file, line)
4261 set_file_and_line_for_stmt (file, line);
4264 if (no_line_numbers)
4268 return emit_note (file, line);
4271 /* Make an insn of code NOTE
4272 with data-fields specified by FILE and LINE
4273 and add it to the end of the doubly-linked list.
4274 If it is a line-number NOTE, omit it if it matches the previous one. */
4277 emit_note (file, line)
4285 if (file && last_filename && !strcmp (file, last_filename)
4286 && line == last_linenum)
4288 last_filename = file;
4289 last_linenum = line;
4292 if (no_line_numbers && line > 0)
4298 note = rtx_alloc (NOTE);
4299 INSN_UID (note) = cur_insn_uid++;
4300 NOTE_SOURCE_FILE (note) = file;
4301 NOTE_LINE_NUMBER (note) = line;
4306 /* Emit a NOTE, and don't omit it even if LINE is the previous note. */
4309 emit_line_note_force (file, line)
4314 return emit_line_note (file, line);
4317 /* Cause next statement to emit a line note even if the line number
4318 has not changed. This is used at the beginning of a function. */
4321 force_next_line_note ()
4326 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4327 note of this type already exists, remove it first. */
4330 set_unique_reg_note (insn, kind, datum)
4335 rtx note = find_reg_note (insn, kind, NULL_RTX);
4341 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4342 has multiple sets (some callers assume single_set
4343 means the insn only has one set, when in fact it
4344 means the insn only has one * useful * set). */
4345 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4352 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4353 It serves no useful purpose and breaks eliminate_regs. */
4354 if (GET_CODE (datum) == ASM_OPERANDS)
4364 XEXP (note, 0) = datum;
4368 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4369 return REG_NOTES (insn);
4372 /* Return an indication of which type of insn should have X as a body.
4373 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4379 if (GET_CODE (x) == CODE_LABEL)
4381 if (GET_CODE (x) == CALL)
4383 if (GET_CODE (x) == RETURN)
4385 if (GET_CODE (x) == SET)
4387 if (SET_DEST (x) == pc_rtx)
4389 else if (GET_CODE (SET_SRC (x)) == CALL)
4394 if (GET_CODE (x) == PARALLEL)
4397 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4398 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4400 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4401 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4403 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4404 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4410 /* Emit the rtl pattern X as an appropriate kind of insn.
4411 If X is a label, it is simply added into the insn chain. */
4417 enum rtx_code code = classify_insn (x);
4419 if (code == CODE_LABEL)
4420 return emit_label (x);
4421 else if (code == INSN)
4422 return emit_insn (x);
4423 else if (code == JUMP_INSN)
4425 rtx insn = emit_jump_insn (x);
4426 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4427 return emit_barrier ();
4430 else if (code == CALL_INSN)
4431 return emit_call_insn (x);
4436 /* Begin emitting insns to a sequence which can be packaged in an
4437 RTL_EXPR. If this sequence will contain something that might cause
4438 the compiler to pop arguments to function calls (because those
4439 pops have previously been deferred; see INHIBIT_DEFER_POP for more
4440 details), use do_pending_stack_adjust before calling this function.
4441 That will ensure that the deferred pops are not accidentally
4442 emitted in the middle of this sequence. */
4447 struct sequence_stack *tem;
4449 tem = (struct sequence_stack *) xmalloc (sizeof (struct sequence_stack));
4451 tem->next = seq_stack;
4452 tem->first = first_insn;
4453 tem->last = last_insn;
4454 tem->sequence_rtl_expr = seq_rtl_expr;
4462 /* Similarly, but indicate that this sequence will be placed in T, an
4463 RTL_EXPR. See the documentation for start_sequence for more
4464 information about how to use this function. */
4467 start_sequence_for_rtl_expr (t)
4475 /* Set up the insn chain starting with FIRST as the current sequence,
4476 saving the previously current one. See the documentation for
4477 start_sequence for more information about how to use this function. */
4480 push_to_sequence (first)
4487 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4493 /* Set up the insn chain from a chain stort in FIRST to LAST. */
4496 push_to_full_sequence (first, last)
4502 /* We really should have the end of the insn chain here. */
4503 if (last && NEXT_INSN (last))
4507 /* Set up the outer-level insn chain
4508 as the current sequence, saving the previously current one. */
4511 push_topmost_sequence ()
4513 struct sequence_stack *stack, *top = NULL;
4517 for (stack = seq_stack; stack; stack = stack->next)
4520 first_insn = top->first;
4521 last_insn = top->last;
4522 seq_rtl_expr = top->sequence_rtl_expr;
4525 /* After emitting to the outer-level insn chain, update the outer-level
4526 insn chain, and restore the previous saved state. */
4529 pop_topmost_sequence ()
4531 struct sequence_stack *stack, *top = NULL;
4533 for (stack = seq_stack; stack; stack = stack->next)
4536 top->first = first_insn;
4537 top->last = last_insn;
4538 /* ??? Why don't we save seq_rtl_expr here? */
4543 /* After emitting to a sequence, restore previous saved state.
4545 To get the contents of the sequence just made, you must call
4546 `gen_sequence' *before* calling here.
4548 If the compiler might have deferred popping arguments while
4549 generating this sequence, and this sequence will not be immediately
4550 inserted into the instruction stream, use do_pending_stack_adjust
4551 before calling gen_sequence. That will ensure that the deferred
4552 pops are inserted into this sequence, and not into some random
4553 location in the instruction stream. See INHIBIT_DEFER_POP for more
4554 information about deferred popping of arguments. */
4559 struct sequence_stack *tem = seq_stack;
4561 first_insn = tem->first;
4562 last_insn = tem->last;
4563 seq_rtl_expr = tem->sequence_rtl_expr;
4564 seq_stack = tem->next;
4569 /* This works like end_sequence, but records the old sequence in FIRST
4573 end_full_sequence (first, last)
4576 *first = first_insn;
4581 /* Return 1 if currently emitting into a sequence. */
4586 return seq_stack != 0;
4589 /* Generate a SEQUENCE rtx containing the insns already emitted
4590 to the current sequence.
4592 This is how the gen_... function from a DEFINE_EXPAND
4593 constructs the SEQUENCE that it returns. */
4603 /* Count the insns in the chain. */
4605 for (tem = first_insn; tem; tem = NEXT_INSN (tem))
4608 /* If only one insn, return it rather than a SEQUENCE.
4609 (Now that we cache SEQUENCE expressions, it isn't worth special-casing
4610 the case of an empty list.)
4611 We only return the pattern of an insn if its code is INSN and it
4612 has no notes. This ensures that no information gets lost. */
4614 && ! RTX_FRAME_RELATED_P (first_insn)
4615 && GET_CODE (first_insn) == INSN
4616 /* Don't throw away any reg notes. */
4617 && REG_NOTES (first_insn) == 0)
4618 return PATTERN (first_insn);
4620 result = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (len));
4622 for (i = 0, tem = first_insn; tem; tem = NEXT_INSN (tem), i++)
4623 XVECEXP (result, 0, i) = tem;
4628 /* Put the various virtual registers into REGNO_REG_RTX. */
4631 init_virtual_regs (es)
4632 struct emit_status *es;
4634 rtx *ptr = es->x_regno_reg_rtx;
4635 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4636 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4637 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4638 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4639 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4643 clear_emit_caches ()
4647 /* Clear the start_sequence/gen_sequence cache. */
4648 for (i = 0; i < SEQUENCE_RESULT_SIZE; i++)
4649 sequence_result[i] = 0;
4653 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4654 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4655 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4656 static int copy_insn_n_scratches;
4658 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4659 copied an ASM_OPERANDS.
4660 In that case, it is the original input-operand vector. */
4661 static rtvec orig_asm_operands_vector;
4663 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4664 copied an ASM_OPERANDS.
4665 In that case, it is the copied input-operand vector. */
4666 static rtvec copy_asm_operands_vector;
4668 /* Likewise for the constraints vector. */
4669 static rtvec orig_asm_constraints_vector;
4670 static rtvec copy_asm_constraints_vector;
4672 /* Recursively create a new copy of an rtx for copy_insn.
4673 This function differs from copy_rtx in that it handles SCRATCHes and
4674 ASM_OPERANDs properly.
4675 Normally, this function is not used directly; use copy_insn as front end.
4676 However, you could first copy an insn pattern with copy_insn and then use
4677 this function afterwards to properly copy any REG_NOTEs containing
4687 const char *format_ptr;
4689 code = GET_CODE (orig);
4706 for (i = 0; i < copy_insn_n_scratches; i++)
4707 if (copy_insn_scratch_in[i] == orig)
4708 return copy_insn_scratch_out[i];
4712 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4713 a LABEL_REF, it isn't sharable. */
4714 if (GET_CODE (XEXP (orig, 0)) == PLUS
4715 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4716 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4720 /* A MEM with a constant address is not sharable. The problem is that
4721 the constant address may need to be reloaded. If the mem is shared,
4722 then reloading one copy of this mem will cause all copies to appear
4723 to have been reloaded. */
4729 copy = rtx_alloc (code);
4731 /* Copy the various flags, and other information. We assume that
4732 all fields need copying, and then clear the fields that should
4733 not be copied. That is the sensible default behavior, and forces
4734 us to explicitly document why we are *not* copying a flag. */
4735 memcpy (copy, orig, sizeof (struct rtx_def) - sizeof (rtunion));
4737 /* We do not copy the USED flag, which is used as a mark bit during
4738 walks over the RTL. */
4741 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4742 if (GET_RTX_CLASS (code) == 'i')
4746 copy->frame_related = 0;
4749 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4751 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4753 copy->fld[i] = orig->fld[i];
4754 switch (*format_ptr++)
4757 if (XEXP (orig, i) != NULL)
4758 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4763 if (XVEC (orig, i) == orig_asm_constraints_vector)
4764 XVEC (copy, i) = copy_asm_constraints_vector;
4765 else if (XVEC (orig, i) == orig_asm_operands_vector)
4766 XVEC (copy, i) = copy_asm_operands_vector;
4767 else if (XVEC (orig, i) != NULL)
4769 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4770 for (j = 0; j < XVECLEN (copy, i); j++)
4771 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4782 /* These are left unchanged. */
4790 if (code == SCRATCH)
4792 i = copy_insn_n_scratches++;
4793 if (i >= MAX_RECOG_OPERANDS)
4795 copy_insn_scratch_in[i] = orig;
4796 copy_insn_scratch_out[i] = copy;
4798 else if (code == ASM_OPERANDS)
4800 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4801 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4802 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4803 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4809 /* Create a new copy of an rtx.
4810 This function differs from copy_rtx in that it handles SCRATCHes and
4811 ASM_OPERANDs properly.
4812 INSN doesn't really have to be a full INSN; it could be just the
4818 copy_insn_n_scratches = 0;
4819 orig_asm_operands_vector = 0;
4820 orig_asm_constraints_vector = 0;
4821 copy_asm_operands_vector = 0;
4822 copy_asm_constraints_vector = 0;
4823 return copy_insn_1 (insn);
4826 /* Initialize data structures and variables in this file
4827 before generating rtl for each function. */
4832 struct function *f = cfun;
4834 f->emit = (struct emit_status *) xmalloc (sizeof (struct emit_status));
4837 seq_rtl_expr = NULL;
4839 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4842 first_label_num = label_num;
4846 clear_emit_caches ();
4848 /* Init the tables that describe all the pseudo regs. */
4850 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4852 f->emit->regno_pointer_align
4853 = (unsigned char *) xcalloc (f->emit->regno_pointer_align_length,
4854 sizeof (unsigned char));
4857 = (rtx *) xcalloc (f->emit->regno_pointer_align_length, sizeof (rtx));
4860 = (tree *) xcalloc (f->emit->regno_pointer_align_length, sizeof (tree));
4862 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4863 init_virtual_regs (f->emit);
4865 /* Indicate that the virtual registers and stack locations are
4867 REG_POINTER (stack_pointer_rtx) = 1;
4868 REG_POINTER (frame_pointer_rtx) = 1;
4869 REG_POINTER (hard_frame_pointer_rtx) = 1;
4870 REG_POINTER (arg_pointer_rtx) = 1;
4872 REG_POINTER (virtual_incoming_args_rtx) = 1;
4873 REG_POINTER (virtual_stack_vars_rtx) = 1;
4874 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4875 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4876 REG_POINTER (virtual_cfa_rtx) = 1;
4878 #ifdef STACK_BOUNDARY
4879 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4880 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4881 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4882 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4884 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4885 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4886 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4887 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4888 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4891 #ifdef INIT_EXPANDERS
4896 /* Mark SS for GC. */
4899 mark_sequence_stack (ss)
4900 struct sequence_stack *ss;
4904 ggc_mark_rtx (ss->first);
4905 ggc_mark_tree (ss->sequence_rtl_expr);
4910 /* Mark ES for GC. */
4913 mark_emit_status (es)
4914 struct emit_status *es;
4923 for (i = es->regno_pointer_align_length, r = es->x_regno_reg_rtx,
4925 i > 0; --i, ++r, ++t)
4931 mark_sequence_stack (es->sequence_stack);
4932 ggc_mark_tree (es->sequence_rtl_expr);
4933 ggc_mark_rtx (es->x_first_insn);
4936 /* Generate the constant 0. */
4939 gen_const_vector_0 (mode)
4940 enum machine_mode mode;
4945 enum machine_mode inner;
4947 units = GET_MODE_NUNITS (mode);
4948 inner = GET_MODE_INNER (mode);
4950 v = rtvec_alloc (units);
4952 /* We need to call this function after we to set CONST0_RTX first. */
4953 if (!CONST0_RTX (inner))
4956 for (i = 0; i < units; ++i)
4957 RTVEC_ELT (v, i) = CONST0_RTX (inner);
4959 tem = gen_rtx_CONST_VECTOR (mode, v);
4963 /* Create some permanent unique rtl objects shared between all functions.
4964 LINE_NUMBERS is nonzero if line numbers are to be generated. */
4967 init_emit_once (line_numbers)
4971 enum machine_mode mode;
4972 enum machine_mode double_mode;
4974 /* Initialize the CONST_INT and memory attribute hash tables. */
4975 const_int_htab = htab_create (37, const_int_htab_hash,
4976 const_int_htab_eq, NULL);
4977 ggc_add_deletable_htab (const_int_htab, 0, 0);
4979 mem_attrs_htab = htab_create (37, mem_attrs_htab_hash,
4980 mem_attrs_htab_eq, NULL);
4981 ggc_add_deletable_htab (mem_attrs_htab, 0, mem_attrs_mark);
4983 no_line_numbers = ! line_numbers;
4985 /* Compute the word and byte modes. */
4987 byte_mode = VOIDmode;
4988 word_mode = VOIDmode;
4989 double_mode = VOIDmode;
4991 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
4992 mode = GET_MODE_WIDER_MODE (mode))
4994 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
4995 && byte_mode == VOIDmode)
4998 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
4999 && word_mode == VOIDmode)
5003 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5004 mode = GET_MODE_WIDER_MODE (mode))
5006 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5007 && double_mode == VOIDmode)
5011 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5013 /* Assign register numbers to the globally defined register rtx.
5014 This must be done at runtime because the register number field
5015 is in a union and some compilers can't initialize unions. */
5017 pc_rtx = gen_rtx (PC, VOIDmode);
5018 cc0_rtx = gen_rtx (CC0, VOIDmode);
5019 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5020 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5021 if (hard_frame_pointer_rtx == 0)
5022 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5023 HARD_FRAME_POINTER_REGNUM);
5024 if (arg_pointer_rtx == 0)
5025 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5026 virtual_incoming_args_rtx =
5027 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5028 virtual_stack_vars_rtx =
5029 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5030 virtual_stack_dynamic_rtx =
5031 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5032 virtual_outgoing_args_rtx =
5033 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5034 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5036 /* These rtx must be roots if GC is enabled. */
5037 ggc_add_rtx_root (global_rtl, GR_MAX);
5039 #ifdef INIT_EXPANDERS
5040 /* This is to initialize {init|mark|free}_machine_status before the first
5041 call to push_function_context_to. This is needed by the Chill front
5042 end which calls push_function_context_to before the first call to
5043 init_function_start. */
5047 /* Create the unique rtx's for certain rtx codes and operand values. */
5049 /* Don't use gen_rtx here since gen_rtx in this case
5050 tries to use these variables. */
5051 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5052 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5053 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5054 ggc_add_rtx_root (const_int_rtx, 2 * MAX_SAVED_CONST_INT + 1);
5056 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5057 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5058 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5060 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5062 dconst0 = REAL_VALUE_ATOF ("0", double_mode);
5063 dconst1 = REAL_VALUE_ATOF ("1", double_mode);
5064 dconst2 = REAL_VALUE_ATOF ("2", double_mode);
5065 dconstm1 = REAL_VALUE_ATOF ("-1", double_mode);
5067 for (i = 0; i <= 2; i++)
5069 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
5070 mode = GET_MODE_WIDER_MODE (mode))
5072 rtx tem = rtx_alloc (CONST_DOUBLE);
5073 union real_extract u;
5075 /* Zero any holes in a structure. */
5076 memset ((char *) &u, 0, sizeof u);
5077 u.d = i == 0 ? dconst0 : i == 1 ? dconst1 : dconst2;
5079 /* Avoid trailing garbage in the rtx. */
5080 if (sizeof (u) < sizeof (HOST_WIDE_INT))
5081 CONST_DOUBLE_LOW (tem) = 0;
5082 if (sizeof (u) < 2 * sizeof (HOST_WIDE_INT))
5083 CONST_DOUBLE_HIGH (tem) = 0;
5085 memcpy (&CONST_DOUBLE_LOW (tem), &u, sizeof u);
5086 CONST_DOUBLE_CHAIN (tem) = NULL_RTX;
5087 PUT_MODE (tem, mode);
5089 const_tiny_rtx[i][(int) mode] = tem;
5092 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5094 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
5095 mode = GET_MODE_WIDER_MODE (mode))
5096 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5098 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5100 mode = GET_MODE_WIDER_MODE (mode))
5101 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5104 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5106 mode = GET_MODE_WIDER_MODE (mode))
5107 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5109 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5111 mode = GET_MODE_WIDER_MODE (mode))
5112 const_tiny_rtx[0][(int) mode] = gen_const_vector_0 (mode);
5114 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5115 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5116 const_tiny_rtx[0][i] = const0_rtx;
5118 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5119 if (STORE_FLAG_VALUE == 1)
5120 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5122 /* For bounded pointers, `&const_tiny_rtx[0][0]' is not the same as
5123 `(rtx *) const_tiny_rtx'. The former has bounds that only cover
5124 `const_tiny_rtx[0]', whereas the latter has bounds that cover all. */
5125 ggc_add_rtx_root ((rtx *) const_tiny_rtx, sizeof const_tiny_rtx / sizeof (rtx));
5126 ggc_add_rtx_root (&const_true_rtx, 1);
5128 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5129 return_address_pointer_rtx
5130 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5134 struct_value_rtx = STRUCT_VALUE;
5136 struct_value_rtx = gen_rtx_REG (Pmode, STRUCT_VALUE_REGNUM);
5139 #ifdef STRUCT_VALUE_INCOMING
5140 struct_value_incoming_rtx = STRUCT_VALUE_INCOMING;
5142 #ifdef STRUCT_VALUE_INCOMING_REGNUM
5143 struct_value_incoming_rtx
5144 = gen_rtx_REG (Pmode, STRUCT_VALUE_INCOMING_REGNUM);
5146 struct_value_incoming_rtx = struct_value_rtx;
5150 #ifdef STATIC_CHAIN_REGNUM
5151 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5153 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5154 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5155 static_chain_incoming_rtx
5156 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5159 static_chain_incoming_rtx = static_chain_rtx;
5163 static_chain_rtx = STATIC_CHAIN;
5165 #ifdef STATIC_CHAIN_INCOMING
5166 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5168 static_chain_incoming_rtx = static_chain_rtx;
5172 if (PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5173 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5175 ggc_add_rtx_root (&pic_offset_table_rtx, 1);
5176 ggc_add_rtx_root (&struct_value_rtx, 1);
5177 ggc_add_rtx_root (&struct_value_incoming_rtx, 1);
5178 ggc_add_rtx_root (&static_chain_rtx, 1);
5179 ggc_add_rtx_root (&static_chain_incoming_rtx, 1);
5180 ggc_add_rtx_root (&return_address_pointer_rtx, 1);
5183 /* Query and clear/ restore no_line_numbers. This is used by the
5184 switch / case handling in stmt.c to give proper line numbers in
5185 warnings about unreachable code. */
5188 force_line_numbers ()
5190 int old = no_line_numbers;
5192 no_line_numbers = 0;
5194 force_next_line_note ();
5199 restore_line_number_status (old_value)
5202 no_line_numbers = old_value;