1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
58 #define PUSH_ARGS_REVERSED /* If it's last to first. */
63 #ifndef STACK_PUSH_CODE
64 #ifdef STACK_GROWS_DOWNWARD
65 #define STACK_PUSH_CODE PRE_DEC
67 #define STACK_PUSH_CODE PRE_INC
71 /* Assume that case vectors are not pc-relative. */
72 #ifndef CASE_VECTOR_PC_RELATIVE
73 #define CASE_VECTOR_PC_RELATIVE 0
76 /* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
84 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
85 static tree placeholder_list = 0;
87 /* This structure is used by move_by_pieces to describe the move to
98 int explicit_inc_from;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
104 /* This structure is used by store_by_pieces to describe the clear to
107 struct store_by_pieces
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
115 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
120 extern struct obstack permanent_obstack;
122 static rtx enqueue_insn PARAMS ((rtx, rtx));
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
124 PARAMS ((unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *));
128 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
130 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
132 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
134 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
136 struct store_by_pieces *));
137 static rtx get_subtarget PARAMS ((rtx));
138 static int is_zeros_p PARAMS ((tree));
139 static int mostly_zeros_p PARAMS ((tree));
140 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int));
143 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
144 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, enum machine_mode, int, tree,
148 static rtx var_rtx PARAMS ((tree));
149 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
150 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
151 static int is_aligning_offset PARAMS ((tree, tree));
152 static rtx expand_increment PARAMS ((tree, int, int));
153 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
154 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
155 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
157 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
159 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
161 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* If a memory-to-memory move would take MOVE_RATIO or more simple
171 move-instruction sequences, we will do a movstr or libcall instead. */
174 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
177 /* If we are optimizing for space (-Os), cut down the default move ratio. */
178 #define MOVE_RATIO (optimize_size ? 3 : 15)
182 /* This macro is used to determine whether move_by_pieces should be called
183 to perform a structure copy. */
184 #ifndef MOVE_BY_PIECES_P
185 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
189 /* This array records the insn_code of insns to perform block moves. */
190 enum insn_code movstr_optab[NUM_MACHINE_MODES];
192 /* This array records the insn_code of insns to perform block clears. */
193 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
195 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
201 /* This is run once per compilation to set up which modes can be used
202 directly in memory and to initialize the block move optab. */
208 enum machine_mode mode;
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
220 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
221 pat = PATTERN (insn);
223 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
224 mode = (enum machine_mode) ((int) mode + 1))
229 direct_load[(int) mode] = direct_store[(int) mode] = 0;
230 PUT_MODE (mem, mode);
231 PUT_MODE (mem1, mode);
233 /* See if there is some register that can be used in this mode and
234 directly loaded or stored from memory. */
236 if (mode != VOIDmode && mode != BLKmode)
237 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
238 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
241 if (! HARD_REGNO_MODE_OK (regno, mode))
244 reg = gen_rtx_REG (mode, regno);
247 SET_DEST (pat) = reg;
248 if (recog (pat, insn, &num_clobbers) >= 0)
249 direct_load[(int) mode] = 1;
251 SET_SRC (pat) = mem1;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
257 SET_DEST (pat) = mem;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_store[(int) mode] = 1;
262 SET_DEST (pat) = mem1;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
271 /* This is run at the start of compiling a function. */
276 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
279 pending_stack_adjust = 0;
280 stack_pointer_delta = 0;
281 inhibit_defer_pop = 0;
283 apply_args_value = 0;
289 struct expr_status *p;
294 ggc_mark_rtx (p->x_saveregs_value);
295 ggc_mark_rtx (p->x_apply_args_value);
296 ggc_mark_rtx (p->x_forced_labels);
307 /* Small sanity check that the queue is empty at the end of a function. */
310 finish_expr_for_function ()
316 /* Manage the queue of increment instructions to be output
317 for POSTINCREMENT_EXPR expressions, etc. */
319 /* Queue up to increment (or change) VAR later. BODY says how:
320 BODY should be the same thing you would pass to emit_insn
321 to increment right away. It will go to emit_insn later on.
323 The value is a QUEUED expression to be used in place of VAR
324 where you want to guarantee the pre-incrementation value of VAR. */
327 enqueue_insn (var, body)
330 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
331 body, pending_chain);
332 return pending_chain;
335 /* Use protect_from_queue to convert a QUEUED expression
336 into something that you can put immediately into an instruction.
337 If the queued incrementation has not happened yet,
338 protect_from_queue returns the variable itself.
339 If the incrementation has happened, protect_from_queue returns a temp
340 that contains a copy of the old value of the variable.
342 Any time an rtx which might possibly be a QUEUED is to be put
343 into an instruction, it must be passed through protect_from_queue first.
344 QUEUED expressions are not meaningful in instructions.
346 Do not pass a value through protect_from_queue and then hold
347 on to it for a while before putting it in an instruction!
348 If the queue is flushed in between, incorrect code will result. */
351 protect_from_queue (x, modify)
355 RTX_CODE code = GET_CODE (x);
357 #if 0 /* A QUEUED can hang around after the queue is forced out. */
358 /* Shortcut for most common case. */
359 if (pending_chain == 0)
365 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
366 use of autoincrement. Make a copy of the contents of the memory
367 location rather than a copy of the address, but not if the value is
368 of mode BLKmode. Don't modify X in place since it might be
370 if (code == MEM && GET_MODE (x) != BLKmode
371 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
374 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
378 rtx temp = gen_reg_rtx (GET_MODE (x));
380 emit_insn_before (gen_move_insn (temp, new),
385 /* Copy the address into a pseudo, so that the returned value
386 remains correct across calls to emit_queue. */
387 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
390 /* Otherwise, recursively protect the subexpressions of all
391 the kinds of rtx's that can contain a QUEUED. */
394 rtx tem = protect_from_queue (XEXP (x, 0), 0);
395 if (tem != XEXP (x, 0))
401 else if (code == PLUS || code == MULT)
403 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
404 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
405 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
414 /* If the increment has not happened, use the variable itself. Copy it
415 into a new pseudo so that the value remains correct across calls to
417 if (QUEUED_INSN (x) == 0)
418 return copy_to_reg (QUEUED_VAR (x));
419 /* If the increment has happened and a pre-increment copy exists,
421 if (QUEUED_COPY (x) != 0)
422 return QUEUED_COPY (x);
423 /* The increment has happened but we haven't set up a pre-increment copy.
424 Set one up now, and use it. */
425 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
426 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
428 return QUEUED_COPY (x);
431 /* Return nonzero if X contains a QUEUED expression:
432 if it contains anything that will be altered by a queued increment.
433 We handle only combinations of MEM, PLUS, MINUS and MULT operators
434 since memory addresses generally contain only those. */
440 enum rtx_code code = GET_CODE (x);
446 return queued_subexp_p (XEXP (x, 0));
450 return (queued_subexp_p (XEXP (x, 0))
451 || queued_subexp_p (XEXP (x, 1)));
457 /* Perform all the pending incrementations. */
463 while ((p = pending_chain))
465 rtx body = QUEUED_BODY (p);
467 if (GET_CODE (body) == SEQUENCE)
469 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
470 emit_insn (QUEUED_BODY (p));
473 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
474 pending_chain = QUEUED_NEXT (p);
478 /* Copy data from FROM to TO, where the machine modes are not the same.
479 Both modes may be integer, or both may be floating.
480 UNSIGNEDP should be nonzero if FROM is an unsigned type.
481 This causes zero-extension instead of sign-extension. */
484 convert_move (to, from, unsignedp)
488 enum machine_mode to_mode = GET_MODE (to);
489 enum machine_mode from_mode = GET_MODE (from);
490 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
491 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
495 /* rtx code for making an equivalent value. */
496 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
498 to = protect_from_queue (to, 1);
499 from = protect_from_queue (from, 0);
501 if (to_real != from_real)
504 /* If FROM is a SUBREG that indicates that we have already done at least
505 the required extension, strip it. We don't handle such SUBREGs as
508 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
509 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
510 >= GET_MODE_SIZE (to_mode))
511 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
512 from = gen_lowpart (to_mode, from), from_mode = to_mode;
514 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
517 if (to_mode == from_mode
518 || (from_mode == VOIDmode && CONSTANT_P (from)))
520 emit_move_insn (to, from);
524 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
526 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
529 if (VECTOR_MODE_P (to_mode))
530 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
532 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
534 emit_move_insn (to, from);
538 if (to_real != from_real)
545 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
547 /* Try converting directly if the insn is supported. */
548 if ((code = can_extend_p (to_mode, from_mode, 0))
551 emit_unop_insn (code, to, from, UNKNOWN);
556 #ifdef HAVE_trunchfqf2
557 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
559 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
563 #ifdef HAVE_trunctqfqf2
564 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
566 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
570 #ifdef HAVE_truncsfqf2
571 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
573 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
577 #ifdef HAVE_truncdfqf2
578 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
580 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
584 #ifdef HAVE_truncxfqf2
585 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
587 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
591 #ifdef HAVE_trunctfqf2
592 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
594 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
599 #ifdef HAVE_trunctqfhf2
600 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
602 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
606 #ifdef HAVE_truncsfhf2
607 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
609 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
613 #ifdef HAVE_truncdfhf2
614 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
616 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
620 #ifdef HAVE_truncxfhf2
621 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
623 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
627 #ifdef HAVE_trunctfhf2
628 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
630 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncsftqf2
636 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
638 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
642 #ifdef HAVE_truncdftqf2
643 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
645 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
649 #ifdef HAVE_truncxftqf2
650 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
652 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
656 #ifdef HAVE_trunctftqf2
657 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
659 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
664 #ifdef HAVE_truncdfsf2
665 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
667 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
671 #ifdef HAVE_truncxfsf2
672 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
674 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
678 #ifdef HAVE_trunctfsf2
679 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
681 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
685 #ifdef HAVE_truncxfdf2
686 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
688 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
692 #ifdef HAVE_trunctfdf2
693 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
695 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
707 libcall = extendsfdf2_libfunc;
711 libcall = extendsfxf2_libfunc;
715 libcall = extendsftf2_libfunc;
727 libcall = truncdfsf2_libfunc;
731 libcall = extenddfxf2_libfunc;
735 libcall = extenddftf2_libfunc;
747 libcall = truncxfsf2_libfunc;
751 libcall = truncxfdf2_libfunc;
763 libcall = trunctfsf2_libfunc;
767 libcall = trunctfdf2_libfunc;
779 if (libcall == (rtx) 0)
780 /* This conversion is not implemented yet. */
784 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
786 insns = get_insns ();
788 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
793 /* Now both modes are integers. */
795 /* Handle expanding beyond a word. */
796 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
797 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
804 enum machine_mode lowpart_mode;
805 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
807 /* Try converting directly if the insn is supported. */
808 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
811 /* If FROM is a SUBREG, put it into a register. Do this
812 so that we always generate the same set of insns for
813 better cse'ing; if an intermediate assignment occurred,
814 we won't be doing the operation directly on the SUBREG. */
815 if (optimize > 0 && GET_CODE (from) == SUBREG)
816 from = force_reg (from_mode, from);
817 emit_unop_insn (code, to, from, equiv_code);
820 /* Next, try converting via full word. */
821 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
822 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
823 != CODE_FOR_nothing))
825 if (GET_CODE (to) == REG)
826 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
827 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
828 emit_unop_insn (code, to,
829 gen_lowpart (word_mode, to), equiv_code);
833 /* No special multiword conversion insn; do it by hand. */
836 /* Since we will turn this into a no conflict block, we must ensure
837 that the source does not overlap the target. */
839 if (reg_overlap_mentioned_p (to, from))
840 from = force_reg (from_mode, from);
842 /* Get a copy of FROM widened to a word, if necessary. */
843 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
844 lowpart_mode = word_mode;
846 lowpart_mode = from_mode;
848 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
850 lowpart = gen_lowpart (lowpart_mode, to);
851 emit_move_insn (lowpart, lowfrom);
853 /* Compute the value to put in each remaining word. */
855 fill_value = const0_rtx;
860 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
861 && STORE_FLAG_VALUE == -1)
863 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
865 fill_value = gen_reg_rtx (word_mode);
866 emit_insn (gen_slt (fill_value));
872 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
873 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
875 fill_value = convert_to_mode (word_mode, fill_value, 1);
879 /* Fill the remaining words. */
880 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
882 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
883 rtx subword = operand_subword (to, index, 1, to_mode);
888 if (fill_value != subword)
889 emit_move_insn (subword, fill_value);
892 insns = get_insns ();
895 emit_no_conflict_block (insns, to, from, NULL_RTX,
896 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
900 /* Truncating multi-word to a word or less. */
901 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
902 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
904 if (!((GET_CODE (from) == MEM
905 && ! MEM_VOLATILE_P (from)
906 && direct_load[(int) to_mode]
907 && ! mode_dependent_address_p (XEXP (from, 0)))
908 || GET_CODE (from) == REG
909 || GET_CODE (from) == SUBREG))
910 from = force_reg (from_mode, from);
911 convert_move (to, gen_lowpart (word_mode, from), 0);
915 /* Handle pointer conversion. */ /* SPEE 900220. */
916 if (to_mode == PQImode)
918 if (from_mode != QImode)
919 from = convert_to_mode (QImode, from, unsignedp);
921 #ifdef HAVE_truncqipqi2
922 if (HAVE_truncqipqi2)
924 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
927 #endif /* HAVE_truncqipqi2 */
931 if (from_mode == PQImode)
933 if (to_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
940 #ifdef HAVE_extendpqiqi2
941 if (HAVE_extendpqiqi2)
943 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
946 #endif /* HAVE_extendpqiqi2 */
951 if (to_mode == PSImode)
953 if (from_mode != SImode)
954 from = convert_to_mode (SImode, from, unsignedp);
956 #ifdef HAVE_truncsipsi2
957 if (HAVE_truncsipsi2)
959 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
962 #endif /* HAVE_truncsipsi2 */
966 if (from_mode == PSImode)
968 if (to_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
975 #ifdef HAVE_extendpsisi2
976 if (! unsignedp && HAVE_extendpsisi2)
978 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
981 #endif /* HAVE_extendpsisi2 */
982 #ifdef HAVE_zero_extendpsisi2
983 if (unsignedp && HAVE_zero_extendpsisi2)
985 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
988 #endif /* HAVE_zero_extendpsisi2 */
993 if (to_mode == PDImode)
995 if (from_mode != DImode)
996 from = convert_to_mode (DImode, from, unsignedp);
998 #ifdef HAVE_truncdipdi2
999 if (HAVE_truncdipdi2)
1001 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1004 #endif /* HAVE_truncdipdi2 */
1008 if (from_mode == PDImode)
1010 if (to_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1017 #ifdef HAVE_extendpdidi2
1018 if (HAVE_extendpdidi2)
1020 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1023 #endif /* HAVE_extendpdidi2 */
1028 /* Now follow all the conversions between integers
1029 no more than a word long. */
1031 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1032 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1033 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1034 GET_MODE_BITSIZE (from_mode)))
1036 if (!((GET_CODE (from) == MEM
1037 && ! MEM_VOLATILE_P (from)
1038 && direct_load[(int) to_mode]
1039 && ! mode_dependent_address_p (XEXP (from, 0)))
1040 || GET_CODE (from) == REG
1041 || GET_CODE (from) == SUBREG))
1042 from = force_reg (from_mode, from);
1043 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1044 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1045 from = copy_to_reg (from);
1046 emit_move_insn (to, gen_lowpart (to_mode, from));
1050 /* Handle extension. */
1051 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1053 /* Convert directly if that works. */
1054 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1055 != CODE_FOR_nothing)
1058 from = force_not_mem (from);
1060 emit_unop_insn (code, to, from, equiv_code);
1065 enum machine_mode intermediate;
1069 /* Search for a mode to convert via. */
1070 for (intermediate = from_mode; intermediate != VOIDmode;
1071 intermediate = GET_MODE_WIDER_MODE (intermediate))
1072 if (((can_extend_p (to_mode, intermediate, unsignedp)
1073 != CODE_FOR_nothing)
1074 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1075 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1076 GET_MODE_BITSIZE (intermediate))))
1077 && (can_extend_p (intermediate, from_mode, unsignedp)
1078 != CODE_FOR_nothing))
1080 convert_move (to, convert_to_mode (intermediate, from,
1081 unsignedp), unsignedp);
1085 /* No suitable intermediate mode.
1086 Generate what we need with shifts. */
1087 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1088 - GET_MODE_BITSIZE (from_mode), 0);
1089 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1090 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1092 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1095 emit_move_insn (to, tmp);
1100 /* Support special truncate insns for certain modes. */
1102 if (from_mode == DImode && to_mode == SImode)
1104 #ifdef HAVE_truncdisi2
1105 if (HAVE_truncdisi2)
1107 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 if (from_mode == DImode && to_mode == HImode)
1117 #ifdef HAVE_truncdihi2
1118 if (HAVE_truncdihi2)
1120 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 if (from_mode == DImode && to_mode == QImode)
1130 #ifdef HAVE_truncdiqi2
1131 if (HAVE_truncdiqi2)
1133 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 if (from_mode == SImode && to_mode == HImode)
1143 #ifdef HAVE_truncsihi2
1144 if (HAVE_truncsihi2)
1146 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 if (from_mode == SImode && to_mode == QImode)
1156 #ifdef HAVE_truncsiqi2
1157 if (HAVE_truncsiqi2)
1159 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 if (from_mode == HImode && to_mode == QImode)
1169 #ifdef HAVE_trunchiqi2
1170 if (HAVE_trunchiqi2)
1172 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 if (from_mode == TImode && to_mode == DImode)
1182 #ifdef HAVE_trunctidi2
1183 if (HAVE_trunctidi2)
1185 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 if (from_mode == TImode && to_mode == SImode)
1195 #ifdef HAVE_trunctisi2
1196 if (HAVE_trunctisi2)
1198 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 if (from_mode == TImode && to_mode == HImode)
1208 #ifdef HAVE_trunctihi2
1209 if (HAVE_trunctihi2)
1211 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 if (from_mode == TImode && to_mode == QImode)
1221 #ifdef HAVE_trunctiqi2
1222 if (HAVE_trunctiqi2)
1224 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 /* Handle truncation of volatile memrefs, and so on;
1233 the things that couldn't be truncated directly,
1234 and for which there was no special instruction. */
1235 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1237 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1238 emit_move_insn (to, temp);
1242 /* Mode combination is not recognized. */
1246 /* Return an rtx for a value that would result
1247 from converting X to mode MODE.
1248 Both X and MODE may be floating, or both integer.
1249 UNSIGNEDP is nonzero if X is an unsigned value.
1250 This can be done by referring to a part of X in place
1251 or by copying to a new temporary with conversion.
1253 This function *must not* call protect_from_queue
1254 except when putting X into an insn (in which case convert_move does it). */
1257 convert_to_mode (mode, x, unsignedp)
1258 enum machine_mode mode;
1262 return convert_modes (mode, VOIDmode, x, unsignedp);
1265 /* Return an rtx for a value that would result
1266 from converting X from mode OLDMODE to mode MODE.
1267 Both modes may be floating, or both integer.
1268 UNSIGNEDP is nonzero if X is an unsigned value.
1270 This can be done by referring to a part of X in place
1271 or by copying to a new temporary with conversion.
1273 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
1279 convert_modes (mode, oldmode, x, unsignedp)
1280 enum machine_mode mode, oldmode;
1286 /* If FROM is a SUBREG that indicates that we have already done at least
1287 the required extension, strip it. */
1289 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1290 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1291 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1292 x = gen_lowpart (mode, x);
1294 if (GET_MODE (x) != VOIDmode)
1295 oldmode = GET_MODE (x);
1297 if (mode == oldmode)
1300 /* There is one case that we must handle specially: If we are converting
1301 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1302 we are to interpret the constant as unsigned, gen_lowpart will do
1303 the wrong if the constant appears negative. What we want to do is
1304 make the high-order word of the constant zero, not all ones. */
1306 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1307 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1308 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1310 HOST_WIDE_INT val = INTVAL (x);
1312 if (oldmode != VOIDmode
1313 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1315 int width = GET_MODE_BITSIZE (oldmode);
1317 /* We need to zero extend VAL. */
1318 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1321 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1324 /* We can do this with a gen_lowpart if both desired and current modes
1325 are integer, and this is either a constant integer, a register, or a
1326 non-volatile MEM. Except for the constant case where MODE is no
1327 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1329 if ((GET_CODE (x) == CONST_INT
1330 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1331 || (GET_MODE_CLASS (mode) == MODE_INT
1332 && GET_MODE_CLASS (oldmode) == MODE_INT
1333 && (GET_CODE (x) == CONST_DOUBLE
1334 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1335 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1336 && direct_load[(int) mode])
1337 || (GET_CODE (x) == REG
1338 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1339 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1341 /* ?? If we don't know OLDMODE, we have to assume here that
1342 X does not need sign- or zero-extension. This may not be
1343 the case, but it's the best we can do. */
1344 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1345 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1347 HOST_WIDE_INT val = INTVAL (x);
1348 int width = GET_MODE_BITSIZE (oldmode);
1350 /* We must sign or zero-extend in this case. Start by
1351 zero-extending, then sign extend if we need to. */
1352 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1354 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1355 val |= (HOST_WIDE_INT) (-1) << width;
1357 return GEN_INT (trunc_int_for_mode (val, mode));
1360 return gen_lowpart (mode, x);
1363 temp = gen_reg_rtx (mode);
1364 convert_move (temp, x, unsignedp);
1368 /* This macro is used to determine what the largest unit size that
1369 move_by_pieces can use is. */
1371 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1372 move efficiently, as opposed to MOVE_MAX which is the maximum
1373 number of bytes we can move with a single instruction. */
1375 #ifndef MOVE_MAX_PIECES
1376 #define MOVE_MAX_PIECES MOVE_MAX
1379 /* Generate several move instructions to copy LEN bytes from block FROM to
1380 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1381 and TO through protect_from_queue before calling.
1383 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1384 used to push FROM to the stack.
1386 ALIGN is maximum alignment we can assume. */
1389 move_by_pieces (to, from, len, align)
1391 unsigned HOST_WIDE_INT len;
1394 struct move_by_pieces data;
1395 rtx to_addr, from_addr = XEXP (from, 0);
1396 unsigned int max_size = MOVE_MAX_PIECES + 1;
1397 enum machine_mode mode = VOIDmode, tmode;
1398 enum insn_code icode;
1401 data.from_addr = from_addr;
1404 to_addr = XEXP (to, 0);
1407 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1408 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1410 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1417 #ifdef STACK_GROWS_DOWNWARD
1423 data.to_addr = to_addr;
1426 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1427 || GET_CODE (from_addr) == POST_INC
1428 || GET_CODE (from_addr) == POST_DEC);
1430 data.explicit_inc_from = 0;
1431 data.explicit_inc_to = 0;
1432 if (data.reverse) data.offset = len;
1435 /* If copying requires more than two move insns,
1436 copy addresses to registers (to make displacements shorter)
1437 and use post-increment if available. */
1438 if (!(data.autinc_from && data.autinc_to)
1439 && move_by_pieces_ninsns (len, align) > 2)
1441 /* Find the mode of the largest move... */
1442 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1443 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1444 if (GET_MODE_SIZE (tmode) < max_size)
1447 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1449 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1450 data.autinc_from = 1;
1451 data.explicit_inc_from = -1;
1453 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1455 data.from_addr = copy_addr_to_reg (from_addr);
1456 data.autinc_from = 1;
1457 data.explicit_inc_from = 1;
1459 if (!data.autinc_from && CONSTANT_P (from_addr))
1460 data.from_addr = copy_addr_to_reg (from_addr);
1461 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1463 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1465 data.explicit_inc_to = -1;
1467 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1469 data.to_addr = copy_addr_to_reg (to_addr);
1471 data.explicit_inc_to = 1;
1473 if (!data.autinc_to && CONSTANT_P (to_addr))
1474 data.to_addr = copy_addr_to_reg (to_addr);
1477 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1478 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1479 align = MOVE_MAX * BITS_PER_UNIT;
1481 /* First move what we can in the largest integer mode, then go to
1482 successively smaller modes. */
1484 while (max_size > 1)
1486 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1487 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1488 if (GET_MODE_SIZE (tmode) < max_size)
1491 if (mode == VOIDmode)
1494 icode = mov_optab->handlers[(int) mode].insn_code;
1495 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1496 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1498 max_size = GET_MODE_SIZE (mode);
1501 /* The code above should have handled everything. */
1506 /* Return number of insns required to move L bytes by pieces.
1507 ALIGN (in bits) is maximum alignment we can assume. */
1509 static unsigned HOST_WIDE_INT
1510 move_by_pieces_ninsns (l, align)
1511 unsigned HOST_WIDE_INT l;
1514 unsigned HOST_WIDE_INT n_insns = 0;
1515 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1517 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1518 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1519 align = MOVE_MAX * BITS_PER_UNIT;
1521 while (max_size > 1)
1523 enum machine_mode mode = VOIDmode, tmode;
1524 enum insn_code icode;
1526 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1527 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1528 if (GET_MODE_SIZE (tmode) < max_size)
1531 if (mode == VOIDmode)
1534 icode = mov_optab->handlers[(int) mode].insn_code;
1535 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1536 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1538 max_size = GET_MODE_SIZE (mode);
1546 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1547 with move instructions for mode MODE. GENFUN is the gen_... function
1548 to make a move insn for that mode. DATA has all the other info. */
1551 move_by_pieces_1 (genfun, mode, data)
1552 rtx (*genfun) PARAMS ((rtx, ...));
1553 enum machine_mode mode;
1554 struct move_by_pieces *data;
1556 unsigned int size = GET_MODE_SIZE (mode);
1557 rtx to1 = NULL_RTX, from1;
1559 while (data->len >= size)
1562 data->offset -= size;
1566 if (data->autinc_to)
1567 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1570 to1 = adjust_address (data->to, mode, data->offset);
1573 if (data->autinc_from)
1574 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1577 from1 = adjust_address (data->from, mode, data->offset);
1579 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1580 emit_insn (gen_add2_insn (data->to_addr,
1581 GEN_INT (-(HOST_WIDE_INT)size)));
1582 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1583 emit_insn (gen_add2_insn (data->from_addr,
1584 GEN_INT (-(HOST_WIDE_INT)size)));
1587 emit_insn ((*genfun) (to1, from1));
1590 #ifdef PUSH_ROUNDING
1591 emit_single_push_insn (mode, from1, NULL);
1597 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1598 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1599 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1600 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1602 if (! data->reverse)
1603 data->offset += size;
1609 /* Emit code to move a block Y to a block X.
1610 This may be done with string-move instructions,
1611 with multiple scalar move instructions, or with a library call.
1613 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1615 SIZE is an rtx that says how long they are.
1616 ALIGN is the maximum alignment we can assume they have.
1618 Return the address of the new block, if memcpy is called and returns it,
1622 emit_block_move (x, y, size)
1627 #ifdef TARGET_MEM_FUNCTIONS
1629 tree call_expr, arg_list;
1631 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1633 if (GET_MODE (x) != BLKmode)
1636 if (GET_MODE (y) != BLKmode)
1639 x = protect_from_queue (x, 1);
1640 y = protect_from_queue (y, 0);
1641 size = protect_from_queue (size, 0);
1643 if (GET_CODE (x) != MEM)
1645 if (GET_CODE (y) != MEM)
1650 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1651 move_by_pieces (x, y, INTVAL (size), align);
1654 /* Try the most limited insn first, because there's no point
1655 including more than one in the machine description unless
1656 the more limited one has some advantage. */
1658 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1659 enum machine_mode mode;
1661 /* Since this is a move insn, we don't care about volatility. */
1664 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1665 mode = GET_MODE_WIDER_MODE (mode))
1667 enum insn_code code = movstr_optab[(int) mode];
1668 insn_operand_predicate_fn pred;
1670 if (code != CODE_FOR_nothing
1671 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1672 here because if SIZE is less than the mode mask, as it is
1673 returned by the macro, it will definitely be less than the
1674 actual mode mask. */
1675 && ((GET_CODE (size) == CONST_INT
1676 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1677 <= (GET_MODE_MASK (mode) >> 1)))
1678 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1679 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1680 || (*pred) (x, BLKmode))
1681 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1682 || (*pred) (y, BLKmode))
1683 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1684 || (*pred) (opalign, VOIDmode)))
1687 rtx last = get_last_insn ();
1690 op2 = convert_to_mode (mode, size, 1);
1691 pred = insn_data[(int) code].operand[2].predicate;
1692 if (pred != 0 && ! (*pred) (op2, mode))
1693 op2 = copy_to_mode_reg (mode, op2);
1695 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1703 delete_insns_since (last);
1709 /* X, Y, or SIZE may have been passed through protect_from_queue.
1711 It is unsafe to save the value generated by protect_from_queue
1712 and reuse it later. Consider what happens if emit_queue is
1713 called before the return value from protect_from_queue is used.
1715 Expansion of the CALL_EXPR below will call emit_queue before
1716 we are finished emitting RTL for argument setup. So if we are
1717 not careful we could get the wrong value for an argument.
1719 To avoid this problem we go ahead and emit code to copy X, Y &
1720 SIZE into new pseudos. We can then place those new pseudos
1721 into an RTL_EXPR and use them later, even after a call to
1724 Note this is not strictly needed for library calls since they
1725 do not call emit_queue before loading their arguments. However,
1726 we may need to have library calls call emit_queue in the future
1727 since failing to do so could cause problems for targets which
1728 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1729 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1730 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1732 #ifdef TARGET_MEM_FUNCTIONS
1733 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1735 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1736 TREE_UNSIGNED (integer_type_node));
1737 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1740 #ifdef TARGET_MEM_FUNCTIONS
1741 /* It is incorrect to use the libcall calling conventions to call
1742 memcpy in this context.
1744 This could be a user call to memcpy and the user may wish to
1745 examine the return value from memcpy.
1747 For targets where libcalls and normal calls have different conventions
1748 for returning pointers, we could end up generating incorrect code.
1750 So instead of using a libcall sequence we build up a suitable
1751 CALL_EXPR and expand the call in the normal fashion. */
1752 if (fn == NULL_TREE)
1756 /* This was copied from except.c, I don't know if all this is
1757 necessary in this context or not. */
1758 fn = get_identifier ("memcpy");
1759 fntype = build_pointer_type (void_type_node);
1760 fntype = build_function_type (fntype, NULL_TREE);
1761 fn = build_decl (FUNCTION_DECL, fn, fntype);
1762 ggc_add_tree_root (&fn, 1);
1763 DECL_EXTERNAL (fn) = 1;
1764 TREE_PUBLIC (fn) = 1;
1765 DECL_ARTIFICIAL (fn) = 1;
1766 TREE_NOTHROW (fn) = 1;
1767 make_decl_rtl (fn, NULL);
1768 assemble_external (fn);
1771 /* We need to make an argument list for the function call.
1773 memcpy has three arguments, the first two are void * addresses and
1774 the last is a size_t byte count for the copy. */
1776 = build_tree_list (NULL_TREE,
1777 make_tree (build_pointer_type (void_type_node), x));
1778 TREE_CHAIN (arg_list)
1779 = build_tree_list (NULL_TREE,
1780 make_tree (build_pointer_type (void_type_node), y));
1781 TREE_CHAIN (TREE_CHAIN (arg_list))
1782 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1783 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1785 /* Now we have to build up the CALL_EXPR itself. */
1786 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1787 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1788 call_expr, arg_list, NULL_TREE);
1789 TREE_SIDE_EFFECTS (call_expr) = 1;
1791 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1793 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1794 VOIDmode, 3, y, Pmode, x, Pmode,
1795 convert_to_mode (TYPE_MODE (integer_type_node), size,
1796 TREE_UNSIGNED (integer_type_node)),
1797 TYPE_MODE (integer_type_node));
1800 /* If we are initializing a readonly value, show the above call
1801 clobbered it. Otherwise, a load from it may erroneously be hoisted
1803 if (RTX_UNCHANGING_P (x))
1804 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1810 /* Copy all or part of a value X into registers starting at REGNO.
1811 The number of registers to be filled is NREGS. */
1814 move_block_to_reg (regno, x, nregs, mode)
1818 enum machine_mode mode;
1821 #ifdef HAVE_load_multiple
1829 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1830 x = validize_mem (force_const_mem (mode, x));
1832 /* See if the machine can do this with a load multiple insn. */
1833 #ifdef HAVE_load_multiple
1834 if (HAVE_load_multiple)
1836 last = get_last_insn ();
1837 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1845 delete_insns_since (last);
1849 for (i = 0; i < nregs; i++)
1850 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1851 operand_subword_force (x, i, mode));
1854 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1855 The number of registers to be filled is NREGS. SIZE indicates the number
1856 of bytes in the object X. */
1859 move_block_from_reg (regno, x, nregs, size)
1866 #ifdef HAVE_store_multiple
1870 enum machine_mode mode;
1875 /* If SIZE is that of a mode no bigger than a word, just use that
1876 mode's store operation. */
1877 if (size <= UNITS_PER_WORD
1878 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode
1879 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1881 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1885 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1886 to the left before storing to memory. Note that the previous test
1887 doesn't handle all cases (e.g. SIZE == 3). */
1888 if (size < UNITS_PER_WORD
1890 && !FUNCTION_ARG_REG_LITTLE_ENDIAN)
1892 rtx tem = operand_subword (x, 0, 1, BLKmode);
1898 shift = expand_shift (LSHIFT_EXPR, word_mode,
1899 gen_rtx_REG (word_mode, regno),
1900 build_int_2 ((UNITS_PER_WORD - size)
1901 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1902 emit_move_insn (tem, shift);
1906 /* See if the machine can do this with a store multiple insn. */
1907 #ifdef HAVE_store_multiple
1908 if (HAVE_store_multiple)
1910 last = get_last_insn ();
1911 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1919 delete_insns_since (last);
1923 for (i = 0; i < nregs; i++)
1925 rtx tem = operand_subword (x, i, 1, BLKmode);
1930 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1934 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1935 registers represented by a PARALLEL. SSIZE represents the total size of
1936 block SRC in bytes, or -1 if not known. */
1937 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
1938 the balance will be in what would be the low-order memory addresses, i.e.
1939 left justified for big endian, right justified for little endian. This
1940 happens to be true for the targets currently using this support. If this
1941 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1945 emit_group_load (dst, orig_src, ssize)
1952 if (GET_CODE (dst) != PARALLEL)
1955 /* Check for a NULL entry, used to indicate that the parameter goes
1956 both on the stack and in registers. */
1957 if (XEXP (XVECEXP (dst, 0, 0), 0))
1962 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1964 /* Process the pieces. */
1965 for (i = start; i < XVECLEN (dst, 0); i++)
1967 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1968 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1969 unsigned int bytelen = GET_MODE_SIZE (mode);
1972 /* Handle trailing fragments that run over the size of the struct. */
1973 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1975 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1976 bytelen = ssize - bytepos;
1981 /* If we won't be loading directly from memory, protect the real source
1982 from strange tricks we might play; but make sure that the source can
1983 be loaded directly into the destination. */
1985 if (GET_CODE (orig_src) != MEM
1986 && (!CONSTANT_P (orig_src)
1987 || (GET_MODE (orig_src) != mode
1988 && GET_MODE (orig_src) != VOIDmode)))
1990 if (GET_MODE (orig_src) == VOIDmode)
1991 src = gen_reg_rtx (mode);
1993 src = gen_reg_rtx (GET_MODE (orig_src));
1995 emit_move_insn (src, orig_src);
1998 /* Optimize the access just a bit. */
1999 if (GET_CODE (src) == MEM
2000 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2001 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2002 && bytelen == GET_MODE_SIZE (mode))
2004 tmps[i] = gen_reg_rtx (mode);
2005 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2007 else if (GET_CODE (src) == CONCAT)
2010 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2011 || (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2012 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1)))))
2014 tmps[i] = XEXP (src, bytepos != 0);
2015 if (! CONSTANT_P (tmps[i])
2016 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2017 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2018 0, 1, NULL_RTX, mode, mode, ssize);
2020 else if (bytepos == 0)
2022 rtx mem = assign_stack_temp (GET_MODE (src),
2023 GET_MODE_SIZE (GET_MODE (src)), 0);
2024 emit_move_insn (mem, src);
2025 tmps[i] = adjust_address (mem, mode, 0);
2030 else if (CONSTANT_P (src)
2031 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2034 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2035 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2038 if (BYTES_BIG_ENDIAN && shift)
2039 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2040 tmps[i], 0, OPTAB_WIDEN);
2045 /* Copy the extracted pieces into the proper (probable) hard regs. */
2046 for (i = start; i < XVECLEN (dst, 0); i++)
2047 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2050 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2051 registers represented by a PARALLEL. SSIZE represents the total size of
2052 block DST, or -1 if not known. */
2055 emit_group_store (orig_dst, src, ssize)
2062 if (GET_CODE (src) != PARALLEL)
2065 /* Check for a NULL entry, used to indicate that the parameter goes
2066 both on the stack and in registers. */
2067 if (XEXP (XVECEXP (src, 0, 0), 0))
2072 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2074 /* Copy the (probable) hard regs into pseudos. */
2075 for (i = start; i < XVECLEN (src, 0); i++)
2077 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2078 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2079 emit_move_insn (tmps[i], reg);
2083 /* If we won't be storing directly into memory, protect the real destination
2084 from strange tricks we might play. */
2086 if (GET_CODE (dst) == PARALLEL)
2090 /* We can get a PARALLEL dst if there is a conditional expression in
2091 a return statement. In that case, the dst and src are the same,
2092 so no action is necessary. */
2093 if (rtx_equal_p (dst, src))
2096 /* It is unclear if we can ever reach here, but we may as well handle
2097 it. Allocate a temporary, and split this into a store/load to/from
2100 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2101 emit_group_store (temp, src, ssize);
2102 emit_group_load (dst, temp, ssize);
2105 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2107 dst = gen_reg_rtx (GET_MODE (orig_dst));
2108 /* Make life a bit easier for combine. */
2109 emit_move_insn (dst, const0_rtx);
2112 /* Process the pieces. */
2113 for (i = start; i < XVECLEN (src, 0); i++)
2115 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2116 enum machine_mode mode = GET_MODE (tmps[i]);
2117 unsigned int bytelen = GET_MODE_SIZE (mode);
2120 /* Handle trailing fragments that run over the size of the struct. */
2121 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2123 if (BYTES_BIG_ENDIAN)
2125 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2126 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2127 tmps[i], 0, OPTAB_WIDEN);
2129 bytelen = ssize - bytepos;
2132 if (GET_CODE (dst) == CONCAT)
2134 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2135 dest = XEXP (dst, 0);
2136 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2138 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2139 dest = XEXP (dst, 1);
2145 /* Optimize the access just a bit. */
2146 if (GET_CODE (dest) == MEM
2147 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2148 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2149 && bytelen == GET_MODE_SIZE (mode))
2150 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2152 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2153 mode, tmps[i], ssize);
2158 /* Copy from the pseudo into the (probable) hard reg. */
2159 if (GET_CODE (dst) == REG)
2160 emit_move_insn (orig_dst, dst);
2163 /* Generate code to copy a BLKmode object of TYPE out of a
2164 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2165 is null, a stack temporary is created. TGTBLK is returned.
2167 The primary purpose of this routine is to handle functions
2168 that return BLKmode structures in registers. Some machines
2169 (the PA for example) want to return all small structures
2170 in registers regardless of the structure's alignment. */
2173 copy_blkmode_from_reg (tgtblk, srcreg, type)
2178 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2179 rtx src = NULL, dst = NULL;
2180 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2181 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2185 tgtblk = assign_temp (build_qualified_type (type,
2187 | TYPE_QUAL_CONST)),
2189 preserve_temp_slots (tgtblk);
2192 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2193 into a new pseudo which is a full word.
2195 If FUNCTION_ARG_REG_LITTLE_ENDIAN is set and convert_to_mode does a copy,
2196 the wrong part of the register gets copied so we fake a type conversion
2198 if (GET_MODE (srcreg) != BLKmode
2199 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2201 if (FUNCTION_ARG_REG_LITTLE_ENDIAN)
2202 srcreg = simplify_gen_subreg (word_mode, srcreg, GET_MODE (srcreg), 0);
2204 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2207 /* Structures whose size is not a multiple of a word are aligned
2208 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2209 machine, this means we must skip the empty high order bytes when
2210 calculating the bit offset. */
2211 if (BYTES_BIG_ENDIAN
2212 && !FUNCTION_ARG_REG_LITTLE_ENDIAN
2213 && bytes % UNITS_PER_WORD)
2214 big_endian_correction
2215 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2217 /* Copy the structure BITSIZE bites at a time.
2219 We could probably emit more efficient code for machines which do not use
2220 strict alignment, but it doesn't seem worth the effort at the current
2222 for (bitpos = 0, xbitpos = big_endian_correction;
2223 bitpos < bytes * BITS_PER_UNIT;
2224 bitpos += bitsize, xbitpos += bitsize)
2226 /* We need a new source operand each time xbitpos is on a
2227 word boundary and when xbitpos == big_endian_correction
2228 (the first time through). */
2229 if (xbitpos % BITS_PER_WORD == 0
2230 || xbitpos == big_endian_correction)
2231 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2234 /* We need a new destination operand each time bitpos is on
2236 if (bitpos % BITS_PER_WORD == 0)
2237 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2239 /* Use xbitpos for the source extraction (right justified) and
2240 xbitpos for the destination store (left justified). */
2241 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2242 extract_bit_field (src, bitsize,
2243 xbitpos % BITS_PER_WORD, 1,
2244 NULL_RTX, word_mode, word_mode,
2252 /* Add a USE expression for REG to the (possibly empty) list pointed
2253 to by CALL_FUSAGE. REG must denote a hard register. */
2256 use_reg (call_fusage, reg)
2257 rtx *call_fusage, reg;
2259 if (GET_CODE (reg) != REG
2260 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2264 = gen_rtx_EXPR_LIST (VOIDmode,
2265 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2268 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2269 starting at REGNO. All of these registers must be hard registers. */
2272 use_regs (call_fusage, regno, nregs)
2279 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2282 for (i = 0; i < nregs; i++)
2283 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2286 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2287 PARALLEL REGS. This is for calls that pass values in multiple
2288 non-contiguous locations. The Irix 6 ABI has examples of this. */
2291 use_group_regs (call_fusage, regs)
2297 for (i = 0; i < XVECLEN (regs, 0); i++)
2299 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2301 /* A NULL entry means the parameter goes both on the stack and in
2302 registers. This can also be a MEM for targets that pass values
2303 partially on the stack and partially in registers. */
2304 if (reg != 0 && GET_CODE (reg) == REG)
2305 use_reg (call_fusage, reg);
2311 can_store_by_pieces (len, constfun, constfundata, align)
2312 unsigned HOST_WIDE_INT len;
2313 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2317 unsigned HOST_WIDE_INT max_size, l;
2318 HOST_WIDE_INT offset = 0;
2319 enum machine_mode mode, tmode;
2320 enum insn_code icode;
2324 if (! MOVE_BY_PIECES_P (len, align))
2327 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2328 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2329 align = MOVE_MAX * BITS_PER_UNIT;
2331 /* We would first store what we can in the largest integer mode, then go to
2332 successively smaller modes. */
2335 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2340 max_size = MOVE_MAX_PIECES + 1;
2341 while (max_size > 1)
2343 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2344 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2345 if (GET_MODE_SIZE (tmode) < max_size)
2348 if (mode == VOIDmode)
2351 icode = mov_optab->handlers[(int) mode].insn_code;
2352 if (icode != CODE_FOR_nothing
2353 && align >= GET_MODE_ALIGNMENT (mode))
2355 unsigned int size = GET_MODE_SIZE (mode);
2362 cst = (*constfun) (constfundata, offset, mode);
2363 if (!LEGITIMATE_CONSTANT_P (cst))
2373 max_size = GET_MODE_SIZE (mode);
2376 /* The code above should have handled everything. */
2384 /* Generate several move instructions to store LEN bytes generated by
2385 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2386 pointer which will be passed as argument in every CONSTFUN call.
2387 ALIGN is maximum alignment we can assume. */
2390 store_by_pieces (to, len, constfun, constfundata, align)
2392 unsigned HOST_WIDE_INT len;
2393 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2397 struct store_by_pieces data;
2399 if (! MOVE_BY_PIECES_P (len, align))
2401 to = protect_from_queue (to, 1);
2402 data.constfun = constfun;
2403 data.constfundata = constfundata;
2406 store_by_pieces_1 (&data, align);
2409 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2410 rtx with BLKmode). The caller must pass TO through protect_from_queue
2411 before calling. ALIGN is maximum alignment we can assume. */
2414 clear_by_pieces (to, len, align)
2416 unsigned HOST_WIDE_INT len;
2419 struct store_by_pieces data;
2421 data.constfun = clear_by_pieces_1;
2422 data.constfundata = NULL;
2425 store_by_pieces_1 (&data, align);
2428 /* Callback routine for clear_by_pieces.
2429 Return const0_rtx unconditionally. */
2432 clear_by_pieces_1 (data, offset, mode)
2433 PTR data ATTRIBUTE_UNUSED;
2434 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2435 enum machine_mode mode ATTRIBUTE_UNUSED;
2440 /* Subroutine of clear_by_pieces and store_by_pieces.
2441 Generate several move instructions to store LEN bytes of block TO. (A MEM
2442 rtx with BLKmode). The caller must pass TO through protect_from_queue
2443 before calling. ALIGN is maximum alignment we can assume. */
2446 store_by_pieces_1 (data, align)
2447 struct store_by_pieces *data;
2450 rtx to_addr = XEXP (data->to, 0);
2451 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2452 enum machine_mode mode = VOIDmode, tmode;
2453 enum insn_code icode;
2456 data->to_addr = to_addr;
2458 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2459 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2461 data->explicit_inc_to = 0;
2463 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2465 data->offset = data->len;
2467 /* If storing requires more than two move insns,
2468 copy addresses to registers (to make displacements shorter)
2469 and use post-increment if available. */
2470 if (!data->autinc_to
2471 && move_by_pieces_ninsns (data->len, align) > 2)
2473 /* Determine the main mode we'll be using. */
2474 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2475 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2476 if (GET_MODE_SIZE (tmode) < max_size)
2479 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2481 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2482 data->autinc_to = 1;
2483 data->explicit_inc_to = -1;
2486 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2487 && ! data->autinc_to)
2489 data->to_addr = copy_addr_to_reg (to_addr);
2490 data->autinc_to = 1;
2491 data->explicit_inc_to = 1;
2494 if ( !data->autinc_to && CONSTANT_P (to_addr))
2495 data->to_addr = copy_addr_to_reg (to_addr);
2498 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2499 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2500 align = MOVE_MAX * BITS_PER_UNIT;
2502 /* First store what we can in the largest integer mode, then go to
2503 successively smaller modes. */
2505 while (max_size > 1)
2507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2509 if (GET_MODE_SIZE (tmode) < max_size)
2512 if (mode == VOIDmode)
2515 icode = mov_optab->handlers[(int) mode].insn_code;
2516 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2517 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2519 max_size = GET_MODE_SIZE (mode);
2522 /* The code above should have handled everything. */
2527 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2528 with move instructions for mode MODE. GENFUN is the gen_... function
2529 to make a move insn for that mode. DATA has all the other info. */
2532 store_by_pieces_2 (genfun, mode, data)
2533 rtx (*genfun) PARAMS ((rtx, ...));
2534 enum machine_mode mode;
2535 struct store_by_pieces *data;
2537 unsigned int size = GET_MODE_SIZE (mode);
2540 while (data->len >= size)
2543 data->offset -= size;
2545 if (data->autinc_to)
2546 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2549 to1 = adjust_address (data->to, mode, data->offset);
2551 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2552 emit_insn (gen_add2_insn (data->to_addr,
2553 GEN_INT (-(HOST_WIDE_INT) size)));
2555 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2556 emit_insn ((*genfun) (to1, cst));
2558 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2559 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2561 if (! data->reverse)
2562 data->offset += size;
2568 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2569 its length in bytes. */
2572 clear_storage (object, size)
2576 #ifdef TARGET_MEM_FUNCTIONS
2578 tree call_expr, arg_list;
2581 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2582 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2584 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2585 just move a zero. Otherwise, do this a piece at a time. */
2586 if (GET_MODE (object) != BLKmode
2587 && GET_CODE (size) == CONST_INT
2588 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2589 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2592 object = protect_from_queue (object, 1);
2593 size = protect_from_queue (size, 0);
2595 if (GET_CODE (size) == CONST_INT
2596 && MOVE_BY_PIECES_P (INTVAL (size), align))
2597 clear_by_pieces (object, INTVAL (size), align);
2600 /* Try the most limited insn first, because there's no point
2601 including more than one in the machine description unless
2602 the more limited one has some advantage. */
2604 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2605 enum machine_mode mode;
2607 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2608 mode = GET_MODE_WIDER_MODE (mode))
2610 enum insn_code code = clrstr_optab[(int) mode];
2611 insn_operand_predicate_fn pred;
2613 if (code != CODE_FOR_nothing
2614 /* We don't need MODE to be narrower than
2615 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2616 the mode mask, as it is returned by the macro, it will
2617 definitely be less than the actual mode mask. */
2618 && ((GET_CODE (size) == CONST_INT
2619 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2620 <= (GET_MODE_MASK (mode) >> 1)))
2621 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2622 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2623 || (*pred) (object, BLKmode))
2624 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2625 || (*pred) (opalign, VOIDmode)))
2628 rtx last = get_last_insn ();
2631 op1 = convert_to_mode (mode, size, 1);
2632 pred = insn_data[(int) code].operand[1].predicate;
2633 if (pred != 0 && ! (*pred) (op1, mode))
2634 op1 = copy_to_mode_reg (mode, op1);
2636 pat = GEN_FCN ((int) code) (object, op1, opalign);
2643 delete_insns_since (last);
2647 /* OBJECT or SIZE may have been passed through protect_from_queue.
2649 It is unsafe to save the value generated by protect_from_queue
2650 and reuse it later. Consider what happens if emit_queue is
2651 called before the return value from protect_from_queue is used.
2653 Expansion of the CALL_EXPR below will call emit_queue before
2654 we are finished emitting RTL for argument setup. So if we are
2655 not careful we could get the wrong value for an argument.
2657 To avoid this problem we go ahead and emit code to copy OBJECT
2658 and SIZE into new pseudos. We can then place those new pseudos
2659 into an RTL_EXPR and use them later, even after a call to
2662 Note this is not strictly needed for library calls since they
2663 do not call emit_queue before loading their arguments. However,
2664 we may need to have library calls call emit_queue in the future
2665 since failing to do so could cause problems for targets which
2666 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2667 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2669 #ifdef TARGET_MEM_FUNCTIONS
2670 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2672 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2673 TREE_UNSIGNED (integer_type_node));
2674 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2677 #ifdef TARGET_MEM_FUNCTIONS
2678 /* It is incorrect to use the libcall calling conventions to call
2679 memset in this context.
2681 This could be a user call to memset and the user may wish to
2682 examine the return value from memset.
2684 For targets where libcalls and normal calls have different
2685 conventions for returning pointers, we could end up generating
2688 So instead of using a libcall sequence we build up a suitable
2689 CALL_EXPR and expand the call in the normal fashion. */
2690 if (fn == NULL_TREE)
2694 /* This was copied from except.c, I don't know if all this is
2695 necessary in this context or not. */
2696 fn = get_identifier ("memset");
2697 fntype = build_pointer_type (void_type_node);
2698 fntype = build_function_type (fntype, NULL_TREE);
2699 fn = build_decl (FUNCTION_DECL, fn, fntype);
2700 ggc_add_tree_root (&fn, 1);
2701 DECL_EXTERNAL (fn) = 1;
2702 TREE_PUBLIC (fn) = 1;
2703 DECL_ARTIFICIAL (fn) = 1;
2704 TREE_NOTHROW (fn) = 1;
2705 make_decl_rtl (fn, NULL);
2706 assemble_external (fn);
2709 /* We need to make an argument list for the function call.
2711 memset has three arguments, the first is a void * addresses, the
2712 second an integer with the initialization value, the last is a
2713 size_t byte count for the copy. */
2715 = build_tree_list (NULL_TREE,
2716 make_tree (build_pointer_type (void_type_node),
2718 TREE_CHAIN (arg_list)
2719 = build_tree_list (NULL_TREE,
2720 make_tree (integer_type_node, const0_rtx));
2721 TREE_CHAIN (TREE_CHAIN (arg_list))
2722 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2723 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2725 /* Now we have to build up the CALL_EXPR itself. */
2726 call_expr = build1 (ADDR_EXPR,
2727 build_pointer_type (TREE_TYPE (fn)), fn);
2728 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2729 call_expr, arg_list, NULL_TREE);
2730 TREE_SIDE_EFFECTS (call_expr) = 1;
2732 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2734 emit_library_call (bzero_libfunc, LCT_NORMAL,
2735 VOIDmode, 2, object, Pmode, size,
2736 TYPE_MODE (integer_type_node));
2739 /* If we are initializing a readonly value, show the above call
2740 clobbered it. Otherwise, a load from it may erroneously be
2741 hoisted from a loop. */
2742 if (RTX_UNCHANGING_P (object))
2743 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2750 /* Generate code to copy Y into X.
2751 Both Y and X must have the same mode, except that
2752 Y can be a constant with VOIDmode.
2753 This mode cannot be BLKmode; use emit_block_move for that.
2755 Return the last instruction emitted. */
2758 emit_move_insn (x, y)
2761 enum machine_mode mode = GET_MODE (x);
2762 rtx y_cst = NULL_RTX;
2765 x = protect_from_queue (x, 1);
2766 y = protect_from_queue (y, 0);
2768 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2771 /* Never force constant_p_rtx to memory. */
2772 if (GET_CODE (y) == CONSTANT_P_RTX)
2774 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2777 y = force_const_mem (mode, y);
2780 /* If X or Y are memory references, verify that their addresses are valid
2782 if (GET_CODE (x) == MEM
2783 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2784 && ! push_operand (x, GET_MODE (x)))
2786 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2787 x = validize_mem (x);
2789 if (GET_CODE (y) == MEM
2790 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2792 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2793 y = validize_mem (y);
2795 if (mode == BLKmode)
2798 last_insn = emit_move_insn_1 (x, y);
2800 if (y_cst && GET_CODE (x) == REG)
2801 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2806 /* Low level part of emit_move_insn.
2807 Called just like emit_move_insn, but assumes X and Y
2808 are basically valid. */
2811 emit_move_insn_1 (x, y)
2814 enum machine_mode mode = GET_MODE (x);
2815 enum machine_mode submode;
2816 enum mode_class class = GET_MODE_CLASS (mode);
2818 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2821 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2823 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2825 /* Expand complex moves by moving real part and imag part, if possible. */
2826 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2827 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2829 (class == MODE_COMPLEX_INT
2830 ? MODE_INT : MODE_FLOAT),
2832 && (mov_optab->handlers[(int) submode].insn_code
2833 != CODE_FOR_nothing))
2835 /* Don't split destination if it is a stack push. */
2836 int stack = push_operand (x, GET_MODE (x));
2838 #ifdef PUSH_ROUNDING
2839 /* In case we output to the stack, but the size is smaller machine can
2840 push exactly, we need to use move instructions. */
2842 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2843 != GET_MODE_SIZE (submode)))
2846 HOST_WIDE_INT offset1, offset2;
2848 /* Do not use anti_adjust_stack, since we don't want to update
2849 stack_pointer_delta. */
2850 temp = expand_binop (Pmode,
2851 #ifdef STACK_GROWS_DOWNWARD
2859 (GET_MODE_SIZE (GET_MODE (x)))),
2860 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2862 if (temp != stack_pointer_rtx)
2863 emit_move_insn (stack_pointer_rtx, temp);
2865 #ifdef STACK_GROWS_DOWNWARD
2867 offset2 = GET_MODE_SIZE (submode);
2869 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2870 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2871 + GET_MODE_SIZE (submode));
2874 emit_move_insn (change_address (x, submode,
2875 gen_rtx_PLUS (Pmode,
2877 GEN_INT (offset1))),
2878 gen_realpart (submode, y));
2879 emit_move_insn (change_address (x, submode,
2880 gen_rtx_PLUS (Pmode,
2882 GEN_INT (offset2))),
2883 gen_imagpart (submode, y));
2887 /* If this is a stack, push the highpart first, so it
2888 will be in the argument order.
2890 In that case, change_address is used only to convert
2891 the mode, not to change the address. */
2894 /* Note that the real part always precedes the imag part in memory
2895 regardless of machine's endianness. */
2896 #ifdef STACK_GROWS_DOWNWARD
2897 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2898 (gen_rtx_MEM (submode, XEXP (x, 0)),
2899 gen_imagpart (submode, y)));
2900 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2901 (gen_rtx_MEM (submode, XEXP (x, 0)),
2902 gen_realpart (submode, y)));
2904 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2905 (gen_rtx_MEM (submode, XEXP (x, 0)),
2906 gen_realpart (submode, y)));
2907 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2908 (gen_rtx_MEM (submode, XEXP (x, 0)),
2909 gen_imagpart (submode, y)));
2914 rtx realpart_x, realpart_y;
2915 rtx imagpart_x, imagpart_y;
2917 /* If this is a complex value with each part being smaller than a
2918 word, the usual calling sequence will likely pack the pieces into
2919 a single register. Unfortunately, SUBREG of hard registers only
2920 deals in terms of words, so we have a problem converting input
2921 arguments to the CONCAT of two registers that is used elsewhere
2922 for complex values. If this is before reload, we can copy it into
2923 memory and reload. FIXME, we should see about using extract and
2924 insert on integer registers, but complex short and complex char
2925 variables should be rarely used. */
2926 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2927 && (reload_in_progress | reload_completed) == 0)
2930 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2932 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2934 if (packed_dest_p || packed_src_p)
2936 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2937 ? MODE_FLOAT : MODE_INT);
2939 enum machine_mode reg_mode
2940 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2942 if (reg_mode != BLKmode)
2944 rtx mem = assign_stack_temp (reg_mode,
2945 GET_MODE_SIZE (mode), 0);
2946 rtx cmem = adjust_address (mem, mode, 0);
2949 = N_("function using short complex types cannot be inline");
2953 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2955 emit_move_insn_1 (cmem, y);
2956 return emit_move_insn_1 (sreg, mem);
2960 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2962 emit_move_insn_1 (mem, sreg);
2963 return emit_move_insn_1 (x, cmem);
2969 realpart_x = gen_realpart (submode, x);
2970 realpart_y = gen_realpart (submode, y);
2971 imagpart_x = gen_imagpart (submode, x);
2972 imagpart_y = gen_imagpart (submode, y);
2974 /* Show the output dies here. This is necessary for SUBREGs
2975 of pseudos since we cannot track their lifetimes correctly;
2976 hard regs shouldn't appear here except as return values.
2977 We never want to emit such a clobber after reload. */
2979 && ! (reload_in_progress || reload_completed)
2980 && (GET_CODE (realpart_x) == SUBREG
2981 || GET_CODE (imagpart_x) == SUBREG))
2982 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2984 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2985 (realpart_x, realpart_y));
2986 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2987 (imagpart_x, imagpart_y));
2990 return get_last_insn ();
2993 /* This will handle any multi-word mode that lacks a move_insn pattern.
2994 However, you will get better code if you define such patterns,
2995 even if they must turn into multiple assembler instructions. */
2996 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3003 #ifdef PUSH_ROUNDING
3005 /* If X is a push on the stack, do the push now and replace
3006 X with a reference to the stack pointer. */
3007 if (push_operand (x, GET_MODE (x)))
3012 /* Do not use anti_adjust_stack, since we don't want to update
3013 stack_pointer_delta. */
3014 temp = expand_binop (Pmode,
3015 #ifdef STACK_GROWS_DOWNWARD
3023 (GET_MODE_SIZE (GET_MODE (x)))),
3024 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3026 if (temp != stack_pointer_rtx)
3027 emit_move_insn (stack_pointer_rtx, temp);
3029 code = GET_CODE (XEXP (x, 0));
3031 /* Just hope that small offsets off SP are OK. */
3032 if (code == POST_INC)
3033 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3034 GEN_INT (-((HOST_WIDE_INT)
3035 GET_MODE_SIZE (GET_MODE (x)))));
3036 else if (code == POST_DEC)
3037 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3038 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3040 temp = stack_pointer_rtx;
3042 x = change_address (x, VOIDmode, temp);
3046 /* If we are in reload, see if either operand is a MEM whose address
3047 is scheduled for replacement. */
3048 if (reload_in_progress && GET_CODE (x) == MEM
3049 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3050 x = replace_equiv_address_nv (x, inner);
3051 if (reload_in_progress && GET_CODE (y) == MEM
3052 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3053 y = replace_equiv_address_nv (y, inner);
3059 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3062 rtx xpart = operand_subword (x, i, 1, mode);
3063 rtx ypart = operand_subword (y, i, 1, mode);
3065 /* If we can't get a part of Y, put Y into memory if it is a
3066 constant. Otherwise, force it into a register. If we still
3067 can't get a part of Y, abort. */
3068 if (ypart == 0 && CONSTANT_P (y))
3070 y = force_const_mem (mode, y);
3071 ypart = operand_subword (y, i, 1, mode);
3073 else if (ypart == 0)
3074 ypart = operand_subword_force (y, i, mode);
3076 if (xpart == 0 || ypart == 0)
3079 need_clobber |= (GET_CODE (xpart) == SUBREG);
3081 last_insn = emit_move_insn (xpart, ypart);
3084 seq = gen_sequence ();
3087 /* Show the output dies here. This is necessary for SUBREGs
3088 of pseudos since we cannot track their lifetimes correctly;
3089 hard regs shouldn't appear here except as return values.
3090 We never want to emit such a clobber after reload. */
3092 && ! (reload_in_progress || reload_completed)
3093 && need_clobber != 0)
3094 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3104 /* Pushing data onto the stack. */
3106 /* Push a block of length SIZE (perhaps variable)
3107 and return an rtx to address the beginning of the block.
3108 Note that it is not possible for the value returned to be a QUEUED.
3109 The value may be virtual_outgoing_args_rtx.
3111 EXTRA is the number of bytes of padding to push in addition to SIZE.
3112 BELOW nonzero means this padding comes at low addresses;
3113 otherwise, the padding comes at high addresses. */
3116 push_block (size, extra, below)
3122 size = convert_modes (Pmode, ptr_mode, size, 1);
3123 if (CONSTANT_P (size))
3124 anti_adjust_stack (plus_constant (size, extra));
3125 else if (GET_CODE (size) == REG && extra == 0)
3126 anti_adjust_stack (size);
3129 temp = copy_to_mode_reg (Pmode, size);
3131 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3132 temp, 0, OPTAB_LIB_WIDEN);
3133 anti_adjust_stack (temp);
3136 #ifndef STACK_GROWS_DOWNWARD
3142 temp = virtual_outgoing_args_rtx;
3143 if (extra != 0 && below)
3144 temp = plus_constant (temp, extra);
3148 if (GET_CODE (size) == CONST_INT)
3149 temp = plus_constant (virtual_outgoing_args_rtx,
3150 -INTVAL (size) - (below ? 0 : extra));
3151 else if (extra != 0 && !below)
3152 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3153 negate_rtx (Pmode, plus_constant (size, extra)));
3155 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3156 negate_rtx (Pmode, size));
3159 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3162 #ifdef PUSH_ROUNDING
3164 /* Emit single push insn. */
3167 emit_single_push_insn (mode, x, type)
3169 enum machine_mode mode;
3173 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3175 enum insn_code icode;
3176 insn_operand_predicate_fn pred;
3178 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3179 /* If there is push pattern, use it. Otherwise try old way of throwing
3180 MEM representing push operation to move expander. */
3181 icode = push_optab->handlers[(int) mode].insn_code;
3182 if (icode != CODE_FOR_nothing)
3184 if (((pred = insn_data[(int) icode].operand[0].predicate)
3185 && !((*pred) (x, mode))))
3186 x = force_reg (mode, x);
3187 emit_insn (GEN_FCN (icode) (x));
3190 if (GET_MODE_SIZE (mode) == rounded_size)
3191 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3194 #ifdef STACK_GROWS_DOWNWARD
3195 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3196 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3198 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3199 GEN_INT (rounded_size));
3201 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3204 dest = gen_rtx_MEM (mode, dest_addr);
3208 set_mem_attributes (dest, type, 1);
3210 if (flag_optimize_sibling_calls)
3211 /* Function incoming arguments may overlap with sibling call
3212 outgoing arguments and we cannot allow reordering of reads
3213 from function arguments with stores to outgoing arguments
3214 of sibling calls. */
3215 set_mem_alias_set (dest, 0);
3217 emit_move_insn (dest, x);
3221 /* Generate code to push X onto the stack, assuming it has mode MODE and
3223 MODE is redundant except when X is a CONST_INT (since they don't
3225 SIZE is an rtx for the size of data to be copied (in bytes),
3226 needed only if X is BLKmode.
3228 ALIGN (in bits) is maximum alignment we can assume.
3230 If PARTIAL and REG are both nonzero, then copy that many of the first
3231 words of X into registers starting with REG, and push the rest of X.
3232 The amount of space pushed is decreased by PARTIAL words,
3233 rounded *down* to a multiple of PARM_BOUNDARY.
3234 REG must be a hard register in this case.
3235 If REG is zero but PARTIAL is not, take any all others actions for an
3236 argument partially in registers, but do not actually load any
3239 EXTRA is the amount in bytes of extra space to leave next to this arg.
3240 This is ignored if an argument block has already been allocated.
3242 On a machine that lacks real push insns, ARGS_ADDR is the address of
3243 the bottom of the argument block for this call. We use indexing off there
3244 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3245 argument block has not been preallocated.
3247 ARGS_SO_FAR is the size of args previously pushed for this call.
3249 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3250 for arguments passed in registers. If nonzero, it will be the number
3251 of bytes required. */
3254 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3255 args_addr, args_so_far, reg_parm_stack_space,
3258 enum machine_mode mode;
3267 int reg_parm_stack_space;
3271 enum direction stack_direction
3272 #ifdef STACK_GROWS_DOWNWARD
3278 /* Decide where to pad the argument: `downward' for below,
3279 `upward' for above, or `none' for don't pad it.
3280 Default is below for small data on big-endian machines; else above. */
3281 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3283 /* Invert direction if stack is post-decrement.
3285 if (STACK_PUSH_CODE == POST_DEC)
3286 if (where_pad != none)
3287 where_pad = (where_pad == downward ? upward : downward);
3289 xinner = x = protect_from_queue (x, 0);
3291 if (mode == BLKmode)
3293 /* Copy a block into the stack, entirely or partially. */
3296 int used = partial * UNITS_PER_WORD;
3297 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3305 /* USED is now the # of bytes we need not copy to the stack
3306 because registers will take care of them. */
3309 xinner = adjust_address (xinner, BLKmode, used);
3311 /* If the partial register-part of the arg counts in its stack size,
3312 skip the part of stack space corresponding to the registers.
3313 Otherwise, start copying to the beginning of the stack space,
3314 by setting SKIP to 0. */
3315 skip = (reg_parm_stack_space == 0) ? 0 : used;
3317 #ifdef PUSH_ROUNDING
3318 /* Do it with several push insns if that doesn't take lots of insns
3319 and if there is no difficulty with push insns that skip bytes
3320 on the stack for alignment purposes. */
3323 && GET_CODE (size) == CONST_INT
3325 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3326 /* Here we avoid the case of a structure whose weak alignment
3327 forces many pushes of a small amount of data,
3328 and such small pushes do rounding that causes trouble. */
3329 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3330 || align >= BIGGEST_ALIGNMENT
3331 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3332 == (align / BITS_PER_UNIT)))
3333 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3335 /* Push padding now if padding above and stack grows down,
3336 or if padding below and stack grows up.
3337 But if space already allocated, this has already been done. */
3338 if (extra && args_addr == 0
3339 && where_pad != none && where_pad != stack_direction)
3340 anti_adjust_stack (GEN_INT (extra));
3342 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3345 #endif /* PUSH_ROUNDING */
3349 /* Otherwise make space on the stack and copy the data
3350 to the address of that space. */
3352 /* Deduct words put into registers from the size we must copy. */
3355 if (GET_CODE (size) == CONST_INT)
3356 size = GEN_INT (INTVAL (size) - used);
3358 size = expand_binop (GET_MODE (size), sub_optab, size,
3359 GEN_INT (used), NULL_RTX, 0,
3363 /* Get the address of the stack space.
3364 In this case, we do not deal with EXTRA separately.
3365 A single stack adjust will do. */
3368 temp = push_block (size, extra, where_pad == downward);
3371 else if (GET_CODE (args_so_far) == CONST_INT)
3372 temp = memory_address (BLKmode,
3373 plus_constant (args_addr,
3374 skip + INTVAL (args_so_far)));
3376 temp = memory_address (BLKmode,
3377 plus_constant (gen_rtx_PLUS (Pmode,
3381 target = gen_rtx_MEM (BLKmode, temp);
3385 set_mem_attributes (target, type, 1);
3386 /* Function incoming arguments may overlap with sibling call
3387 outgoing arguments and we cannot allow reordering of reads
3388 from function arguments with stores to outgoing arguments
3389 of sibling calls. */
3390 set_mem_alias_set (target, 0);
3393 set_mem_align (target, align);
3395 /* TEMP is the address of the block. Copy the data there. */
3396 if (GET_CODE (size) == CONST_INT
3397 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3399 move_by_pieces (target, xinner, INTVAL (size), align);
3404 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3405 enum machine_mode mode;
3407 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3409 mode = GET_MODE_WIDER_MODE (mode))
3411 enum insn_code code = movstr_optab[(int) mode];
3412 insn_operand_predicate_fn pred;
3414 if (code != CODE_FOR_nothing
3415 && ((GET_CODE (size) == CONST_INT
3416 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3417 <= (GET_MODE_MASK (mode) >> 1)))
3418 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3419 && (!(pred = insn_data[(int) code].operand[0].predicate)
3420 || ((*pred) (target, BLKmode)))
3421 && (!(pred = insn_data[(int) code].operand[1].predicate)
3422 || ((*pred) (xinner, BLKmode)))
3423 && (!(pred = insn_data[(int) code].operand[3].predicate)
3424 || ((*pred) (opalign, VOIDmode))))
3426 rtx op2 = convert_to_mode (mode, size, 1);
3427 rtx last = get_last_insn ();
3430 pred = insn_data[(int) code].operand[2].predicate;
3431 if (pred != 0 && ! (*pred) (op2, mode))
3432 op2 = copy_to_mode_reg (mode, op2);
3434 pat = GEN_FCN ((int) code) (target, xinner,
3442 delete_insns_since (last);
3447 if (!ACCUMULATE_OUTGOING_ARGS)
3449 /* If the source is referenced relative to the stack pointer,
3450 copy it to another register to stabilize it. We do not need
3451 to do this if we know that we won't be changing sp. */
3453 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3454 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3455 temp = copy_to_reg (temp);
3458 /* Make inhibit_defer_pop nonzero around the library call
3459 to force it to pop the bcopy-arguments right away. */
3461 #ifdef TARGET_MEM_FUNCTIONS
3462 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3463 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3464 convert_to_mode (TYPE_MODE (sizetype),
3465 size, TREE_UNSIGNED (sizetype)),
3466 TYPE_MODE (sizetype));
3468 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3469 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3470 convert_to_mode (TYPE_MODE (integer_type_node),
3472 TREE_UNSIGNED (integer_type_node)),
3473 TYPE_MODE (integer_type_node));
3478 else if (partial > 0)
3480 /* Scalar partly in registers. */
3482 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3485 /* # words of start of argument
3486 that we must make space for but need not store. */
3487 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3488 int args_offset = INTVAL (args_so_far);
3491 /* Push padding now if padding above and stack grows down,
3492 or if padding below and stack grows up.
3493 But if space already allocated, this has already been done. */
3494 if (extra && args_addr == 0
3495 && where_pad != none && where_pad != stack_direction)
3496 anti_adjust_stack (GEN_INT (extra));
3498 /* If we make space by pushing it, we might as well push
3499 the real data. Otherwise, we can leave OFFSET nonzero
3500 and leave the space uninitialized. */
3504 /* Now NOT_STACK gets the number of words that we don't need to
3505 allocate on the stack. */
3506 not_stack = partial - offset;
3508 /* If the partial register-part of the arg counts in its stack size,
3509 skip the part of stack space corresponding to the registers.
3510 Otherwise, start copying to the beginning of the stack space,
3511 by setting SKIP to 0. */
3512 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3514 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3515 x = validize_mem (force_const_mem (mode, x));
3517 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3518 SUBREGs of such registers are not allowed. */
3519 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3520 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3521 x = copy_to_reg (x);
3523 /* Loop over all the words allocated on the stack for this arg. */
3524 /* We can do it by words, because any scalar bigger than a word
3525 has a size a multiple of a word. */
3526 #ifndef PUSH_ARGS_REVERSED
3527 for (i = not_stack; i < size; i++)
3529 for (i = size - 1; i >= not_stack; i--)
3531 if (i >= not_stack + offset)
3532 emit_push_insn (operand_subword_force (x, i, mode),
3533 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3535 GEN_INT (args_offset + ((i - not_stack + skip)
3537 reg_parm_stack_space, alignment_pad);
3542 rtx target = NULL_RTX;
3545 /* Push padding now if padding above and stack grows down,
3546 or if padding below and stack grows up.
3547 But if space already allocated, this has already been done. */
3548 if (extra && args_addr == 0
3549 && where_pad != none && where_pad != stack_direction)
3550 anti_adjust_stack (GEN_INT (extra));
3552 #ifdef PUSH_ROUNDING
3553 if (args_addr == 0 && PUSH_ARGS)
3554 emit_single_push_insn (mode, x, type);
3558 if (GET_CODE (args_so_far) == CONST_INT)
3560 = memory_address (mode,
3561 plus_constant (args_addr,
3562 INTVAL (args_so_far)));
3564 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3567 dest = gen_rtx_MEM (mode, addr);
3570 set_mem_attributes (dest, type, 1);
3571 /* Function incoming arguments may overlap with sibling call
3572 outgoing arguments and we cannot allow reordering of reads
3573 from function arguments with stores to outgoing arguments
3574 of sibling calls. */
3575 set_mem_alias_set (dest, 0);
3578 emit_move_insn (dest, x);
3584 /* If part should go in registers, copy that part
3585 into the appropriate registers. Do this now, at the end,
3586 since mem-to-mem copies above may do function calls. */
3587 if (partial > 0 && reg != 0)
3589 /* Handle calls that pass values in multiple non-contiguous locations.
3590 The Irix 6 ABI has examples of this. */
3591 if (GET_CODE (reg) == PARALLEL)
3592 emit_group_load (reg, x, -1); /* ??? size? */
3594 move_block_to_reg (REGNO (reg), x, partial, mode);
3597 if (extra && args_addr == 0 && where_pad == stack_direction)
3598 anti_adjust_stack (GEN_INT (extra));
3600 if (alignment_pad && args_addr == 0)
3601 anti_adjust_stack (alignment_pad);
3604 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3612 /* Only registers can be subtargets. */
3613 || GET_CODE (x) != REG
3614 /* If the register is readonly, it can't be set more than once. */
3615 || RTX_UNCHANGING_P (x)
3616 /* Don't use hard regs to avoid extending their life. */
3617 || REGNO (x) < FIRST_PSEUDO_REGISTER
3618 /* Avoid subtargets inside loops,
3619 since they hide some invariant expressions. */
3620 || preserve_subexpressions_p ())
3624 /* Expand an assignment that stores the value of FROM into TO.
3625 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3626 (This may contain a QUEUED rtx;
3627 if the value is constant, this rtx is a constant.)
3628 Otherwise, the returned value is NULL_RTX.
3630 SUGGEST_REG is no longer actually used.
3631 It used to mean, copy the value through a register
3632 and return that register, if that is possible.
3633 We now use WANT_VALUE to decide whether to do this. */
3636 expand_assignment (to, from, want_value, suggest_reg)
3639 int suggest_reg ATTRIBUTE_UNUSED;
3644 /* Don't crash if the lhs of the assignment was erroneous. */
3646 if (TREE_CODE (to) == ERROR_MARK)
3648 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3649 return want_value ? result : NULL_RTX;
3652 /* Assignment of a structure component needs special treatment
3653 if the structure component's rtx is not simply a MEM.
3654 Assignment of an array element at a constant index, and assignment of
3655 an array element in an unaligned packed structure field, has the same
3658 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3659 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3661 enum machine_mode mode1;
3662 HOST_WIDE_INT bitsize, bitpos;
3670 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3671 &unsignedp, &volatilep);
3673 /* If we are going to use store_bit_field and extract_bit_field,
3674 make sure to_rtx will be safe for multiple use. */
3676 if (mode1 == VOIDmode && want_value)
3677 tem = stabilize_reference (tem);
3679 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3683 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3685 if (GET_CODE (to_rtx) != MEM)
3688 #ifdef POINTERS_EXTEND_UNSIGNED
3689 if (GET_MODE (offset_rtx) != Pmode)
3690 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3692 if (GET_MODE (offset_rtx) != ptr_mode)
3693 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3696 /* A constant address in TO_RTX can have VOIDmode, we must not try
3697 to call force_reg for that case. Avoid that case. */
3698 if (GET_CODE (to_rtx) == MEM
3699 && GET_MODE (to_rtx) == BLKmode
3700 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3702 && (bitpos % bitsize) == 0
3703 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3704 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3706 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3710 to_rtx = offset_address (to_rtx, offset_rtx,
3711 highest_pow2_factor_for_type (TREE_TYPE (to),
3715 if (GET_CODE (to_rtx) == MEM)
3717 tree old_expr = MEM_EXPR (to_rtx);
3719 /* If the field is at offset zero, we could have been given the
3720 DECL_RTX of the parent struct. Don't munge it. */
3721 to_rtx = shallow_copy_rtx (to_rtx);
3723 set_mem_attributes (to_rtx, to, 0);
3725 /* If we changed MEM_EXPR, that means we're now referencing
3726 the COMPONENT_REF, which means that MEM_OFFSET must be
3727 relative to that field. But we've not yet reflected BITPOS
3728 in TO_RTX. This will be done in store_field. Adjust for
3729 that by biasing MEM_OFFSET by -bitpos. */
3730 if (MEM_EXPR (to_rtx) != old_expr && MEM_OFFSET (to_rtx)
3731 && (bitpos / BITS_PER_UNIT) != 0)
3732 set_mem_offset (to_rtx, GEN_INT (INTVAL (MEM_OFFSET (to_rtx))
3733 - (bitpos / BITS_PER_UNIT)));
3736 /* Deal with volatile and readonly fields. The former is only done
3737 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3738 if (volatilep && GET_CODE (to_rtx) == MEM)
3740 if (to_rtx == orig_to_rtx)
3741 to_rtx = copy_rtx (to_rtx);
3742 MEM_VOLATILE_P (to_rtx) = 1;
3745 if (TREE_CODE (to) == COMPONENT_REF
3746 && TREE_READONLY (TREE_OPERAND (to, 1)))
3748 if (to_rtx == orig_to_rtx)
3749 to_rtx = copy_rtx (to_rtx);
3750 RTX_UNCHANGING_P (to_rtx) = 1;
3753 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3755 if (to_rtx == orig_to_rtx)
3756 to_rtx = copy_rtx (to_rtx);
3757 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3760 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3762 /* Spurious cast for HPUX compiler. */
3763 ? ((enum machine_mode)
3764 TYPE_MODE (TREE_TYPE (to)))
3766 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3768 preserve_temp_slots (result);
3772 /* If the value is meaningful, convert RESULT to the proper mode.
3773 Otherwise, return nothing. */
3774 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3775 TYPE_MODE (TREE_TYPE (from)),
3777 TREE_UNSIGNED (TREE_TYPE (to)))
3781 /* If the rhs is a function call and its value is not an aggregate,
3782 call the function before we start to compute the lhs.
3783 This is needed for correct code for cases such as
3784 val = setjmp (buf) on machines where reference to val
3785 requires loading up part of an address in a separate insn.
3787 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3788 since it might be a promoted variable where the zero- or sign- extension
3789 needs to be done. Handling this in the normal way is safe because no
3790 computation is done before the call. */
3791 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3792 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3793 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3794 && GET_CODE (DECL_RTL (to)) == REG))
3799 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3801 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3803 /* Handle calls that return values in multiple non-contiguous locations.
3804 The Irix 6 ABI has examples of this. */
3805 if (GET_CODE (to_rtx) == PARALLEL)
3806 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3807 else if (GET_MODE (to_rtx) == BLKmode)
3808 emit_block_move (to_rtx, value, expr_size (from));
3811 #ifdef POINTERS_EXTEND_UNSIGNED
3812 if (POINTER_TYPE_P (TREE_TYPE (to))
3813 && GET_MODE (to_rtx) != GET_MODE (value))
3814 value = convert_memory_address (GET_MODE (to_rtx), value);
3816 emit_move_insn (to_rtx, value);
3818 preserve_temp_slots (to_rtx);
3821 return want_value ? to_rtx : NULL_RTX;
3824 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3825 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3828 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3830 /* Don't move directly into a return register. */
3831 if (TREE_CODE (to) == RESULT_DECL
3832 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3837 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3839 if (GET_CODE (to_rtx) == PARALLEL)
3840 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3842 emit_move_insn (to_rtx, temp);
3844 preserve_temp_slots (to_rtx);
3847 return want_value ? to_rtx : NULL_RTX;
3850 /* In case we are returning the contents of an object which overlaps
3851 the place the value is being stored, use a safe function when copying
3852 a value through a pointer into a structure value return block. */
3853 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3854 && current_function_returns_struct
3855 && !current_function_returns_pcc_struct)
3860 size = expr_size (from);
3861 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3863 #ifdef TARGET_MEM_FUNCTIONS
3864 emit_library_call (memmove_libfunc, LCT_NORMAL,
3865 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3866 XEXP (from_rtx, 0), Pmode,
3867 convert_to_mode (TYPE_MODE (sizetype),
3868 size, TREE_UNSIGNED (sizetype)),
3869 TYPE_MODE (sizetype));
3871 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3872 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3873 XEXP (to_rtx, 0), Pmode,
3874 convert_to_mode (TYPE_MODE (integer_type_node),
3875 size, TREE_UNSIGNED (integer_type_node)),
3876 TYPE_MODE (integer_type_node));
3879 preserve_temp_slots (to_rtx);
3882 return want_value ? to_rtx : NULL_RTX;
3885 /* Compute FROM and store the value in the rtx we got. */
3888 result = store_expr (from, to_rtx, want_value);
3889 preserve_temp_slots (result);
3892 return want_value ? result : NULL_RTX;
3895 /* Generate code for computing expression EXP,
3896 and storing the value into TARGET.
3897 TARGET may contain a QUEUED rtx.
3899 If WANT_VALUE is nonzero, return a copy of the value
3900 not in TARGET, so that we can be sure to use the proper
3901 value in a containing expression even if TARGET has something
3902 else stored in it. If possible, we copy the value through a pseudo
3903 and return that pseudo. Or, if the value is constant, we try to
3904 return the constant. In some cases, we return a pseudo
3905 copied *from* TARGET.
3907 If the mode is BLKmode then we may return TARGET itself.
3908 It turns out that in BLKmode it doesn't cause a problem.
3909 because C has no operators that could combine two different
3910 assignments into the same BLKmode object with different values
3911 with no sequence point. Will other languages need this to
3914 If WANT_VALUE is 0, we return NULL, to make sure
3915 to catch quickly any cases where the caller uses the value
3916 and fails to set WANT_VALUE. */
3919 store_expr (exp, target, want_value)
3925 int dont_return_target = 0;
3926 int dont_store_target = 0;
3928 if (TREE_CODE (exp) == COMPOUND_EXPR)
3930 /* Perform first part of compound expression, then assign from second
3932 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3934 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3936 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3938 /* For conditional expression, get safe form of the target. Then
3939 test the condition, doing the appropriate assignment on either
3940 side. This avoids the creation of unnecessary temporaries.
3941 For non-BLKmode, it is more efficient not to do this. */
3943 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3946 target = protect_from_queue (target, 1);
3948 do_pending_stack_adjust ();
3950 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3951 start_cleanup_deferral ();
3952 store_expr (TREE_OPERAND (exp, 1), target, 0);
3953 end_cleanup_deferral ();
3955 emit_jump_insn (gen_jump (lab2));
3958 start_cleanup_deferral ();
3959 store_expr (TREE_OPERAND (exp, 2), target, 0);
3960 end_cleanup_deferral ();
3965 return want_value ? target : NULL_RTX;
3967 else if (queued_subexp_p (target))
3968 /* If target contains a postincrement, let's not risk
3969 using it as the place to generate the rhs. */
3971 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3973 /* Expand EXP into a new pseudo. */
3974 temp = gen_reg_rtx (GET_MODE (target));
3975 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3978 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
3980 /* If target is volatile, ANSI requires accessing the value
3981 *from* the target, if it is accessed. So make that happen.
3982 In no case return the target itself. */
3983 if (! MEM_VOLATILE_P (target) && want_value)
3984 dont_return_target = 1;
3986 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3987 && GET_MODE (target) != BLKmode)
3988 /* If target is in memory and caller wants value in a register instead,
3989 arrange that. Pass TARGET as target for expand_expr so that,
3990 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3991 We know expand_expr will not use the target in that case.
3992 Don't do this if TARGET is volatile because we are supposed
3993 to write it and then read it. */
3995 temp = expand_expr (exp, target, GET_MODE (target), 0);
3996 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3998 /* If TEMP is already in the desired TARGET, only copy it from
3999 memory and don't store it there again. */
4001 || (rtx_equal_p (temp, target)
4002 && ! side_effects_p (temp) && ! side_effects_p (target)))
4003 dont_store_target = 1;
4004 temp = copy_to_reg (temp);
4006 dont_return_target = 1;
4008 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4009 /* If this is an scalar in a register that is stored in a wider mode
4010 than the declared mode, compute the result into its declared mode
4011 and then convert to the wider mode. Our value is the computed
4014 rtx inner_target = 0;
4016 /* If we don't want a value, we can do the conversion inside EXP,
4017 which will often result in some optimizations. Do the conversion
4018 in two steps: first change the signedness, if needed, then
4019 the extend. But don't do this if the type of EXP is a subtype
4020 of something else since then the conversion might involve
4021 more than just converting modes. */
4022 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4023 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4025 if (TREE_UNSIGNED (TREE_TYPE (exp))
4026 != SUBREG_PROMOTED_UNSIGNED_P (target))
4029 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4033 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4034 SUBREG_PROMOTED_UNSIGNED_P (target)),
4037 inner_target = SUBREG_REG (target);
4040 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4042 /* If TEMP is a volatile MEM and we want a result value, make
4043 the access now so it gets done only once. Likewise if
4044 it contains TARGET. */
4045 if (GET_CODE (temp) == MEM && want_value
4046 && (MEM_VOLATILE_P (temp)
4047 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4048 temp = copy_to_reg (temp);
4050 /* If TEMP is a VOIDmode constant, use convert_modes to make
4051 sure that we properly convert it. */
4052 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4054 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4055 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4056 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4057 GET_MODE (target), temp,
4058 SUBREG_PROMOTED_UNSIGNED_P (target));
4061 convert_move (SUBREG_REG (target), temp,
4062 SUBREG_PROMOTED_UNSIGNED_P (target));
4064 /* If we promoted a constant, change the mode back down to match
4065 target. Otherwise, the caller might get confused by a result whose
4066 mode is larger than expected. */
4068 if (want_value && GET_MODE (temp) != GET_MODE (target))
4070 if (GET_MODE (temp) != VOIDmode)
4072 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4073 SUBREG_PROMOTED_VAR_P (temp) = 1;
4074 SUBREG_PROMOTED_UNSIGNED_P (temp)
4075 = SUBREG_PROMOTED_UNSIGNED_P (target);
4078 temp = convert_modes (GET_MODE (target),
4079 GET_MODE (SUBREG_REG (target)),
4080 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4083 return want_value ? temp : NULL_RTX;
4087 temp = expand_expr (exp, target, GET_MODE (target), 0);
4088 /* Return TARGET if it's a specified hardware register.
4089 If TARGET is a volatile mem ref, either return TARGET
4090 or return a reg copied *from* TARGET; ANSI requires this.
4092 Otherwise, if TEMP is not TARGET, return TEMP
4093 if it is constant (for efficiency),
4094 or if we really want the correct value. */
4095 if (!(target && GET_CODE (target) == REG
4096 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4097 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4098 && ! rtx_equal_p (temp, target)
4099 && (CONSTANT_P (temp) || want_value))
4100 dont_return_target = 1;
4103 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4104 the same as that of TARGET, adjust the constant. This is needed, for
4105 example, in case it is a CONST_DOUBLE and we want only a word-sized
4107 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4108 && TREE_CODE (exp) != ERROR_MARK
4109 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4110 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4111 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4113 /* If value was not generated in the target, store it there.
4114 Convert the value to TARGET's type first if necessary.
4115 If TEMP and TARGET compare equal according to rtx_equal_p, but
4116 one or both of them are volatile memory refs, we have to distinguish
4118 - expand_expr has used TARGET. In this case, we must not generate
4119 another copy. This can be detected by TARGET being equal according
4121 - expand_expr has not used TARGET - that means that the source just
4122 happens to have the same RTX form. Since temp will have been created
4123 by expand_expr, it will compare unequal according to == .
4124 We must generate a copy in this case, to reach the correct number
4125 of volatile memory references. */
4127 if ((! rtx_equal_p (temp, target)
4128 || (temp != target && (side_effects_p (temp)
4129 || side_effects_p (target))))
4130 && TREE_CODE (exp) != ERROR_MARK
4131 && ! dont_store_target
4132 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4133 but TARGET is not valid memory reference, TEMP will differ
4134 from TARGET although it is really the same location. */
4135 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4136 || target != DECL_RTL_IF_SET (exp)))
4138 target = protect_from_queue (target, 1);
4139 if (GET_MODE (temp) != GET_MODE (target)
4140 && GET_MODE (temp) != VOIDmode)
4142 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4143 if (dont_return_target)
4145 /* In this case, we will return TEMP,
4146 so make sure it has the proper mode.
4147 But don't forget to store the value into TARGET. */
4148 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4149 emit_move_insn (target, temp);
4152 convert_move (target, temp, unsignedp);
4155 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4157 /* Handle copying a string constant into an array. The string
4158 constant may be shorter than the array. So copy just the string's
4159 actual length, and clear the rest. First get the size of the data
4160 type of the string, which is actually the size of the target. */
4161 rtx size = expr_size (exp);
4163 if (GET_CODE (size) == CONST_INT
4164 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4165 emit_block_move (target, temp, size);
4168 /* Compute the size of the data to copy from the string. */
4170 = size_binop (MIN_EXPR,
4171 make_tree (sizetype, size),
4172 size_int (TREE_STRING_LENGTH (exp)));
4173 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4177 /* Copy that much. */
4178 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4179 emit_block_move (target, temp, copy_size_rtx);
4181 /* Figure out how much is left in TARGET that we have to clear.
4182 Do all calculations in ptr_mode. */
4183 if (GET_CODE (copy_size_rtx) == CONST_INT)
4185 size = plus_constant (size, -INTVAL (copy_size_rtx));
4186 target = adjust_address (target, BLKmode,
4187 INTVAL (copy_size_rtx));
4191 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4192 copy_size_rtx, NULL_RTX, 0,
4195 #ifdef POINTERS_EXTEND_UNSIGNED
4196 if (GET_MODE (copy_size_rtx) != Pmode)
4197 copy_size_rtx = convert_memory_address (Pmode,
4201 target = offset_address (target, copy_size_rtx,
4202 highest_pow2_factor (copy_size));
4203 label = gen_label_rtx ();
4204 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4205 GET_MODE (size), 0, label);
4208 if (size != const0_rtx)
4209 clear_storage (target, size);
4215 /* Handle calls that return values in multiple non-contiguous locations.
4216 The Irix 6 ABI has examples of this. */
4217 else if (GET_CODE (target) == PARALLEL)
4218 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4219 else if (GET_MODE (temp) == BLKmode)
4220 emit_block_move (target, temp, expr_size (exp));
4222 emit_move_insn (target, temp);
4225 /* If we don't want a value, return NULL_RTX. */
4229 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4230 ??? The latter test doesn't seem to make sense. */
4231 else if (dont_return_target && GET_CODE (temp) != MEM)
4234 /* Return TARGET itself if it is a hard register. */
4235 else if (want_value && GET_MODE (target) != BLKmode
4236 && ! (GET_CODE (target) == REG
4237 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4238 return copy_to_reg (target);
4244 /* Return 1 if EXP just contains zeros. */
4252 switch (TREE_CODE (exp))
4256 case NON_LVALUE_EXPR:
4257 case VIEW_CONVERT_EXPR:
4258 return is_zeros_p (TREE_OPERAND (exp, 0));
4261 return integer_zerop (exp);
4265 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4268 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4271 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4272 elt = TREE_CHAIN (elt))
4273 if (!is_zeros_p (TREE_VALUE (elt)))
4279 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4280 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4281 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4282 if (! is_zeros_p (TREE_VALUE (elt)))
4292 /* Return 1 if EXP contains mostly (3/4) zeros. */
4295 mostly_zeros_p (exp)
4298 if (TREE_CODE (exp) == CONSTRUCTOR)
4300 int elts = 0, zeros = 0;
4301 tree elt = CONSTRUCTOR_ELTS (exp);
4302 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4304 /* If there are no ranges of true bits, it is all zero. */
4305 return elt == NULL_TREE;
4307 for (; elt; elt = TREE_CHAIN (elt))
4309 /* We do not handle the case where the index is a RANGE_EXPR,
4310 so the statistic will be somewhat inaccurate.
4311 We do make a more accurate count in store_constructor itself,
4312 so since this function is only used for nested array elements,
4313 this should be close enough. */
4314 if (mostly_zeros_p (TREE_VALUE (elt)))
4319 return 4 * zeros >= 3 * elts;
4322 return is_zeros_p (exp);
4325 /* Helper function for store_constructor.
4326 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4327 TYPE is the type of the CONSTRUCTOR, not the element type.
4328 CLEARED is as for store_constructor.
4329 ALIAS_SET is the alias set to use for any stores.
4331 This provides a recursive shortcut back to store_constructor when it isn't
4332 necessary to go through store_field. This is so that we can pass through
4333 the cleared field to let store_constructor know that we may not have to
4334 clear a substructure if the outer structure has already been cleared. */
4337 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4340 unsigned HOST_WIDE_INT bitsize;
4341 HOST_WIDE_INT bitpos;
4342 enum machine_mode mode;
4347 if (TREE_CODE (exp) == CONSTRUCTOR
4348 && bitpos % BITS_PER_UNIT == 0
4349 /* If we have a non-zero bitpos for a register target, then we just
4350 let store_field do the bitfield handling. This is unlikely to
4351 generate unnecessary clear instructions anyways. */
4352 && (bitpos == 0 || GET_CODE (target) == MEM))
4354 if (GET_CODE (target) == MEM)
4356 = adjust_address (target,
4357 GET_MODE (target) == BLKmode
4359 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4360 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4363 /* Update the alias set, if required. */
4364 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4365 && MEM_ALIAS_SET (target) != 0)
4367 target = copy_rtx (target);
4368 set_mem_alias_set (target, alias_set);
4371 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4374 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4378 /* Store the value of constructor EXP into the rtx TARGET.
4379 TARGET is either a REG or a MEM; we know it cannot conflict, since
4380 safe_from_p has been called.
4381 CLEARED is true if TARGET is known to have been zero'd.
4382 SIZE is the number of bytes of TARGET we are allowed to modify: this
4383 may not be the same as the size of EXP if we are assigning to a field
4384 which has been packed to exclude padding bits. */
4387 store_constructor (exp, target, cleared, size)
4393 tree type = TREE_TYPE (exp);
4394 #ifdef WORD_REGISTER_OPERATIONS
4395 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4398 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4399 || TREE_CODE (type) == QUAL_UNION_TYPE)
4403 /* We either clear the aggregate or indicate the value is dead. */
4404 if ((TREE_CODE (type) == UNION_TYPE
4405 || TREE_CODE (type) == QUAL_UNION_TYPE)
4407 && ! CONSTRUCTOR_ELTS (exp))
4408 /* If the constructor is empty, clear the union. */
4410 clear_storage (target, expr_size (exp));
4414 /* If we are building a static constructor into a register,
4415 set the initial value as zero so we can fold the value into
4416 a constant. But if more than one register is involved,
4417 this probably loses. */
4418 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4419 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4421 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4425 /* If the constructor has fewer fields than the structure
4426 or if we are initializing the structure to mostly zeros,
4427 clear the whole structure first. Don't do this if TARGET is a
4428 register whose mode size isn't equal to SIZE since clear_storage
4429 can't handle this case. */
4430 else if (! cleared && size > 0
4431 && ((list_length (CONSTRUCTOR_ELTS (exp))
4432 != fields_length (type))
4433 || mostly_zeros_p (exp))
4434 && (GET_CODE (target) != REG
4435 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4438 clear_storage (target, GEN_INT (size));
4443 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4445 /* Store each element of the constructor into
4446 the corresponding field of TARGET. */
4448 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4450 tree field = TREE_PURPOSE (elt);
4451 tree value = TREE_VALUE (elt);
4452 enum machine_mode mode;
4453 HOST_WIDE_INT bitsize;
4454 HOST_WIDE_INT bitpos = 0;
4457 rtx to_rtx = target;
4459 /* Just ignore missing fields.
4460 We cleared the whole structure, above,
4461 if any fields are missing. */
4465 if (cleared && is_zeros_p (value))
4468 if (host_integerp (DECL_SIZE (field), 1))
4469 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4473 unsignedp = TREE_UNSIGNED (field);
4474 mode = DECL_MODE (field);
4475 if (DECL_BIT_FIELD (field))
4478 offset = DECL_FIELD_OFFSET (field);
4479 if (host_integerp (offset, 0)
4480 && host_integerp (bit_position (field), 0))
4482 bitpos = int_bit_position (field);
4486 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4492 if (contains_placeholder_p (offset))
4493 offset = build (WITH_RECORD_EXPR, sizetype,
4494 offset, make_tree (TREE_TYPE (exp), target));
4496 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4497 if (GET_CODE (to_rtx) != MEM)
4500 #ifdef POINTERS_EXTEND_UNSIGNED
4501 if (GET_MODE (offset_rtx) != Pmode)
4502 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4504 if (GET_MODE (offset_rtx) != ptr_mode)
4505 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4508 to_rtx = offset_address (to_rtx, offset_rtx,
4509 highest_pow2_factor (offset));
4512 if (TREE_READONLY (field))
4514 if (GET_CODE (to_rtx) == MEM)
4515 to_rtx = copy_rtx (to_rtx);
4517 RTX_UNCHANGING_P (to_rtx) = 1;
4520 #ifdef WORD_REGISTER_OPERATIONS
4521 /* If this initializes a field that is smaller than a word, at the
4522 start of a word, try to widen it to a full word.
4523 This special case allows us to output C++ member function
4524 initializations in a form that the optimizers can understand. */
4525 if (GET_CODE (target) == REG
4526 && bitsize < BITS_PER_WORD
4527 && bitpos % BITS_PER_WORD == 0
4528 && GET_MODE_CLASS (mode) == MODE_INT
4529 && TREE_CODE (value) == INTEGER_CST
4531 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4533 tree type = TREE_TYPE (value);
4535 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4537 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4538 value = convert (type, value);
4541 if (BYTES_BIG_ENDIAN)
4543 = fold (build (LSHIFT_EXPR, type, value,
4544 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4545 bitsize = BITS_PER_WORD;
4550 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4551 && DECL_NONADDRESSABLE_P (field))
4553 to_rtx = copy_rtx (to_rtx);
4554 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4557 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4558 value, type, cleared,
4559 get_alias_set (TREE_TYPE (field)));
4562 else if (TREE_CODE (type) == ARRAY_TYPE
4563 || TREE_CODE (type) == VECTOR_TYPE)
4568 tree domain = TYPE_DOMAIN (type);
4569 tree elttype = TREE_TYPE (type);
4571 HOST_WIDE_INT minelt = 0;
4572 HOST_WIDE_INT maxelt = 0;
4574 /* Vectors are like arrays, but the domain is stored via an array
4576 if (TREE_CODE (type) == VECTOR_TYPE)
4578 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4579 the same field as TYPE_DOMAIN, we are not guaranteed that
4581 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4582 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4585 const_bounds_p = (TYPE_MIN_VALUE (domain)
4586 && TYPE_MAX_VALUE (domain)
4587 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4588 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4590 /* If we have constant bounds for the range of the type, get them. */
4593 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4594 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4597 /* If the constructor has fewer elements than the array,
4598 clear the whole array first. Similarly if this is
4599 static constructor of a non-BLKmode object. */
4600 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4604 HOST_WIDE_INT count = 0, zero_count = 0;
4605 need_to_clear = ! const_bounds_p;
4607 /* This loop is a more accurate version of the loop in
4608 mostly_zeros_p (it handles RANGE_EXPR in an index).
4609 It is also needed to check for missing elements. */
4610 for (elt = CONSTRUCTOR_ELTS (exp);
4611 elt != NULL_TREE && ! need_to_clear;
4612 elt = TREE_CHAIN (elt))
4614 tree index = TREE_PURPOSE (elt);
4615 HOST_WIDE_INT this_node_count;
4617 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4619 tree lo_index = TREE_OPERAND (index, 0);
4620 tree hi_index = TREE_OPERAND (index, 1);
4622 if (! host_integerp (lo_index, 1)
4623 || ! host_integerp (hi_index, 1))
4629 this_node_count = (tree_low_cst (hi_index, 1)
4630 - tree_low_cst (lo_index, 1) + 1);
4633 this_node_count = 1;
4635 count += this_node_count;
4636 if (mostly_zeros_p (TREE_VALUE (elt)))
4637 zero_count += this_node_count;
4640 /* Clear the entire array first if there are any missing elements,
4641 or if the incidence of zero elements is >= 75%. */
4643 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4647 if (need_to_clear && size > 0)
4652 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4654 clear_storage (target, GEN_INT (size));
4658 else if (REG_P (target))
4659 /* Inform later passes that the old value is dead. */
4660 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4662 /* Store each element of the constructor into
4663 the corresponding element of TARGET, determined
4664 by counting the elements. */
4665 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4667 elt = TREE_CHAIN (elt), i++)
4669 enum machine_mode mode;
4670 HOST_WIDE_INT bitsize;
4671 HOST_WIDE_INT bitpos;
4673 tree value = TREE_VALUE (elt);
4674 tree index = TREE_PURPOSE (elt);
4675 rtx xtarget = target;
4677 if (cleared && is_zeros_p (value))
4680 unsignedp = TREE_UNSIGNED (elttype);
4681 mode = TYPE_MODE (elttype);
4682 if (mode == BLKmode)
4683 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4684 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4687 bitsize = GET_MODE_BITSIZE (mode);
4689 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4691 tree lo_index = TREE_OPERAND (index, 0);
4692 tree hi_index = TREE_OPERAND (index, 1);
4693 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4694 struct nesting *loop;
4695 HOST_WIDE_INT lo, hi, count;
4698 /* If the range is constant and "small", unroll the loop. */
4700 && host_integerp (lo_index, 0)
4701 && host_integerp (hi_index, 0)
4702 && (lo = tree_low_cst (lo_index, 0),
4703 hi = tree_low_cst (hi_index, 0),
4704 count = hi - lo + 1,
4705 (GET_CODE (target) != MEM
4707 || (host_integerp (TYPE_SIZE (elttype), 1)
4708 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4711 lo -= minelt; hi -= minelt;
4712 for (; lo <= hi; lo++)
4714 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4716 if (GET_CODE (target) == MEM
4717 && !MEM_KEEP_ALIAS_SET_P (target)
4718 && TREE_CODE (type) == ARRAY_TYPE
4719 && TYPE_NONALIASED_COMPONENT (type))
4721 target = copy_rtx (target);
4722 MEM_KEEP_ALIAS_SET_P (target) = 1;
4725 store_constructor_field
4726 (target, bitsize, bitpos, mode, value, type, cleared,
4727 get_alias_set (elttype));
4732 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4733 loop_top = gen_label_rtx ();
4734 loop_end = gen_label_rtx ();
4736 unsignedp = TREE_UNSIGNED (domain);
4738 index = build_decl (VAR_DECL, NULL_TREE, domain);
4741 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4743 SET_DECL_RTL (index, index_r);
4744 if (TREE_CODE (value) == SAVE_EXPR
4745 && SAVE_EXPR_RTL (value) == 0)
4747 /* Make sure value gets expanded once before the
4749 expand_expr (value, const0_rtx, VOIDmode, 0);
4752 store_expr (lo_index, index_r, 0);
4753 loop = expand_start_loop (0);
4755 /* Assign value to element index. */
4757 = convert (ssizetype,
4758 fold (build (MINUS_EXPR, TREE_TYPE (index),
4759 index, TYPE_MIN_VALUE (domain))));
4760 position = size_binop (MULT_EXPR, position,
4762 TYPE_SIZE_UNIT (elttype)));
4764 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4765 xtarget = offset_address (target, pos_rtx,
4766 highest_pow2_factor (position));
4767 xtarget = adjust_address (xtarget, mode, 0);
4768 if (TREE_CODE (value) == CONSTRUCTOR)
4769 store_constructor (value, xtarget, cleared,
4770 bitsize / BITS_PER_UNIT);
4772 store_expr (value, xtarget, 0);
4774 expand_exit_loop_if_false (loop,
4775 build (LT_EXPR, integer_type_node,
4778 expand_increment (build (PREINCREMENT_EXPR,
4780 index, integer_one_node), 0, 0);
4782 emit_label (loop_end);
4785 else if ((index != 0 && ! host_integerp (index, 0))
4786 || ! host_integerp (TYPE_SIZE (elttype), 1))
4791 index = ssize_int (1);
4794 index = convert (ssizetype,
4795 fold (build (MINUS_EXPR, index,
4796 TYPE_MIN_VALUE (domain))));
4798 position = size_binop (MULT_EXPR, index,
4800 TYPE_SIZE_UNIT (elttype)));
4801 xtarget = offset_address (target,
4802 expand_expr (position, 0, VOIDmode, 0),
4803 highest_pow2_factor (position));
4804 xtarget = adjust_address (xtarget, mode, 0);
4805 store_expr (value, xtarget, 0);
4810 bitpos = ((tree_low_cst (index, 0) - minelt)
4811 * tree_low_cst (TYPE_SIZE (elttype), 1));
4813 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4815 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4816 && TREE_CODE (type) == ARRAY_TYPE
4817 && TYPE_NONALIASED_COMPONENT (type))
4819 target = copy_rtx (target);
4820 MEM_KEEP_ALIAS_SET_P (target) = 1;
4823 store_constructor_field (target, bitsize, bitpos, mode, value,
4824 type, cleared, get_alias_set (elttype));
4830 /* Set constructor assignments. */
4831 else if (TREE_CODE (type) == SET_TYPE)
4833 tree elt = CONSTRUCTOR_ELTS (exp);
4834 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4835 tree domain = TYPE_DOMAIN (type);
4836 tree domain_min, domain_max, bitlength;
4838 /* The default implementation strategy is to extract the constant
4839 parts of the constructor, use that to initialize the target,
4840 and then "or" in whatever non-constant ranges we need in addition.
4842 If a large set is all zero or all ones, it is
4843 probably better to set it using memset (if available) or bzero.
4844 Also, if a large set has just a single range, it may also be
4845 better to first clear all the first clear the set (using
4846 bzero/memset), and set the bits we want. */
4848 /* Check for all zeros. */
4849 if (elt == NULL_TREE && size > 0)
4852 clear_storage (target, GEN_INT (size));
4856 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4857 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4858 bitlength = size_binop (PLUS_EXPR,
4859 size_diffop (domain_max, domain_min),
4862 nbits = tree_low_cst (bitlength, 1);
4864 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4865 are "complicated" (more than one range), initialize (the
4866 constant parts) by copying from a constant. */
4867 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4868 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4870 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4871 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4872 char *bit_buffer = (char *) alloca (nbits);
4873 HOST_WIDE_INT word = 0;
4874 unsigned int bit_pos = 0;
4875 unsigned int ibit = 0;
4876 unsigned int offset = 0; /* In bytes from beginning of set. */
4878 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4881 if (bit_buffer[ibit])
4883 if (BYTES_BIG_ENDIAN)
4884 word |= (1 << (set_word_size - 1 - bit_pos));
4886 word |= 1 << bit_pos;
4890 if (bit_pos >= set_word_size || ibit == nbits)
4892 if (word != 0 || ! cleared)
4894 rtx datum = GEN_INT (word);
4897 /* The assumption here is that it is safe to use
4898 XEXP if the set is multi-word, but not if
4899 it's single-word. */
4900 if (GET_CODE (target) == MEM)
4901 to_rtx = adjust_address (target, mode, offset);
4902 else if (offset == 0)
4906 emit_move_insn (to_rtx, datum);
4913 offset += set_word_size / BITS_PER_UNIT;
4918 /* Don't bother clearing storage if the set is all ones. */
4919 if (TREE_CHAIN (elt) != NULL_TREE
4920 || (TREE_PURPOSE (elt) == NULL_TREE
4922 : ( ! host_integerp (TREE_VALUE (elt), 0)
4923 || ! host_integerp (TREE_PURPOSE (elt), 0)
4924 || (tree_low_cst (TREE_VALUE (elt), 0)
4925 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4926 != (HOST_WIDE_INT) nbits))))
4927 clear_storage (target, expr_size (exp));
4929 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4931 /* Start of range of element or NULL. */
4932 tree startbit = TREE_PURPOSE (elt);
4933 /* End of range of element, or element value. */
4934 tree endbit = TREE_VALUE (elt);
4935 #ifdef TARGET_MEM_FUNCTIONS
4936 HOST_WIDE_INT startb, endb;
4938 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4940 bitlength_rtx = expand_expr (bitlength,
4941 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4943 /* Handle non-range tuple element like [ expr ]. */
4944 if (startbit == NULL_TREE)
4946 startbit = save_expr (endbit);
4950 startbit = convert (sizetype, startbit);
4951 endbit = convert (sizetype, endbit);
4952 if (! integer_zerop (domain_min))
4954 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4955 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4957 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4958 EXPAND_CONST_ADDRESS);
4959 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4960 EXPAND_CONST_ADDRESS);
4966 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4969 emit_move_insn (targetx, target);
4972 else if (GET_CODE (target) == MEM)
4977 #ifdef TARGET_MEM_FUNCTIONS
4978 /* Optimization: If startbit and endbit are
4979 constants divisible by BITS_PER_UNIT,
4980 call memset instead. */
4981 if (TREE_CODE (startbit) == INTEGER_CST
4982 && TREE_CODE (endbit) == INTEGER_CST
4983 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4984 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
4986 emit_library_call (memset_libfunc, LCT_NORMAL,
4988 plus_constant (XEXP (targetx, 0),
4989 startb / BITS_PER_UNIT),
4991 constm1_rtx, TYPE_MODE (integer_type_node),
4992 GEN_INT ((endb - startb) / BITS_PER_UNIT),
4993 TYPE_MODE (sizetype));
4997 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4998 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4999 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5000 startbit_rtx, TYPE_MODE (sizetype),
5001 endbit_rtx, TYPE_MODE (sizetype));
5004 emit_move_insn (target, targetx);
5012 /* Store the value of EXP (an expression tree)
5013 into a subfield of TARGET which has mode MODE and occupies
5014 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5015 If MODE is VOIDmode, it means that we are storing into a bit-field.
5017 If VALUE_MODE is VOIDmode, return nothing in particular.
5018 UNSIGNEDP is not used in this case.
5020 Otherwise, return an rtx for the value stored. This rtx
5021 has mode VALUE_MODE if that is convenient to do.
5022 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5024 TYPE is the type of the underlying object,
5026 ALIAS_SET is the alias set for the destination. This value will
5027 (in general) be different from that for TARGET, since TARGET is a
5028 reference to the containing structure. */
5031 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5034 HOST_WIDE_INT bitsize;
5035 HOST_WIDE_INT bitpos;
5036 enum machine_mode mode;
5038 enum machine_mode value_mode;
5043 HOST_WIDE_INT width_mask = 0;
5045 if (TREE_CODE (exp) == ERROR_MARK)
5048 /* If we have nothing to store, do nothing unless the expression has
5051 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5052 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5053 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5055 /* If we are storing into an unaligned field of an aligned union that is
5056 in a register, we may have the mode of TARGET being an integer mode but
5057 MODE == BLKmode. In that case, get an aligned object whose size and
5058 alignment are the same as TARGET and store TARGET into it (we can avoid
5059 the store if the field being stored is the entire width of TARGET). Then
5060 call ourselves recursively to store the field into a BLKmode version of
5061 that object. Finally, load from the object into TARGET. This is not
5062 very efficient in general, but should only be slightly more expensive
5063 than the otherwise-required unaligned accesses. Perhaps this can be
5064 cleaned up later. */
5067 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5071 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5073 rtx blk_object = adjust_address (object, BLKmode, 0);
5075 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5076 emit_move_insn (object, target);
5078 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5081 emit_move_insn (target, object);
5083 /* We want to return the BLKmode version of the data. */
5087 if (GET_CODE (target) == CONCAT)
5089 /* We're storing into a struct containing a single __complex. */
5093 return store_expr (exp, target, 0);
5096 /* If the structure is in a register or if the component
5097 is a bit field, we cannot use addressing to access it.
5098 Use bit-field techniques or SUBREG to store in it. */
5100 if (mode == VOIDmode
5101 || (mode != BLKmode && ! direct_store[(int) mode]
5102 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5103 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5104 || GET_CODE (target) == REG
5105 || GET_CODE (target) == SUBREG
5106 /* If the field isn't aligned enough to store as an ordinary memref,
5107 store it as a bit field. */
5108 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5109 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5110 || bitpos % GET_MODE_ALIGNMENT (mode)))
5111 /* If the RHS and field are a constant size and the size of the
5112 RHS isn't the same size as the bitfield, we must use bitfield
5115 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5116 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5118 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5120 /* If BITSIZE is narrower than the size of the type of EXP
5121 we will be narrowing TEMP. Normally, what's wanted are the
5122 low-order bits. However, if EXP's type is a record and this is
5123 big-endian machine, we want the upper BITSIZE bits. */
5124 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5125 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5126 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5127 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5128 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5132 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5134 if (mode != VOIDmode && mode != BLKmode
5135 && mode != TYPE_MODE (TREE_TYPE (exp)))
5136 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5138 /* If the modes of TARGET and TEMP are both BLKmode, both
5139 must be in memory and BITPOS must be aligned on a byte
5140 boundary. If so, we simply do a block copy. */
5141 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5143 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5144 || bitpos % BITS_PER_UNIT != 0)
5147 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5148 emit_block_move (target, temp,
5149 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5152 return value_mode == VOIDmode ? const0_rtx : target;
5155 /* Store the value in the bitfield. */
5156 store_bit_field (target, bitsize, bitpos, mode, temp,
5157 int_size_in_bytes (type));
5159 if (value_mode != VOIDmode)
5161 /* The caller wants an rtx for the value.
5162 If possible, avoid refetching from the bitfield itself. */
5164 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5167 enum machine_mode tmode;
5169 tmode = GET_MODE (temp);
5170 if (tmode == VOIDmode)
5174 return expand_and (tmode, temp,
5175 GEN_INT (trunc_int_for_mode (width_mask,
5179 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5180 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5181 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5184 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5185 NULL_RTX, value_mode, VOIDmode,
5186 int_size_in_bytes (type));
5192 rtx addr = XEXP (target, 0);
5193 rtx to_rtx = target;
5195 /* If a value is wanted, it must be the lhs;
5196 so make the address stable for multiple use. */
5198 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5199 && ! CONSTANT_ADDRESS_P (addr)
5200 /* A frame-pointer reference is already stable. */
5201 && ! (GET_CODE (addr) == PLUS
5202 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5203 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5204 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5205 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5207 /* Now build a reference to just the desired component. */
5209 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5211 if (to_rtx == target)
5212 to_rtx = copy_rtx (to_rtx);
5214 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5215 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5216 set_mem_alias_set (to_rtx, alias_set);
5218 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5222 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5223 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5224 codes and find the ultimate containing object, which we return.
5226 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5227 bit position, and *PUNSIGNEDP to the signedness of the field.
5228 If the position of the field is variable, we store a tree
5229 giving the variable offset (in units) in *POFFSET.
5230 This offset is in addition to the bit position.
5231 If the position is not variable, we store 0 in *POFFSET.
5233 If any of the extraction expressions is volatile,
5234 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5236 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5237 is a mode that can be used to access the field. In that case, *PBITSIZE
5240 If the field describes a variable-sized object, *PMODE is set to
5241 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5242 this case, but the address of the object can be found. */
5245 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5246 punsignedp, pvolatilep)
5248 HOST_WIDE_INT *pbitsize;
5249 HOST_WIDE_INT *pbitpos;
5251 enum machine_mode *pmode;
5256 enum machine_mode mode = VOIDmode;
5257 tree offset = size_zero_node;
5258 tree bit_offset = bitsize_zero_node;
5259 tree placeholder_ptr = 0;
5262 /* First get the mode, signedness, and size. We do this from just the
5263 outermost expression. */
5264 if (TREE_CODE (exp) == COMPONENT_REF)
5266 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5267 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5268 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5270 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5272 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5274 size_tree = TREE_OPERAND (exp, 1);
5275 *punsignedp = TREE_UNSIGNED (exp);
5279 mode = TYPE_MODE (TREE_TYPE (exp));
5280 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5282 if (mode == BLKmode)
5283 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5285 *pbitsize = GET_MODE_BITSIZE (mode);
5290 if (! host_integerp (size_tree, 1))
5291 mode = BLKmode, *pbitsize = -1;
5293 *pbitsize = tree_low_cst (size_tree, 1);
5296 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5297 and find the ultimate containing object. */
5300 if (TREE_CODE (exp) == BIT_FIELD_REF)
5301 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5302 else if (TREE_CODE (exp) == COMPONENT_REF)
5304 tree field = TREE_OPERAND (exp, 1);
5305 tree this_offset = DECL_FIELD_OFFSET (field);
5307 /* If this field hasn't been filled in yet, don't go
5308 past it. This should only happen when folding expressions
5309 made during type construction. */
5310 if (this_offset == 0)
5312 else if (! TREE_CONSTANT (this_offset)
5313 && contains_placeholder_p (this_offset))
5314 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5316 offset = size_binop (PLUS_EXPR, offset, this_offset);
5317 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5318 DECL_FIELD_BIT_OFFSET (field));
5320 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5323 else if (TREE_CODE (exp) == ARRAY_REF
5324 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5326 tree index = TREE_OPERAND (exp, 1);
5327 tree array = TREE_OPERAND (exp, 0);
5328 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5329 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5330 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5332 /* We assume all arrays have sizes that are a multiple of a byte.
5333 First subtract the lower bound, if any, in the type of the
5334 index, then convert to sizetype and multiply by the size of the
5336 if (low_bound != 0 && ! integer_zerop (low_bound))
5337 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5340 /* If the index has a self-referential type, pass it to a
5341 WITH_RECORD_EXPR; if the component size is, pass our
5342 component to one. */
5343 if (! TREE_CONSTANT (index)
5344 && contains_placeholder_p (index))
5345 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5346 if (! TREE_CONSTANT (unit_size)
5347 && contains_placeholder_p (unit_size))
5348 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5350 offset = size_binop (PLUS_EXPR, offset,
5351 size_binop (MULT_EXPR,
5352 convert (sizetype, index),
5356 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5358 tree new = find_placeholder (exp, &placeholder_ptr);
5360 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5361 We might have been called from tree optimization where we
5362 haven't set up an object yet. */
5370 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5371 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5372 && ! ((TREE_CODE (exp) == NOP_EXPR
5373 || TREE_CODE (exp) == CONVERT_EXPR)
5374 && (TYPE_MODE (TREE_TYPE (exp))
5375 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5378 /* If any reference in the chain is volatile, the effect is volatile. */
5379 if (TREE_THIS_VOLATILE (exp))
5382 exp = TREE_OPERAND (exp, 0);
5385 /* If OFFSET is constant, see if we can return the whole thing as a
5386 constant bit position. Otherwise, split it up. */
5387 if (host_integerp (offset, 0)
5388 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5390 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5391 && host_integerp (tem, 0))
5392 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5394 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5400 /* Return 1 if T is an expression that get_inner_reference handles. */
5403 handled_component_p (t)
5406 switch (TREE_CODE (t))
5411 case ARRAY_RANGE_REF:
5412 case NON_LVALUE_EXPR:
5413 case VIEW_CONVERT_EXPR:
5418 return (TYPE_MODE (TREE_TYPE (t))
5419 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5426 /* Given an rtx VALUE that may contain additions and multiplications, return
5427 an equivalent value that just refers to a register, memory, or constant.
5428 This is done by generating instructions to perform the arithmetic and
5429 returning a pseudo-register containing the value.
5431 The returned value may be a REG, SUBREG, MEM or constant. */
5434 force_operand (value, target)
5438 /* Use subtarget as the target for operand 0 of a binary operation. */
5439 rtx subtarget = get_subtarget (target);
5440 enum rtx_code code = GET_CODE (value);
5442 /* Check for a PIC address load. */
5443 if ((code == PLUS || code == MINUS)
5444 && XEXP (value, 0) == pic_offset_table_rtx
5445 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5446 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5447 || GET_CODE (XEXP (value, 1)) == CONST))
5450 subtarget = gen_reg_rtx (GET_MODE (value));
5451 emit_move_insn (subtarget, value);
5455 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5458 target = gen_reg_rtx (GET_MODE (value));
5459 convert_move (target, force_operand (XEXP (value, 0), NULL),
5460 code == ZERO_EXTEND);
5464 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5466 op2 = XEXP (value, 1);
5467 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5469 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5472 op2 = negate_rtx (GET_MODE (value), op2);
5475 /* Check for an addition with OP2 a constant integer and our first
5476 operand a PLUS of a virtual register and something else. In that
5477 case, we want to emit the sum of the virtual register and the
5478 constant first and then add the other value. This allows virtual
5479 register instantiation to simply modify the constant rather than
5480 creating another one around this addition. */
5481 if (code == PLUS && GET_CODE (op2) == CONST_INT
5482 && GET_CODE (XEXP (value, 0)) == PLUS
5483 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5484 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5485 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5487 rtx temp = expand_simple_binop (GET_MODE (value), code,
5488 XEXP (XEXP (value, 0), 0), op2,
5489 subtarget, 0, OPTAB_LIB_WIDEN);
5490 return expand_simple_binop (GET_MODE (value), code, temp,
5491 force_operand (XEXP (XEXP (value,
5493 target, 0, OPTAB_LIB_WIDEN);
5496 op1 = force_operand (XEXP (value, 0), subtarget);
5497 op2 = force_operand (op2, NULL_RTX);
5501 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5503 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5504 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5505 target, 1, OPTAB_LIB_WIDEN);
5507 return expand_divmod (0,
5508 FLOAT_MODE_P (GET_MODE (value))
5509 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5510 GET_MODE (value), op1, op2, target, 0);
5513 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5517 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5521 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5525 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5526 target, 0, OPTAB_LIB_WIDEN);
5529 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5530 target, 1, OPTAB_LIB_WIDEN);
5533 if (GET_RTX_CLASS (code) == '1')
5535 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5536 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5539 #ifdef INSN_SCHEDULING
5540 /* On machines that have insn scheduling, we want all memory reference to be
5541 explicit, so we need to deal with such paradoxical SUBREGs. */
5542 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5543 && (GET_MODE_SIZE (GET_MODE (value))
5544 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5546 = simplify_gen_subreg (GET_MODE (value),
5547 force_reg (GET_MODE (SUBREG_REG (value)),
5548 force_operand (SUBREG_REG (value),
5550 GET_MODE (SUBREG_REG (value)),
5551 SUBREG_BYTE (value));
5557 /* Subroutine of expand_expr: return nonzero iff there is no way that
5558 EXP can reference X, which is being modified. TOP_P is nonzero if this
5559 call is going to be used to determine whether we need a temporary
5560 for EXP, as opposed to a recursive call to this function.
5562 It is always safe for this routine to return zero since it merely
5563 searches for optimization opportunities. */
5566 safe_from_p (x, exp, top_p)
5573 static tree save_expr_list;
5576 /* If EXP has varying size, we MUST use a target since we currently
5577 have no way of allocating temporaries of variable size
5578 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5579 So we assume here that something at a higher level has prevented a
5580 clash. This is somewhat bogus, but the best we can do. Only
5581 do this when X is BLKmode and when we are at the top level. */
5582 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5583 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5584 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5585 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5586 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5588 && GET_MODE (x) == BLKmode)
5589 /* If X is in the outgoing argument area, it is always safe. */
5590 || (GET_CODE (x) == MEM
5591 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5592 || (GET_CODE (XEXP (x, 0)) == PLUS
5593 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5596 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5597 find the underlying pseudo. */
5598 if (GET_CODE (x) == SUBREG)
5601 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5605 /* A SAVE_EXPR might appear many times in the expression passed to the
5606 top-level safe_from_p call, and if it has a complex subexpression,
5607 examining it multiple times could result in a combinatorial explosion.
5608 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5609 with optimization took about 28 minutes to compile -- even though it was
5610 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5611 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5612 we have processed. Note that the only test of top_p was above. */
5621 rtn = safe_from_p (x, exp, 0);
5623 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5624 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5629 /* Now look at our tree code and possibly recurse. */
5630 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5633 exp_rtl = DECL_RTL_IF_SET (exp);
5640 if (TREE_CODE (exp) == TREE_LIST)
5641 return ((TREE_VALUE (exp) == 0
5642 || safe_from_p (x, TREE_VALUE (exp), 0))
5643 && (TREE_CHAIN (exp) == 0
5644 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5645 else if (TREE_CODE (exp) == ERROR_MARK)
5646 return 1; /* An already-visited SAVE_EXPR? */
5651 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5655 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5656 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5660 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5661 the expression. If it is set, we conflict iff we are that rtx or
5662 both are in memory. Otherwise, we check all operands of the
5663 expression recursively. */
5665 switch (TREE_CODE (exp))
5668 /* If the operand is static or we are static, we can't conflict.
5669 Likewise if we don't conflict with the operand at all. */
5670 if (staticp (TREE_OPERAND (exp, 0))
5671 || TREE_STATIC (exp)
5672 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5675 /* Otherwise, the only way this can conflict is if we are taking
5676 the address of a DECL a that address if part of X, which is
5678 exp = TREE_OPERAND (exp, 0);
5681 if (!DECL_RTL_SET_P (exp)
5682 || GET_CODE (DECL_RTL (exp)) != MEM)
5685 exp_rtl = XEXP (DECL_RTL (exp), 0);
5690 if (GET_CODE (x) == MEM
5691 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5692 get_alias_set (exp)))
5697 /* Assume that the call will clobber all hard registers and
5699 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5700 || GET_CODE (x) == MEM)
5705 /* If a sequence exists, we would have to scan every instruction
5706 in the sequence to see if it was safe. This is probably not
5708 if (RTL_EXPR_SEQUENCE (exp))
5711 exp_rtl = RTL_EXPR_RTL (exp);
5714 case WITH_CLEANUP_EXPR:
5715 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5718 case CLEANUP_POINT_EXPR:
5719 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5722 exp_rtl = SAVE_EXPR_RTL (exp);
5726 /* If we've already scanned this, don't do it again. Otherwise,
5727 show we've scanned it and record for clearing the flag if we're
5729 if (TREE_PRIVATE (exp))
5732 TREE_PRIVATE (exp) = 1;
5733 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5735 TREE_PRIVATE (exp) = 0;
5739 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5743 /* The only operand we look at is operand 1. The rest aren't
5744 part of the expression. */
5745 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5747 case METHOD_CALL_EXPR:
5748 /* This takes an rtx argument, but shouldn't appear here. */
5755 /* If we have an rtx, we do not need to scan our operands. */
5759 nops = first_rtl_op (TREE_CODE (exp));
5760 for (i = 0; i < nops; i++)
5761 if (TREE_OPERAND (exp, i) != 0
5762 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5765 /* If this is a language-specific tree code, it may require
5766 special handling. */
5767 if ((unsigned int) TREE_CODE (exp)
5768 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5769 && !(*lang_hooks.safe_from_p) (x, exp))
5773 /* If we have an rtl, find any enclosed object. Then see if we conflict
5777 if (GET_CODE (exp_rtl) == SUBREG)
5779 exp_rtl = SUBREG_REG (exp_rtl);
5780 if (GET_CODE (exp_rtl) == REG
5781 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5785 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5786 are memory and they conflict. */
5787 return ! (rtx_equal_p (x, exp_rtl)
5788 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5789 && true_dependence (exp_rtl, VOIDmode, x,
5790 rtx_addr_varies_p)));
5793 /* If we reach here, it is safe. */
5797 /* Subroutine of expand_expr: return rtx if EXP is a
5798 variable or parameter; else return 0. */
5805 switch (TREE_CODE (exp))
5809 return DECL_RTL (exp);
5815 #ifdef MAX_INTEGER_COMPUTATION_MODE
5818 check_max_integer_computation_mode (exp)
5821 enum tree_code code;
5822 enum machine_mode mode;
5824 /* Strip any NOPs that don't change the mode. */
5826 code = TREE_CODE (exp);
5828 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5829 if (code == NOP_EXPR
5830 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5833 /* First check the type of the overall operation. We need only look at
5834 unary, binary and relational operations. */
5835 if (TREE_CODE_CLASS (code) == '1'
5836 || TREE_CODE_CLASS (code) == '2'
5837 || TREE_CODE_CLASS (code) == '<')
5839 mode = TYPE_MODE (TREE_TYPE (exp));
5840 if (GET_MODE_CLASS (mode) == MODE_INT
5841 && mode > MAX_INTEGER_COMPUTATION_MODE)
5842 internal_error ("unsupported wide integer operation");
5845 /* Check operand of a unary op. */
5846 if (TREE_CODE_CLASS (code) == '1')
5848 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5849 if (GET_MODE_CLASS (mode) == MODE_INT
5850 && mode > MAX_INTEGER_COMPUTATION_MODE)
5851 internal_error ("unsupported wide integer operation");
5854 /* Check operands of a binary/comparison op. */
5855 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5857 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5858 if (GET_MODE_CLASS (mode) == MODE_INT
5859 && mode > MAX_INTEGER_COMPUTATION_MODE)
5860 internal_error ("unsupported wide integer operation");
5862 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5863 if (GET_MODE_CLASS (mode) == MODE_INT
5864 && mode > MAX_INTEGER_COMPUTATION_MODE)
5865 internal_error ("unsupported wide integer operation");
5870 /* Return the highest power of two that EXP is known to be a multiple of.
5871 This is used in updating alignment of MEMs in array references. */
5873 static HOST_WIDE_INT
5874 highest_pow2_factor (exp)
5877 HOST_WIDE_INT c0, c1;
5879 switch (TREE_CODE (exp))
5882 /* We can find the lowest bit that's a one. If the low
5883 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5884 We need to handle this case since we can find it in a COND_EXPR,
5885 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
5886 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5888 if (TREE_CONSTANT_OVERFLOW (exp))
5889 return BIGGEST_ALIGNMENT;
5892 /* Note: tree_low_cst is intentionally not used here,
5893 we don't care about the upper bits. */
5894 c0 = TREE_INT_CST_LOW (exp);
5896 return c0 ? c0 : BIGGEST_ALIGNMENT;
5900 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5901 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5902 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5903 return MIN (c0, c1);
5906 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5907 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5910 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5912 if (integer_pow2p (TREE_OPERAND (exp, 1))
5913 && host_integerp (TREE_OPERAND (exp, 1), 1))
5915 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5916 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5917 return MAX (1, c0 / c1);
5921 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5922 case SAVE_EXPR: case WITH_RECORD_EXPR:
5923 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5926 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5929 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5930 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5931 return MIN (c0, c1);
5940 /* Similar, except that it is known that the expression must be a multiple
5941 of the alignment of TYPE. */
5943 static HOST_WIDE_INT
5944 highest_pow2_factor_for_type (type, exp)
5948 HOST_WIDE_INT type_align, factor;
5950 factor = highest_pow2_factor (exp);
5951 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
5952 return MAX (factor, type_align);
5955 /* Return an object on the placeholder list that matches EXP, a
5956 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5957 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
5958 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
5959 is a location which initially points to a starting location in the
5960 placeholder list (zero means start of the list) and where a pointer into
5961 the placeholder list at which the object is found is placed. */
5964 find_placeholder (exp, plist)
5968 tree type = TREE_TYPE (exp);
5969 tree placeholder_expr;
5971 for (placeholder_expr
5972 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
5973 placeholder_expr != 0;
5974 placeholder_expr = TREE_CHAIN (placeholder_expr))
5976 tree need_type = TYPE_MAIN_VARIANT (type);
5979 /* Find the outermost reference that is of the type we want. If none,
5980 see if any object has a type that is a pointer to the type we
5982 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5983 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5984 || TREE_CODE (elt) == COND_EXPR)
5985 ? TREE_OPERAND (elt, 1)
5986 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5987 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5988 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5989 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5990 ? TREE_OPERAND (elt, 0) : 0))
5991 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5994 *plist = placeholder_expr;
5998 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6000 = ((TREE_CODE (elt) == COMPOUND_EXPR
6001 || TREE_CODE (elt) == COND_EXPR)
6002 ? TREE_OPERAND (elt, 1)
6003 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6004 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6005 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6006 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6007 ? TREE_OPERAND (elt, 0) : 0))
6008 if (POINTER_TYPE_P (TREE_TYPE (elt))
6009 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6013 *plist = placeholder_expr;
6014 return build1 (INDIRECT_REF, need_type, elt);
6021 /* expand_expr: generate code for computing expression EXP.
6022 An rtx for the computed value is returned. The value is never null.
6023 In the case of a void EXP, const0_rtx is returned.
6025 The value may be stored in TARGET if TARGET is nonzero.
6026 TARGET is just a suggestion; callers must assume that
6027 the rtx returned may not be the same as TARGET.
6029 If TARGET is CONST0_RTX, it means that the value will be ignored.
6031 If TMODE is not VOIDmode, it suggests generating the
6032 result in mode TMODE. But this is done only when convenient.
6033 Otherwise, TMODE is ignored and the value generated in its natural mode.
6034 TMODE is just a suggestion; callers must assume that
6035 the rtx returned may not have mode TMODE.
6037 Note that TARGET may have neither TMODE nor MODE. In that case, it
6038 probably will not be used.
6040 If MODIFIER is EXPAND_SUM then when EXP is an addition
6041 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6042 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6043 products as above, or REG or MEM, or constant.
6044 Ordinarily in such cases we would output mul or add instructions
6045 and then return a pseudo reg containing the sum.
6047 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6048 it also marks a label as absolutely required (it can't be dead).
6049 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6050 This is used for outputting expressions used in initializers.
6052 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6053 with a constant address even if that address is not normally legitimate.
6054 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6057 expand_expr (exp, target, tmode, modifier)
6060 enum machine_mode tmode;
6061 enum expand_modifier modifier;
6064 tree type = TREE_TYPE (exp);
6065 int unsignedp = TREE_UNSIGNED (type);
6066 enum machine_mode mode;
6067 enum tree_code code = TREE_CODE (exp);
6069 rtx subtarget, original_target;
6073 /* Handle ERROR_MARK before anybody tries to access its type. */
6074 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6076 op0 = CONST0_RTX (tmode);
6082 mode = TYPE_MODE (type);
6083 /* Use subtarget as the target for operand 0 of a binary operation. */
6084 subtarget = get_subtarget (target);
6085 original_target = target;
6086 ignore = (target == const0_rtx
6087 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6088 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6089 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6090 && TREE_CODE (type) == VOID_TYPE));
6092 /* If we are going to ignore this result, we need only do something
6093 if there is a side-effect somewhere in the expression. If there
6094 is, short-circuit the most common cases here. Note that we must
6095 not call expand_expr with anything but const0_rtx in case this
6096 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6100 if (! TREE_SIDE_EFFECTS (exp))
6103 /* Ensure we reference a volatile object even if value is ignored, but
6104 don't do this if all we are doing is taking its address. */
6105 if (TREE_THIS_VOLATILE (exp)
6106 && TREE_CODE (exp) != FUNCTION_DECL
6107 && mode != VOIDmode && mode != BLKmode
6108 && modifier != EXPAND_CONST_ADDRESS)
6110 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6111 if (GET_CODE (temp) == MEM)
6112 temp = copy_to_reg (temp);
6116 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6117 || code == INDIRECT_REF || code == BUFFER_REF)
6118 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6121 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6122 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6124 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6125 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6128 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6129 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6130 /* If the second operand has no side effects, just evaluate
6132 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6134 else if (code == BIT_FIELD_REF)
6136 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6137 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6138 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6145 #ifdef MAX_INTEGER_COMPUTATION_MODE
6146 /* Only check stuff here if the mode we want is different from the mode
6147 of the expression; if it's the same, check_max_integer_computiation_mode
6148 will handle it. Do we really need to check this stuff at all? */
6151 && GET_MODE (target) != mode
6152 && TREE_CODE (exp) != INTEGER_CST
6153 && TREE_CODE (exp) != PARM_DECL
6154 && TREE_CODE (exp) != ARRAY_REF
6155 && TREE_CODE (exp) != ARRAY_RANGE_REF
6156 && TREE_CODE (exp) != COMPONENT_REF
6157 && TREE_CODE (exp) != BIT_FIELD_REF
6158 && TREE_CODE (exp) != INDIRECT_REF
6159 && TREE_CODE (exp) != CALL_EXPR
6160 && TREE_CODE (exp) != VAR_DECL
6161 && TREE_CODE (exp) != RTL_EXPR)
6163 enum machine_mode mode = GET_MODE (target);
6165 if (GET_MODE_CLASS (mode) == MODE_INT
6166 && mode > MAX_INTEGER_COMPUTATION_MODE)
6167 internal_error ("unsupported wide integer operation");
6171 && TREE_CODE (exp) != INTEGER_CST
6172 && TREE_CODE (exp) != PARM_DECL
6173 && TREE_CODE (exp) != ARRAY_REF
6174 && TREE_CODE (exp) != ARRAY_RANGE_REF
6175 && TREE_CODE (exp) != COMPONENT_REF
6176 && TREE_CODE (exp) != BIT_FIELD_REF
6177 && TREE_CODE (exp) != INDIRECT_REF
6178 && TREE_CODE (exp) != VAR_DECL
6179 && TREE_CODE (exp) != CALL_EXPR
6180 && TREE_CODE (exp) != RTL_EXPR
6181 && GET_MODE_CLASS (tmode) == MODE_INT
6182 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6183 internal_error ("unsupported wide integer operation");
6185 check_max_integer_computation_mode (exp);
6188 /* If will do cse, generate all results into pseudo registers
6189 since 1) that allows cse to find more things
6190 and 2) otherwise cse could produce an insn the machine
6191 cannot support. And exception is a CONSTRUCTOR into a multi-word
6192 MEM: that's much more likely to be most efficient into the MEM. */
6194 if (! cse_not_expected && mode != BLKmode && target
6195 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6196 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6203 tree function = decl_function_context (exp);
6204 /* Handle using a label in a containing function. */
6205 if (function != current_function_decl
6206 && function != inline_function_decl && function != 0)
6208 struct function *p = find_function_data (function);
6209 p->expr->x_forced_labels
6210 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6211 p->expr->x_forced_labels);
6215 if (modifier == EXPAND_INITIALIZER)
6216 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6221 temp = gen_rtx_MEM (FUNCTION_MODE,
6222 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6223 if (function != current_function_decl
6224 && function != inline_function_decl && function != 0)
6225 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6230 if (DECL_RTL (exp) == 0)
6232 error_with_decl (exp, "prior parameter's size depends on `%s'");
6233 return CONST0_RTX (mode);
6236 /* ... fall through ... */
6239 /* If a static var's type was incomplete when the decl was written,
6240 but the type is complete now, lay out the decl now. */
6241 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6242 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6244 rtx value = DECL_RTL_IF_SET (exp);
6246 layout_decl (exp, 0);
6248 /* If the RTL was already set, update its mode and memory
6252 PUT_MODE (value, DECL_MODE (exp));
6253 SET_DECL_RTL (exp, 0);
6254 set_mem_attributes (value, exp, 1);
6255 SET_DECL_RTL (exp, value);
6259 /* ... fall through ... */
6263 if (DECL_RTL (exp) == 0)
6266 /* Ensure variable marked as used even if it doesn't go through
6267 a parser. If it hasn't be used yet, write out an external
6269 if (! TREE_USED (exp))
6271 assemble_external (exp);
6272 TREE_USED (exp) = 1;
6275 /* Show we haven't gotten RTL for this yet. */
6278 /* Handle variables inherited from containing functions. */
6279 context = decl_function_context (exp);
6281 /* We treat inline_function_decl as an alias for the current function
6282 because that is the inline function whose vars, types, etc.
6283 are being merged into the current function.
6284 See expand_inline_function. */
6286 if (context != 0 && context != current_function_decl
6287 && context != inline_function_decl
6288 /* If var is static, we don't need a static chain to access it. */
6289 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6290 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6294 /* Mark as non-local and addressable. */
6295 DECL_NONLOCAL (exp) = 1;
6296 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6298 mark_addressable (exp);
6299 if (GET_CODE (DECL_RTL (exp)) != MEM)
6301 addr = XEXP (DECL_RTL (exp), 0);
6302 if (GET_CODE (addr) == MEM)
6304 = replace_equiv_address (addr,
6305 fix_lexical_addr (XEXP (addr, 0), exp));
6307 addr = fix_lexical_addr (addr, exp);
6309 temp = replace_equiv_address (DECL_RTL (exp), addr);
6312 /* This is the case of an array whose size is to be determined
6313 from its initializer, while the initializer is still being parsed.
6316 else if (GET_CODE (DECL_RTL (exp)) == MEM
6317 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6318 temp = validize_mem (DECL_RTL (exp));
6320 /* If DECL_RTL is memory, we are in the normal case and either
6321 the address is not valid or it is not a register and -fforce-addr
6322 is specified, get the address into a register. */
6324 else if (GET_CODE (DECL_RTL (exp)) == MEM
6325 && modifier != EXPAND_CONST_ADDRESS
6326 && modifier != EXPAND_SUM
6327 && modifier != EXPAND_INITIALIZER
6328 && (! memory_address_p (DECL_MODE (exp),
6329 XEXP (DECL_RTL (exp), 0))
6331 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6332 temp = replace_equiv_address (DECL_RTL (exp),
6333 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6335 /* If we got something, return it. But first, set the alignment
6336 if the address is a register. */
6339 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6340 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6345 /* If the mode of DECL_RTL does not match that of the decl, it
6346 must be a promoted value. We return a SUBREG of the wanted mode,
6347 but mark it so that we know that it was already extended. */
6349 if (GET_CODE (DECL_RTL (exp)) == REG
6350 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6352 /* Get the signedness used for this variable. Ensure we get the
6353 same mode we got when the variable was declared. */
6354 if (GET_MODE (DECL_RTL (exp))
6355 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6356 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6359 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6360 SUBREG_PROMOTED_VAR_P (temp) = 1;
6361 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6365 return DECL_RTL (exp);
6368 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6369 TREE_INT_CST_HIGH (exp), mode);
6371 /* ??? If overflow is set, fold will have done an incomplete job,
6372 which can result in (plus xx (const_int 0)), which can get
6373 simplified by validate_replace_rtx during virtual register
6374 instantiation, which can result in unrecognizable insns.
6375 Avoid this by forcing all overflows into registers. */
6376 if (TREE_CONSTANT_OVERFLOW (exp)
6377 && modifier != EXPAND_INITIALIZER)
6378 temp = force_reg (mode, temp);
6383 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6386 /* If optimized, generate immediate CONST_DOUBLE
6387 which will be turned into memory by reload if necessary.
6389 We used to force a register so that loop.c could see it. But
6390 this does not allow gen_* patterns to perform optimizations with
6391 the constants. It also produces two insns in cases like "x = 1.0;".
6392 On most machines, floating-point constants are not permitted in
6393 many insns, so we'd end up copying it to a register in any case.
6395 Now, we do the copying in expand_binop, if appropriate. */
6396 return immed_real_const (exp);
6400 if (! TREE_CST_RTL (exp))
6401 output_constant_def (exp, 1);
6403 /* TREE_CST_RTL probably contains a constant address.
6404 On RISC machines where a constant address isn't valid,
6405 make some insns to get that address into a register. */
6406 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6407 && modifier != EXPAND_CONST_ADDRESS
6408 && modifier != EXPAND_INITIALIZER
6409 && modifier != EXPAND_SUM
6410 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6412 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6413 return replace_equiv_address (TREE_CST_RTL (exp),
6414 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6415 return TREE_CST_RTL (exp);
6417 case EXPR_WITH_FILE_LOCATION:
6420 const char *saved_input_filename = input_filename;
6421 int saved_lineno = lineno;
6422 input_filename = EXPR_WFL_FILENAME (exp);
6423 lineno = EXPR_WFL_LINENO (exp);
6424 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6425 emit_line_note (input_filename, lineno);
6426 /* Possibly avoid switching back and forth here. */
6427 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6428 input_filename = saved_input_filename;
6429 lineno = saved_lineno;
6434 context = decl_function_context (exp);
6436 /* If this SAVE_EXPR was at global context, assume we are an
6437 initialization function and move it into our context. */
6439 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6441 /* We treat inline_function_decl as an alias for the current function
6442 because that is the inline function whose vars, types, etc.
6443 are being merged into the current function.
6444 See expand_inline_function. */
6445 if (context == current_function_decl || context == inline_function_decl)
6448 /* If this is non-local, handle it. */
6451 /* The following call just exists to abort if the context is
6452 not of a containing function. */
6453 find_function_data (context);
6455 temp = SAVE_EXPR_RTL (exp);
6456 if (temp && GET_CODE (temp) == REG)
6458 put_var_into_stack (exp);
6459 temp = SAVE_EXPR_RTL (exp);
6461 if (temp == 0 || GET_CODE (temp) != MEM)
6464 replace_equiv_address (temp,
6465 fix_lexical_addr (XEXP (temp, 0), exp));
6467 if (SAVE_EXPR_RTL (exp) == 0)
6469 if (mode == VOIDmode)
6472 temp = assign_temp (build_qualified_type (type,
6474 | TYPE_QUAL_CONST)),
6477 SAVE_EXPR_RTL (exp) = temp;
6478 if (!optimize && GET_CODE (temp) == REG)
6479 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6482 /* If the mode of TEMP does not match that of the expression, it
6483 must be a promoted value. We pass store_expr a SUBREG of the
6484 wanted mode but mark it so that we know that it was already
6485 extended. Note that `unsignedp' was modified above in
6488 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6490 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6491 SUBREG_PROMOTED_VAR_P (temp) = 1;
6492 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6495 if (temp == const0_rtx)
6496 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6498 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6500 TREE_USED (exp) = 1;
6503 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6504 must be a promoted value. We return a SUBREG of the wanted mode,
6505 but mark it so that we know that it was already extended. */
6507 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6508 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6510 /* Compute the signedness and make the proper SUBREG. */
6511 promote_mode (type, mode, &unsignedp, 0);
6512 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6513 SUBREG_PROMOTED_VAR_P (temp) = 1;
6514 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6518 return SAVE_EXPR_RTL (exp);
6523 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6524 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6528 case PLACEHOLDER_EXPR:
6530 tree old_list = placeholder_list;
6531 tree placeholder_expr = 0;
6533 exp = find_placeholder (exp, &placeholder_expr);
6537 placeholder_list = TREE_CHAIN (placeholder_expr);
6538 temp = expand_expr (exp, original_target, tmode, modifier);
6539 placeholder_list = old_list;
6543 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6546 case WITH_RECORD_EXPR:
6547 /* Put the object on the placeholder list, expand our first operand,
6548 and pop the list. */
6549 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6551 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6553 placeholder_list = TREE_CHAIN (placeholder_list);
6557 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6558 expand_goto (TREE_OPERAND (exp, 0));
6560 expand_computed_goto (TREE_OPERAND (exp, 0));
6564 expand_exit_loop_if_false (NULL,
6565 invert_truthvalue (TREE_OPERAND (exp, 0)));
6568 case LABELED_BLOCK_EXPR:
6569 if (LABELED_BLOCK_BODY (exp))
6570 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6571 /* Should perhaps use expand_label, but this is simpler and safer. */
6572 do_pending_stack_adjust ();
6573 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6576 case EXIT_BLOCK_EXPR:
6577 if (EXIT_BLOCK_RETURN (exp))
6578 sorry ("returned value in block_exit_expr");
6579 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6584 expand_start_loop (1);
6585 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6593 tree vars = TREE_OPERAND (exp, 0);
6594 int vars_need_expansion = 0;
6596 /* Need to open a binding contour here because
6597 if there are any cleanups they must be contained here. */
6598 expand_start_bindings (2);
6600 /* Mark the corresponding BLOCK for output in its proper place. */
6601 if (TREE_OPERAND (exp, 2) != 0
6602 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6603 insert_block (TREE_OPERAND (exp, 2));
6605 /* If VARS have not yet been expanded, expand them now. */
6608 if (!DECL_RTL_SET_P (vars))
6610 vars_need_expansion = 1;
6613 expand_decl_init (vars);
6614 vars = TREE_CHAIN (vars);
6617 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6619 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6625 if (RTL_EXPR_SEQUENCE (exp))
6627 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6629 emit_insns (RTL_EXPR_SEQUENCE (exp));
6630 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6632 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6633 free_temps_for_rtl_expr (exp);
6634 return RTL_EXPR_RTL (exp);
6637 /* If we don't need the result, just ensure we evaluate any
6643 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6644 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6649 /* All elts simple constants => refer to a constant in memory. But
6650 if this is a non-BLKmode mode, let it store a field at a time
6651 since that should make a CONST_INT or CONST_DOUBLE when we
6652 fold. Likewise, if we have a target we can use, it is best to
6653 store directly into the target unless the type is large enough
6654 that memcpy will be used. If we are making an initializer and
6655 all operands are constant, put it in memory as well. */
6656 else if ((TREE_STATIC (exp)
6657 && ((mode == BLKmode
6658 && ! (target != 0 && safe_from_p (target, exp, 1)))
6659 || TREE_ADDRESSABLE (exp)
6660 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6661 && (! MOVE_BY_PIECES_P
6662 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6664 && ! mostly_zeros_p (exp))))
6665 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6667 rtx constructor = output_constant_def (exp, 1);
6669 if (modifier != EXPAND_CONST_ADDRESS
6670 && modifier != EXPAND_INITIALIZER
6671 && modifier != EXPAND_SUM)
6672 constructor = validize_mem (constructor);
6678 /* Handle calls that pass values in multiple non-contiguous
6679 locations. The Irix 6 ABI has examples of this. */
6680 if (target == 0 || ! safe_from_p (target, exp, 1)
6681 || GET_CODE (target) == PARALLEL)
6683 = assign_temp (build_qualified_type (type,
6685 | (TREE_READONLY (exp)
6686 * TYPE_QUAL_CONST))),
6687 0, TREE_ADDRESSABLE (exp), 1);
6689 store_constructor (exp, target, 0, int_expr_size (exp));
6695 tree exp1 = TREE_OPERAND (exp, 0);
6697 tree string = string_constant (exp1, &index);
6699 /* Try to optimize reads from const strings. */
6701 && TREE_CODE (string) == STRING_CST
6702 && TREE_CODE (index) == INTEGER_CST
6703 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6704 && GET_MODE_CLASS (mode) == MODE_INT
6705 && GET_MODE_SIZE (mode) == 1
6706 && modifier != EXPAND_WRITE)
6708 GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (string)
6709 [TREE_INT_CST_LOW (index)], mode));
6711 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6712 op0 = memory_address (mode, op0);
6713 temp = gen_rtx_MEM (mode, op0);
6714 set_mem_attributes (temp, exp, 0);
6716 /* If we are writing to this object and its type is a record with
6717 readonly fields, we must mark it as readonly so it will
6718 conflict with readonly references to those fields. */
6719 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6720 RTX_UNCHANGING_P (temp) = 1;
6726 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6730 tree array = TREE_OPERAND (exp, 0);
6731 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6732 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6733 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6736 /* Optimize the special-case of a zero lower bound.
6738 We convert the low_bound to sizetype to avoid some problems
6739 with constant folding. (E.g. suppose the lower bound is 1,
6740 and its mode is QI. Without the conversion, (ARRAY
6741 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6742 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6744 if (! integer_zerop (low_bound))
6745 index = size_diffop (index, convert (sizetype, low_bound));
6747 /* Fold an expression like: "foo"[2].
6748 This is not done in fold so it won't happen inside &.
6749 Don't fold if this is for wide characters since it's too
6750 difficult to do correctly and this is a very rare case. */
6752 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6753 && TREE_CODE (array) == STRING_CST
6754 && TREE_CODE (index) == INTEGER_CST
6755 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6756 && GET_MODE_CLASS (mode) == MODE_INT
6757 && GET_MODE_SIZE (mode) == 1)
6759 GEN_INT (trunc_int_for_mode (TREE_STRING_POINTER (array)
6760 [TREE_INT_CST_LOW (index)], mode));
6762 /* If this is a constant index into a constant array,
6763 just get the value from the array. Handle both the cases when
6764 we have an explicit constructor and when our operand is a variable
6765 that was declared const. */
6767 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6768 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6769 && TREE_CODE (index) == INTEGER_CST
6770 && 0 > compare_tree_int (index,
6771 list_length (CONSTRUCTOR_ELTS
6772 (TREE_OPERAND (exp, 0)))))
6776 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6777 i = TREE_INT_CST_LOW (index);
6778 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6782 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6786 else if (optimize >= 1
6787 && modifier != EXPAND_CONST_ADDRESS
6788 && modifier != EXPAND_INITIALIZER
6789 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6790 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6791 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6793 if (TREE_CODE (index) == INTEGER_CST)
6795 tree init = DECL_INITIAL (array);
6797 if (TREE_CODE (init) == CONSTRUCTOR)
6801 for (elem = CONSTRUCTOR_ELTS (init);
6803 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6804 elem = TREE_CHAIN (elem))
6807 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6808 return expand_expr (fold (TREE_VALUE (elem)), target,
6811 else if (TREE_CODE (init) == STRING_CST
6812 && 0 > compare_tree_int (index,
6813 TREE_STRING_LENGTH (init)))
6815 tree type = TREE_TYPE (TREE_TYPE (init));
6816 enum machine_mode mode = TYPE_MODE (type);
6818 if (GET_MODE_CLASS (mode) == MODE_INT
6819 && GET_MODE_SIZE (mode) == 1)
6820 return GEN_INT (trunc_int_for_mode
6821 (TREE_STRING_POINTER (init)
6822 [TREE_INT_CST_LOW (index)], mode));
6831 case ARRAY_RANGE_REF:
6832 /* If the operand is a CONSTRUCTOR, we can just extract the
6833 appropriate field if it is present. Don't do this if we have
6834 already written the data since we want to refer to that copy
6835 and varasm.c assumes that's what we'll do. */
6836 if (code == COMPONENT_REF
6837 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6838 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6842 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6843 elt = TREE_CHAIN (elt))
6844 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6845 /* We can normally use the value of the field in the
6846 CONSTRUCTOR. However, if this is a bitfield in
6847 an integral mode that we can fit in a HOST_WIDE_INT,
6848 we must mask only the number of bits in the bitfield,
6849 since this is done implicitly by the constructor. If
6850 the bitfield does not meet either of those conditions,
6851 we can't do this optimization. */
6852 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6853 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6855 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6856 <= HOST_BITS_PER_WIDE_INT))))
6858 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6859 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6861 HOST_WIDE_INT bitsize
6862 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6863 enum machine_mode imode
6864 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6866 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6868 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6869 op0 = expand_and (imode, op0, op1, target);
6874 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6877 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6879 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6889 enum machine_mode mode1;
6890 HOST_WIDE_INT bitsize, bitpos;
6893 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6894 &mode1, &unsignedp, &volatilep);
6897 /* If we got back the original object, something is wrong. Perhaps
6898 we are evaluating an expression too early. In any event, don't
6899 infinitely recurse. */
6903 /* If TEM's type is a union of variable size, pass TARGET to the inner
6904 computation, since it will need a temporary and TARGET is known
6905 to have to do. This occurs in unchecked conversion in Ada. */
6909 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6910 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6912 ? target : NULL_RTX),
6914 (modifier == EXPAND_INITIALIZER
6915 || modifier == EXPAND_CONST_ADDRESS)
6916 ? modifier : EXPAND_NORMAL);
6918 /* If this is a constant, put it into a register if it is a
6919 legitimate constant and OFFSET is 0 and memory if it isn't. */
6920 if (CONSTANT_P (op0))
6922 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6923 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6925 op0 = force_reg (mode, op0);
6927 op0 = validize_mem (force_const_mem (mode, op0));
6932 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
6934 /* If this object is in a register, put it into memory.
6935 This case can't occur in C, but can in Ada if we have
6936 unchecked conversion of an expression from a scalar type to
6937 an array or record type. */
6938 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6939 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6941 /* If the operand is a SAVE_EXPR, we can deal with this by
6942 forcing the SAVE_EXPR into memory. */
6943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6945 put_var_into_stack (TREE_OPERAND (exp, 0));
6946 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6951 = build_qualified_type (TREE_TYPE (tem),
6952 (TYPE_QUALS (TREE_TYPE (tem))
6953 | TYPE_QUAL_CONST));
6954 rtx memloc = assign_temp (nt, 1, 1, 1);
6956 emit_move_insn (memloc, op0);
6961 if (GET_CODE (op0) != MEM)
6964 #ifdef POINTERS_EXTEND_UNSIGNED
6965 if (GET_MODE (offset_rtx) != Pmode)
6966 offset_rtx = convert_memory_address (Pmode, offset_rtx);
6968 if (GET_MODE (offset_rtx) != ptr_mode)
6969 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6972 /* A constant address in OP0 can have VOIDmode, we must not try
6973 to call force_reg for that case. Avoid that case. */
6974 if (GET_CODE (op0) == MEM
6975 && GET_MODE (op0) == BLKmode
6976 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6978 && (bitpos % bitsize) == 0
6979 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6980 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6982 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6986 op0 = offset_address (op0, offset_rtx,
6987 highest_pow2_factor (offset));
6990 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6991 record its alignment as BIGGEST_ALIGNMENT. */
6992 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
6993 && is_aligning_offset (offset, tem))
6994 set_mem_align (op0, BIGGEST_ALIGNMENT);
6996 /* Don't forget about volatility even if this is a bitfield. */
6997 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6999 if (op0 == orig_op0)
7000 op0 = copy_rtx (op0);
7002 MEM_VOLATILE_P (op0) = 1;
7005 /* The following code doesn't handle CONCAT.
7006 Assume only bitpos == 0 can be used for CONCAT, due to
7007 one element arrays having the same mode as its element. */
7008 if (GET_CODE (op0) == CONCAT)
7010 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7015 /* In cases where an aligned union has an unaligned object
7016 as a field, we might be extracting a BLKmode value from
7017 an integer-mode (e.g., SImode) object. Handle this case
7018 by doing the extract into an object as wide as the field
7019 (which we know to be the width of a basic mode), then
7020 storing into memory, and changing the mode to BLKmode. */
7021 if (mode1 == VOIDmode
7022 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7023 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7024 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7025 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7026 && modifier != EXPAND_CONST_ADDRESS
7027 && modifier != EXPAND_INITIALIZER)
7028 /* If the field isn't aligned enough to fetch as a memref,
7029 fetch it as a bit field. */
7030 || (mode1 != BLKmode
7031 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7032 && ((TYPE_ALIGN (TREE_TYPE (tem))
7033 < GET_MODE_ALIGNMENT (mode))
7034 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7035 /* If the type and the field are a constant size and the
7036 size of the type isn't the same size as the bitfield,
7037 we must use bitfield operations. */
7039 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7041 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7044 enum machine_mode ext_mode = mode;
7046 if (ext_mode == BLKmode
7047 && ! (target != 0 && GET_CODE (op0) == MEM
7048 && GET_CODE (target) == MEM
7049 && bitpos % BITS_PER_UNIT == 0))
7050 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7052 if (ext_mode == BLKmode)
7054 /* In this case, BITPOS must start at a byte boundary and
7055 TARGET, if specified, must be a MEM. */
7056 if (GET_CODE (op0) != MEM
7057 || (target != 0 && GET_CODE (target) != MEM)
7058 || bitpos % BITS_PER_UNIT != 0)
7061 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7063 target = assign_temp (type, 0, 1, 1);
7065 emit_block_move (target, op0,
7066 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7072 op0 = validize_mem (op0);
7074 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7075 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7077 op0 = extract_bit_field (op0, bitsize, bitpos,
7078 unsignedp, target, ext_mode, ext_mode,
7079 int_size_in_bytes (TREE_TYPE (tem)));
7081 /* If the result is a record type and BITSIZE is narrower than
7082 the mode of OP0, an integral mode, and this is a big endian
7083 machine, we must put the field into the high-order bits. */
7084 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7085 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7086 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7087 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7088 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7092 if (mode == BLKmode)
7094 rtx new = assign_temp (build_qualified_type
7095 (type_for_mode (ext_mode, 0),
7096 TYPE_QUAL_CONST), 0, 1, 1);
7098 emit_move_insn (new, op0);
7099 op0 = copy_rtx (new);
7100 PUT_MODE (op0, BLKmode);
7101 set_mem_attributes (op0, exp, 1);
7107 /* If the result is BLKmode, use that to access the object
7109 if (mode == BLKmode)
7112 /* Get a reference to just this component. */
7113 if (modifier == EXPAND_CONST_ADDRESS
7114 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7115 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7117 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7119 if (op0 == orig_op0)
7120 op0 = copy_rtx (op0);
7122 set_mem_attributes (op0, exp, 0);
7123 if (GET_CODE (XEXP (op0, 0)) == REG)
7124 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7126 MEM_VOLATILE_P (op0) |= volatilep;
7127 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7128 || modifier == EXPAND_CONST_ADDRESS
7129 || modifier == EXPAND_INITIALIZER)
7131 else if (target == 0)
7132 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7134 convert_move (target, op0, unsignedp);
7140 rtx insn, before = get_last_insn (), vtbl_ref;
7142 /* Evaluate the interior expression. */
7143 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7146 /* Get or create an instruction off which to hang a note. */
7147 if (REG_P (subtarget))
7150 insn = get_last_insn ();
7153 if (! INSN_P (insn))
7154 insn = prev_nonnote_insn (insn);
7158 target = gen_reg_rtx (GET_MODE (subtarget));
7159 insn = emit_move_insn (target, subtarget);
7162 /* Collect the data for the note. */
7163 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7164 vtbl_ref = plus_constant (vtbl_ref,
7165 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7166 /* Discard the initial CONST that was added. */
7167 vtbl_ref = XEXP (vtbl_ref, 0);
7170 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7175 /* Intended for a reference to a buffer of a file-object in Pascal.
7176 But it's not certain that a special tree code will really be
7177 necessary for these. INDIRECT_REF might work for them. */
7183 /* Pascal set IN expression.
7186 rlo = set_low - (set_low%bits_per_word);
7187 the_word = set [ (index - rlo)/bits_per_word ];
7188 bit_index = index % bits_per_word;
7189 bitmask = 1 << bit_index;
7190 return !!(the_word & bitmask); */
7192 tree set = TREE_OPERAND (exp, 0);
7193 tree index = TREE_OPERAND (exp, 1);
7194 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7195 tree set_type = TREE_TYPE (set);
7196 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7197 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7198 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7199 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7200 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7201 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7202 rtx setaddr = XEXP (setval, 0);
7203 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7205 rtx diff, quo, rem, addr, bit, result;
7207 /* If domain is empty, answer is no. Likewise if index is constant
7208 and out of bounds. */
7209 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7210 && TREE_CODE (set_low_bound) == INTEGER_CST
7211 && tree_int_cst_lt (set_high_bound, set_low_bound))
7212 || (TREE_CODE (index) == INTEGER_CST
7213 && TREE_CODE (set_low_bound) == INTEGER_CST
7214 && tree_int_cst_lt (index, set_low_bound))
7215 || (TREE_CODE (set_high_bound) == INTEGER_CST
7216 && TREE_CODE (index) == INTEGER_CST
7217 && tree_int_cst_lt (set_high_bound, index))))
7221 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7223 /* If we get here, we have to generate the code for both cases
7224 (in range and out of range). */
7226 op0 = gen_label_rtx ();
7227 op1 = gen_label_rtx ();
7229 if (! (GET_CODE (index_val) == CONST_INT
7230 && GET_CODE (lo_r) == CONST_INT))
7231 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7232 GET_MODE (index_val), iunsignedp, op1);
7234 if (! (GET_CODE (index_val) == CONST_INT
7235 && GET_CODE (hi_r) == CONST_INT))
7236 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7237 GET_MODE (index_val), iunsignedp, op1);
7239 /* Calculate the element number of bit zero in the first word
7241 if (GET_CODE (lo_r) == CONST_INT)
7242 rlow = GEN_INT (INTVAL (lo_r)
7243 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7245 rlow = expand_binop (index_mode, and_optab, lo_r,
7246 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7247 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7249 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7250 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7252 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7253 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7254 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7255 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7257 addr = memory_address (byte_mode,
7258 expand_binop (index_mode, add_optab, diff,
7259 setaddr, NULL_RTX, iunsignedp,
7262 /* Extract the bit we want to examine. */
7263 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7264 gen_rtx_MEM (byte_mode, addr),
7265 make_tree (TREE_TYPE (index), rem),
7267 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7268 GET_MODE (target) == byte_mode ? target : 0,
7269 1, OPTAB_LIB_WIDEN);
7271 if (result != target)
7272 convert_move (target, result, 1);
7274 /* Output the code to handle the out-of-range case. */
7277 emit_move_insn (target, const0_rtx);
7282 case WITH_CLEANUP_EXPR:
7283 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7285 WITH_CLEANUP_EXPR_RTL (exp)
7286 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7287 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7289 /* That's it for this cleanup. */
7290 TREE_OPERAND (exp, 1) = 0;
7292 return WITH_CLEANUP_EXPR_RTL (exp);
7294 case CLEANUP_POINT_EXPR:
7296 /* Start a new binding layer that will keep track of all cleanup
7297 actions to be performed. */
7298 expand_start_bindings (2);
7300 target_temp_slot_level = temp_slot_level;
7302 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7303 /* If we're going to use this value, load it up now. */
7305 op0 = force_not_mem (op0);
7306 preserve_temp_slots (op0);
7307 expand_end_bindings (NULL_TREE, 0, 0);
7312 /* Check for a built-in function. */
7313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7314 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7316 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7318 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7319 == BUILT_IN_FRONTEND)
7320 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7322 return expand_builtin (exp, target, subtarget, tmode, ignore);
7325 return expand_call (exp, target, ignore);
7327 case NON_LVALUE_EXPR:
7330 case REFERENCE_EXPR:
7331 if (TREE_OPERAND (exp, 0) == error_mark_node)
7334 if (TREE_CODE (type) == UNION_TYPE)
7336 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7338 /* If both input and output are BLKmode, this conversion isn't doing
7339 anything except possibly changing memory attribute. */
7340 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7342 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7345 result = copy_rtx (result);
7346 set_mem_attributes (result, exp, 0);
7351 target = assign_temp (type, 0, 1, 1);
7353 if (GET_CODE (target) == MEM)
7354 /* Store data into beginning of memory target. */
7355 store_expr (TREE_OPERAND (exp, 0),
7356 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7358 else if (GET_CODE (target) == REG)
7359 /* Store this field into a union of the proper type. */
7360 store_field (target,
7361 MIN ((int_size_in_bytes (TREE_TYPE
7362 (TREE_OPERAND (exp, 0)))
7364 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7365 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7366 VOIDmode, 0, type, 0);
7370 /* Return the entire union. */
7374 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7376 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7379 /* If the signedness of the conversion differs and OP0 is
7380 a promoted SUBREG, clear that indication since we now
7381 have to do the proper extension. */
7382 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7383 && GET_CODE (op0) == SUBREG)
7384 SUBREG_PROMOTED_VAR_P (op0) = 0;
7389 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7390 if (GET_MODE (op0) == mode)
7393 /* If OP0 is a constant, just convert it into the proper mode. */
7394 if (CONSTANT_P (op0))
7396 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7397 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7399 if (modifier == EXPAND_INITIALIZER)
7400 return simplify_gen_subreg (mode, op0, inner_mode,
7401 subreg_lowpart_offset (mode,
7404 return convert_modes (mode, inner_mode, op0,
7405 TREE_UNSIGNED (inner_type));
7408 if (modifier == EXPAND_INITIALIZER)
7409 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7413 convert_to_mode (mode, op0,
7414 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7416 convert_move (target, op0,
7417 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7420 case VIEW_CONVERT_EXPR:
7421 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7423 /* If the input and output modes are both the same, we are done.
7424 Otherwise, if neither mode is BLKmode and both are within a word, we
7425 can use gen_lowpart. If neither is true, make sure the operand is
7426 in memory and convert the MEM to the new mode. */
7427 if (TYPE_MODE (type) == GET_MODE (op0))
7429 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7430 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7431 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7432 op0 = gen_lowpart (TYPE_MODE (type), op0);
7433 else if (GET_CODE (op0) != MEM)
7435 /* If the operand is not a MEM, force it into memory. Since we
7436 are going to be be changing the mode of the MEM, don't call
7437 force_const_mem for constants because we don't allow pool
7438 constants to change mode. */
7439 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7441 if (TREE_ADDRESSABLE (exp))
7444 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7446 = assign_stack_temp_for_type
7447 (TYPE_MODE (inner_type),
7448 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7450 emit_move_insn (target, op0);
7454 /* At this point, OP0 is in the correct mode. If the output type is such
7455 that the operand is known to be aligned, indicate that it is.
7456 Otherwise, we need only be concerned about alignment for non-BLKmode
7458 if (GET_CODE (op0) == MEM)
7460 op0 = copy_rtx (op0);
7462 if (TYPE_ALIGN_OK (type))
7463 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7464 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7465 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7467 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7468 HOST_WIDE_INT temp_size
7469 = MAX (int_size_in_bytes (inner_type),
7470 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7471 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7472 temp_size, 0, type);
7473 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7475 if (TREE_ADDRESSABLE (exp))
7478 if (GET_MODE (op0) == BLKmode)
7479 emit_block_move (new_with_op0_mode, op0,
7480 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7482 emit_move_insn (new_with_op0_mode, op0);
7487 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7493 /* We come here from MINUS_EXPR when the second operand is a
7496 this_optab = ! unsignedp && flag_trapv
7497 && (GET_MODE_CLASS (mode) == MODE_INT)
7498 ? addv_optab : add_optab;
7500 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7501 something else, make sure we add the register to the constant and
7502 then to the other thing. This case can occur during strength
7503 reduction and doing it this way will produce better code if the
7504 frame pointer or argument pointer is eliminated.
7506 fold-const.c will ensure that the constant is always in the inner
7507 PLUS_EXPR, so the only case we need to do anything about is if
7508 sp, ap, or fp is our second argument, in which case we must swap
7509 the innermost first argument and our second argument. */
7511 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7512 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7513 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7514 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7515 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7516 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7518 tree t = TREE_OPERAND (exp, 1);
7520 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7521 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7524 /* If the result is to be ptr_mode and we are adding an integer to
7525 something, we might be forming a constant. So try to use
7526 plus_constant. If it produces a sum and we can't accept it,
7527 use force_operand. This allows P = &ARR[const] to generate
7528 efficient code on machines where a SYMBOL_REF is not a valid
7531 If this is an EXPAND_SUM call, always return the sum. */
7532 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7533 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7535 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7536 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7537 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7541 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7543 /* Use immed_double_const to ensure that the constant is
7544 truncated according to the mode of OP1, then sign extended
7545 to a HOST_WIDE_INT. Using the constant directly can result
7546 in non-canonical RTL in a 64x32 cross compile. */
7548 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7550 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7551 op1 = plus_constant (op1, INTVAL (constant_part));
7552 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7553 op1 = force_operand (op1, target);
7557 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7558 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7559 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7563 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7564 (modifier == EXPAND_INITIALIZER
7565 ? EXPAND_INITIALIZER : EXPAND_SUM));
7566 if (! CONSTANT_P (op0))
7568 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7569 VOIDmode, modifier);
7570 /* Don't go to both_summands if modifier
7571 says it's not right to return a PLUS. */
7572 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7576 /* Use immed_double_const to ensure that the constant is
7577 truncated according to the mode of OP1, then sign extended
7578 to a HOST_WIDE_INT. Using the constant directly can result
7579 in non-canonical RTL in a 64x32 cross compile. */
7581 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7583 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7584 op0 = plus_constant (op0, INTVAL (constant_part));
7585 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7586 op0 = force_operand (op0, target);
7591 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7594 /* No sense saving up arithmetic to be done
7595 if it's all in the wrong mode to form part of an address.
7596 And force_operand won't know whether to sign-extend or
7598 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 || mode != ptr_mode)
7601 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7602 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7603 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7609 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7610 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7613 /* Make sure any term that's a sum with a constant comes last. */
7614 if (GET_CODE (op0) == PLUS
7615 && CONSTANT_P (XEXP (op0, 1)))
7621 /* If adding to a sum including a constant,
7622 associate it to put the constant outside. */
7623 if (GET_CODE (op1) == PLUS
7624 && CONSTANT_P (XEXP (op1, 1)))
7626 rtx constant_term = const0_rtx;
7628 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7631 /* Ensure that MULT comes first if there is one. */
7632 else if (GET_CODE (op0) == MULT)
7633 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7635 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7637 /* Let's also eliminate constants from op0 if possible. */
7638 op0 = eliminate_constant_term (op0, &constant_term);
7640 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7641 their sum should be a constant. Form it into OP1, since the
7642 result we want will then be OP0 + OP1. */
7644 temp = simplify_binary_operation (PLUS, mode, constant_term,
7649 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7652 /* Put a constant term last and put a multiplication first. */
7653 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7654 temp = op1, op1 = op0, op0 = temp;
7656 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7657 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7660 /* For initializers, we are allowed to return a MINUS of two
7661 symbolic constants. Here we handle all cases when both operands
7663 /* Handle difference of two symbolic constants,
7664 for the sake of an initializer. */
7665 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7666 && really_constant_p (TREE_OPERAND (exp, 0))
7667 && really_constant_p (TREE_OPERAND (exp, 1)))
7669 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
7671 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
7674 /* If the last operand is a CONST_INT, use plus_constant of
7675 the negated constant. Else make the MINUS. */
7676 if (GET_CODE (op1) == CONST_INT)
7677 return plus_constant (op0, - INTVAL (op1));
7679 return gen_rtx_MINUS (mode, op0, op1);
7681 /* Convert A - const to A + (-const). */
7682 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7684 tree negated = fold (build1 (NEGATE_EXPR, type,
7685 TREE_OPERAND (exp, 1)));
7687 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7688 /* If we can't negate the constant in TYPE, leave it alone and
7689 expand_binop will negate it for us. We used to try to do it
7690 here in the signed version of TYPE, but that doesn't work
7691 on POINTER_TYPEs. */;
7694 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7698 this_optab = ! unsignedp && flag_trapv
7699 && (GET_MODE_CLASS(mode) == MODE_INT)
7700 ? subv_optab : sub_optab;
7704 /* If first operand is constant, swap them.
7705 Thus the following special case checks need only
7706 check the second operand. */
7707 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7709 tree t1 = TREE_OPERAND (exp, 0);
7710 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7711 TREE_OPERAND (exp, 1) = t1;
7714 /* Attempt to return something suitable for generating an
7715 indexed address, for machines that support that. */
7717 if (modifier == EXPAND_SUM && mode == ptr_mode
7718 && host_integerp (TREE_OPERAND (exp, 1), 0))
7720 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7723 /* If we knew for certain that this is arithmetic for an array
7724 reference, and we knew the bounds of the array, then we could
7725 apply the distributive law across (PLUS X C) for constant C.
7726 Without such knowledge, we risk overflowing the computation
7727 when both X and C are large, but X+C isn't. */
7728 /* ??? Could perhaps special-case EXP being unsigned and C being
7729 positive. In that case we are certain that X+C is no smaller
7730 than X and so the transformed expression will overflow iff the
7731 original would have. */
7733 if (GET_CODE (op0) != REG)
7734 op0 = force_operand (op0, NULL_RTX);
7735 if (GET_CODE (op0) != REG)
7736 op0 = copy_to_mode_reg (mode, op0);
7739 gen_rtx_MULT (mode, op0,
7740 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
7743 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7746 /* Check for multiplying things that have been extended
7747 from a narrower type. If this machine supports multiplying
7748 in that narrower type with a result in the desired type,
7749 do it that way, and avoid the explicit type-conversion. */
7750 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7751 && TREE_CODE (type) == INTEGER_TYPE
7752 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7753 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7754 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7755 && int_fits_type_p (TREE_OPERAND (exp, 1),
7756 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7757 /* Don't use a widening multiply if a shift will do. */
7758 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7759 > HOST_BITS_PER_WIDE_INT)
7760 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7762 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7763 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7765 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7766 /* If both operands are extended, they must either both
7767 be zero-extended or both be sign-extended. */
7768 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7770 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7772 enum machine_mode innermode
7773 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7774 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7775 ? smul_widen_optab : umul_widen_optab);
7776 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7777 ? umul_widen_optab : smul_widen_optab);
7778 if (mode == GET_MODE_WIDER_MODE (innermode))
7780 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7782 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7783 NULL_RTX, VOIDmode, 0);
7784 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7785 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7788 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7789 NULL_RTX, VOIDmode, 0);
7792 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7793 && innermode == word_mode)
7796 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7797 NULL_RTX, VOIDmode, 0);
7798 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7799 op1 = convert_modes (innermode, mode,
7800 expand_expr (TREE_OPERAND (exp, 1),
7801 NULL_RTX, VOIDmode, 0),
7804 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7805 NULL_RTX, VOIDmode, 0);
7806 temp = expand_binop (mode, other_optab, op0, op1, target,
7807 unsignedp, OPTAB_LIB_WIDEN);
7808 htem = expand_mult_highpart_adjust (innermode,
7809 gen_highpart (innermode, temp),
7811 gen_highpart (innermode, temp),
7813 emit_move_insn (gen_highpart (innermode, temp), htem);
7818 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7819 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7820 return expand_mult (mode, op0, op1, target, unsignedp);
7822 case TRUNC_DIV_EXPR:
7823 case FLOOR_DIV_EXPR:
7825 case ROUND_DIV_EXPR:
7826 case EXACT_DIV_EXPR:
7827 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7829 /* Possible optimization: compute the dividend with EXPAND_SUM
7830 then if the divisor is constant can optimize the case
7831 where some terms of the dividend have coeffs divisible by it. */
7832 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7833 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7834 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7837 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7838 expensive divide. If not, combine will rebuild the original
7840 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7841 && TREE_CODE (type) == REAL_TYPE
7842 && !real_onep (TREE_OPERAND (exp, 0)))
7843 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7844 build (RDIV_EXPR, type,
7845 build_real (type, dconst1),
7846 TREE_OPERAND (exp, 1))),
7847 target, tmode, unsignedp);
7848 this_optab = sdiv_optab;
7851 case TRUNC_MOD_EXPR:
7852 case FLOOR_MOD_EXPR:
7854 case ROUND_MOD_EXPR:
7855 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7857 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7858 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7859 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7861 case FIX_ROUND_EXPR:
7862 case FIX_FLOOR_EXPR:
7864 abort (); /* Not used for C. */
7866 case FIX_TRUNC_EXPR:
7867 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7869 target = gen_reg_rtx (mode);
7870 expand_fix (target, op0, unsignedp);
7874 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7876 target = gen_reg_rtx (mode);
7877 /* expand_float can't figure out what to do if FROM has VOIDmode.
7878 So give it the correct mode. With -O, cse will optimize this. */
7879 if (GET_MODE (op0) == VOIDmode)
7880 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7882 expand_float (target, op0,
7883 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7887 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7888 temp = expand_unop (mode,
7889 ! unsignedp && flag_trapv
7890 && (GET_MODE_CLASS(mode) == MODE_INT)
7891 ? negv_optab : neg_optab, op0, target, 0);
7897 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7899 /* Handle complex values specially. */
7900 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7901 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7902 return expand_complex_abs (mode, op0, target, unsignedp);
7904 /* Unsigned abs is simply the operand. Testing here means we don't
7905 risk generating incorrect code below. */
7906 if (TREE_UNSIGNED (type))
7909 return expand_abs (mode, op0, target, unsignedp,
7910 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7914 target = original_target;
7915 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7916 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7917 || GET_MODE (target) != mode
7918 || (GET_CODE (target) == REG
7919 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7920 target = gen_reg_rtx (mode);
7921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7922 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7924 /* First try to do it with a special MIN or MAX instruction.
7925 If that does not win, use a conditional jump to select the proper
7927 this_optab = (TREE_UNSIGNED (type)
7928 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7929 : (code == MIN_EXPR ? smin_optab : smax_optab));
7931 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7936 /* At this point, a MEM target is no longer useful; we will get better
7939 if (GET_CODE (target) == MEM)
7940 target = gen_reg_rtx (mode);
7943 emit_move_insn (target, op0);
7945 op0 = gen_label_rtx ();
7947 /* If this mode is an integer too wide to compare properly,
7948 compare word by word. Rely on cse to optimize constant cases. */
7949 if (GET_MODE_CLASS (mode) == MODE_INT
7950 && ! can_compare_p (GE, mode, ccp_jump))
7952 if (code == MAX_EXPR)
7953 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7954 target, op1, NULL_RTX, op0);
7956 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7957 op1, target, NULL_RTX, op0);
7961 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7962 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7963 unsignedp, mode, NULL_RTX, NULL_RTX,
7966 emit_move_insn (target, op1);
7971 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7972 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7979 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7984 /* ??? Can optimize bitwise operations with one arg constant.
7985 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7986 and (a bitwise1 b) bitwise2 b (etc)
7987 but that is probably not worth while. */
7989 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7990 boolean values when we want in all cases to compute both of them. In
7991 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7992 as actual zero-or-1 values and then bitwise anding. In cases where
7993 there cannot be any side effects, better code would be made by
7994 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7995 how to recognize those cases. */
7997 case TRUTH_AND_EXPR:
7999 this_optab = and_optab;
8004 this_optab = ior_optab;
8007 case TRUTH_XOR_EXPR:
8009 this_optab = xor_optab;
8016 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8019 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8022 /* Could determine the answer when only additive constants differ. Also,
8023 the addition of one can be handled by changing the condition. */
8030 case UNORDERED_EXPR:
8037 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8041 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8042 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8044 && GET_CODE (original_target) == REG
8045 && (GET_MODE (original_target)
8046 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8048 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8051 /* If temp is constant, we can just compute the result. */
8052 if (GET_CODE (temp) == CONST_INT)
8054 if (INTVAL (temp) != 0)
8055 emit_move_insn (target, const1_rtx);
8057 emit_move_insn (target, const0_rtx);
8062 if (temp != original_target)
8064 enum machine_mode mode1 = GET_MODE (temp);
8065 if (mode1 == VOIDmode)
8066 mode1 = tmode != VOIDmode ? tmode : mode;
8068 temp = copy_to_mode_reg (mode1, temp);
8071 op1 = gen_label_rtx ();
8072 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8073 GET_MODE (temp), unsignedp, op1);
8074 emit_move_insn (temp, const1_rtx);
8079 /* If no set-flag instruction, must generate a conditional
8080 store into a temporary variable. Drop through
8081 and handle this like && and ||. */
8083 case TRUTH_ANDIF_EXPR:
8084 case TRUTH_ORIF_EXPR:
8086 && (target == 0 || ! safe_from_p (target, exp, 1)
8087 /* Make sure we don't have a hard reg (such as function's return
8088 value) live across basic blocks, if not optimizing. */
8089 || (!optimize && GET_CODE (target) == REG
8090 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8091 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8094 emit_clr_insn (target);
8096 op1 = gen_label_rtx ();
8097 jumpifnot (exp, op1);
8100 emit_0_to_1_insn (target);
8103 return ignore ? const0_rtx : target;
8105 case TRUTH_NOT_EXPR:
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8107 /* The parser is careful to generate TRUTH_NOT_EXPR
8108 only with operands that are always zero or one. */
8109 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8110 target, 1, OPTAB_LIB_WIDEN);
8116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8118 return expand_expr (TREE_OPERAND (exp, 1),
8119 (ignore ? const0_rtx : target),
8123 /* If we would have a "singleton" (see below) were it not for a
8124 conversion in each arm, bring that conversion back out. */
8125 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8126 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8127 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8128 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8130 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8131 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8133 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8134 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8135 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8136 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8137 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8138 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8139 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8140 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8141 return expand_expr (build1 (NOP_EXPR, type,
8142 build (COND_EXPR, TREE_TYPE (iftrue),
8143 TREE_OPERAND (exp, 0),
8145 target, tmode, modifier);
8149 /* Note that COND_EXPRs whose type is a structure or union
8150 are required to be constructed to contain assignments of
8151 a temporary variable, so that we can evaluate them here
8152 for side effect only. If type is void, we must do likewise. */
8154 /* If an arm of the branch requires a cleanup,
8155 only that cleanup is performed. */
8158 tree binary_op = 0, unary_op = 0;
8160 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8161 convert it to our mode, if necessary. */
8162 if (integer_onep (TREE_OPERAND (exp, 1))
8163 && integer_zerop (TREE_OPERAND (exp, 2))
8164 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8173 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8174 if (GET_MODE (op0) == mode)
8178 target = gen_reg_rtx (mode);
8179 convert_move (target, op0, unsignedp);
8183 /* Check for X ? A + B : A. If we have this, we can copy A to the
8184 output and conditionally add B. Similarly for unary operations.
8185 Don't do this if X has side-effects because those side effects
8186 might affect A or B and the "?" operation is a sequence point in
8187 ANSI. (operand_equal_p tests for side effects.) */
8189 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8190 && operand_equal_p (TREE_OPERAND (exp, 2),
8191 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8192 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8193 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8194 && operand_equal_p (TREE_OPERAND (exp, 1),
8195 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8196 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8197 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8198 && operand_equal_p (TREE_OPERAND (exp, 2),
8199 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8200 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8201 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8202 && operand_equal_p (TREE_OPERAND (exp, 1),
8203 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8204 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8206 /* If we are not to produce a result, we have no target. Otherwise,
8207 if a target was specified use it; it will not be used as an
8208 intermediate target unless it is safe. If no target, use a
8213 else if (original_target
8214 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8215 || (singleton && GET_CODE (original_target) == REG
8216 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8217 && original_target == var_rtx (singleton)))
8218 && GET_MODE (original_target) == mode
8219 #ifdef HAVE_conditional_move
8220 && (! can_conditionally_move_p (mode)
8221 || GET_CODE (original_target) == REG
8222 || TREE_ADDRESSABLE (type))
8224 && (GET_CODE (original_target) != MEM
8225 || TREE_ADDRESSABLE (type)))
8226 temp = original_target;
8227 else if (TREE_ADDRESSABLE (type))
8230 temp = assign_temp (type, 0, 0, 1);
8232 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8233 do the test of X as a store-flag operation, do this as
8234 A + ((X != 0) << log C). Similarly for other simple binary
8235 operators. Only do for C == 1 if BRANCH_COST is low. */
8236 if (temp && singleton && binary_op
8237 && (TREE_CODE (binary_op) == PLUS_EXPR
8238 || TREE_CODE (binary_op) == MINUS_EXPR
8239 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8240 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8241 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8242 : integer_onep (TREE_OPERAND (binary_op, 1)))
8243 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8246 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8247 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8248 ? addv_optab : add_optab)
8249 : TREE_CODE (binary_op) == MINUS_EXPR
8250 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8251 ? subv_optab : sub_optab)
8252 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8255 /* If we had X ? A : A + 1, do this as A + (X == 0).
8257 We have to invert the truth value here and then put it
8258 back later if do_store_flag fails. We cannot simply copy
8259 TREE_OPERAND (exp, 0) to another variable and modify that
8260 because invert_truthvalue can modify the tree pointed to
8262 if (singleton == TREE_OPERAND (exp, 1))
8263 TREE_OPERAND (exp, 0)
8264 = invert_truthvalue (TREE_OPERAND (exp, 0));
8266 result = do_store_flag (TREE_OPERAND (exp, 0),
8267 (safe_from_p (temp, singleton, 1)
8269 mode, BRANCH_COST <= 1);
8271 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8272 result = expand_shift (LSHIFT_EXPR, mode, result,
8273 build_int_2 (tree_log2
8277 (safe_from_p (temp, singleton, 1)
8278 ? temp : NULL_RTX), 0);
8282 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8283 return expand_binop (mode, boptab, op1, result, temp,
8284 unsignedp, OPTAB_LIB_WIDEN);
8286 else if (singleton == TREE_OPERAND (exp, 1))
8287 TREE_OPERAND (exp, 0)
8288 = invert_truthvalue (TREE_OPERAND (exp, 0));
8291 do_pending_stack_adjust ();
8293 op0 = gen_label_rtx ();
8295 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8299 /* If the target conflicts with the other operand of the
8300 binary op, we can't use it. Also, we can't use the target
8301 if it is a hard register, because evaluating the condition
8302 might clobber it. */
8304 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8305 || (GET_CODE (temp) == REG
8306 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8307 temp = gen_reg_rtx (mode);
8308 store_expr (singleton, temp, 0);
8311 expand_expr (singleton,
8312 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8313 if (singleton == TREE_OPERAND (exp, 1))
8314 jumpif (TREE_OPERAND (exp, 0), op0);
8316 jumpifnot (TREE_OPERAND (exp, 0), op0);
8318 start_cleanup_deferral ();
8319 if (binary_op && temp == 0)
8320 /* Just touch the other operand. */
8321 expand_expr (TREE_OPERAND (binary_op, 1),
8322 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8324 store_expr (build (TREE_CODE (binary_op), type,
8325 make_tree (type, temp),
8326 TREE_OPERAND (binary_op, 1)),
8329 store_expr (build1 (TREE_CODE (unary_op), type,
8330 make_tree (type, temp)),
8334 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8335 comparison operator. If we have one of these cases, set the
8336 output to A, branch on A (cse will merge these two references),
8337 then set the output to FOO. */
8339 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8340 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8342 TREE_OPERAND (exp, 1), 0)
8343 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8344 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8345 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8347 if (GET_CODE (temp) == REG
8348 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8349 temp = gen_reg_rtx (mode);
8350 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8351 jumpif (TREE_OPERAND (exp, 0), op0);
8353 start_cleanup_deferral ();
8354 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8358 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8359 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8360 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8361 TREE_OPERAND (exp, 2), 0)
8362 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8363 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8364 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8366 if (GET_CODE (temp) == REG
8367 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8368 temp = gen_reg_rtx (mode);
8369 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8370 jumpifnot (TREE_OPERAND (exp, 0), op0);
8372 start_cleanup_deferral ();
8373 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8378 op1 = gen_label_rtx ();
8379 jumpifnot (TREE_OPERAND (exp, 0), op0);
8381 start_cleanup_deferral ();
8383 /* One branch of the cond can be void, if it never returns. For
8384 example A ? throw : E */
8386 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8387 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8389 expand_expr (TREE_OPERAND (exp, 1),
8390 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8391 end_cleanup_deferral ();
8393 emit_jump_insn (gen_jump (op1));
8396 start_cleanup_deferral ();
8398 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8399 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8401 expand_expr (TREE_OPERAND (exp, 2),
8402 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8405 end_cleanup_deferral ();
8416 /* Something needs to be initialized, but we didn't know
8417 where that thing was when building the tree. For example,
8418 it could be the return value of a function, or a parameter
8419 to a function which lays down in the stack, or a temporary
8420 variable which must be passed by reference.
8422 We guarantee that the expression will either be constructed
8423 or copied into our original target. */
8425 tree slot = TREE_OPERAND (exp, 0);
8426 tree cleanups = NULL_TREE;
8429 if (TREE_CODE (slot) != VAR_DECL)
8433 target = original_target;
8435 /* Set this here so that if we get a target that refers to a
8436 register variable that's already been used, put_reg_into_stack
8437 knows that it should fix up those uses. */
8438 TREE_USED (slot) = 1;
8442 if (DECL_RTL_SET_P (slot))
8444 target = DECL_RTL (slot);
8445 /* If we have already expanded the slot, so don't do
8447 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8452 target = assign_temp (type, 2, 0, 1);
8453 /* All temp slots at this level must not conflict. */
8454 preserve_temp_slots (target);
8455 SET_DECL_RTL (slot, target);
8456 if (TREE_ADDRESSABLE (slot))
8457 put_var_into_stack (slot);
8459 /* Since SLOT is not known to the called function
8460 to belong to its stack frame, we must build an explicit
8461 cleanup. This case occurs when we must build up a reference
8462 to pass the reference as an argument. In this case,
8463 it is very likely that such a reference need not be
8466 if (TREE_OPERAND (exp, 2) == 0)
8467 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8468 cleanups = TREE_OPERAND (exp, 2);
8473 /* This case does occur, when expanding a parameter which
8474 needs to be constructed on the stack. The target
8475 is the actual stack address that we want to initialize.
8476 The function we call will perform the cleanup in this case. */
8478 /* If we have already assigned it space, use that space,
8479 not target that we were passed in, as our target
8480 parameter is only a hint. */
8481 if (DECL_RTL_SET_P (slot))
8483 target = DECL_RTL (slot);
8484 /* If we have already expanded the slot, so don't do
8486 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8491 SET_DECL_RTL (slot, target);
8492 /* If we must have an addressable slot, then make sure that
8493 the RTL that we just stored in slot is OK. */
8494 if (TREE_ADDRESSABLE (slot))
8495 put_var_into_stack (slot);
8499 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8500 /* Mark it as expanded. */
8501 TREE_OPERAND (exp, 1) = NULL_TREE;
8503 store_expr (exp1, target, 0);
8505 expand_decl_cleanup (NULL_TREE, cleanups);
8512 tree lhs = TREE_OPERAND (exp, 0);
8513 tree rhs = TREE_OPERAND (exp, 1);
8515 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8521 /* If lhs is complex, expand calls in rhs before computing it.
8522 That's so we don't compute a pointer and save it over a
8523 call. If lhs is simple, compute it first so we can give it
8524 as a target if the rhs is just a call. This avoids an
8525 extra temp and copy and that prevents a partial-subsumption
8526 which makes bad code. Actually we could treat
8527 component_ref's of vars like vars. */
8529 tree lhs = TREE_OPERAND (exp, 0);
8530 tree rhs = TREE_OPERAND (exp, 1);
8534 /* Check for |= or &= of a bitfield of size one into another bitfield
8535 of size 1. In this case, (unless we need the result of the
8536 assignment) we can do this more efficiently with a
8537 test followed by an assignment, if necessary.
8539 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8540 things change so we do, this code should be enhanced to
8543 && TREE_CODE (lhs) == COMPONENT_REF
8544 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8545 || TREE_CODE (rhs) == BIT_AND_EXPR)
8546 && TREE_OPERAND (rhs, 0) == lhs
8547 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8548 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8549 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8551 rtx label = gen_label_rtx ();
8553 do_jump (TREE_OPERAND (rhs, 1),
8554 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8555 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8556 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8557 (TREE_CODE (rhs) == BIT_IOR_EXPR
8559 : integer_zero_node)),
8561 do_pending_stack_adjust ();
8566 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8572 if (!TREE_OPERAND (exp, 0))
8573 expand_null_return ();
8575 expand_return (TREE_OPERAND (exp, 0));
8578 case PREINCREMENT_EXPR:
8579 case PREDECREMENT_EXPR:
8580 return expand_increment (exp, 0, ignore);
8582 case POSTINCREMENT_EXPR:
8583 case POSTDECREMENT_EXPR:
8584 /* Faster to treat as pre-increment if result is not used. */
8585 return expand_increment (exp, ! ignore, ignore);
8588 /* Are we taking the address of a nested function? */
8589 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8590 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8591 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8592 && ! TREE_STATIC (exp))
8594 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8595 op0 = force_operand (op0, target);
8597 /* If we are taking the address of something erroneous, just
8599 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8601 /* If we are taking the address of a constant and are at the
8602 top level, we have to use output_constant_def since we can't
8603 call force_const_mem at top level. */
8605 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8606 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8608 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8611 /* We make sure to pass const0_rtx down if we came in with
8612 ignore set, to avoid doing the cleanups twice for something. */
8613 op0 = expand_expr (TREE_OPERAND (exp, 0),
8614 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8615 (modifier == EXPAND_INITIALIZER
8616 ? modifier : EXPAND_CONST_ADDRESS));
8618 /* If we are going to ignore the result, OP0 will have been set
8619 to const0_rtx, so just return it. Don't get confused and
8620 think we are taking the address of the constant. */
8624 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8625 clever and returns a REG when given a MEM. */
8626 op0 = protect_from_queue (op0, 1);
8628 /* We would like the object in memory. If it is a constant, we can
8629 have it be statically allocated into memory. For a non-constant,
8630 we need to allocate some memory and store the value into it. */
8632 if (CONSTANT_P (op0))
8633 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8635 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8636 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8637 || GET_CODE (op0) == PARALLEL)
8639 /* If the operand is a SAVE_EXPR, we can deal with this by
8640 forcing the SAVE_EXPR into memory. */
8641 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8643 put_var_into_stack (TREE_OPERAND (exp, 0));
8644 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8648 /* If this object is in a register, it can't be BLKmode. */
8649 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8650 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8652 if (GET_CODE (op0) == PARALLEL)
8653 /* Handle calls that pass values in multiple
8654 non-contiguous locations. The Irix 6 ABI has examples
8656 emit_group_store (memloc, op0,
8657 int_size_in_bytes (inner_type));
8659 emit_move_insn (memloc, op0);
8665 if (GET_CODE (op0) != MEM)
8668 mark_temp_addr_taken (op0);
8669 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8671 op0 = XEXP (op0, 0);
8672 #ifdef POINTERS_EXTEND_UNSIGNED
8673 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8674 && mode == ptr_mode)
8675 op0 = convert_memory_address (ptr_mode, op0);
8680 /* If OP0 is not aligned as least as much as the type requires, we
8681 need to make a temporary, copy OP0 to it, and take the address of
8682 the temporary. We want to use the alignment of the type, not of
8683 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8684 the test for BLKmode means that can't happen. The test for
8685 BLKmode is because we never make mis-aligned MEMs with
8688 We don't need to do this at all if the machine doesn't have
8689 strict alignment. */
8690 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8691 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8693 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8695 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8697 = assign_stack_temp_for_type
8698 (TYPE_MODE (inner_type),
8699 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8700 : int_size_in_bytes (inner_type),
8701 1, build_qualified_type (inner_type,
8702 (TYPE_QUALS (inner_type)
8703 | TYPE_QUAL_CONST)));
8705 if (TYPE_ALIGN_OK (inner_type))
8708 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8712 op0 = force_operand (XEXP (op0, 0), target);
8716 && GET_CODE (op0) != REG
8717 && modifier != EXPAND_CONST_ADDRESS
8718 && modifier != EXPAND_INITIALIZER
8719 && modifier != EXPAND_SUM)
8720 op0 = force_reg (Pmode, op0);
8722 if (GET_CODE (op0) == REG
8723 && ! REG_USERVAR_P (op0))
8724 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8726 #ifdef POINTERS_EXTEND_UNSIGNED
8727 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8728 && mode == ptr_mode)
8729 op0 = convert_memory_address (ptr_mode, op0);
8734 case ENTRY_VALUE_EXPR:
8737 /* COMPLEX type for Extended Pascal & Fortran */
8740 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8743 /* Get the rtx code of the operands. */
8744 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8745 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8748 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8752 /* Move the real (op0) and imaginary (op1) parts to their location. */
8753 emit_move_insn (gen_realpart (mode, target), op0);
8754 emit_move_insn (gen_imagpart (mode, target), op1);
8756 insns = get_insns ();
8759 /* Complex construction should appear as a single unit. */
8760 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8761 each with a separate pseudo as destination.
8762 It's not correct for flow to treat them as a unit. */
8763 if (GET_CODE (target) != CONCAT)
8764 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8772 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8773 return gen_realpart (mode, op0);
8776 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8777 return gen_imagpart (mode, op0);
8781 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8785 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8788 target = gen_reg_rtx (mode);
8792 /* Store the realpart and the negated imagpart to target. */
8793 emit_move_insn (gen_realpart (partmode, target),
8794 gen_realpart (partmode, op0));
8796 imag_t = gen_imagpart (partmode, target);
8797 temp = expand_unop (partmode,
8798 ! unsignedp && flag_trapv
8799 && (GET_MODE_CLASS(partmode) == MODE_INT)
8800 ? negv_optab : neg_optab,
8801 gen_imagpart (partmode, op0), imag_t, 0);
8803 emit_move_insn (imag_t, temp);
8805 insns = get_insns ();
8808 /* Conjugate should appear as a single unit
8809 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8810 each with a separate pseudo as destination.
8811 It's not correct for flow to treat them as a unit. */
8812 if (GET_CODE (target) != CONCAT)
8813 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8820 case TRY_CATCH_EXPR:
8822 tree handler = TREE_OPERAND (exp, 1);
8824 expand_eh_region_start ();
8826 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8828 expand_eh_region_end_cleanup (handler);
8833 case TRY_FINALLY_EXPR:
8835 tree try_block = TREE_OPERAND (exp, 0);
8836 tree finally_block = TREE_OPERAND (exp, 1);
8837 rtx finally_label = gen_label_rtx ();
8838 rtx done_label = gen_label_rtx ();
8839 rtx return_link = gen_reg_rtx (Pmode);
8840 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8841 (tree) finally_label, (tree) return_link);
8842 TREE_SIDE_EFFECTS (cleanup) = 1;
8844 /* Start a new binding layer that will keep track of all cleanup
8845 actions to be performed. */
8846 expand_start_bindings (2);
8848 target_temp_slot_level = temp_slot_level;
8850 expand_decl_cleanup (NULL_TREE, cleanup);
8851 op0 = expand_expr (try_block, target, tmode, modifier);
8853 preserve_temp_slots (op0);
8854 expand_end_bindings (NULL_TREE, 0, 0);
8855 emit_jump (done_label);
8856 emit_label (finally_label);
8857 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8858 emit_indirect_jump (return_link);
8859 emit_label (done_label);
8863 case GOTO_SUBROUTINE_EXPR:
8865 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8866 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8867 rtx return_address = gen_label_rtx ();
8868 emit_move_insn (return_link,
8869 gen_rtx_LABEL_REF (Pmode, return_address));
8871 emit_label (return_address);
8876 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8879 return get_exception_pointer (cfun);
8882 /* Function descriptors are not valid except for as
8883 initialization constants, and should not be expanded. */
8887 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8890 /* Here to do an ordinary binary operator, generating an instruction
8891 from the optab already placed in `this_optab'. */
8893 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8895 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8896 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8898 temp = expand_binop (mode, this_optab, op0, op1, target,
8899 unsignedp, OPTAB_LIB_WIDEN);
8905 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8906 when applied to the address of EXP produces an address known to be
8907 aligned more than BIGGEST_ALIGNMENT. */
8910 is_aligning_offset (offset, exp)
8914 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
8915 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8916 || TREE_CODE (offset) == NOP_EXPR
8917 || TREE_CODE (offset) == CONVERT_EXPR
8918 || TREE_CODE (offset) == WITH_RECORD_EXPR)
8919 offset = TREE_OPERAND (offset, 0);
8921 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8922 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8923 if (TREE_CODE (offset) != BIT_AND_EXPR
8924 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8925 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
8926 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8929 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8930 It must be NEGATE_EXPR. Then strip any more conversions. */
8931 offset = TREE_OPERAND (offset, 0);
8932 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8933 || TREE_CODE (offset) == NOP_EXPR
8934 || TREE_CODE (offset) == CONVERT_EXPR)
8935 offset = TREE_OPERAND (offset, 0);
8937 if (TREE_CODE (offset) != NEGATE_EXPR)
8940 offset = TREE_OPERAND (offset, 0);
8941 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8942 || TREE_CODE (offset) == NOP_EXPR
8943 || TREE_CODE (offset) == CONVERT_EXPR)
8944 offset = TREE_OPERAND (offset, 0);
8946 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
8947 whose type is the same as EXP. */
8948 return (TREE_CODE (offset) == ADDR_EXPR
8949 && (TREE_OPERAND (offset, 0) == exp
8950 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
8951 && (TREE_TYPE (TREE_OPERAND (offset, 0))
8952 == TREE_TYPE (exp)))));
8955 /* Return the tree node if a ARG corresponds to a string constant or zero
8956 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8957 in bytes within the string that ARG is accessing. The type of the
8958 offset will be `sizetype'. */
8961 string_constant (arg, ptr_offset)
8967 if (TREE_CODE (arg) == ADDR_EXPR
8968 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8970 *ptr_offset = size_zero_node;
8971 return TREE_OPERAND (arg, 0);
8973 else if (TREE_CODE (arg) == PLUS_EXPR)
8975 tree arg0 = TREE_OPERAND (arg, 0);
8976 tree arg1 = TREE_OPERAND (arg, 1);
8981 if (TREE_CODE (arg0) == ADDR_EXPR
8982 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8984 *ptr_offset = convert (sizetype, arg1);
8985 return TREE_OPERAND (arg0, 0);
8987 else if (TREE_CODE (arg1) == ADDR_EXPR
8988 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8990 *ptr_offset = convert (sizetype, arg0);
8991 return TREE_OPERAND (arg1, 0);
8998 /* Expand code for a post- or pre- increment or decrement
8999 and return the RTX for the result.
9000 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9003 expand_increment (exp, post, ignore)
9009 tree incremented = TREE_OPERAND (exp, 0);
9010 optab this_optab = add_optab;
9012 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9013 int op0_is_copy = 0;
9014 int single_insn = 0;
9015 /* 1 means we can't store into OP0 directly,
9016 because it is a subreg narrower than a word,
9017 and we don't dare clobber the rest of the word. */
9020 /* Stabilize any component ref that might need to be
9021 evaluated more than once below. */
9023 || TREE_CODE (incremented) == BIT_FIELD_REF
9024 || (TREE_CODE (incremented) == COMPONENT_REF
9025 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9026 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9027 incremented = stabilize_reference (incremented);
9028 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9029 ones into save exprs so that they don't accidentally get evaluated
9030 more than once by the code below. */
9031 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9032 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9033 incremented = save_expr (incremented);
9035 /* Compute the operands as RTX.
9036 Note whether OP0 is the actual lvalue or a copy of it:
9037 I believe it is a copy iff it is a register or subreg
9038 and insns were generated in computing it. */
9040 temp = get_last_insn ();
9041 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9043 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9044 in place but instead must do sign- or zero-extension during assignment,
9045 so we copy it into a new register and let the code below use it as
9048 Note that we can safely modify this SUBREG since it is know not to be
9049 shared (it was made by the expand_expr call above). */
9051 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9054 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9058 else if (GET_CODE (op0) == SUBREG
9059 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9061 /* We cannot increment this SUBREG in place. If we are
9062 post-incrementing, get a copy of the old value. Otherwise,
9063 just mark that we cannot increment in place. */
9065 op0 = copy_to_reg (op0);
9070 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9071 && temp != get_last_insn ());
9072 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9074 /* Decide whether incrementing or decrementing. */
9075 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9076 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9077 this_optab = sub_optab;
9079 /* Convert decrement by a constant into a negative increment. */
9080 if (this_optab == sub_optab
9081 && GET_CODE (op1) == CONST_INT)
9083 op1 = GEN_INT (-INTVAL (op1));
9084 this_optab = add_optab;
9087 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9088 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9090 /* For a preincrement, see if we can do this with a single instruction. */
9093 icode = (int) this_optab->handlers[(int) mode].insn_code;
9094 if (icode != (int) CODE_FOR_nothing
9095 /* Make sure that OP0 is valid for operands 0 and 1
9096 of the insn we want to queue. */
9097 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9098 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9099 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9103 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9104 then we cannot just increment OP0. We must therefore contrive to
9105 increment the original value. Then, for postincrement, we can return
9106 OP0 since it is a copy of the old value. For preincrement, expand here
9107 unless we can do it with a single insn.
9109 Likewise if storing directly into OP0 would clobber high bits
9110 we need to preserve (bad_subreg). */
9111 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9113 /* This is the easiest way to increment the value wherever it is.
9114 Problems with multiple evaluation of INCREMENTED are prevented
9115 because either (1) it is a component_ref or preincrement,
9116 in which case it was stabilized above, or (2) it is an array_ref
9117 with constant index in an array in a register, which is
9118 safe to reevaluate. */
9119 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9120 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9121 ? MINUS_EXPR : PLUS_EXPR),
9124 TREE_OPERAND (exp, 1));
9126 while (TREE_CODE (incremented) == NOP_EXPR
9127 || TREE_CODE (incremented) == CONVERT_EXPR)
9129 newexp = convert (TREE_TYPE (incremented), newexp);
9130 incremented = TREE_OPERAND (incremented, 0);
9133 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9134 return post ? op0 : temp;
9139 /* We have a true reference to the value in OP0.
9140 If there is an insn to add or subtract in this mode, queue it.
9141 Queueing the increment insn avoids the register shuffling
9142 that often results if we must increment now and first save
9143 the old value for subsequent use. */
9145 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9146 op0 = stabilize (op0);
9149 icode = (int) this_optab->handlers[(int) mode].insn_code;
9150 if (icode != (int) CODE_FOR_nothing
9151 /* Make sure that OP0 is valid for operands 0 and 1
9152 of the insn we want to queue. */
9153 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9154 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9156 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9157 op1 = force_reg (mode, op1);
9159 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9161 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9163 rtx addr = (general_operand (XEXP (op0, 0), mode)
9164 ? force_reg (Pmode, XEXP (op0, 0))
9165 : copy_to_reg (XEXP (op0, 0)));
9168 op0 = replace_equiv_address (op0, addr);
9169 temp = force_reg (GET_MODE (op0), op0);
9170 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9171 op1 = force_reg (mode, op1);
9173 /* The increment queue is LIFO, thus we have to `queue'
9174 the instructions in reverse order. */
9175 enqueue_insn (op0, gen_move_insn (op0, temp));
9176 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9181 /* Preincrement, or we can't increment with one simple insn. */
9183 /* Save a copy of the value before inc or dec, to return it later. */
9184 temp = value = copy_to_reg (op0);
9186 /* Arrange to return the incremented value. */
9187 /* Copy the rtx because expand_binop will protect from the queue,
9188 and the results of that would be invalid for us to return
9189 if our caller does emit_queue before using our result. */
9190 temp = copy_rtx (value = op0);
9192 /* Increment however we can. */
9193 op1 = expand_binop (mode, this_optab, value, op1, op0,
9194 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9196 /* Make sure the value is stored into OP0. */
9198 emit_move_insn (op0, op1);
9203 /* At the start of a function, record that we have no previously-pushed
9204 arguments waiting to be popped. */
9207 init_pending_stack_adjust ()
9209 pending_stack_adjust = 0;
9212 /* When exiting from function, if safe, clear out any pending stack adjust
9213 so the adjustment won't get done.
9215 Note, if the current function calls alloca, then it must have a
9216 frame pointer regardless of the value of flag_omit_frame_pointer. */
9219 clear_pending_stack_adjust ()
9221 #ifdef EXIT_IGNORE_STACK
9223 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9224 && EXIT_IGNORE_STACK
9225 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9226 && ! flag_inline_functions)
9228 stack_pointer_delta -= pending_stack_adjust,
9229 pending_stack_adjust = 0;
9234 /* Pop any previously-pushed arguments that have not been popped yet. */
9237 do_pending_stack_adjust ()
9239 if (inhibit_defer_pop == 0)
9241 if (pending_stack_adjust != 0)
9242 adjust_stack (GEN_INT (pending_stack_adjust));
9243 pending_stack_adjust = 0;
9247 /* Expand conditional expressions. */
9249 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9250 LABEL is an rtx of code CODE_LABEL, in this function and all the
9254 jumpifnot (exp, label)
9258 do_jump (exp, label, NULL_RTX);
9261 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9268 do_jump (exp, NULL_RTX, label);
9271 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9272 the result is zero, or IF_TRUE_LABEL if the result is one.
9273 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9274 meaning fall through in that case.
9276 do_jump always does any pending stack adjust except when it does not
9277 actually perform a jump. An example where there is no jump
9278 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9280 This function is responsible for optimizing cases such as
9281 &&, || and comparison operators in EXP. */
9284 do_jump (exp, if_false_label, if_true_label)
9286 rtx if_false_label, if_true_label;
9288 enum tree_code code = TREE_CODE (exp);
9289 /* Some cases need to create a label to jump to
9290 in order to properly fall through.
9291 These cases set DROP_THROUGH_LABEL nonzero. */
9292 rtx drop_through_label = 0;
9296 enum machine_mode mode;
9298 #ifdef MAX_INTEGER_COMPUTATION_MODE
9299 check_max_integer_computation_mode (exp);
9310 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9316 /* This is not true with #pragma weak */
9318 /* The address of something can never be zero. */
9320 emit_jump (if_true_label);
9325 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9326 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9327 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9328 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9331 /* If we are narrowing the operand, we have to do the compare in the
9333 if ((TYPE_PRECISION (TREE_TYPE (exp))
9334 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9336 case NON_LVALUE_EXPR:
9337 case REFERENCE_EXPR:
9342 /* These cannot change zero->non-zero or vice versa. */
9343 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9346 case WITH_RECORD_EXPR:
9347 /* Put the object on the placeholder list, recurse through our first
9348 operand, and pop the list. */
9349 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9351 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9352 placeholder_list = TREE_CHAIN (placeholder_list);
9356 /* This is never less insns than evaluating the PLUS_EXPR followed by
9357 a test and can be longer if the test is eliminated. */
9359 /* Reduce to minus. */
9360 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9361 TREE_OPERAND (exp, 0),
9362 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9363 TREE_OPERAND (exp, 1))));
9364 /* Process as MINUS. */
9368 /* Non-zero iff operands of minus differ. */
9369 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9370 TREE_OPERAND (exp, 0),
9371 TREE_OPERAND (exp, 1)),
9372 NE, NE, if_false_label, if_true_label);
9376 /* If we are AND'ing with a small constant, do this comparison in the
9377 smallest type that fits. If the machine doesn't have comparisons
9378 that small, it will be converted back to the wider comparison.
9379 This helps if we are testing the sign bit of a narrower object.
9380 combine can't do this for us because it can't know whether a
9381 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9383 if (! SLOW_BYTE_ACCESS
9384 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9385 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9386 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9387 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9388 && (type = type_for_mode (mode, 1)) != 0
9389 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9390 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9391 != CODE_FOR_nothing))
9393 do_jump (convert (type, exp), if_false_label, if_true_label);
9398 case TRUTH_NOT_EXPR:
9399 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9402 case TRUTH_ANDIF_EXPR:
9403 if (if_false_label == 0)
9404 if_false_label = drop_through_label = gen_label_rtx ();
9405 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9406 start_cleanup_deferral ();
9407 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9408 end_cleanup_deferral ();
9411 case TRUTH_ORIF_EXPR:
9412 if (if_true_label == 0)
9413 if_true_label = drop_through_label = gen_label_rtx ();
9414 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9415 start_cleanup_deferral ();
9416 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9417 end_cleanup_deferral ();
9422 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9423 preserve_temp_slots (NULL_RTX);
9427 do_pending_stack_adjust ();
9428 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9434 case ARRAY_RANGE_REF:
9436 HOST_WIDE_INT bitsize, bitpos;
9438 enum machine_mode mode;
9443 /* Get description of this reference. We don't actually care
9444 about the underlying object here. */
9445 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9446 &unsignedp, &volatilep);
9448 type = type_for_size (bitsize, unsignedp);
9449 if (! SLOW_BYTE_ACCESS
9450 && type != 0 && bitsize >= 0
9451 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9452 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9453 != CODE_FOR_nothing))
9455 do_jump (convert (type, exp), if_false_label, if_true_label);
9462 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9463 if (integer_onep (TREE_OPERAND (exp, 1))
9464 && integer_zerop (TREE_OPERAND (exp, 2)))
9465 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9467 else if (integer_zerop (TREE_OPERAND (exp, 1))
9468 && integer_onep (TREE_OPERAND (exp, 2)))
9469 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9473 rtx label1 = gen_label_rtx ();
9474 drop_through_label = gen_label_rtx ();
9476 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9478 start_cleanup_deferral ();
9479 /* Now the THEN-expression. */
9480 do_jump (TREE_OPERAND (exp, 1),
9481 if_false_label ? if_false_label : drop_through_label,
9482 if_true_label ? if_true_label : drop_through_label);
9483 /* In case the do_jump just above never jumps. */
9484 do_pending_stack_adjust ();
9485 emit_label (label1);
9487 /* Now the ELSE-expression. */
9488 do_jump (TREE_OPERAND (exp, 2),
9489 if_false_label ? if_false_label : drop_through_label,
9490 if_true_label ? if_true_label : drop_through_label);
9491 end_cleanup_deferral ();
9497 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9499 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9500 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9502 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9503 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9506 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9507 fold (build (EQ_EXPR, TREE_TYPE (exp),
9508 fold (build1 (REALPART_EXPR,
9509 TREE_TYPE (inner_type),
9511 fold (build1 (REALPART_EXPR,
9512 TREE_TYPE (inner_type),
9514 fold (build (EQ_EXPR, TREE_TYPE (exp),
9515 fold (build1 (IMAGPART_EXPR,
9516 TREE_TYPE (inner_type),
9518 fold (build1 (IMAGPART_EXPR,
9519 TREE_TYPE (inner_type),
9521 if_false_label, if_true_label);
9524 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9525 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9527 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9528 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9529 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9531 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9537 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9539 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9540 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9542 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9543 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9546 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9547 fold (build (NE_EXPR, TREE_TYPE (exp),
9548 fold (build1 (REALPART_EXPR,
9549 TREE_TYPE (inner_type),
9551 fold (build1 (REALPART_EXPR,
9552 TREE_TYPE (inner_type),
9554 fold (build (NE_EXPR, TREE_TYPE (exp),
9555 fold (build1 (IMAGPART_EXPR,
9556 TREE_TYPE (inner_type),
9558 fold (build1 (IMAGPART_EXPR,
9559 TREE_TYPE (inner_type),
9561 if_false_label, if_true_label);
9564 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9565 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9567 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9568 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9569 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9571 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9576 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9577 if (GET_MODE_CLASS (mode) == MODE_INT
9578 && ! can_compare_p (LT, mode, ccp_jump))
9579 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9581 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9585 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9586 if (GET_MODE_CLASS (mode) == MODE_INT
9587 && ! can_compare_p (LE, mode, ccp_jump))
9588 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9590 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9594 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9595 if (GET_MODE_CLASS (mode) == MODE_INT
9596 && ! can_compare_p (GT, mode, ccp_jump))
9597 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9599 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9603 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9604 if (GET_MODE_CLASS (mode) == MODE_INT
9605 && ! can_compare_p (GE, mode, ccp_jump))
9606 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9608 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9611 case UNORDERED_EXPR:
9614 enum rtx_code cmp, rcmp;
9617 if (code == UNORDERED_EXPR)
9618 cmp = UNORDERED, rcmp = ORDERED;
9620 cmp = ORDERED, rcmp = UNORDERED;
9621 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9624 if (! can_compare_p (cmp, mode, ccp_jump)
9625 && (can_compare_p (rcmp, mode, ccp_jump)
9626 /* If the target doesn't provide either UNORDERED or ORDERED
9627 comparisons, canonicalize on UNORDERED for the library. */
9628 || rcmp == UNORDERED))
9632 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9634 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9639 enum rtx_code rcode1;
9640 enum tree_code tcode2;
9664 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9665 if (can_compare_p (rcode1, mode, ccp_jump))
9666 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9670 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9671 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9674 /* If the target doesn't support combined unordered
9675 compares, decompose into UNORDERED + comparison. */
9676 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9677 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9678 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9679 do_jump (exp, if_false_label, if_true_label);
9685 __builtin_expect (<test>, 0) and
9686 __builtin_expect (<test>, 1)
9688 We need to do this here, so that <test> is not converted to a SCC
9689 operation on machines that use condition code registers and COMPARE
9690 like the PowerPC, and then the jump is done based on whether the SCC
9691 operation produced a 1 or 0. */
9693 /* Check for a built-in function. */
9694 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9696 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9697 tree arglist = TREE_OPERAND (exp, 1);
9699 if (TREE_CODE (fndecl) == FUNCTION_DECL
9700 && DECL_BUILT_IN (fndecl)
9701 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9702 && arglist != NULL_TREE
9703 && TREE_CHAIN (arglist) != NULL_TREE)
9705 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9708 if (seq != NULL_RTX)
9715 /* fall through and generate the normal code. */
9719 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9721 /* This is not needed any more and causes poor code since it causes
9722 comparisons and tests from non-SI objects to have different code
9724 /* Copy to register to avoid generating bad insns by cse
9725 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9726 if (!cse_not_expected && GET_CODE (temp) == MEM)
9727 temp = copy_to_reg (temp);
9729 do_pending_stack_adjust ();
9730 /* Do any postincrements in the expression that was tested. */
9733 if (GET_CODE (temp) == CONST_INT
9734 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9735 || GET_CODE (temp) == LABEL_REF)
9737 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9741 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9742 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9743 /* Note swapping the labels gives us not-equal. */
9744 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9745 else if (GET_MODE (temp) != VOIDmode)
9746 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9747 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9748 GET_MODE (temp), NULL_RTX,
9749 if_false_label, if_true_label);
9754 if (drop_through_label)
9756 /* If do_jump produces code that might be jumped around,
9757 do any stack adjusts from that code, before the place
9758 where control merges in. */
9759 do_pending_stack_adjust ();
9760 emit_label (drop_through_label);
9764 /* Given a comparison expression EXP for values too wide to be compared
9765 with one insn, test the comparison and jump to the appropriate label.
9766 The code of EXP is ignored; we always test GT if SWAP is 0,
9767 and LT if SWAP is 1. */
9770 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9773 rtx if_false_label, if_true_label;
9775 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9776 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9777 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9778 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9780 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9783 /* Compare OP0 with OP1, word at a time, in mode MODE.
9784 UNSIGNEDP says to do unsigned comparison.
9785 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9788 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9789 enum machine_mode mode;
9792 rtx if_false_label, if_true_label;
9794 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9795 rtx drop_through_label = 0;
9798 if (! if_true_label || ! if_false_label)
9799 drop_through_label = gen_label_rtx ();
9800 if (! if_true_label)
9801 if_true_label = drop_through_label;
9802 if (! if_false_label)
9803 if_false_label = drop_through_label;
9805 /* Compare a word at a time, high order first. */
9806 for (i = 0; i < nwords; i++)
9808 rtx op0_word, op1_word;
9810 if (WORDS_BIG_ENDIAN)
9812 op0_word = operand_subword_force (op0, i, mode);
9813 op1_word = operand_subword_force (op1, i, mode);
9817 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9818 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9821 /* All but high-order word must be compared as unsigned. */
9822 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9823 (unsignedp || i > 0), word_mode, NULL_RTX,
9824 NULL_RTX, if_true_label);
9826 /* Consider lower words only if these are equal. */
9827 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9828 NULL_RTX, NULL_RTX, if_false_label);
9832 emit_jump (if_false_label);
9833 if (drop_through_label)
9834 emit_label (drop_through_label);
9837 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9838 with one insn, test the comparison and jump to the appropriate label. */
9841 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9843 rtx if_false_label, if_true_label;
9845 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9846 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9847 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9848 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9850 rtx drop_through_label = 0;
9852 if (! if_false_label)
9853 drop_through_label = if_false_label = gen_label_rtx ();
9855 for (i = 0; i < nwords; i++)
9856 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9857 operand_subword_force (op1, i, mode),
9858 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9859 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9862 emit_jump (if_true_label);
9863 if (drop_through_label)
9864 emit_label (drop_through_label);
9867 /* Jump according to whether OP0 is 0.
9868 We assume that OP0 has an integer mode that is too wide
9869 for the available compare insns. */
9872 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9874 rtx if_false_label, if_true_label;
9876 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9879 rtx drop_through_label = 0;
9881 /* The fastest way of doing this comparison on almost any machine is to
9882 "or" all the words and compare the result. If all have to be loaded
9883 from memory and this is a very wide item, it's possible this may
9884 be slower, but that's highly unlikely. */
9886 part = gen_reg_rtx (word_mode);
9887 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9888 for (i = 1; i < nwords && part != 0; i++)
9889 part = expand_binop (word_mode, ior_optab, part,
9890 operand_subword_force (op0, i, GET_MODE (op0)),
9891 part, 1, OPTAB_WIDEN);
9895 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9896 NULL_RTX, if_false_label, if_true_label);
9901 /* If we couldn't do the "or" simply, do this with a series of compares. */
9902 if (! if_false_label)
9903 drop_through_label = if_false_label = gen_label_rtx ();
9905 for (i = 0; i < nwords; i++)
9906 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9907 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9908 if_false_label, NULL_RTX);
9911 emit_jump (if_true_label);
9913 if (drop_through_label)
9914 emit_label (drop_through_label);
9917 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9918 (including code to compute the values to be compared)
9919 and set (CC0) according to the result.
9920 The decision as to signed or unsigned comparison must be made by the caller.
9922 We force a stack adjustment unless there are currently
9923 things pushed on the stack that aren't yet used.
9925 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9929 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9933 enum machine_mode mode;
9938 /* If one operand is constant, make it the second one. Only do this
9939 if the other operand is not constant as well. */
9941 if (swap_commutative_operands_p (op0, op1))
9946 code = swap_condition (code);
9951 op0 = force_not_mem (op0);
9952 op1 = force_not_mem (op1);
9955 do_pending_stack_adjust ();
9957 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9958 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9962 /* There's no need to do this now that combine.c can eliminate lots of
9963 sign extensions. This can be less efficient in certain cases on other
9966 /* If this is a signed equality comparison, we can do it as an
9967 unsigned comparison since zero-extension is cheaper than sign
9968 extension and comparisons with zero are done as unsigned. This is
9969 the case even on machines that can do fast sign extension, since
9970 zero-extension is easier to combine with other operations than
9971 sign-extension is. If we are comparing against a constant, we must
9972 convert it to what it would look like unsigned. */
9973 if ((code == EQ || code == NE) && ! unsignedp
9974 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9976 if (GET_CODE (op1) == CONST_INT
9977 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9978 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9983 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
9985 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
9988 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
9989 The decision as to signed or unsigned comparison must be made by the caller.
9991 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9995 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
9996 if_false_label, if_true_label)
10000 enum machine_mode mode;
10002 rtx if_false_label, if_true_label;
10005 int dummy_true_label = 0;
10007 /* Reverse the comparison if that is safe and we want to jump if it is
10009 if (! if_true_label && ! FLOAT_MODE_P (mode))
10011 if_true_label = if_false_label;
10012 if_false_label = 0;
10013 code = reverse_condition (code);
10016 /* If one operand is constant, make it the second one. Only do this
10017 if the other operand is not constant as well. */
10019 if (swap_commutative_operands_p (op0, op1))
10024 code = swap_condition (code);
10027 if (flag_force_mem)
10029 op0 = force_not_mem (op0);
10030 op1 = force_not_mem (op1);
10033 do_pending_stack_adjust ();
10035 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10036 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10038 if (tem == const_true_rtx)
10041 emit_jump (if_true_label);
10045 if (if_false_label)
10046 emit_jump (if_false_label);
10052 /* There's no need to do this now that combine.c can eliminate lots of
10053 sign extensions. This can be less efficient in certain cases on other
10056 /* If this is a signed equality comparison, we can do it as an
10057 unsigned comparison since zero-extension is cheaper than sign
10058 extension and comparisons with zero are done as unsigned. This is
10059 the case even on machines that can do fast sign extension, since
10060 zero-extension is easier to combine with other operations than
10061 sign-extension is. If we are comparing against a constant, we must
10062 convert it to what it would look like unsigned. */
10063 if ((code == EQ || code == NE) && ! unsignedp
10064 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10066 if (GET_CODE (op1) == CONST_INT
10067 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10068 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10073 if (! if_true_label)
10075 dummy_true_label = 1;
10076 if_true_label = gen_label_rtx ();
10079 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10082 if (if_false_label)
10083 emit_jump (if_false_label);
10084 if (dummy_true_label)
10085 emit_label (if_true_label);
10088 /* Generate code for a comparison expression EXP (including code to compute
10089 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10090 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10091 generated code will drop through.
10092 SIGNED_CODE should be the rtx operation for this comparison for
10093 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10095 We force a stack adjustment unless there are currently
10096 things pushed on the stack that aren't yet used. */
10099 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10102 enum rtx_code signed_code, unsigned_code;
10103 rtx if_false_label, if_true_label;
10107 enum machine_mode mode;
10109 enum rtx_code code;
10111 /* Don't crash if the comparison was erroneous. */
10112 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10116 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10117 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10120 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10121 mode = TYPE_MODE (type);
10122 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10123 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10124 || (GET_MODE_BITSIZE (mode)
10125 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10128 /* op0 might have been replaced by promoted constant, in which
10129 case the type of second argument should be used. */
10130 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10131 mode = TYPE_MODE (type);
10133 unsignedp = TREE_UNSIGNED (type);
10134 code = unsignedp ? unsigned_code : signed_code;
10136 #ifdef HAVE_canonicalize_funcptr_for_compare
10137 /* If function pointers need to be "canonicalized" before they can
10138 be reliably compared, then canonicalize them. */
10139 if (HAVE_canonicalize_funcptr_for_compare
10140 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10141 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10144 rtx new_op0 = gen_reg_rtx (mode);
10146 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10150 if (HAVE_canonicalize_funcptr_for_compare
10151 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10152 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10155 rtx new_op1 = gen_reg_rtx (mode);
10157 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10162 /* Do any postincrements in the expression that was tested. */
10165 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10167 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10168 if_false_label, if_true_label);
10171 /* Generate code to calculate EXP using a store-flag instruction
10172 and return an rtx for the result. EXP is either a comparison
10173 or a TRUTH_NOT_EXPR whose operand is a comparison.
10175 If TARGET is nonzero, store the result there if convenient.
10177 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10180 Return zero if there is no suitable set-flag instruction
10181 available on this machine.
10183 Once expand_expr has been called on the arguments of the comparison,
10184 we are committed to doing the store flag, since it is not safe to
10185 re-evaluate the expression. We emit the store-flag insn by calling
10186 emit_store_flag, but only expand the arguments if we have a reason
10187 to believe that emit_store_flag will be successful. If we think that
10188 it will, but it isn't, we have to simulate the store-flag with a
10189 set/jump/set sequence. */
10192 do_store_flag (exp, target, mode, only_cheap)
10195 enum machine_mode mode;
10198 enum rtx_code code;
10199 tree arg0, arg1, type;
10201 enum machine_mode operand_mode;
10205 enum insn_code icode;
10206 rtx subtarget = target;
10209 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10210 result at the end. We can't simply invert the test since it would
10211 have already been inverted if it were valid. This case occurs for
10212 some floating-point comparisons. */
10214 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10215 invert = 1, exp = TREE_OPERAND (exp, 0);
10217 arg0 = TREE_OPERAND (exp, 0);
10218 arg1 = TREE_OPERAND (exp, 1);
10220 /* Don't crash if the comparison was erroneous. */
10221 if (arg0 == error_mark_node || arg1 == error_mark_node)
10224 type = TREE_TYPE (arg0);
10225 operand_mode = TYPE_MODE (type);
10226 unsignedp = TREE_UNSIGNED (type);
10228 /* We won't bother with BLKmode store-flag operations because it would mean
10229 passing a lot of information to emit_store_flag. */
10230 if (operand_mode == BLKmode)
10233 /* We won't bother with store-flag operations involving function pointers
10234 when function pointers must be canonicalized before comparisons. */
10235 #ifdef HAVE_canonicalize_funcptr_for_compare
10236 if (HAVE_canonicalize_funcptr_for_compare
10237 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10238 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10240 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10241 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10242 == FUNCTION_TYPE))))
10249 /* Get the rtx comparison code to use. We know that EXP is a comparison
10250 operation of some type. Some comparisons against 1 and -1 can be
10251 converted to comparisons with zero. Do so here so that the tests
10252 below will be aware that we have a comparison with zero. These
10253 tests will not catch constants in the first operand, but constants
10254 are rarely passed as the first operand. */
10256 switch (TREE_CODE (exp))
10265 if (integer_onep (arg1))
10266 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10268 code = unsignedp ? LTU : LT;
10271 if (! unsignedp && integer_all_onesp (arg1))
10272 arg1 = integer_zero_node, code = LT;
10274 code = unsignedp ? LEU : LE;
10277 if (! unsignedp && integer_all_onesp (arg1))
10278 arg1 = integer_zero_node, code = GE;
10280 code = unsignedp ? GTU : GT;
10283 if (integer_onep (arg1))
10284 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10286 code = unsignedp ? GEU : GE;
10289 case UNORDERED_EXPR:
10315 /* Put a constant second. */
10316 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10318 tem = arg0; arg0 = arg1; arg1 = tem;
10319 code = swap_condition (code);
10322 /* If this is an equality or inequality test of a single bit, we can
10323 do this by shifting the bit being tested to the low-order bit and
10324 masking the result with the constant 1. If the condition was EQ,
10325 we xor it with 1. This does not require an scc insn and is faster
10326 than an scc insn even if we have it. */
10328 if ((code == NE || code == EQ)
10329 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10330 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10332 tree inner = TREE_OPERAND (arg0, 0);
10333 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10336 /* If INNER is a right shift of a constant and it plus BITNUM does
10337 not overflow, adjust BITNUM and INNER. */
10339 if (TREE_CODE (inner) == RSHIFT_EXPR
10340 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10341 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10342 && bitnum < TYPE_PRECISION (type)
10343 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10344 bitnum - TYPE_PRECISION (type)))
10346 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10347 inner = TREE_OPERAND (inner, 0);
10350 /* If we are going to be able to omit the AND below, we must do our
10351 operations as unsigned. If we must use the AND, we have a choice.
10352 Normally unsigned is faster, but for some machines signed is. */
10353 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10354 #ifdef LOAD_EXTEND_OP
10355 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10361 if (! get_subtarget (subtarget)
10362 || GET_MODE (subtarget) != operand_mode
10363 || ! safe_from_p (subtarget, inner, 1))
10366 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10369 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10370 size_int (bitnum), subtarget, ops_unsignedp);
10372 if (GET_MODE (op0) != mode)
10373 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10375 if ((code == EQ && ! invert) || (code == NE && invert))
10376 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10377 ops_unsignedp, OPTAB_LIB_WIDEN);
10379 /* Put the AND last so it can combine with more things. */
10380 if (bitnum != TYPE_PRECISION (type) - 1)
10381 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10386 /* Now see if we are likely to be able to do this. Return if not. */
10387 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10390 icode = setcc_gen_code[(int) code];
10391 if (icode == CODE_FOR_nothing
10392 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10394 /* We can only do this if it is one of the special cases that
10395 can be handled without an scc insn. */
10396 if ((code == LT && integer_zerop (arg1))
10397 || (! only_cheap && code == GE && integer_zerop (arg1)))
10399 else if (BRANCH_COST >= 0
10400 && ! only_cheap && (code == NE || code == EQ)
10401 && TREE_CODE (type) != REAL_TYPE
10402 && ((abs_optab->handlers[(int) operand_mode].insn_code
10403 != CODE_FOR_nothing)
10404 || (ffs_optab->handlers[(int) operand_mode].insn_code
10405 != CODE_FOR_nothing)))
10411 if (! get_subtarget (target)
10412 || GET_MODE (subtarget) != operand_mode
10413 || ! safe_from_p (subtarget, arg1, 1))
10416 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10417 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10420 target = gen_reg_rtx (mode);
10422 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10423 because, if the emit_store_flag does anything it will succeed and
10424 OP0 and OP1 will not be used subsequently. */
10426 result = emit_store_flag (target, code,
10427 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10428 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10429 operand_mode, unsignedp, 1);
10434 result = expand_binop (mode, xor_optab, result, const1_rtx,
10435 result, 0, OPTAB_LIB_WIDEN);
10439 /* If this failed, we have to do this with set/compare/jump/set code. */
10440 if (GET_CODE (target) != REG
10441 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10442 target = gen_reg_rtx (GET_MODE (target));
10444 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10445 result = compare_from_rtx (op0, op1, code, unsignedp,
10446 operand_mode, NULL_RTX);
10447 if (GET_CODE (result) == CONST_INT)
10448 return (((result == const0_rtx && ! invert)
10449 || (result != const0_rtx && invert))
10450 ? const0_rtx : const1_rtx);
10452 /* The code of RESULT may not match CODE if compare_from_rtx
10453 decided to swap its operands and reverse the original code.
10455 We know that compare_from_rtx returns either a CONST_INT or
10456 a new comparison code, so it is safe to just extract the
10457 code from RESULT. */
10458 code = GET_CODE (result);
10460 label = gen_label_rtx ();
10461 if (bcc_gen_fctn[(int) code] == 0)
10464 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10465 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10466 emit_label (label);
10472 /* Stubs in case we haven't got a casesi insn. */
10473 #ifndef HAVE_casesi
10474 # define HAVE_casesi 0
10475 # define gen_casesi(a, b, c, d, e) (0)
10476 # define CODE_FOR_casesi CODE_FOR_nothing
10479 /* If the machine does not have a case insn that compares the bounds,
10480 this means extra overhead for dispatch tables, which raises the
10481 threshold for using them. */
10482 #ifndef CASE_VALUES_THRESHOLD
10483 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10484 #endif /* CASE_VALUES_THRESHOLD */
10487 case_values_threshold ()
10489 return CASE_VALUES_THRESHOLD;
10492 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10493 0 otherwise (i.e. if there is no casesi instruction). */
10495 try_casesi (index_type, index_expr, minval, range,
10496 table_label, default_label)
10497 tree index_type, index_expr, minval, range;
10498 rtx table_label ATTRIBUTE_UNUSED;
10501 enum machine_mode index_mode = SImode;
10502 int index_bits = GET_MODE_BITSIZE (index_mode);
10503 rtx op1, op2, index;
10504 enum machine_mode op_mode;
10509 /* Convert the index to SImode. */
10510 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10512 enum machine_mode omode = TYPE_MODE (index_type);
10513 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10515 /* We must handle the endpoints in the original mode. */
10516 index_expr = build (MINUS_EXPR, index_type,
10517 index_expr, minval);
10518 minval = integer_zero_node;
10519 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10520 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10521 omode, 1, default_label);
10522 /* Now we can safely truncate. */
10523 index = convert_to_mode (index_mode, index, 0);
10527 if (TYPE_MODE (index_type) != index_mode)
10529 index_expr = convert (type_for_size (index_bits, 0),
10531 index_type = TREE_TYPE (index_expr);
10534 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10537 index = protect_from_queue (index, 0);
10538 do_pending_stack_adjust ();
10540 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10541 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10543 index = copy_to_mode_reg (op_mode, index);
10545 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10547 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10548 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10549 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10550 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10552 op1 = copy_to_mode_reg (op_mode, op1);
10554 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10556 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10557 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10558 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10559 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10561 op2 = copy_to_mode_reg (op_mode, op2);
10563 emit_jump_insn (gen_casesi (index, op1, op2,
10564 table_label, default_label));
10568 /* Attempt to generate a tablejump instruction; same concept. */
10569 #ifndef HAVE_tablejump
10570 #define HAVE_tablejump 0
10571 #define gen_tablejump(x, y) (0)
10574 /* Subroutine of the next function.
10576 INDEX is the value being switched on, with the lowest value
10577 in the table already subtracted.
10578 MODE is its expected mode (needed if INDEX is constant).
10579 RANGE is the length of the jump table.
10580 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10582 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10583 index value is out of range. */
10586 do_tablejump (index, mode, range, table_label, default_label)
10587 rtx index, range, table_label, default_label;
10588 enum machine_mode mode;
10592 /* Do an unsigned comparison (in the proper mode) between the index
10593 expression and the value which represents the length of the range.
10594 Since we just finished subtracting the lower bound of the range
10595 from the index expression, this comparison allows us to simultaneously
10596 check that the original index expression value is both greater than
10597 or equal to the minimum value of the range and less than or equal to
10598 the maximum value of the range. */
10600 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10603 /* If index is in range, it must fit in Pmode.
10604 Convert to Pmode so we can index with it. */
10606 index = convert_to_mode (Pmode, index, 1);
10608 /* Don't let a MEM slip thru, because then INDEX that comes
10609 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10610 and break_out_memory_refs will go to work on it and mess it up. */
10611 #ifdef PIC_CASE_VECTOR_ADDRESS
10612 if (flag_pic && GET_CODE (index) != REG)
10613 index = copy_to_mode_reg (Pmode, index);
10616 /* If flag_force_addr were to affect this address
10617 it could interfere with the tricky assumptions made
10618 about addresses that contain label-refs,
10619 which may be valid only very near the tablejump itself. */
10620 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10621 GET_MODE_SIZE, because this indicates how large insns are. The other
10622 uses should all be Pmode, because they are addresses. This code
10623 could fail if addresses and insns are not the same size. */
10624 index = gen_rtx_PLUS (Pmode,
10625 gen_rtx_MULT (Pmode, index,
10626 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10627 gen_rtx_LABEL_REF (Pmode, table_label));
10628 #ifdef PIC_CASE_VECTOR_ADDRESS
10630 index = PIC_CASE_VECTOR_ADDRESS (index);
10633 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10634 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10635 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10636 RTX_UNCHANGING_P (vector) = 1;
10637 convert_move (temp, vector, 0);
10639 emit_jump_insn (gen_tablejump (temp, table_label));
10641 /* If we are generating PIC code or if the table is PC-relative, the
10642 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10643 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10648 try_tablejump (index_type, index_expr, minval, range,
10649 table_label, default_label)
10650 tree index_type, index_expr, minval, range;
10651 rtx table_label, default_label;
10655 if (! HAVE_tablejump)
10658 index_expr = fold (build (MINUS_EXPR, index_type,
10659 convert (index_type, index_expr),
10660 convert (index_type, minval)));
10661 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10663 index = protect_from_queue (index, 0);
10664 do_pending_stack_adjust ();
10666 do_tablejump (index, TYPE_MODE (index_type),
10667 convert_modes (TYPE_MODE (index_type),
10668 TYPE_MODE (TREE_TYPE (range)),
10669 expand_expr (range, NULL_RTX,
10671 TREE_UNSIGNED (TREE_TYPE (range))),
10672 table_label, default_label);