1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
245 init_expr_once (void)
248 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 if (! HARD_REGNO_MODE_OK (regno, mode))
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
327 PUT_MODE (mem, srcmode);
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
335 /* This is run at the start of compiling a function. */
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
349 convert_move (rtx to, rtx from, int unsignedp)
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
363 gcc_assert (to_real == from_real);
364 gcc_assert (to_mode != BLKmode);
365 gcc_assert (from_mode != BLKmode);
367 /* If the source and destination are already the same, then there's
372 /* If FROM is a SUBREG that indicates that we have already done at least
373 the required extension, strip it. We don't handle such SUBREGs as
376 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
377 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
378 >= GET_MODE_SIZE (to_mode))
379 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
380 from = gen_lowpart (to_mode, from), from_mode = to_mode;
382 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
384 if (to_mode == from_mode
385 || (from_mode == VOIDmode && CONSTANT_P (from)))
387 emit_move_insn (to, from);
391 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
393 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
395 if (VECTOR_MODE_P (to_mode))
396 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
398 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
400 emit_move_insn (to, from);
404 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
406 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
407 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416 gcc_assert ((GET_MODE_PRECISION (from_mode)
417 != GET_MODE_PRECISION (to_mode))
418 || (DECIMAL_FLOAT_MODE_P (from_mode)
419 != DECIMAL_FLOAT_MODE_P (to_mode)));
421 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
422 /* Conversion between decimal float and binary float, same size. */
423 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
424 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
429 /* Try converting directly if the insn is supported. */
431 code = tab->handlers[to_mode][from_mode].insn_code;
432 if (code != CODE_FOR_nothing)
434 emit_unop_insn (code, to, from,
435 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
439 /* Otherwise use a libcall. */
440 libcall = tab->handlers[to_mode][from_mode].libfunc;
442 /* Is this conversion implemented yet? */
443 gcc_assert (libcall);
446 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
448 insns = get_insns ();
450 emit_libcall_block (insns, to, value,
451 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
453 : gen_rtx_FLOAT_EXTEND (to_mode, from));
457 /* Handle pointer conversion. */ /* SPEE 900220. */
458 /* Targets are expected to provide conversion insns between PxImode and
459 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
460 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
462 enum machine_mode full_mode
463 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
465 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
466 != CODE_FOR_nothing);
468 if (full_mode != from_mode)
469 from = convert_to_mode (full_mode, from, unsignedp);
470 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
474 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
477 enum machine_mode full_mode
478 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
480 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
481 != CODE_FOR_nothing);
483 if (to_mode == full_mode)
485 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from = gen_reg_rtx (full_mode);
491 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
492 new_from, from, UNKNOWN);
494 /* else proceed to integer conversions below. */
495 from_mode = full_mode;
499 /* Now both modes are integers. */
501 /* Handle expanding beyond a word. */
502 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
503 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
510 enum machine_mode lowpart_mode;
511 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
513 /* Try converting directly if the insn is supported. */
514 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
517 /* If FROM is a SUBREG, put it into a register. Do this
518 so that we always generate the same set of insns for
519 better cse'ing; if an intermediate assignment occurred,
520 we won't be doing the operation directly on the SUBREG. */
521 if (optimize > 0 && GET_CODE (from) == SUBREG)
522 from = force_reg (from_mode, from);
523 emit_unop_insn (code, to, from, equiv_code);
526 /* Next, try converting via full word. */
527 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
528 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
529 != CODE_FOR_nothing))
533 if (reg_overlap_mentioned_p (to, from))
534 from = force_reg (from_mode, from);
535 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
537 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
538 emit_unop_insn (code, to,
539 gen_lowpart (word_mode, to), equiv_code);
543 /* No special multiword conversion insn; do it by hand. */
546 /* Since we will turn this into a no conflict block, we must ensure
547 that the source does not overlap the target. */
549 if (reg_overlap_mentioned_p (to, from))
550 from = force_reg (from_mode, from);
552 /* Get a copy of FROM widened to a word, if necessary. */
553 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
554 lowpart_mode = word_mode;
556 lowpart_mode = from_mode;
558 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
560 lowpart = gen_lowpart (lowpart_mode, to);
561 emit_move_insn (lowpart, lowfrom);
563 /* Compute the value to put in each remaining word. */
565 fill_value = const0_rtx;
570 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
571 && STORE_FLAG_VALUE == -1)
573 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
575 fill_value = gen_reg_rtx (word_mode);
576 emit_insn (gen_slt (fill_value));
582 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
583 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
585 fill_value = convert_to_mode (word_mode, fill_value, 1);
589 /* Fill the remaining words. */
590 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
592 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
593 rtx subword = operand_subword (to, index, 1, to_mode);
595 gcc_assert (subword);
597 if (fill_value != subword)
598 emit_move_insn (subword, fill_value);
601 insns = get_insns ();
604 emit_no_conflict_block (insns, to, from, NULL_RTX,
605 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
609 /* Truncating multi-word to a word or less. */
610 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
611 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
614 && ! MEM_VOLATILE_P (from)
615 && direct_load[(int) to_mode]
616 && ! mode_dependent_address_p (XEXP (from, 0)))
618 || GET_CODE (from) == SUBREG))
619 from = force_reg (from_mode, from);
620 convert_move (to, gen_lowpart (word_mode, from), 0);
624 /* Now follow all the conversions between integers
625 no more than a word long. */
627 /* For truncation, usually we can just refer to FROM in a narrower mode. */
628 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
629 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
630 GET_MODE_BITSIZE (from_mode)))
633 && ! MEM_VOLATILE_P (from)
634 && direct_load[(int) to_mode]
635 && ! mode_dependent_address_p (XEXP (from, 0)))
637 || GET_CODE (from) == SUBREG))
638 from = force_reg (from_mode, from);
639 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
640 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
641 from = copy_to_reg (from);
642 emit_move_insn (to, gen_lowpart (to_mode, from));
646 /* Handle extension. */
647 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
649 /* Convert directly if that works. */
650 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
653 emit_unop_insn (code, to, from, equiv_code);
658 enum machine_mode intermediate;
662 /* Search for a mode to convert via. */
663 for (intermediate = from_mode; intermediate != VOIDmode;
664 intermediate = GET_MODE_WIDER_MODE (intermediate))
665 if (((can_extend_p (to_mode, intermediate, unsignedp)
667 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
668 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
669 GET_MODE_BITSIZE (intermediate))))
670 && (can_extend_p (intermediate, from_mode, unsignedp)
671 != CODE_FOR_nothing))
673 convert_move (to, convert_to_mode (intermediate, from,
674 unsignedp), unsignedp);
678 /* No suitable intermediate mode.
679 Generate what we need with shifts. */
680 shift_amount = build_int_cst (NULL_TREE,
681 GET_MODE_BITSIZE (to_mode)
682 - GET_MODE_BITSIZE (from_mode));
683 from = gen_lowpart (to_mode, force_reg (from_mode, from));
684 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
686 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
689 emit_move_insn (to, tmp);
694 /* Support special truncate insns for certain modes. */
695 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
697 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
702 /* Handle truncation of volatile memrefs, and so on;
703 the things that couldn't be truncated directly,
704 and for which there was no special instruction.
706 ??? Code above formerly short-circuited this, for most integer
707 mode pairs, with a force_reg in from_mode followed by a recursive
708 call to this routine. Appears always to have been wrong. */
709 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
711 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
712 emit_move_insn (to, temp);
716 /* Mode combination is not recognized. */
720 /* Return an rtx for a value that would result
721 from converting X to mode MODE.
722 Both X and MODE may be floating, or both integer.
723 UNSIGNEDP is nonzero if X is an unsigned value.
724 This can be done by referring to a part of X in place
725 or by copying to a new temporary with conversion. */
728 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
730 return convert_modes (mode, VOIDmode, x, unsignedp);
733 /* Return an rtx for a value that would result
734 from converting X from mode OLDMODE to mode MODE.
735 Both modes may be floating, or both integer.
736 UNSIGNEDP is nonzero if X is an unsigned value.
738 This can be done by referring to a part of X in place
739 or by copying to a new temporary with conversion.
741 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
744 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
748 /* If FROM is a SUBREG that indicates that we have already done at least
749 the required extension, strip it. */
751 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
752 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
753 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
754 x = gen_lowpart (mode, x);
756 if (GET_MODE (x) != VOIDmode)
757 oldmode = GET_MODE (x);
762 /* There is one case that we must handle specially: If we are converting
763 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
764 we are to interpret the constant as unsigned, gen_lowpart will do
765 the wrong if the constant appears negative. What we want to do is
766 make the high-order word of the constant zero, not all ones. */
768 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
769 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
770 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
772 HOST_WIDE_INT val = INTVAL (x);
774 if (oldmode != VOIDmode
775 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
777 int width = GET_MODE_BITSIZE (oldmode);
779 /* We need to zero extend VAL. */
780 val &= ((HOST_WIDE_INT) 1 << width) - 1;
783 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
786 /* We can do this with a gen_lowpart if both desired and current modes
787 are integer, and this is either a constant integer, a register, or a
788 non-volatile MEM. Except for the constant case where MODE is no
789 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
791 if ((GET_CODE (x) == CONST_INT
792 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
793 || (GET_MODE_CLASS (mode) == MODE_INT
794 && GET_MODE_CLASS (oldmode) == MODE_INT
795 && (GET_CODE (x) == CONST_DOUBLE
796 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
797 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
798 && direct_load[(int) mode])
800 && (! HARD_REGISTER_P (x)
801 || HARD_REGNO_MODE_OK (REGNO (x), mode))
802 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
803 GET_MODE_BITSIZE (GET_MODE (x)))))))))
805 /* ?? If we don't know OLDMODE, we have to assume here that
806 X does not need sign- or zero-extension. This may not be
807 the case, but it's the best we can do. */
808 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
809 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
811 HOST_WIDE_INT val = INTVAL (x);
812 int width = GET_MODE_BITSIZE (oldmode);
814 /* We must sign or zero-extend in this case. Start by
815 zero-extending, then sign extend if we need to. */
816 val &= ((HOST_WIDE_INT) 1 << width) - 1;
818 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
819 val |= (HOST_WIDE_INT) (-1) << width;
821 return gen_int_mode (val, mode);
824 return gen_lowpart (mode, x);
827 /* Converting from integer constant into mode is always equivalent to an
829 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
831 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
832 return simplify_gen_subreg (mode, x, oldmode, 0);
835 temp = gen_reg_rtx (mode);
836 convert_move (temp, x, unsignedp);
840 /* STORE_MAX_PIECES is the number of bytes at a time that we can
841 store efficiently. Due to internal GCC limitations, this is
842 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
843 for an immediate constant. */
845 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
847 /* Determine whether the LEN bytes can be moved by using several move
848 instructions. Return nonzero if a call to move_by_pieces should
852 can_move_by_pieces (unsigned HOST_WIDE_INT len,
853 unsigned int align ATTRIBUTE_UNUSED)
855 return MOVE_BY_PIECES_P (len, align);
858 /* Generate several move instructions to copy LEN bytes from block FROM to
859 block TO. (These are MEM rtx's with BLKmode).
861 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
862 used to push FROM to the stack.
864 ALIGN is maximum stack alignment we can assume.
866 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
867 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
871 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
872 unsigned int align, int endp)
874 struct move_by_pieces data;
875 rtx to_addr, from_addr = XEXP (from, 0);
876 unsigned int max_size = MOVE_MAX_PIECES + 1;
877 enum machine_mode mode = VOIDmode, tmode;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
883 data.from_addr = from_addr;
886 to_addr = XEXP (to, 0);
889 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
890 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
892 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
899 #ifdef STACK_GROWS_DOWNWARD
905 data.to_addr = to_addr;
908 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
909 || GET_CODE (from_addr) == POST_INC
910 || GET_CODE (from_addr) == POST_DEC);
912 data.explicit_inc_from = 0;
913 data.explicit_inc_to = 0;
914 if (data.reverse) data.offset = len;
917 /* If copying requires more than two move insns,
918 copy addresses to registers (to make displacements shorter)
919 and use post-increment if available. */
920 if (!(data.autinc_from && data.autinc_to)
921 && move_by_pieces_ninsns (len, align, max_size) > 2)
923 /* Find the mode of the largest move... */
924 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
925 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
926 if (GET_MODE_SIZE (tmode) < max_size)
929 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
931 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
932 data.autinc_from = 1;
933 data.explicit_inc_from = -1;
935 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
937 data.from_addr = copy_addr_to_reg (from_addr);
938 data.autinc_from = 1;
939 data.explicit_inc_from = 1;
941 if (!data.autinc_from && CONSTANT_P (from_addr))
942 data.from_addr = copy_addr_to_reg (from_addr);
943 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
945 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
947 data.explicit_inc_to = -1;
949 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
951 data.to_addr = copy_addr_to_reg (to_addr);
953 data.explicit_inc_to = 1;
955 if (!data.autinc_to && CONSTANT_P (to_addr))
956 data.to_addr = copy_addr_to_reg (to_addr);
959 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
960 if (align >= GET_MODE_ALIGNMENT (tmode))
961 align = GET_MODE_ALIGNMENT (tmode);
964 enum machine_mode xmode;
966 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
968 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
969 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
970 || SLOW_UNALIGNED_ACCESS (tmode, align))
973 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
976 /* First move what we can in the largest integer mode, then go to
977 successively smaller modes. */
981 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
982 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
983 if (GET_MODE_SIZE (tmode) < max_size)
986 if (mode == VOIDmode)
989 icode = mov_optab->handlers[(int) mode].insn_code;
990 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
991 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
993 max_size = GET_MODE_SIZE (mode);
996 /* The code above should have handled everything. */
997 gcc_assert (!data.len);
1003 gcc_assert (!data.reverse);
1008 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1009 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1011 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1014 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1021 to1 = adjust_address (data.to, QImode, data.offset);
1029 /* Return number of insns required to move L bytes by pieces.
1030 ALIGN (in bits) is maximum alignment we can assume. */
1032 static unsigned HOST_WIDE_INT
1033 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1034 unsigned int max_size)
1036 unsigned HOST_WIDE_INT n_insns = 0;
1037 enum machine_mode tmode;
1039 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1040 if (align >= GET_MODE_ALIGNMENT (tmode))
1041 align = GET_MODE_ALIGNMENT (tmode);
1044 enum machine_mode tmode, xmode;
1046 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1048 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1049 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1050 || SLOW_UNALIGNED_ACCESS (tmode, align))
1053 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1056 while (max_size > 1)
1058 enum machine_mode mode = VOIDmode;
1059 enum insn_code icode;
1061 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1062 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1063 if (GET_MODE_SIZE (tmode) < max_size)
1066 if (mode == VOIDmode)
1069 icode = mov_optab->handlers[(int) mode].insn_code;
1070 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1071 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1073 max_size = GET_MODE_SIZE (mode);
1080 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1081 with move instructions for mode MODE. GENFUN is the gen_... function
1082 to make a move insn for that mode. DATA has all the other info. */
1085 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1086 struct move_by_pieces *data)
1088 unsigned int size = GET_MODE_SIZE (mode);
1089 rtx to1 = NULL_RTX, from1;
1091 while (data->len >= size)
1094 data->offset -= size;
1098 if (data->autinc_to)
1099 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1102 to1 = adjust_address (data->to, mode, data->offset);
1105 if (data->autinc_from)
1106 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1109 from1 = adjust_address (data->from, mode, data->offset);
1111 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1112 emit_insn (gen_add2_insn (data->to_addr,
1113 GEN_INT (-(HOST_WIDE_INT)size)));
1114 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1115 emit_insn (gen_add2_insn (data->from_addr,
1116 GEN_INT (-(HOST_WIDE_INT)size)));
1119 emit_insn ((*genfun) (to1, from1));
1122 #ifdef PUSH_ROUNDING
1123 emit_single_push_insn (mode, from1, NULL);
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1130 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1131 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1132 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1134 if (! data->reverse)
1135 data->offset += size;
1141 /* Emit code to move a block Y to a block X. This may be done with
1142 string-move instructions, with multiple scalar move instructions,
1143 or with a library call.
1145 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1146 SIZE is an rtx that says how long they are.
1147 ALIGN is the maximum alignment we can assume they have.
1148 METHOD describes what kind of copy this is, and what mechanisms may be used.
1150 Return the address of the new block, if memcpy is called and returns it,
1154 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1162 case BLOCK_OP_NORMAL:
1163 case BLOCK_OP_TAILCALL:
1164 may_use_call = true;
1167 case BLOCK_OP_CALL_PARM:
1168 may_use_call = block_move_libcall_safe_for_call_parm ();
1170 /* Make inhibit_defer_pop nonzero around the library call
1171 to force it to pop the arguments right away. */
1175 case BLOCK_OP_NO_LIBCALL:
1176 may_use_call = false;
1183 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1185 gcc_assert (MEM_P (x));
1186 gcc_assert (MEM_P (y));
1189 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1190 block copy is more efficient for other large modes, e.g. DCmode. */
1191 x = adjust_address (x, BLKmode, 0);
1192 y = adjust_address (y, BLKmode, 0);
1194 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1195 can be incorrect is coming from __builtin_memcpy. */
1196 if (GET_CODE (size) == CONST_INT)
1198 if (INTVAL (size) == 0)
1201 x = shallow_copy_rtx (x);
1202 y = shallow_copy_rtx (y);
1203 set_mem_size (x, size);
1204 set_mem_size (y, size);
1207 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1208 move_by_pieces (x, y, INTVAL (size), align, 0);
1209 else if (emit_block_move_via_movmem (x, y, size, align))
1211 else if (may_use_call)
1212 retval = emit_block_move_via_libcall (x, y, size,
1213 method == BLOCK_OP_TAILCALL);
1215 emit_block_move_via_loop (x, y, size, align);
1217 if (method == BLOCK_OP_CALL_PARM)
1223 /* A subroutine of emit_block_move. Returns true if calling the
1224 block move libcall will not clobber any parameters which may have
1225 already been placed on the stack. */
1228 block_move_libcall_safe_for_call_parm (void)
1230 /* If arguments are pushed on the stack, then they're safe. */
1234 /* If registers go on the stack anyway, any argument is sure to clobber
1235 an outgoing argument. */
1236 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1238 tree fn = emit_block_move_libcall_fn (false);
1240 if (REG_PARM_STACK_SPACE (fn) != 0)
1245 /* If any argument goes in memory, then it might clobber an outgoing
1248 CUMULATIVE_ARGS args_so_far;
1251 fn = emit_block_move_libcall_fn (false);
1252 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1254 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1255 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1257 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1258 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1259 if (!tmp || !REG_P (tmp))
1261 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1263 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1269 /* A subroutine of emit_block_move. Expand a movmem pattern;
1270 return true if successful. */
1273 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1275 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1276 int save_volatile_ok = volatile_ok;
1277 enum machine_mode mode;
1279 /* Since this is a move insn, we don't care about volatility. */
1282 /* Try the most limited insn first, because there's no point
1283 including more than one in the machine description unless
1284 the more limited one has some advantage. */
1286 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1287 mode = GET_MODE_WIDER_MODE (mode))
1289 enum insn_code code = movmem_optab[(int) mode];
1290 insn_operand_predicate_fn pred;
1292 if (code != CODE_FOR_nothing
1293 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1294 here because if SIZE is less than the mode mask, as it is
1295 returned by the macro, it will definitely be less than the
1296 actual mode mask. */
1297 && ((GET_CODE (size) == CONST_INT
1298 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1299 <= (GET_MODE_MASK (mode) >> 1)))
1300 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1301 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1302 || (*pred) (x, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1304 || (*pred) (y, BLKmode))
1305 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1306 || (*pred) (opalign, VOIDmode)))
1309 rtx last = get_last_insn ();
1312 op2 = convert_to_mode (mode, size, 1);
1313 pred = insn_data[(int) code].operand[2].predicate;
1314 if (pred != 0 && ! (*pred) (op2, mode))
1315 op2 = copy_to_mode_reg (mode, op2);
1317 /* ??? When called via emit_block_move_for_call, it'd be
1318 nice if there were some way to inform the backend, so
1319 that it doesn't fail the expansion because it thinks
1320 emitting the libcall would be more efficient. */
1322 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1326 volatile_ok = save_volatile_ok;
1330 delete_insns_since (last);
1334 volatile_ok = save_volatile_ok;
1338 /* A subroutine of emit_block_move. Expand a call to memcpy.
1339 Return the return value from memcpy, 0 otherwise. */
1342 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1344 rtx dst_addr, src_addr;
1345 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1346 enum machine_mode size_mode;
1349 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1350 pseudos. We can then place those new pseudos into a VAR_DECL and
1353 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1354 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1356 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1357 src_addr = convert_memory_address (ptr_mode, src_addr);
1359 dst_tree = make_tree (ptr_type_node, dst_addr);
1360 src_tree = make_tree (ptr_type_node, src_addr);
1362 size_mode = TYPE_MODE (sizetype);
1364 size = convert_to_mode (size_mode, size, 1);
1365 size = copy_to_mode_reg (size_mode, size);
1367 /* It is incorrect to use the libcall calling conventions to call
1368 memcpy in this context. This could be a user call to memcpy and
1369 the user may wish to examine the return value from memcpy. For
1370 targets where libcalls and normal calls have different conventions
1371 for returning pointers, we could end up generating incorrect code. */
1373 size_tree = make_tree (sizetype, size);
1375 fn = emit_block_move_libcall_fn (true);
1376 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1377 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1378 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1380 /* Now we have to build up the CALL_EXPR itself. */
1381 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1382 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1383 call_expr, arg_list, NULL_TREE);
1384 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1386 retval = expand_normal (call_expr);
1391 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1392 for the function we use for block copies. The first time FOR_CALL
1393 is true, we call assemble_external. */
1395 static GTY(()) tree block_move_fn;
1398 init_block_move_fn (const char *asmspec)
1404 fn = get_identifier ("memcpy");
1405 args = build_function_type_list (ptr_type_node, ptr_type_node,
1406 const_ptr_type_node, sizetype,
1409 fn = build_decl (FUNCTION_DECL, fn, args);
1410 DECL_EXTERNAL (fn) = 1;
1411 TREE_PUBLIC (fn) = 1;
1412 DECL_ARTIFICIAL (fn) = 1;
1413 TREE_NOTHROW (fn) = 1;
1414 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1415 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1421 set_user_assembler_name (block_move_fn, asmspec);
1425 emit_block_move_libcall_fn (int for_call)
1427 static bool emitted_extern;
1430 init_block_move_fn (NULL);
1432 if (for_call && !emitted_extern)
1434 emitted_extern = true;
1435 make_decl_rtl (block_move_fn);
1436 assemble_external (block_move_fn);
1439 return block_move_fn;
1442 /* A subroutine of emit_block_move. Copy the data via an explicit
1443 loop. This is used only when libcalls are forbidden. */
1444 /* ??? It'd be nice to copy in hunks larger than QImode. */
1447 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1448 unsigned int align ATTRIBUTE_UNUSED)
1450 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1451 enum machine_mode iter_mode;
1453 iter_mode = GET_MODE (size);
1454 if (iter_mode == VOIDmode)
1455 iter_mode = word_mode;
1457 top_label = gen_label_rtx ();
1458 cmp_label = gen_label_rtx ();
1459 iter = gen_reg_rtx (iter_mode);
1461 emit_move_insn (iter, const0_rtx);
1463 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1464 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1465 do_pending_stack_adjust ();
1467 emit_jump (cmp_label);
1468 emit_label (top_label);
1470 tmp = convert_modes (Pmode, iter_mode, iter, true);
1471 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1472 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1473 x = change_address (x, QImode, x_addr);
1474 y = change_address (y, QImode, y_addr);
1476 emit_move_insn (x, y);
1478 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1479 true, OPTAB_LIB_WIDEN);
1481 emit_move_insn (iter, tmp);
1483 emit_label (cmp_label);
1485 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1489 /* Copy all or part of a value X into registers starting at REGNO.
1490 The number of registers to be filled is NREGS. */
1493 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1496 #ifdef HAVE_load_multiple
1504 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1505 x = validize_mem (force_const_mem (mode, x));
1507 /* See if the machine can do this with a load multiple insn. */
1508 #ifdef HAVE_load_multiple
1509 if (HAVE_load_multiple)
1511 last = get_last_insn ();
1512 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1520 delete_insns_since (last);
1524 for (i = 0; i < nregs; i++)
1525 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1526 operand_subword_force (x, i, mode));
1529 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1530 The number of registers to be filled is NREGS. */
1533 move_block_from_reg (int regno, rtx x, int nregs)
1540 /* See if the machine can do this with a store multiple insn. */
1541 #ifdef HAVE_store_multiple
1542 if (HAVE_store_multiple)
1544 rtx last = get_last_insn ();
1545 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1553 delete_insns_since (last);
1557 for (i = 0; i < nregs; i++)
1559 rtx tem = operand_subword (x, i, 1, BLKmode);
1563 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1567 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1568 ORIG, where ORIG is a non-consecutive group of registers represented by
1569 a PARALLEL. The clone is identical to the original except in that the
1570 original set of registers is replaced by a new set of pseudo registers.
1571 The new set has the same modes as the original set. */
1574 gen_group_rtx (rtx orig)
1579 gcc_assert (GET_CODE (orig) == PARALLEL);
1581 length = XVECLEN (orig, 0);
1582 tmps = alloca (sizeof (rtx) * length);
1584 /* Skip a NULL entry in first slot. */
1585 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1590 for (; i < length; i++)
1592 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1593 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1595 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1598 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1601 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1602 except that values are placed in TMPS[i], and must later be moved
1603 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1606 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1610 enum machine_mode m = GET_MODE (orig_src);
1612 gcc_assert (GET_CODE (dst) == PARALLEL);
1615 && !SCALAR_INT_MODE_P (m)
1616 && !MEM_P (orig_src)
1617 && GET_CODE (orig_src) != CONCAT)
1619 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1620 if (imode == BLKmode)
1621 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1623 src = gen_reg_rtx (imode);
1624 if (imode != BLKmode)
1625 src = gen_lowpart (GET_MODE (orig_src), src);
1626 emit_move_insn (src, orig_src);
1627 /* ...and back again. */
1628 if (imode != BLKmode)
1629 src = gen_lowpart (imode, src);
1630 emit_group_load_1 (tmps, dst, src, type, ssize);
1634 /* Check for a NULL entry, used to indicate that the parameter goes
1635 both on the stack and in registers. */
1636 if (XEXP (XVECEXP (dst, 0, 0), 0))
1641 /* Process the pieces. */
1642 for (i = start; i < XVECLEN (dst, 0); i++)
1644 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1645 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1646 unsigned int bytelen = GET_MODE_SIZE (mode);
1649 /* Handle trailing fragments that run over the size of the struct. */
1650 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1652 /* Arrange to shift the fragment to where it belongs.
1653 extract_bit_field loads to the lsb of the reg. */
1655 #ifdef BLOCK_REG_PADDING
1656 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1657 == (BYTES_BIG_ENDIAN ? upward : downward)
1662 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1663 bytelen = ssize - bytepos;
1664 gcc_assert (bytelen > 0);
1667 /* If we won't be loading directly from memory, protect the real source
1668 from strange tricks we might play; but make sure that the source can
1669 be loaded directly into the destination. */
1671 if (!MEM_P (orig_src)
1672 && (!CONSTANT_P (orig_src)
1673 || (GET_MODE (orig_src) != mode
1674 && GET_MODE (orig_src) != VOIDmode)))
1676 if (GET_MODE (orig_src) == VOIDmode)
1677 src = gen_reg_rtx (mode);
1679 src = gen_reg_rtx (GET_MODE (orig_src));
1681 emit_move_insn (src, orig_src);
1684 /* Optimize the access just a bit. */
1686 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1687 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1688 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1689 && bytelen == GET_MODE_SIZE (mode))
1691 tmps[i] = gen_reg_rtx (mode);
1692 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1694 else if (COMPLEX_MODE_P (mode)
1695 && GET_MODE (src) == mode
1696 && bytelen == GET_MODE_SIZE (mode))
1697 /* Let emit_move_complex do the bulk of the work. */
1699 else if (GET_CODE (src) == CONCAT)
1701 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1702 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1704 if ((bytepos == 0 && bytelen == slen0)
1705 || (bytepos != 0 && bytepos + bytelen <= slen))
1707 /* The following assumes that the concatenated objects all
1708 have the same size. In this case, a simple calculation
1709 can be used to determine the object and the bit field
1711 tmps[i] = XEXP (src, bytepos / slen0);
1712 if (! CONSTANT_P (tmps[i])
1713 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1714 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1715 (bytepos % slen0) * BITS_PER_UNIT,
1716 1, NULL_RTX, mode, mode);
1722 gcc_assert (!bytepos);
1723 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1724 emit_move_insn (mem, src);
1725 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1726 0, 1, NULL_RTX, mode, mode);
1729 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1730 SIMD register, which is currently broken. While we get GCC
1731 to emit proper RTL for these cases, let's dump to memory. */
1732 else if (VECTOR_MODE_P (GET_MODE (dst))
1735 int slen = GET_MODE_SIZE (GET_MODE (src));
1738 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1739 emit_move_insn (mem, src);
1740 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1742 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1743 && XVECLEN (dst, 0) > 1)
1744 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1745 else if (CONSTANT_P (src)
1746 || (REG_P (src) && GET_MODE (src) == mode))
1749 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1750 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1754 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1755 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1759 /* Emit code to move a block SRC of type TYPE to a block DST,
1760 where DST is non-consecutive registers represented by a PARALLEL.
1761 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1765 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1770 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1771 emit_group_load_1 (tmps, dst, src, type, ssize);
1773 /* Copy the extracted pieces into the proper (probable) hard regs. */
1774 for (i = 0; i < XVECLEN (dst, 0); i++)
1776 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1779 emit_move_insn (d, tmps[i]);
1783 /* Similar, but load SRC into new pseudos in a format that looks like
1784 PARALLEL. This can later be fed to emit_group_move to get things
1785 in the right place. */
1788 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1793 vec = rtvec_alloc (XVECLEN (parallel, 0));
1794 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1796 /* Convert the vector to look just like the original PARALLEL, except
1797 with the computed values. */
1798 for (i = 0; i < XVECLEN (parallel, 0); i++)
1800 rtx e = XVECEXP (parallel, 0, i);
1801 rtx d = XEXP (e, 0);
1805 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1806 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1808 RTVEC_ELT (vec, i) = e;
1811 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1814 /* Emit code to move a block SRC to block DST, where SRC and DST are
1815 non-consecutive groups of registers, each represented by a PARALLEL. */
1818 emit_group_move (rtx dst, rtx src)
1822 gcc_assert (GET_CODE (src) == PARALLEL
1823 && GET_CODE (dst) == PARALLEL
1824 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1826 /* Skip first entry if NULL. */
1827 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1828 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1829 XEXP (XVECEXP (src, 0, i), 0));
1832 /* Move a group of registers represented by a PARALLEL into pseudos. */
1835 emit_group_move_into_temps (rtx src)
1837 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1840 for (i = 0; i < XVECLEN (src, 0); i++)
1842 rtx e = XVECEXP (src, 0, i);
1843 rtx d = XEXP (e, 0);
1846 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1847 RTVEC_ELT (vec, i) = e;
1850 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1853 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1854 where SRC is non-consecutive registers represented by a PARALLEL.
1855 SSIZE represents the total size of block ORIG_DST, or -1 if not
1859 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1862 int start, finish, i;
1863 enum machine_mode m = GET_MODE (orig_dst);
1865 gcc_assert (GET_CODE (src) == PARALLEL);
1867 if (!SCALAR_INT_MODE_P (m)
1868 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1870 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1871 if (imode == BLKmode)
1872 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1874 dst = gen_reg_rtx (imode);
1875 emit_group_store (dst, src, type, ssize);
1876 if (imode != BLKmode)
1877 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1878 emit_move_insn (orig_dst, dst);
1882 /* Check for a NULL entry, used to indicate that the parameter goes
1883 both on the stack and in registers. */
1884 if (XEXP (XVECEXP (src, 0, 0), 0))
1888 finish = XVECLEN (src, 0);
1890 tmps = alloca (sizeof (rtx) * finish);
1892 /* Copy the (probable) hard regs into pseudos. */
1893 for (i = start; i < finish; i++)
1895 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1896 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1898 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1899 emit_move_insn (tmps[i], reg);
1905 /* If we won't be storing directly into memory, protect the real destination
1906 from strange tricks we might play. */
1908 if (GET_CODE (dst) == PARALLEL)
1912 /* We can get a PARALLEL dst if there is a conditional expression in
1913 a return statement. In that case, the dst and src are the same,
1914 so no action is necessary. */
1915 if (rtx_equal_p (dst, src))
1918 /* It is unclear if we can ever reach here, but we may as well handle
1919 it. Allocate a temporary, and split this into a store/load to/from
1922 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1923 emit_group_store (temp, src, type, ssize);
1924 emit_group_load (dst, temp, type, ssize);
1927 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1929 enum machine_mode outer = GET_MODE (dst);
1930 enum machine_mode inner;
1931 HOST_WIDE_INT bytepos;
1935 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1936 dst = gen_reg_rtx (outer);
1938 /* Make life a bit easier for combine. */
1939 /* If the first element of the vector is the low part
1940 of the destination mode, use a paradoxical subreg to
1941 initialize the destination. */
1944 inner = GET_MODE (tmps[start]);
1945 bytepos = subreg_lowpart_offset (inner, outer);
1946 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1948 temp = simplify_gen_subreg (outer, tmps[start],
1952 emit_move_insn (dst, temp);
1959 /* If the first element wasn't the low part, try the last. */
1961 && start < finish - 1)
1963 inner = GET_MODE (tmps[finish - 1]);
1964 bytepos = subreg_lowpart_offset (inner, outer);
1965 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1967 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1971 emit_move_insn (dst, temp);
1978 /* Otherwise, simply initialize the result to zero. */
1980 emit_move_insn (dst, CONST0_RTX (outer));
1983 /* Process the pieces. */
1984 for (i = start; i < finish; i++)
1986 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1987 enum machine_mode mode = GET_MODE (tmps[i]);
1988 unsigned int bytelen = GET_MODE_SIZE (mode);
1991 /* Handle trailing fragments that run over the size of the struct. */
1992 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1994 /* store_bit_field always takes its value from the lsb.
1995 Move the fragment to the lsb if it's not already there. */
1997 #ifdef BLOCK_REG_PADDING
1998 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1999 == (BYTES_BIG_ENDIAN ? upward : downward)
2005 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2006 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2007 build_int_cst (NULL_TREE, shift),
2010 bytelen = ssize - bytepos;
2013 if (GET_CODE (dst) == CONCAT)
2015 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 dest = XEXP (dst, 0);
2017 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2019 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2020 dest = XEXP (dst, 1);
2024 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2025 dest = assign_stack_temp (GET_MODE (dest),
2026 GET_MODE_SIZE (GET_MODE (dest)), 0);
2027 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2034 /* Optimize the access just a bit. */
2036 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2037 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2038 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2039 && bytelen == GET_MODE_SIZE (mode))
2040 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2042 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2046 /* Copy from the pseudo into the (probable) hard reg. */
2047 if (orig_dst != dst)
2048 emit_move_insn (orig_dst, dst);
2051 /* Generate code to copy a BLKmode object of TYPE out of a
2052 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2053 is null, a stack temporary is created. TGTBLK is returned.
2055 The purpose of this routine is to handle functions that return
2056 BLKmode structures in registers. Some machines (the PA for example)
2057 want to return all small structures in registers regardless of the
2058 structure's alignment. */
2061 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2063 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2064 rtx src = NULL, dst = NULL;
2065 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2066 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2070 tgtblk = assign_temp (build_qualified_type (type,
2072 | TYPE_QUAL_CONST)),
2074 preserve_temp_slots (tgtblk);
2077 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2078 into a new pseudo which is a full word. */
2080 if (GET_MODE (srcreg) != BLKmode
2081 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2082 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2084 /* If the structure doesn't take up a whole number of words, see whether
2085 SRCREG is padded on the left or on the right. If it's on the left,
2086 set PADDING_CORRECTION to the number of bits to skip.
2088 In most ABIs, the structure will be returned at the least end of
2089 the register, which translates to right padding on little-endian
2090 targets and left padding on big-endian targets. The opposite
2091 holds if the structure is returned at the most significant
2092 end of the register. */
2093 if (bytes % UNITS_PER_WORD != 0
2094 && (targetm.calls.return_in_msb (type)
2096 : BYTES_BIG_ENDIAN))
2098 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2100 /* Copy the structure BITSIZE bites at a time.
2102 We could probably emit more efficient code for machines which do not use
2103 strict alignment, but it doesn't seem worth the effort at the current
2105 for (bitpos = 0, xbitpos = padding_correction;
2106 bitpos < bytes * BITS_PER_UNIT;
2107 bitpos += bitsize, xbitpos += bitsize)
2109 /* We need a new source operand each time xbitpos is on a
2110 word boundary and when xbitpos == padding_correction
2111 (the first time through). */
2112 if (xbitpos % BITS_PER_WORD == 0
2113 || xbitpos == padding_correction)
2114 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2117 /* We need a new destination operand each time bitpos is on
2119 if (bitpos % BITS_PER_WORD == 0)
2120 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2122 /* Use xbitpos for the source extraction (right justified) and
2123 xbitpos for the destination store (left justified). */
2124 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2125 extract_bit_field (src, bitsize,
2126 xbitpos % BITS_PER_WORD, 1,
2127 NULL_RTX, word_mode, word_mode));
2133 /* Add a USE expression for REG to the (possibly empty) list pointed
2134 to by CALL_FUSAGE. REG must denote a hard register. */
2137 use_reg (rtx *call_fusage, rtx reg)
2139 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2142 = gen_rtx_EXPR_LIST (VOIDmode,
2143 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2146 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2147 starting at REGNO. All of these registers must be hard registers. */
2150 use_regs (rtx *call_fusage, int regno, int nregs)
2154 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2156 for (i = 0; i < nregs; i++)
2157 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2160 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2161 PARALLEL REGS. This is for calls that pass values in multiple
2162 non-contiguous locations. The Irix 6 ABI has examples of this. */
2165 use_group_regs (rtx *call_fusage, rtx regs)
2169 for (i = 0; i < XVECLEN (regs, 0); i++)
2171 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2173 /* A NULL entry means the parameter goes both on the stack and in
2174 registers. This can also be a MEM for targets that pass values
2175 partially on the stack and partially in registers. */
2176 if (reg != 0 && REG_P (reg))
2177 use_reg (call_fusage, reg);
2182 /* Determine whether the LEN bytes generated by CONSTFUN can be
2183 stored to memory using several move instructions. CONSTFUNDATA is
2184 a pointer which will be passed as argument in every CONSTFUN call.
2185 ALIGN is maximum alignment we can assume. Return nonzero if a
2186 call to store_by_pieces should succeed. */
2189 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2190 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2191 void *constfundata, unsigned int align)
2193 unsigned HOST_WIDE_INT l;
2194 unsigned int max_size;
2195 HOST_WIDE_INT offset = 0;
2196 enum machine_mode mode, tmode;
2197 enum insn_code icode;
2204 if (! STORE_BY_PIECES_P (len, align))
2207 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2208 if (align >= GET_MODE_ALIGNMENT (tmode))
2209 align = GET_MODE_ALIGNMENT (tmode);
2212 enum machine_mode xmode;
2214 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2216 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2217 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2218 || SLOW_UNALIGNED_ACCESS (tmode, align))
2221 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2224 /* We would first store what we can in the largest integer mode, then go to
2225 successively smaller modes. */
2228 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2233 max_size = STORE_MAX_PIECES + 1;
2234 while (max_size > 1)
2236 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2237 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2238 if (GET_MODE_SIZE (tmode) < max_size)
2241 if (mode == VOIDmode)
2244 icode = mov_optab->handlers[(int) mode].insn_code;
2245 if (icode != CODE_FOR_nothing
2246 && align >= GET_MODE_ALIGNMENT (mode))
2248 unsigned int size = GET_MODE_SIZE (mode);
2255 cst = (*constfun) (constfundata, offset, mode);
2256 if (!LEGITIMATE_CONSTANT_P (cst))
2266 max_size = GET_MODE_SIZE (mode);
2269 /* The code above should have handled everything. */
2276 /* Generate several move instructions to store LEN bytes generated by
2277 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2278 pointer which will be passed as argument in every CONSTFUN call.
2279 ALIGN is maximum alignment we can assume.
2280 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2281 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2285 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2286 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2287 void *constfundata, unsigned int align, int endp)
2289 struct store_by_pieces data;
2293 gcc_assert (endp != 2);
2297 gcc_assert (STORE_BY_PIECES_P (len, align));
2298 data.constfun = constfun;
2299 data.constfundata = constfundata;
2302 store_by_pieces_1 (&data, align);
2307 gcc_assert (!data.reverse);
2312 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2313 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2315 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2318 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2325 to1 = adjust_address (data.to, QImode, data.offset);
2333 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2334 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2337 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2339 struct store_by_pieces data;
2344 data.constfun = clear_by_pieces_1;
2345 data.constfundata = NULL;
2348 store_by_pieces_1 (&data, align);
2351 /* Callback routine for clear_by_pieces.
2352 Return const0_rtx unconditionally. */
2355 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2356 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2357 enum machine_mode mode ATTRIBUTE_UNUSED)
2362 /* Subroutine of clear_by_pieces and store_by_pieces.
2363 Generate several move instructions to store LEN bytes of block TO. (A MEM
2364 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2367 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2368 unsigned int align ATTRIBUTE_UNUSED)
2370 rtx to_addr = XEXP (data->to, 0);
2371 unsigned int max_size = STORE_MAX_PIECES + 1;
2372 enum machine_mode mode = VOIDmode, tmode;
2373 enum insn_code icode;
2376 data->to_addr = to_addr;
2378 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2379 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2381 data->explicit_inc_to = 0;
2383 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2385 data->offset = data->len;
2387 /* If storing requires more than two move insns,
2388 copy addresses to registers (to make displacements shorter)
2389 and use post-increment if available. */
2390 if (!data->autinc_to
2391 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2393 /* Determine the main mode we'll be using. */
2394 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2395 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2396 if (GET_MODE_SIZE (tmode) < max_size)
2399 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2401 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2402 data->autinc_to = 1;
2403 data->explicit_inc_to = -1;
2406 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2407 && ! data->autinc_to)
2409 data->to_addr = copy_addr_to_reg (to_addr);
2410 data->autinc_to = 1;
2411 data->explicit_inc_to = 1;
2414 if ( !data->autinc_to && CONSTANT_P (to_addr))
2415 data->to_addr = copy_addr_to_reg (to_addr);
2418 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2419 if (align >= GET_MODE_ALIGNMENT (tmode))
2420 align = GET_MODE_ALIGNMENT (tmode);
2423 enum machine_mode xmode;
2425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2427 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2428 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2429 || SLOW_UNALIGNED_ACCESS (tmode, align))
2432 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2435 /* First store what we can in the largest integer mode, then go to
2436 successively smaller modes. */
2438 while (max_size > 1)
2440 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2441 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2442 if (GET_MODE_SIZE (tmode) < max_size)
2445 if (mode == VOIDmode)
2448 icode = mov_optab->handlers[(int) mode].insn_code;
2449 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2450 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2452 max_size = GET_MODE_SIZE (mode);
2455 /* The code above should have handled everything. */
2456 gcc_assert (!data->len);
2459 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2460 with move instructions for mode MODE. GENFUN is the gen_... function
2461 to make a move insn for that mode. DATA has all the other info. */
2464 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2465 struct store_by_pieces *data)
2467 unsigned int size = GET_MODE_SIZE (mode);
2470 while (data->len >= size)
2473 data->offset -= size;
2475 if (data->autinc_to)
2476 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2479 to1 = adjust_address (data->to, mode, data->offset);
2481 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2482 emit_insn (gen_add2_insn (data->to_addr,
2483 GEN_INT (-(HOST_WIDE_INT) size)));
2485 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2486 emit_insn ((*genfun) (to1, cst));
2488 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2489 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2491 if (! data->reverse)
2492 data->offset += size;
2498 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2499 its length in bytes. */
2502 clear_storage (rtx object, rtx size, enum block_op_methods method)
2504 enum machine_mode mode = GET_MODE (object);
2507 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2509 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2510 just move a zero. Otherwise, do this a piece at a time. */
2512 && GET_CODE (size) == CONST_INT
2513 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2515 rtx zero = CONST0_RTX (mode);
2518 emit_move_insn (object, zero);
2522 if (COMPLEX_MODE_P (mode))
2524 zero = CONST0_RTX (GET_MODE_INNER (mode));
2527 write_complex_part (object, zero, 0);
2528 write_complex_part (object, zero, 1);
2534 if (size == const0_rtx)
2537 align = MEM_ALIGN (object);
2539 if (GET_CODE (size) == CONST_INT
2540 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2541 clear_by_pieces (object, INTVAL (size), align);
2542 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2545 return clear_storage_via_libcall (object, size,
2546 method == BLOCK_OP_TAILCALL);
2551 /* A subroutine of clear_storage. Expand a call to memset.
2552 Return the return value of memset, 0 otherwise. */
2555 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2557 tree call_expr, arg_list, fn, object_tree, size_tree;
2558 enum machine_mode size_mode;
2561 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2562 place those into new pseudos into a VAR_DECL and use them later. */
2564 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2566 size_mode = TYPE_MODE (sizetype);
2567 size = convert_to_mode (size_mode, size, 1);
2568 size = copy_to_mode_reg (size_mode, size);
2570 /* It is incorrect to use the libcall calling conventions to call
2571 memset in this context. This could be a user call to memset and
2572 the user may wish to examine the return value from memset. For
2573 targets where libcalls and normal calls have different conventions
2574 for returning pointers, we could end up generating incorrect code. */
2576 object_tree = make_tree (ptr_type_node, object);
2577 size_tree = make_tree (sizetype, size);
2579 fn = clear_storage_libcall_fn (true);
2580 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2581 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2582 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2584 /* Now we have to build up the CALL_EXPR itself. */
2585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2586 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2587 call_expr, arg_list, NULL_TREE);
2588 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2590 retval = expand_normal (call_expr);
2595 /* A subroutine of clear_storage_via_libcall. Create the tree node
2596 for the function we use for block clears. The first time FOR_CALL
2597 is true, we call assemble_external. */
2599 static GTY(()) tree block_clear_fn;
2602 init_block_clear_fn (const char *asmspec)
2604 if (!block_clear_fn)
2608 fn = get_identifier ("memset");
2609 args = build_function_type_list (ptr_type_node, ptr_type_node,
2610 integer_type_node, sizetype,
2613 fn = build_decl (FUNCTION_DECL, fn, args);
2614 DECL_EXTERNAL (fn) = 1;
2615 TREE_PUBLIC (fn) = 1;
2616 DECL_ARTIFICIAL (fn) = 1;
2617 TREE_NOTHROW (fn) = 1;
2618 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2619 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2621 block_clear_fn = fn;
2625 set_user_assembler_name (block_clear_fn, asmspec);
2629 clear_storage_libcall_fn (int for_call)
2631 static bool emitted_extern;
2633 if (!block_clear_fn)
2634 init_block_clear_fn (NULL);
2636 if (for_call && !emitted_extern)
2638 emitted_extern = true;
2639 make_decl_rtl (block_clear_fn);
2640 assemble_external (block_clear_fn);
2643 return block_clear_fn;
2646 /* Expand a setmem pattern; return true if successful. */
2649 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2651 /* Try the most limited insn first, because there's no point
2652 including more than one in the machine description unless
2653 the more limited one has some advantage. */
2655 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2656 enum machine_mode mode;
2658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2659 mode = GET_MODE_WIDER_MODE (mode))
2661 enum insn_code code = setmem_optab[(int) mode];
2662 insn_operand_predicate_fn pred;
2664 if (code != CODE_FOR_nothing
2665 /* We don't need MODE to be narrower than
2666 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2667 the mode mask, as it is returned by the macro, it will
2668 definitely be less than the actual mode mask. */
2669 && ((GET_CODE (size) == CONST_INT
2670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2671 <= (GET_MODE_MASK (mode) >> 1)))
2672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2673 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2674 || (*pred) (object, BLKmode))
2675 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2676 || (*pred) (opalign, VOIDmode)))
2679 enum machine_mode char_mode;
2680 rtx last = get_last_insn ();
2683 opsize = convert_to_mode (mode, size, 1);
2684 pred = insn_data[(int) code].operand[1].predicate;
2685 if (pred != 0 && ! (*pred) (opsize, mode))
2686 opsize = copy_to_mode_reg (mode, opsize);
2689 char_mode = insn_data[(int) code].operand[2].mode;
2690 if (char_mode != VOIDmode)
2692 opchar = convert_to_mode (char_mode, opchar, 1);
2693 pred = insn_data[(int) code].operand[2].predicate;
2694 if (pred != 0 && ! (*pred) (opchar, char_mode))
2695 opchar = copy_to_mode_reg (char_mode, opchar);
2698 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2705 delete_insns_since (last);
2713 /* Write to one of the components of the complex value CPLX. Write VAL to
2714 the real part if IMAG_P is false, and the imaginary part if its true. */
2717 write_complex_part (rtx cplx, rtx val, bool imag_p)
2719 enum machine_mode cmode;
2720 enum machine_mode imode;
2723 if (GET_CODE (cplx) == CONCAT)
2725 emit_move_insn (XEXP (cplx, imag_p), val);
2729 cmode = GET_MODE (cplx);
2730 imode = GET_MODE_INNER (cmode);
2731 ibitsize = GET_MODE_BITSIZE (imode);
2733 /* For MEMs simplify_gen_subreg may generate an invalid new address
2734 because, e.g., the original address is considered mode-dependent
2735 by the target, which restricts simplify_subreg from invoking
2736 adjust_address_nv. Instead of preparing fallback support for an
2737 invalid address, we call adjust_address_nv directly. */
2740 emit_move_insn (adjust_address_nv (cplx, imode,
2741 imag_p ? GET_MODE_SIZE (imode) : 0),
2746 /* If the sub-object is at least word sized, then we know that subregging
2747 will work. This special case is important, since store_bit_field
2748 wants to operate on integer modes, and there's rarely an OImode to
2749 correspond to TCmode. */
2750 if (ibitsize >= BITS_PER_WORD
2751 /* For hard regs we have exact predicates. Assume we can split
2752 the original object if it spans an even number of hard regs.
2753 This special case is important for SCmode on 64-bit platforms
2754 where the natural size of floating-point regs is 32-bit. */
2756 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2757 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2759 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2760 imag_p ? GET_MODE_SIZE (imode) : 0);
2763 emit_move_insn (part, val);
2767 /* simplify_gen_subreg may fail for sub-word MEMs. */
2768 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2771 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2774 /* Extract one of the components of the complex value CPLX. Extract the
2775 real part if IMAG_P is false, and the imaginary part if it's true. */
2778 read_complex_part (rtx cplx, bool imag_p)
2780 enum machine_mode cmode, imode;
2783 if (GET_CODE (cplx) == CONCAT)
2784 return XEXP (cplx, imag_p);
2786 cmode = GET_MODE (cplx);
2787 imode = GET_MODE_INNER (cmode);
2788 ibitsize = GET_MODE_BITSIZE (imode);
2790 /* Special case reads from complex constants that got spilled to memory. */
2791 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2793 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2794 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2796 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2797 if (CONSTANT_CLASS_P (part))
2798 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2802 /* For MEMs simplify_gen_subreg may generate an invalid new address
2803 because, e.g., the original address is considered mode-dependent
2804 by the target, which restricts simplify_subreg from invoking
2805 adjust_address_nv. Instead of preparing fallback support for an
2806 invalid address, we call adjust_address_nv directly. */
2808 return adjust_address_nv (cplx, imode,
2809 imag_p ? GET_MODE_SIZE (imode) : 0);
2811 /* If the sub-object is at least word sized, then we know that subregging
2812 will work. This special case is important, since extract_bit_field
2813 wants to operate on integer modes, and there's rarely an OImode to
2814 correspond to TCmode. */
2815 if (ibitsize >= BITS_PER_WORD
2816 /* For hard regs we have exact predicates. Assume we can split
2817 the original object if it spans an even number of hard regs.
2818 This special case is important for SCmode on 64-bit platforms
2819 where the natural size of floating-point regs is 32-bit. */
2821 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2822 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2824 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2825 imag_p ? GET_MODE_SIZE (imode) : 0);
2829 /* simplify_gen_subreg may fail for sub-word MEMs. */
2830 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2833 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2834 true, NULL_RTX, imode, imode);
2837 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2838 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2839 represented in NEW_MODE. If FORCE is true, this will never happen, as
2840 we'll force-create a SUBREG if needed. */
2843 emit_move_change_mode (enum machine_mode new_mode,
2844 enum machine_mode old_mode, rtx x, bool force)
2850 /* We don't have to worry about changing the address since the
2851 size in bytes is supposed to be the same. */
2852 if (reload_in_progress)
2854 /* Copy the MEM to change the mode and move any
2855 substitutions from the old MEM to the new one. */
2856 ret = adjust_address_nv (x, new_mode, 0);
2857 copy_replacements (x, ret);
2860 ret = adjust_address (x, new_mode, 0);
2864 /* Note that we do want simplify_subreg's behavior of validating
2865 that the new mode is ok for a hard register. If we were to use
2866 simplify_gen_subreg, we would create the subreg, but would
2867 probably run into the target not being able to implement it. */
2868 /* Except, of course, when FORCE is true, when this is exactly what
2869 we want. Which is needed for CCmodes on some targets. */
2871 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2873 ret = simplify_subreg (new_mode, x, old_mode, 0);
2879 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2880 an integer mode of the same size as MODE. Returns the instruction
2881 emitted, or NULL if such a move could not be generated. */
2884 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2886 enum machine_mode imode;
2887 enum insn_code code;
2889 /* There must exist a mode of the exact size we require. */
2890 imode = int_mode_for_mode (mode);
2891 if (imode == BLKmode)
2894 /* The target must support moves in this mode. */
2895 code = mov_optab->handlers[imode].insn_code;
2896 if (code == CODE_FOR_nothing)
2899 x = emit_move_change_mode (imode, mode, x, force);
2902 y = emit_move_change_mode (imode, mode, y, force);
2905 return emit_insn (GEN_FCN (code) (x, y));
2908 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2909 Return an equivalent MEM that does not use an auto-increment. */
2912 emit_move_resolve_push (enum machine_mode mode, rtx x)
2914 enum rtx_code code = GET_CODE (XEXP (x, 0));
2915 HOST_WIDE_INT adjust;
2918 adjust = GET_MODE_SIZE (mode);
2919 #ifdef PUSH_ROUNDING
2920 adjust = PUSH_ROUNDING (adjust);
2922 if (code == PRE_DEC || code == POST_DEC)
2924 else if (code == PRE_MODIFY || code == POST_MODIFY)
2926 rtx expr = XEXP (XEXP (x, 0), 1);
2929 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2930 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2931 val = INTVAL (XEXP (expr, 1));
2932 if (GET_CODE (expr) == MINUS)
2934 gcc_assert (adjust == val || adjust == -val);
2938 /* Do not use anti_adjust_stack, since we don't want to update
2939 stack_pointer_delta. */
2940 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2941 GEN_INT (adjust), stack_pointer_rtx,
2942 0, OPTAB_LIB_WIDEN);
2943 if (temp != stack_pointer_rtx)
2944 emit_move_insn (stack_pointer_rtx, temp);
2951 temp = stack_pointer_rtx;
2956 temp = plus_constant (stack_pointer_rtx, -adjust);
2962 return replace_equiv_address (x, temp);
2965 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2966 X is known to satisfy push_operand, and MODE is known to be complex.
2967 Returns the last instruction emitted. */
2970 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2972 enum machine_mode submode = GET_MODE_INNER (mode);
2975 #ifdef PUSH_ROUNDING
2976 unsigned int submodesize = GET_MODE_SIZE (submode);
2978 /* In case we output to the stack, but the size is smaller than the
2979 machine can push exactly, we need to use move instructions. */
2980 if (PUSH_ROUNDING (submodesize) != submodesize)
2982 x = emit_move_resolve_push (mode, x);
2983 return emit_move_insn (x, y);
2987 /* Note that the real part always precedes the imag part in memory
2988 regardless of machine's endianness. */
2989 switch (GET_CODE (XEXP (x, 0)))
3003 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004 read_complex_part (y, imag_first));
3005 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3006 read_complex_part (y, !imag_first));
3009 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3010 MODE is known to be complex. Returns the last instruction emitted. */
3013 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3017 /* Need to take special care for pushes, to maintain proper ordering
3018 of the data, and possibly extra padding. */
3019 if (push_operand (x, mode))
3020 return emit_move_complex_push (mode, x, y);
3022 /* See if we can coerce the target into moving both values at once. */
3024 /* Move floating point as parts. */
3025 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3026 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3028 /* Not possible if the values are inherently not adjacent. */
3029 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3031 /* Is possible if both are registers (or subregs of registers). */
3032 else if (register_operand (x, mode) && register_operand (y, mode))
3034 /* If one of the operands is a memory, and alignment constraints
3035 are friendly enough, we may be able to do combined memory operations.
3036 We do not attempt this if Y is a constant because that combination is
3037 usually better with the by-parts thing below. */
3038 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3039 && (!STRICT_ALIGNMENT
3040 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3049 /* For memory to memory moves, optimal behavior can be had with the
3050 existing block move logic. */
3051 if (MEM_P (x) && MEM_P (y))
3053 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3054 BLOCK_OP_NO_LIBCALL);
3055 return get_last_insn ();
3058 ret = emit_move_via_integer (mode, x, y, true);
3063 /* Show the output dies here. This is necessary for SUBREGs
3064 of pseudos since we cannot track their lifetimes correctly;
3065 hard regs shouldn't appear here except as return values. */
3066 if (!reload_completed && !reload_in_progress
3067 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3068 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3070 write_complex_part (x, read_complex_part (y, false), false);
3071 write_complex_part (x, read_complex_part (y, true), true);
3072 return get_last_insn ();
3075 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3076 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3079 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3083 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3086 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3087 if (code != CODE_FOR_nothing)
3089 x = emit_move_change_mode (CCmode, mode, x, true);
3090 y = emit_move_change_mode (CCmode, mode, y, true);
3091 return emit_insn (GEN_FCN (code) (x, y));
3095 /* Otherwise, find the MODE_INT mode of the same width. */
3096 ret = emit_move_via_integer (mode, x, y, false);
3097 gcc_assert (ret != NULL);
3101 /* Return true if word I of OP lies entirely in the
3102 undefined bits of a paradoxical subreg. */
3105 undefined_operand_subword_p (rtx op, int i)
3107 enum machine_mode innermode, innermostmode;
3109 if (GET_CODE (op) != SUBREG)
3111 innermode = GET_MODE (op);
3112 innermostmode = GET_MODE (SUBREG_REG (op));
3113 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3114 /* The SUBREG_BYTE represents offset, as if the value were stored in
3115 memory, except for a paradoxical subreg where we define
3116 SUBREG_BYTE to be 0; undo this exception as in
3118 if (SUBREG_BYTE (op) == 0
3119 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3121 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3122 if (WORDS_BIG_ENDIAN)
3123 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3124 if (BYTES_BIG_ENDIAN)
3125 offset += difference % UNITS_PER_WORD;
3127 if (offset >= GET_MODE_SIZE (innermostmode)
3128 || offset <= -GET_MODE_SIZE (word_mode))
3133 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3134 MODE is any multi-word or full-word mode that lacks a move_insn
3135 pattern. Note that you will get better code if you define such
3136 patterns, even if they must turn into multiple assembler instructions. */
3139 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3146 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3148 /* If X is a push on the stack, do the push now and replace
3149 X with a reference to the stack pointer. */
3150 if (push_operand (x, mode))
3151 x = emit_move_resolve_push (mode, x);
3153 /* If we are in reload, see if either operand is a MEM whose address
3154 is scheduled for replacement. */
3155 if (reload_in_progress && MEM_P (x)
3156 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3157 x = replace_equiv_address_nv (x, inner);
3158 if (reload_in_progress && MEM_P (y)
3159 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3160 y = replace_equiv_address_nv (y, inner);
3164 need_clobber = false;
3166 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3169 rtx xpart = operand_subword (x, i, 1, mode);
3172 /* Do not generate code for a move if it would come entirely
3173 from the undefined bits of a paradoxical subreg. */
3174 if (undefined_operand_subword_p (y, i))
3177 ypart = operand_subword (y, i, 1, mode);
3179 /* If we can't get a part of Y, put Y into memory if it is a
3180 constant. Otherwise, force it into a register. Then we must
3181 be able to get a part of Y. */
3182 if (ypart == 0 && CONSTANT_P (y))
3184 y = use_anchored_address (force_const_mem (mode, y));
3185 ypart = operand_subword (y, i, 1, mode);
3187 else if (ypart == 0)
3188 ypart = operand_subword_force (y, i, mode);
3190 gcc_assert (xpart && ypart);
3192 need_clobber |= (GET_CODE (xpart) == SUBREG);
3194 last_insn = emit_move_insn (xpart, ypart);
3200 /* Show the output dies here. This is necessary for SUBREGs
3201 of pseudos since we cannot track their lifetimes correctly;
3202 hard regs shouldn't appear here except as return values.
3203 We never want to emit such a clobber after reload. */
3205 && ! (reload_in_progress || reload_completed)
3206 && need_clobber != 0)
3207 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3214 /* Low level part of emit_move_insn.
3215 Called just like emit_move_insn, but assumes X and Y
3216 are basically valid. */
3219 emit_move_insn_1 (rtx x, rtx y)
3221 enum machine_mode mode = GET_MODE (x);
3222 enum insn_code code;
3224 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3226 code = mov_optab->handlers[mode].insn_code;
3227 if (code != CODE_FOR_nothing)
3228 return emit_insn (GEN_FCN (code) (x, y));
3230 /* Expand complex moves by moving real part and imag part. */
3231 if (COMPLEX_MODE_P (mode))
3232 return emit_move_complex (mode, x, y);
3234 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3236 rtx result = emit_move_via_integer (mode, x, y, true);
3238 /* If we can't find an integer mode, use multi words. */
3242 return emit_move_multi_word (mode, x, y);
3245 if (GET_MODE_CLASS (mode) == MODE_CC)
3246 return emit_move_ccmode (mode, x, y);
3248 /* Try using a move pattern for the corresponding integer mode. This is
3249 only safe when simplify_subreg can convert MODE constants into integer
3250 constants. At present, it can only do this reliably if the value
3251 fits within a HOST_WIDE_INT. */
3252 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3254 rtx ret = emit_move_via_integer (mode, x, y, false);
3259 return emit_move_multi_word (mode, x, y);
3262 /* Generate code to copy Y into X.
3263 Both Y and X must have the same mode, except that
3264 Y can be a constant with VOIDmode.
3265 This mode cannot be BLKmode; use emit_block_move for that.
3267 Return the last instruction emitted. */
3270 emit_move_insn (rtx x, rtx y)
3272 enum machine_mode mode = GET_MODE (x);
3273 rtx y_cst = NULL_RTX;
3276 gcc_assert (mode != BLKmode
3277 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3282 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3283 && (last_insn = compress_float_constant (x, y)))
3288 if (!LEGITIMATE_CONSTANT_P (y))
3290 y = force_const_mem (mode, y);
3292 /* If the target's cannot_force_const_mem prevented the spill,
3293 assume that the target's move expanders will also take care
3294 of the non-legitimate constant. */
3298 y = use_anchored_address (y);
3302 /* If X or Y are memory references, verify that their addresses are valid
3305 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3306 && ! push_operand (x, GET_MODE (x)))
3308 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3309 x = validize_mem (x);
3312 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3314 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3315 y = validize_mem (y);
3317 gcc_assert (mode != BLKmode);
3319 last_insn = emit_move_insn_1 (x, y);
3321 if (y_cst && REG_P (x)
3322 && (set = single_set (last_insn)) != NULL_RTX
3323 && SET_DEST (set) == x
3324 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3325 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3330 /* If Y is representable exactly in a narrower mode, and the target can
3331 perform the extension directly from constant or memory, then emit the
3332 move as an extension. */
3335 compress_float_constant (rtx x, rtx y)
3337 enum machine_mode dstmode = GET_MODE (x);
3338 enum machine_mode orig_srcmode = GET_MODE (y);
3339 enum machine_mode srcmode;
3341 int oldcost, newcost;
3343 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3345 if (LEGITIMATE_CONSTANT_P (y))
3346 oldcost = rtx_cost (y, SET);
3348 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3350 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3351 srcmode != orig_srcmode;
3352 srcmode = GET_MODE_WIDER_MODE (srcmode))
3355 rtx trunc_y, last_insn;
3357 /* Skip if the target can't extend this way. */
3358 ic = can_extend_p (dstmode, srcmode, 0);
3359 if (ic == CODE_FOR_nothing)
3362 /* Skip if the narrowed value isn't exact. */
3363 if (! exact_real_truncate (srcmode, &r))
3366 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3368 if (LEGITIMATE_CONSTANT_P (trunc_y))
3370 /* Skip if the target needs extra instructions to perform
3372 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3374 /* This is valid, but may not be cheaper than the original. */
3375 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3376 if (oldcost < newcost)
3379 else if (float_extend_from_mem[dstmode][srcmode])
3381 trunc_y = force_const_mem (srcmode, trunc_y);
3382 /* This is valid, but may not be cheaper than the original. */
3383 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3384 if (oldcost < newcost)
3386 trunc_y = validize_mem (trunc_y);
3391 /* For CSE's benefit, force the compressed constant pool entry
3392 into a new pseudo. This constant may be used in different modes,
3393 and if not, combine will put things back together for us. */
3394 trunc_y = force_reg (srcmode, trunc_y);
3395 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3396 last_insn = get_last_insn ();
3399 set_unique_reg_note (last_insn, REG_EQUAL, y);
3407 /* Pushing data onto the stack. */
3409 /* Push a block of length SIZE (perhaps variable)
3410 and return an rtx to address the beginning of the block.
3411 The value may be virtual_outgoing_args_rtx.
3413 EXTRA is the number of bytes of padding to push in addition to SIZE.
3414 BELOW nonzero means this padding comes at low addresses;
3415 otherwise, the padding comes at high addresses. */
3418 push_block (rtx size, int extra, int below)
3422 size = convert_modes (Pmode, ptr_mode, size, 1);
3423 if (CONSTANT_P (size))
3424 anti_adjust_stack (plus_constant (size, extra));
3425 else if (REG_P (size) && extra == 0)
3426 anti_adjust_stack (size);
3429 temp = copy_to_mode_reg (Pmode, size);
3431 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3432 temp, 0, OPTAB_LIB_WIDEN);
3433 anti_adjust_stack (temp);
3436 #ifndef STACK_GROWS_DOWNWARD
3442 temp = virtual_outgoing_args_rtx;
3443 if (extra != 0 && below)
3444 temp = plus_constant (temp, extra);
3448 if (GET_CODE (size) == CONST_INT)
3449 temp = plus_constant (virtual_outgoing_args_rtx,
3450 -INTVAL (size) - (below ? 0 : extra));
3451 else if (extra != 0 && !below)
3452 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3453 negate_rtx (Pmode, plus_constant (size, extra)));
3455 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3456 negate_rtx (Pmode, size));
3459 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3462 #ifdef PUSH_ROUNDING
3464 /* Emit single push insn. */
3467 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3470 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3472 enum insn_code icode;
3473 insn_operand_predicate_fn pred;
3475 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3476 /* If there is push pattern, use it. Otherwise try old way of throwing
3477 MEM representing push operation to move expander. */
3478 icode = push_optab->handlers[(int) mode].insn_code;
3479 if (icode != CODE_FOR_nothing)
3481 if (((pred = insn_data[(int) icode].operand[0].predicate)
3482 && !((*pred) (x, mode))))
3483 x = force_reg (mode, x);
3484 emit_insn (GEN_FCN (icode) (x));
3487 if (GET_MODE_SIZE (mode) == rounded_size)
3488 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3489 /* If we are to pad downward, adjust the stack pointer first and
3490 then store X into the stack location using an offset. This is
3491 because emit_move_insn does not know how to pad; it does not have
3493 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3495 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3496 HOST_WIDE_INT offset;
3498 emit_move_insn (stack_pointer_rtx,
3499 expand_binop (Pmode,
3500 #ifdef STACK_GROWS_DOWNWARD
3506 GEN_INT (rounded_size),
3507 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3509 offset = (HOST_WIDE_INT) padding_size;
3510 #ifdef STACK_GROWS_DOWNWARD
3511 if (STACK_PUSH_CODE == POST_DEC)
3512 /* We have already decremented the stack pointer, so get the
3514 offset += (HOST_WIDE_INT) rounded_size;
3516 if (STACK_PUSH_CODE == POST_INC)
3517 /* We have already incremented the stack pointer, so get the
3519 offset -= (HOST_WIDE_INT) rounded_size;
3521 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3525 #ifdef STACK_GROWS_DOWNWARD
3526 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3527 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3528 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3530 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3531 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3532 GEN_INT (rounded_size));
3534 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3537 dest = gen_rtx_MEM (mode, dest_addr);
3541 set_mem_attributes (dest, type, 1);
3543 if (flag_optimize_sibling_calls)
3544 /* Function incoming arguments may overlap with sibling call
3545 outgoing arguments and we cannot allow reordering of reads
3546 from function arguments with stores to outgoing arguments
3547 of sibling calls. */
3548 set_mem_alias_set (dest, 0);
3550 emit_move_insn (dest, x);
3554 /* Generate code to push X onto the stack, assuming it has mode MODE and
3556 MODE is redundant except when X is a CONST_INT (since they don't
3558 SIZE is an rtx for the size of data to be copied (in bytes),
3559 needed only if X is BLKmode.
3561 ALIGN (in bits) is maximum alignment we can assume.
3563 If PARTIAL and REG are both nonzero, then copy that many of the first
3564 bytes of X into registers starting with REG, and push the rest of X.
3565 The amount of space pushed is decreased by PARTIAL bytes.
3566 REG must be a hard register in this case.
3567 If REG is zero but PARTIAL is not, take any all others actions for an
3568 argument partially in registers, but do not actually load any
3571 EXTRA is the amount in bytes of extra space to leave next to this arg.
3572 This is ignored if an argument block has already been allocated.
3574 On a machine that lacks real push insns, ARGS_ADDR is the address of
3575 the bottom of the argument block for this call. We use indexing off there
3576 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3577 argument block has not been preallocated.
3579 ARGS_SO_FAR is the size of args previously pushed for this call.
3581 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3582 for arguments passed in registers. If nonzero, it will be the number
3583 of bytes required. */
3586 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3587 unsigned int align, int partial, rtx reg, int extra,
3588 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3592 enum direction stack_direction
3593 #ifdef STACK_GROWS_DOWNWARD
3599 /* Decide where to pad the argument: `downward' for below,
3600 `upward' for above, or `none' for don't pad it.
3601 Default is below for small data on big-endian machines; else above. */
3602 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3604 /* Invert direction if stack is post-decrement.
3606 if (STACK_PUSH_CODE == POST_DEC)
3607 if (where_pad != none)
3608 where_pad = (where_pad == downward ? upward : downward);
3612 if (mode == BLKmode)
3614 /* Copy a block into the stack, entirely or partially. */
3621 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3622 used = partial - offset;
3626 /* USED is now the # of bytes we need not copy to the stack
3627 because registers will take care of them. */
3630 xinner = adjust_address (xinner, BLKmode, used);
3632 /* If the partial register-part of the arg counts in its stack size,
3633 skip the part of stack space corresponding to the registers.
3634 Otherwise, start copying to the beginning of the stack space,
3635 by setting SKIP to 0. */
3636 skip = (reg_parm_stack_space == 0) ? 0 : used;
3638 #ifdef PUSH_ROUNDING
3639 /* Do it with several push insns if that doesn't take lots of insns
3640 and if there is no difficulty with push insns that skip bytes
3641 on the stack for alignment purposes. */
3644 && GET_CODE (size) == CONST_INT
3646 && MEM_ALIGN (xinner) >= align
3647 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3648 /* Here we avoid the case of a structure whose weak alignment
3649 forces many pushes of a small amount of data,
3650 and such small pushes do rounding that causes trouble. */
3651 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3652 || align >= BIGGEST_ALIGNMENT
3653 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3654 == (align / BITS_PER_UNIT)))
3655 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3657 /* Push padding now if padding above and stack grows down,
3658 or if padding below and stack grows up.
3659 But if space already allocated, this has already been done. */
3660 if (extra && args_addr == 0
3661 && where_pad != none && where_pad != stack_direction)
3662 anti_adjust_stack (GEN_INT (extra));
3664 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3667 #endif /* PUSH_ROUNDING */
3671 /* Otherwise make space on the stack and copy the data
3672 to the address of that space. */
3674 /* Deduct words put into registers from the size we must copy. */
3677 if (GET_CODE (size) == CONST_INT)
3678 size = GEN_INT (INTVAL (size) - used);
3680 size = expand_binop (GET_MODE (size), sub_optab, size,
3681 GEN_INT (used), NULL_RTX, 0,
3685 /* Get the address of the stack space.
3686 In this case, we do not deal with EXTRA separately.
3687 A single stack adjust will do. */
3690 temp = push_block (size, extra, where_pad == downward);
3693 else if (GET_CODE (args_so_far) == CONST_INT)
3694 temp = memory_address (BLKmode,
3695 plus_constant (args_addr,
3696 skip + INTVAL (args_so_far)));
3698 temp = memory_address (BLKmode,
3699 plus_constant (gen_rtx_PLUS (Pmode,
3704 if (!ACCUMULATE_OUTGOING_ARGS)
3706 /* If the source is referenced relative to the stack pointer,
3707 copy it to another register to stabilize it. We do not need
3708 to do this if we know that we won't be changing sp. */
3710 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3711 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3712 temp = copy_to_reg (temp);
3715 target = gen_rtx_MEM (BLKmode, temp);
3717 /* We do *not* set_mem_attributes here, because incoming arguments
3718 may overlap with sibling call outgoing arguments and we cannot
3719 allow reordering of reads from function arguments with stores
3720 to outgoing arguments of sibling calls. We do, however, want
3721 to record the alignment of the stack slot. */
3722 /* ALIGN may well be better aligned than TYPE, e.g. due to
3723 PARM_BOUNDARY. Assume the caller isn't lying. */
3724 set_mem_align (target, align);
3726 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3729 else if (partial > 0)
3731 /* Scalar partly in registers. */
3733 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3736 /* # bytes of start of argument
3737 that we must make space for but need not store. */
3738 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3739 int args_offset = INTVAL (args_so_far);
3742 /* Push padding now if padding above and stack grows down,
3743 or if padding below and stack grows up.
3744 But if space already allocated, this has already been done. */
3745 if (extra && args_addr == 0
3746 && where_pad != none && where_pad != stack_direction)
3747 anti_adjust_stack (GEN_INT (extra));
3749 /* If we make space by pushing it, we might as well push
3750 the real data. Otherwise, we can leave OFFSET nonzero
3751 and leave the space uninitialized. */
3755 /* Now NOT_STACK gets the number of words that we don't need to
3756 allocate on the stack. Convert OFFSET to words too. */
3757 not_stack = (partial - offset) / UNITS_PER_WORD;
3758 offset /= UNITS_PER_WORD;
3760 /* If the partial register-part of the arg counts in its stack size,
3761 skip the part of stack space corresponding to the registers.
3762 Otherwise, start copying to the beginning of the stack space,
3763 by setting SKIP to 0. */
3764 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3766 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3767 x = validize_mem (force_const_mem (mode, x));
3769 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3770 SUBREGs of such registers are not allowed. */
3771 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3772 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3773 x = copy_to_reg (x);
3775 /* Loop over all the words allocated on the stack for this arg. */
3776 /* We can do it by words, because any scalar bigger than a word
3777 has a size a multiple of a word. */
3778 #ifndef PUSH_ARGS_REVERSED
3779 for (i = not_stack; i < size; i++)
3781 for (i = size - 1; i >= not_stack; i--)
3783 if (i >= not_stack + offset)
3784 emit_push_insn (operand_subword_force (x, i, mode),
3785 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3787 GEN_INT (args_offset + ((i - not_stack + skip)
3789 reg_parm_stack_space, alignment_pad);
3796 /* Push padding now if padding above and stack grows down,
3797 or if padding below and stack grows up.
3798 But if space already allocated, this has already been done. */
3799 if (extra && args_addr == 0
3800 && where_pad != none && where_pad != stack_direction)
3801 anti_adjust_stack (GEN_INT (extra));
3803 #ifdef PUSH_ROUNDING
3804 if (args_addr == 0 && PUSH_ARGS)
3805 emit_single_push_insn (mode, x, type);
3809 if (GET_CODE (args_so_far) == CONST_INT)
3811 = memory_address (mode,
3812 plus_constant (args_addr,
3813 INTVAL (args_so_far)));
3815 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3817 dest = gen_rtx_MEM (mode, addr);
3819 /* We do *not* set_mem_attributes here, because incoming arguments
3820 may overlap with sibling call outgoing arguments and we cannot
3821 allow reordering of reads from function arguments with stores
3822 to outgoing arguments of sibling calls. We do, however, want
3823 to record the alignment of the stack slot. */
3824 /* ALIGN may well be better aligned than TYPE, e.g. due to
3825 PARM_BOUNDARY. Assume the caller isn't lying. */
3826 set_mem_align (dest, align);
3828 emit_move_insn (dest, x);
3832 /* If part should go in registers, copy that part
3833 into the appropriate registers. Do this now, at the end,
3834 since mem-to-mem copies above may do function calls. */
3835 if (partial > 0 && reg != 0)
3837 /* Handle calls that pass values in multiple non-contiguous locations.
3838 The Irix 6 ABI has examples of this. */
3839 if (GET_CODE (reg) == PARALLEL)
3840 emit_group_load (reg, x, type, -1);
3843 gcc_assert (partial % UNITS_PER_WORD == 0);
3844 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3848 if (extra && args_addr == 0 && where_pad == stack_direction)
3849 anti_adjust_stack (GEN_INT (extra));
3851 if (alignment_pad && args_addr == 0)
3852 anti_adjust_stack (alignment_pad);
3855 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3859 get_subtarget (rtx x)
3863 /* Only registers can be subtargets. */
3865 /* Don't use hard regs to avoid extending their life. */
3866 || REGNO (x) < FIRST_PSEUDO_REGISTER
3870 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3871 FIELD is a bitfield. Returns true if the optimization was successful,
3872 and there's nothing else to do. */
3875 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3876 unsigned HOST_WIDE_INT bitpos,
3877 enum machine_mode mode1, rtx str_rtx,
3880 enum machine_mode str_mode = GET_MODE (str_rtx);
3881 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3886 if (mode1 != VOIDmode
3887 || bitsize >= BITS_PER_WORD
3888 || str_bitsize > BITS_PER_WORD
3889 || TREE_SIDE_EFFECTS (to)
3890 || TREE_THIS_VOLATILE (to))
3894 if (!BINARY_CLASS_P (src)
3895 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3898 op0 = TREE_OPERAND (src, 0);
3899 op1 = TREE_OPERAND (src, 1);
3902 if (!operand_equal_p (to, op0, 0))
3905 if (MEM_P (str_rtx))
3907 unsigned HOST_WIDE_INT offset1;
3909 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3910 str_mode = word_mode;
3911 str_mode = get_best_mode (bitsize, bitpos,
3912 MEM_ALIGN (str_rtx), str_mode, 0);
3913 if (str_mode == VOIDmode)
3915 str_bitsize = GET_MODE_BITSIZE (str_mode);
3918 bitpos %= str_bitsize;
3919 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3920 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3922 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3925 /* If the bit field covers the whole REG/MEM, store_field
3926 will likely generate better code. */
3927 if (bitsize >= str_bitsize)
3930 /* We can't handle fields split across multiple entities. */
3931 if (bitpos + bitsize > str_bitsize)
3934 if (BYTES_BIG_ENDIAN)
3935 bitpos = str_bitsize - bitpos - bitsize;
3937 switch (TREE_CODE (src))
3941 /* For now, just optimize the case of the topmost bitfield
3942 where we don't need to do any masking and also
3943 1 bit bitfields where xor can be used.
3944 We might win by one instruction for the other bitfields
3945 too if insv/extv instructions aren't used, so that
3946 can be added later. */
3947 if (bitpos + bitsize != str_bitsize
3948 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3951 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3952 value = convert_modes (str_mode,
3953 TYPE_MODE (TREE_TYPE (op1)), value,
3954 TYPE_UNSIGNED (TREE_TYPE (op1)));
3956 /* We may be accessing data outside the field, which means
3957 we can alias adjacent data. */
3958 if (MEM_P (str_rtx))
3960 str_rtx = shallow_copy_rtx (str_rtx);
3961 set_mem_alias_set (str_rtx, 0);
3962 set_mem_expr (str_rtx, 0);
3965 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3966 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3968 value = expand_and (str_mode, value, const1_rtx, NULL);
3971 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3972 build_int_cst (NULL_TREE, bitpos),
3974 result = expand_binop (str_mode, binop, str_rtx,
3975 value, str_rtx, 1, OPTAB_WIDEN);
3976 if (result != str_rtx)
3977 emit_move_insn (str_rtx, result);
3982 if (TREE_CODE (op1) != INTEGER_CST)
3984 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3985 value = convert_modes (GET_MODE (str_rtx),
3986 TYPE_MODE (TREE_TYPE (op1)), value,
3987 TYPE_UNSIGNED (TREE_TYPE (op1)));
3989 /* We may be accessing data outside the field, which means
3990 we can alias adjacent data. */
3991 if (MEM_P (str_rtx))
3993 str_rtx = shallow_copy_rtx (str_rtx);
3994 set_mem_alias_set (str_rtx, 0);
3995 set_mem_expr (str_rtx, 0);
3998 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3999 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4001 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4003 value = expand_and (GET_MODE (str_rtx), value, mask,
4006 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4007 build_int_cst (NULL_TREE, bitpos),
4009 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4010 value, str_rtx, 1, OPTAB_WIDEN);
4011 if (result != str_rtx)
4012 emit_move_insn (str_rtx, result);
4023 /* Expand an assignment that stores the value of FROM into TO. */
4026 expand_assignment (tree to, tree from)
4031 /* Don't crash if the lhs of the assignment was erroneous. */
4032 if (TREE_CODE (to) == ERROR_MARK)
4034 result = expand_normal (from);
4038 /* Optimize away no-op moves without side-effects. */
4039 if (operand_equal_p (to, from, 0))
4042 /* Assignment of a structure component needs special treatment
4043 if the structure component's rtx is not simply a MEM.
4044 Assignment of an array element at a constant index, and assignment of
4045 an array element in an unaligned packed structure field, has the same
4047 if (handled_component_p (to)
4048 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4050 enum machine_mode mode1;
4051 HOST_WIDE_INT bitsize, bitpos;
4058 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4059 &unsignedp, &volatilep, true);
4061 /* If we are going to use store_bit_field and extract_bit_field,
4062 make sure to_rtx will be safe for multiple use. */
4064 to_rtx = expand_normal (tem);
4070 if (!MEM_P (to_rtx))
4072 /* We can get constant negative offsets into arrays with broken
4073 user code. Translate this to a trap instead of ICEing. */
4074 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4075 expand_builtin_trap ();
4076 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4079 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4080 #ifdef POINTERS_EXTEND_UNSIGNED
4081 if (GET_MODE (offset_rtx) != Pmode)
4082 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4084 if (GET_MODE (offset_rtx) != ptr_mode)
4085 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4088 /* A constant address in TO_RTX can have VOIDmode, we must not try
4089 to call force_reg for that case. Avoid that case. */
4091 && GET_MODE (to_rtx) == BLKmode
4092 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4094 && (bitpos % bitsize) == 0
4095 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4096 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4098 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4102 to_rtx = offset_address (to_rtx, offset_rtx,
4103 highest_pow2_factor_for_target (to,
4107 /* Handle expand_expr of a complex value returning a CONCAT. */
4108 if (GET_CODE (to_rtx) == CONCAT)
4110 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4112 gcc_assert (bitpos == 0);
4113 result = store_expr (from, to_rtx, false);
4117 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4118 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4125 /* If the field is at offset zero, we could have been given the
4126 DECL_RTX of the parent struct. Don't munge it. */
4127 to_rtx = shallow_copy_rtx (to_rtx);
4129 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4131 /* Deal with volatile and readonly fields. The former is only
4132 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4134 MEM_VOLATILE_P (to_rtx) = 1;
4135 if (component_uses_parent_alias_set (to))
4136 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4139 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4143 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4144 TREE_TYPE (tem), get_alias_set (to));
4148 preserve_temp_slots (result);
4154 /* If the rhs is a function call and its value is not an aggregate,
4155 call the function before we start to compute the lhs.
4156 This is needed for correct code for cases such as
4157 val = setjmp (buf) on machines where reference to val
4158 requires loading up part of an address in a separate insn.
4160 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4161 since it might be a promoted variable where the zero- or sign- extension
4162 needs to be done. Handling this in the normal way is safe because no
4163 computation is done before the call. */
4164 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4166 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4167 && REG_P (DECL_RTL (to))))
4172 value = expand_normal (from);
4174 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4176 /* Handle calls that return values in multiple non-contiguous locations.
4177 The Irix 6 ABI has examples of this. */
4178 if (GET_CODE (to_rtx) == PARALLEL)
4179 emit_group_load (to_rtx, value, TREE_TYPE (from),
4180 int_size_in_bytes (TREE_TYPE (from)));
4181 else if (GET_MODE (to_rtx) == BLKmode)
4182 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4185 if (POINTER_TYPE_P (TREE_TYPE (to)))
4186 value = convert_memory_address (GET_MODE (to_rtx), value);
4187 emit_move_insn (to_rtx, value);
4189 preserve_temp_slots (to_rtx);
4195 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4196 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4199 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4201 /* Don't move directly into a return register. */
4202 if (TREE_CODE (to) == RESULT_DECL
4203 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4208 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4210 if (GET_CODE (to_rtx) == PARALLEL)
4211 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4212 int_size_in_bytes (TREE_TYPE (from)));
4214 emit_move_insn (to_rtx, temp);
4216 preserve_temp_slots (to_rtx);
4222 /* In case we are returning the contents of an object which overlaps
4223 the place the value is being stored, use a safe function when copying
4224 a value through a pointer into a structure value return block. */
4225 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4226 && current_function_returns_struct
4227 && !current_function_returns_pcc_struct)
4232 size = expr_size (from);
4233 from_rtx = expand_normal (from);
4235 emit_library_call (memmove_libfunc, LCT_NORMAL,
4236 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4237 XEXP (from_rtx, 0), Pmode,
4238 convert_to_mode (TYPE_MODE (sizetype),
4239 size, TYPE_UNSIGNED (sizetype)),
4240 TYPE_MODE (sizetype));
4242 preserve_temp_slots (to_rtx);
4248 /* Compute FROM and store the value in the rtx we got. */
4251 result = store_expr (from, to_rtx, 0);
4252 preserve_temp_slots (result);
4258 /* Generate code for computing expression EXP,
4259 and storing the value into TARGET.
4261 If the mode is BLKmode then we may return TARGET itself.
4262 It turns out that in BLKmode it doesn't cause a problem.
4263 because C has no operators that could combine two different
4264 assignments into the same BLKmode object with different values
4265 with no sequence point. Will other languages need this to
4268 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4269 stack, and block moves may need to be treated specially. */
4272 store_expr (tree exp, rtx target, int call_param_p)
4275 rtx alt_rtl = NULL_RTX;
4276 int dont_return_target = 0;
4278 if (VOID_TYPE_P (TREE_TYPE (exp)))
4280 /* C++ can generate ?: expressions with a throw expression in one
4281 branch and an rvalue in the other. Here, we resolve attempts to
4282 store the throw expression's nonexistent result. */
4283 gcc_assert (!call_param_p);
4284 expand_expr (exp, const0_rtx, VOIDmode, 0);
4287 if (TREE_CODE (exp) == COMPOUND_EXPR)
4289 /* Perform first part of compound expression, then assign from second
4291 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4292 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4293 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4295 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4297 /* For conditional expression, get safe form of the target. Then
4298 test the condition, doing the appropriate assignment on either
4299 side. This avoids the creation of unnecessary temporaries.
4300 For non-BLKmode, it is more efficient not to do this. */
4302 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4304 do_pending_stack_adjust ();
4306 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4307 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4308 emit_jump_insn (gen_jump (lab2));
4311 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4317 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4318 /* If this is a scalar in a register that is stored in a wider mode
4319 than the declared mode, compute the result into its declared mode
4320 and then convert to the wider mode. Our value is the computed
4323 rtx inner_target = 0;
4325 /* We can do the conversion inside EXP, which will often result
4326 in some optimizations. Do the conversion in two steps: first
4327 change the signedness, if needed, then the extend. But don't
4328 do this if the type of EXP is a subtype of something else
4329 since then the conversion might involve more than just
4330 converting modes. */
4331 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4332 && TREE_TYPE (TREE_TYPE (exp)) == 0
4333 && (!lang_hooks.reduce_bit_field_operations
4334 || (GET_MODE_PRECISION (GET_MODE (target))
4335 == TYPE_PRECISION (TREE_TYPE (exp)))))
4337 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4338 != SUBREG_PROMOTED_UNSIGNED_P (target))
4340 (lang_hooks.types.signed_or_unsigned_type
4341 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4343 exp = fold_convert (lang_hooks.types.type_for_mode
4344 (GET_MODE (SUBREG_REG (target)),
4345 SUBREG_PROMOTED_UNSIGNED_P (target)),
4348 inner_target = SUBREG_REG (target);
4351 temp = expand_expr (exp, inner_target, VOIDmode,
4352 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4354 /* If TEMP is a VOIDmode constant, use convert_modes to make
4355 sure that we properly convert it. */
4356 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4358 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4359 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4360 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4361 GET_MODE (target), temp,
4362 SUBREG_PROMOTED_UNSIGNED_P (target));
4365 convert_move (SUBREG_REG (target), temp,
4366 SUBREG_PROMOTED_UNSIGNED_P (target));
4372 temp = expand_expr_real (exp, target, GET_MODE (target),
4374 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4376 /* Return TARGET if it's a specified hardware register.
4377 If TARGET is a volatile mem ref, either return TARGET
4378 or return a reg copied *from* TARGET; ANSI requires this.
4380 Otherwise, if TEMP is not TARGET, return TEMP
4381 if it is constant (for efficiency),
4382 or if we really want the correct value. */
4383 if (!(target && REG_P (target)
4384 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4385 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4386 && ! rtx_equal_p (temp, target)
4387 && CONSTANT_P (temp))
4388 dont_return_target = 1;
4391 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4392 the same as that of TARGET, adjust the constant. This is needed, for
4393 example, in case it is a CONST_DOUBLE and we want only a word-sized
4395 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4396 && TREE_CODE (exp) != ERROR_MARK
4397 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4398 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4399 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4401 /* If value was not generated in the target, store it there.
4402 Convert the value to TARGET's type first if necessary and emit the
4403 pending incrementations that have been queued when expanding EXP.
4404 Note that we cannot emit the whole queue blindly because this will
4405 effectively disable the POST_INC optimization later.
4407 If TEMP and TARGET compare equal according to rtx_equal_p, but
4408 one or both of them are volatile memory refs, we have to distinguish
4410 - expand_expr has used TARGET. In this case, we must not generate
4411 another copy. This can be detected by TARGET being equal according
4413 - expand_expr has not used TARGET - that means that the source just
4414 happens to have the same RTX form. Since temp will have been created
4415 by expand_expr, it will compare unequal according to == .
4416 We must generate a copy in this case, to reach the correct number
4417 of volatile memory references. */
4419 if ((! rtx_equal_p (temp, target)
4420 || (temp != target && (side_effects_p (temp)
4421 || side_effects_p (target))))
4422 && TREE_CODE (exp) != ERROR_MARK
4423 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4424 but TARGET is not valid memory reference, TEMP will differ
4425 from TARGET although it is really the same location. */
4426 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4427 /* If there's nothing to copy, don't bother. Don't call
4428 expr_size unless necessary, because some front-ends (C++)
4429 expr_size-hook must not be given objects that are not
4430 supposed to be bit-copied or bit-initialized. */
4431 && expr_size (exp) != const0_rtx)
4433 if (GET_MODE (temp) != GET_MODE (target)
4434 && GET_MODE (temp) != VOIDmode)
4436 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4437 if (dont_return_target)
4439 /* In this case, we will return TEMP,
4440 so make sure it has the proper mode.
4441 But don't forget to store the value into TARGET. */
4442 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4443 emit_move_insn (target, temp);
4445 else if (GET_MODE (target) == BLKmode)
4446 emit_block_move (target, temp, expr_size (exp),
4448 ? BLOCK_OP_CALL_PARM
4449 : BLOCK_OP_NORMAL));
4451 convert_move (target, temp, unsignedp);
4454 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4456 /* Handle copying a string constant into an array. The string
4457 constant may be shorter than the array. So copy just the string's
4458 actual length, and clear the rest. First get the size of the data
4459 type of the string, which is actually the size of the target. */
4460 rtx size = expr_size (exp);
4462 if (GET_CODE (size) == CONST_INT
4463 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4464 emit_block_move (target, temp, size,
4466 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4469 /* Compute the size of the data to copy from the string. */
4471 = size_binop (MIN_EXPR,
4472 make_tree (sizetype, size),
4473 size_int (TREE_STRING_LENGTH (exp)));
4475 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4477 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4480 /* Copy that much. */
4481 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4482 TYPE_UNSIGNED (sizetype));
4483 emit_block_move (target, temp, copy_size_rtx,
4485 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4487 /* Figure out how much is left in TARGET that we have to clear.
4488 Do all calculations in ptr_mode. */
4489 if (GET_CODE (copy_size_rtx) == CONST_INT)
4491 size = plus_constant (size, -INTVAL (copy_size_rtx));
4492 target = adjust_address (target, BLKmode,
4493 INTVAL (copy_size_rtx));
4497 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4498 copy_size_rtx, NULL_RTX, 0,
4501 #ifdef POINTERS_EXTEND_UNSIGNED
4502 if (GET_MODE (copy_size_rtx) != Pmode)
4503 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4504 TYPE_UNSIGNED (sizetype));
4507 target = offset_address (target, copy_size_rtx,
4508 highest_pow2_factor (copy_size));
4509 label = gen_label_rtx ();
4510 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4511 GET_MODE (size), 0, label);
4514 if (size != const0_rtx)
4515 clear_storage (target, size, BLOCK_OP_NORMAL);
4521 /* Handle calls that return values in multiple non-contiguous locations.
4522 The Irix 6 ABI has examples of this. */
4523 else if (GET_CODE (target) == PARALLEL)
4524 emit_group_load (target, temp, TREE_TYPE (exp),
4525 int_size_in_bytes (TREE_TYPE (exp)));
4526 else if (GET_MODE (temp) == BLKmode)
4527 emit_block_move (target, temp, expr_size (exp),
4529 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4532 temp = force_operand (temp, target);
4534 emit_move_insn (target, temp);
4541 /* Helper for categorize_ctor_elements. Identical interface. */
4544 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4545 HOST_WIDE_INT *p_elt_count,
4548 unsigned HOST_WIDE_INT idx;
4549 HOST_WIDE_INT nz_elts, elt_count;
4550 tree value, purpose;
4552 /* Whether CTOR is a valid constant initializer, in accordance with what
4553 initializer_constant_valid_p does. If inferred from the constructor
4554 elements, true until proven otherwise. */
4555 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4556 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4561 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4566 if (TREE_CODE (purpose) == RANGE_EXPR)
4568 tree lo_index = TREE_OPERAND (purpose, 0);
4569 tree hi_index = TREE_OPERAND (purpose, 1);
4571 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4572 mult = (tree_low_cst (hi_index, 1)
4573 - tree_low_cst (lo_index, 1) + 1);
4576 switch (TREE_CODE (value))
4580 HOST_WIDE_INT nz = 0, ic = 0;
4583 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4585 nz_elts += mult * nz;
4586 elt_count += mult * ic;
4588 if (const_from_elts_p && const_p)
4589 const_p = const_elt_p;
4595 if (!initializer_zerop (value))
4601 nz_elts += mult * TREE_STRING_LENGTH (value);
4602 elt_count += mult * TREE_STRING_LENGTH (value);
4606 if (!initializer_zerop (TREE_REALPART (value)))
4608 if (!initializer_zerop (TREE_IMAGPART (value)))
4616 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4618 if (!initializer_zerop (TREE_VALUE (v)))
4629 if (const_from_elts_p && const_p)
4630 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4637 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4638 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4641 bool clear_this = true;
4643 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4645 /* We don't expect more than one element of the union to be
4646 initialized. Not sure what we should do otherwise... */
4647 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4650 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4651 CONSTRUCTOR_ELTS (ctor),
4654 /* ??? We could look at each element of the union, and find the
4655 largest element. Which would avoid comparing the size of the
4656 initialized element against any tail padding in the union.
4657 Doesn't seem worth the effort... */
4658 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4659 TYPE_SIZE (init_sub_type)) == 1)
4661 /* And now we have to find out if the element itself is fully
4662 constructed. E.g. for union { struct { int a, b; } s; } u
4663 = { .s = { .a = 1 } }. */
4664 if (elt_count == count_type_elements (init_sub_type, false))
4669 *p_must_clear = clear_this;
4672 *p_nz_elts += nz_elts;
4673 *p_elt_count += elt_count;
4678 /* Examine CTOR to discover:
4679 * how many scalar fields are set to nonzero values,
4680 and place it in *P_NZ_ELTS;
4681 * how many scalar fields in total are in CTOR,
4682 and place it in *P_ELT_COUNT.
4683 * if a type is a union, and the initializer from the constructor
4684 is not the largest element in the union, then set *p_must_clear.
4686 Return whether or not CTOR is a valid static constant initializer, the same
4687 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4690 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4691 HOST_WIDE_INT *p_elt_count,
4696 *p_must_clear = false;
4699 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4702 /* Count the number of scalars in TYPE. Return -1 on overflow or
4703 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4704 array member at the end of the structure. */
4707 count_type_elements (tree type, bool allow_flexarr)
4709 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4710 switch (TREE_CODE (type))
4714 tree telts = array_type_nelts (type);
4715 if (telts && host_integerp (telts, 1))
4717 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4718 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4721 else if (max / n > m)
4729 HOST_WIDE_INT n = 0, t;
4732 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4733 if (TREE_CODE (f) == FIELD_DECL)
4735 t = count_type_elements (TREE_TYPE (f), false);
4738 /* Check for structures with flexible array member. */
4739 tree tf = TREE_TYPE (f);
4741 && TREE_CHAIN (f) == NULL
4742 && TREE_CODE (tf) == ARRAY_TYPE
4744 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4745 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4746 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4747 && int_size_in_bytes (type) >= 0)
4759 case QUAL_UNION_TYPE:
4766 return TYPE_VECTOR_SUBPARTS (type);
4773 /* APPLE LOCAL radar 5732232 - blocks */
4774 case BLOCK_POINTER_TYPE:
4776 case REFERENCE_TYPE:
4788 /* Return 1 if EXP contains mostly (3/4) zeros. */
4791 mostly_zeros_p (tree exp)
4793 if (TREE_CODE (exp) == CONSTRUCTOR)
4796 HOST_WIDE_INT nz_elts, count, elts;
4799 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4803 elts = count_type_elements (TREE_TYPE (exp), false);
4805 return nz_elts < elts / 4;
4808 return initializer_zerop (exp);
4811 /* Return 1 if EXP contains all zeros. */
4814 all_zeros_p (tree exp)
4816 if (TREE_CODE (exp) == CONSTRUCTOR)
4819 HOST_WIDE_INT nz_elts, count;
4822 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4823 return nz_elts == 0;
4826 return initializer_zerop (exp);
4829 /* Helper function for store_constructor.
4830 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4831 TYPE is the type of the CONSTRUCTOR, not the element type.
4832 CLEARED is as for store_constructor.
4833 ALIAS_SET is the alias set to use for any stores.
4835 This provides a recursive shortcut back to store_constructor when it isn't
4836 necessary to go through store_field. This is so that we can pass through
4837 the cleared field to let store_constructor know that we may not have to
4838 clear a substructure if the outer structure has already been cleared. */
4841 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4842 HOST_WIDE_INT bitpos, enum machine_mode mode,
4843 tree exp, tree type, int cleared, int alias_set)
4845 if (TREE_CODE (exp) == CONSTRUCTOR
4846 /* We can only call store_constructor recursively if the size and
4847 bit position are on a byte boundary. */
4848 && bitpos % BITS_PER_UNIT == 0
4849 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4850 /* If we have a nonzero bitpos for a register target, then we just
4851 let store_field do the bitfield handling. This is unlikely to
4852 generate unnecessary clear instructions anyways. */
4853 && (bitpos == 0 || MEM_P (target)))
4857 = adjust_address (target,
4858 GET_MODE (target) == BLKmode
4860 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4861 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4864 /* Update the alias set, if required. */
4865 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4866 && MEM_ALIAS_SET (target) != 0)
4868 target = copy_rtx (target);
4869 set_mem_alias_set (target, alias_set);
4872 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4875 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4878 /* Store the value of constructor EXP into the rtx TARGET.
4879 TARGET is either a REG or a MEM; we know it cannot conflict, since
4880 safe_from_p has been called.
4881 CLEARED is true if TARGET is known to have been zero'd.
4882 SIZE is the number of bytes of TARGET we are allowed to modify: this
4883 may not be the same as the size of EXP if we are assigning to a field
4884 which has been packed to exclude padding bits. */
4887 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4889 tree type = TREE_TYPE (exp);
4890 #ifdef WORD_REGISTER_OPERATIONS
4891 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4894 switch (TREE_CODE (type))
4898 case QUAL_UNION_TYPE:
4900 unsigned HOST_WIDE_INT idx;
4903 /* If size is zero or the target is already cleared, do nothing. */
4904 if (size == 0 || cleared)
4906 /* We either clear the aggregate or indicate the value is dead. */
4907 else if ((TREE_CODE (type) == UNION_TYPE
4908 || TREE_CODE (type) == QUAL_UNION_TYPE)
4909 && ! CONSTRUCTOR_ELTS (exp))
4910 /* If the constructor is empty, clear the union. */
4912 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4916 /* If we are building a static constructor into a register,
4917 set the initial value as zero so we can fold the value into
4918 a constant. But if more than one register is involved,
4919 this probably loses. */
4920 else if (REG_P (target) && TREE_STATIC (exp)
4921 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4923 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4927 /* If the constructor has fewer fields than the structure or
4928 if we are initializing the structure to mostly zeros, clear
4929 the whole structure first. Don't do this if TARGET is a
4930 register whose mode size isn't equal to SIZE since
4931 clear_storage can't handle this case. */
4933 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4934 != fields_length (type))
4935 || mostly_zeros_p (exp))
4937 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4940 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4945 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4947 /* Store each element of the constructor into the
4948 corresponding field of TARGET. */
4949 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4951 enum machine_mode mode;
4952 HOST_WIDE_INT bitsize;
4953 HOST_WIDE_INT bitpos = 0;
4955 rtx to_rtx = target;
4957 /* Just ignore missing fields. We cleared the whole
4958 structure, above, if any fields are missing. */
4962 if (cleared && initializer_zerop (value))
4965 if (host_integerp (DECL_SIZE (field), 1))
4966 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4970 mode = DECL_MODE (field);
4971 if (DECL_BIT_FIELD (field))
4974 offset = DECL_FIELD_OFFSET (field);
4975 if (host_integerp (offset, 0)
4976 && host_integerp (bit_position (field), 0))
4978 bitpos = int_bit_position (field);
4982 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4989 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4990 make_tree (TREE_TYPE (exp),
4993 offset_rtx = expand_normal (offset);
4994 gcc_assert (MEM_P (to_rtx));
4996 #ifdef POINTERS_EXTEND_UNSIGNED
4997 if (GET_MODE (offset_rtx) != Pmode)
4998 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5000 if (GET_MODE (offset_rtx) != ptr_mode)
5001 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5004 to_rtx = offset_address (to_rtx, offset_rtx,
5005 highest_pow2_factor (offset));
5008 #ifdef WORD_REGISTER_OPERATIONS
5009 /* If this initializes a field that is smaller than a
5010 word, at the start of a word, try to widen it to a full
5011 word. This special case allows us to output C++ member
5012 function initializations in a form that the optimizers
5015 && bitsize < BITS_PER_WORD
5016 && bitpos % BITS_PER_WORD == 0
5017 && GET_MODE_CLASS (mode) == MODE_INT
5018 && TREE_CODE (value) == INTEGER_CST
5020 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5022 tree type = TREE_TYPE (value);
5024 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5026 type = lang_hooks.types.type_for_size
5027 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5028 value = fold_convert (type, value);
5031 if (BYTES_BIG_ENDIAN)
5033 = fold_build2 (LSHIFT_EXPR, type, value,
5034 build_int_cst (type,
5035 BITS_PER_WORD - bitsize));
5036 bitsize = BITS_PER_WORD;
5041 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5042 && DECL_NONADDRESSABLE_P (field))
5044 to_rtx = copy_rtx (to_rtx);
5045 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5048 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5049 value, type, cleared,
5050 get_alias_set (TREE_TYPE (field)));
5057 unsigned HOST_WIDE_INT i;
5060 tree elttype = TREE_TYPE (type);
5062 HOST_WIDE_INT minelt = 0;
5063 HOST_WIDE_INT maxelt = 0;
5065 domain = TYPE_DOMAIN (type);
5066 const_bounds_p = (TYPE_MIN_VALUE (domain)
5067 && TYPE_MAX_VALUE (domain)
5068 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5069 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5071 /* If we have constant bounds for the range of the type, get them. */
5074 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5075 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5078 /* If the constructor has fewer elements than the array, clear
5079 the whole array first. Similarly if this is static
5080 constructor of a non-BLKmode object. */
5083 else if (REG_P (target) && TREE_STATIC (exp))
5087 unsigned HOST_WIDE_INT idx;
5089 HOST_WIDE_INT count = 0, zero_count = 0;
5090 need_to_clear = ! const_bounds_p;
5092 /* This loop is a more accurate version of the loop in
5093 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5094 is also needed to check for missing elements. */
5095 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5097 HOST_WIDE_INT this_node_count;
5102 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5104 tree lo_index = TREE_OPERAND (index, 0);
5105 tree hi_index = TREE_OPERAND (index, 1);
5107 if (! host_integerp (lo_index, 1)
5108 || ! host_integerp (hi_index, 1))
5114 this_node_count = (tree_low_cst (hi_index, 1)
5115 - tree_low_cst (lo_index, 1) + 1);
5118 this_node_count = 1;
5120 count += this_node_count;
5121 if (mostly_zeros_p (value))
5122 zero_count += this_node_count;
5125 /* Clear the entire array first if there are any missing
5126 elements, or if the incidence of zero elements is >=
5129 && (count < maxelt - minelt + 1
5130 || 4 * zero_count >= 3 * count))
5134 if (need_to_clear && size > 0)
5137 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5139 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5143 if (!cleared && REG_P (target))
5144 /* Inform later passes that the old value is dead. */
5145 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5147 /* Store each element of the constructor into the
5148 corresponding element of TARGET, determined by counting the
5150 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5152 enum machine_mode mode;
5153 HOST_WIDE_INT bitsize;
5154 HOST_WIDE_INT bitpos;
5156 rtx xtarget = target;
5158 if (cleared && initializer_zerop (value))
5161 unsignedp = TYPE_UNSIGNED (elttype);
5162 mode = TYPE_MODE (elttype);
5163 if (mode == BLKmode)
5164 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5165 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5168 bitsize = GET_MODE_BITSIZE (mode);
5170 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5172 tree lo_index = TREE_OPERAND (index, 0);
5173 tree hi_index = TREE_OPERAND (index, 1);
5174 rtx index_r, pos_rtx;
5175 HOST_WIDE_INT lo, hi, count;
5178 /* If the range is constant and "small", unroll the loop. */
5180 && host_integerp (lo_index, 0)
5181 && host_integerp (hi_index, 0)
5182 && (lo = tree_low_cst (lo_index, 0),
5183 hi = tree_low_cst (hi_index, 0),
5184 count = hi - lo + 1,
5187 || (host_integerp (TYPE_SIZE (elttype), 1)
5188 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5191 lo -= minelt; hi -= minelt;
5192 for (; lo <= hi; lo++)
5194 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5197 && !MEM_KEEP_ALIAS_SET_P (target)
5198 && TREE_CODE (type) == ARRAY_TYPE
5199 && TYPE_NONALIASED_COMPONENT (type))
5201 target = copy_rtx (target);
5202 MEM_KEEP_ALIAS_SET_P (target) = 1;
5205 store_constructor_field
5206 (target, bitsize, bitpos, mode, value, type, cleared,
5207 get_alias_set (elttype));
5212 rtx loop_start = gen_label_rtx ();
5213 rtx loop_end = gen_label_rtx ();
5216 expand_normal (hi_index);
5217 unsignedp = TYPE_UNSIGNED (domain);
5219 index = build_decl (VAR_DECL, NULL_TREE, domain);
5222 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5224 SET_DECL_RTL (index, index_r);
5225 store_expr (lo_index, index_r, 0);
5227 /* Build the head of the loop. */
5228 do_pending_stack_adjust ();
5229 emit_label (loop_start);
5231 /* Assign value to element index. */
5233 fold_convert (ssizetype,
5234 fold_build2 (MINUS_EXPR,
5237 TYPE_MIN_VALUE (domain)));
5240 size_binop (MULT_EXPR, position,
5241 fold_convert (ssizetype,
5242 TYPE_SIZE_UNIT (elttype)));
5244 pos_rtx = expand_normal (position);
5245 xtarget = offset_address (target, pos_rtx,
5246 highest_pow2_factor (position));
5247 xtarget = adjust_address (xtarget, mode, 0);
5248 if (TREE_CODE (value) == CONSTRUCTOR)
5249 store_constructor (value, xtarget, cleared,
5250 bitsize / BITS_PER_UNIT);
5252 store_expr (value, xtarget, 0);
5254 /* Generate a conditional jump to exit the loop. */
5255 exit_cond = build2 (LT_EXPR, integer_type_node,
5257 jumpif (exit_cond, loop_end);
5259 /* Update the loop counter, and jump to the head of
5261 expand_assignment (index,
5262 build2 (PLUS_EXPR, TREE_TYPE (index),
5263 index, integer_one_node));
5265 emit_jump (loop_start);
5267 /* Build the end of the loop. */
5268 emit_label (loop_end);
5271 else if ((index != 0 && ! host_integerp (index, 0))
5272 || ! host_integerp (TYPE_SIZE (elttype), 1))
5277 index = ssize_int (1);
5280 index = fold_convert (ssizetype,
5281 fold_build2 (MINUS_EXPR,
5284 TYPE_MIN_VALUE (domain)));
5287 size_binop (MULT_EXPR, index,
5288 fold_convert (ssizetype,
5289 TYPE_SIZE_UNIT (elttype)));
5290 xtarget = offset_address (target,
5291 expand_normal (position),
5292 highest_pow2_factor (position));
5293 xtarget = adjust_address (xtarget, mode, 0);
5294 store_expr (value, xtarget, 0);
5299 bitpos = ((tree_low_cst (index, 0) - minelt)
5300 * tree_low_cst (TYPE_SIZE (elttype), 1));
5302 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5304 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5305 && TREE_CODE (type) == ARRAY_TYPE
5306 && TYPE_NONALIASED_COMPONENT (type))
5308 target = copy_rtx (target);
5309 MEM_KEEP_ALIAS_SET_P (target) = 1;
5311 store_constructor_field (target, bitsize, bitpos, mode, value,
5312 type, cleared, get_alias_set (elttype));
5320 unsigned HOST_WIDE_INT idx;
5321 constructor_elt *ce;
5325 tree elttype = TREE_TYPE (type);
5326 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5327 enum machine_mode eltmode = TYPE_MODE (elttype);
5328 HOST_WIDE_INT bitsize;
5329 HOST_WIDE_INT bitpos;
5330 rtvec vector = NULL;
5333 gcc_assert (eltmode != BLKmode);
5335 n_elts = TYPE_VECTOR_SUBPARTS (type);
5336 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5338 enum machine_mode mode = GET_MODE (target);
5340 icode = (int) vec_init_optab->handlers[mode].insn_code;
5341 if (icode != CODE_FOR_nothing)
5345 vector = rtvec_alloc (n_elts);
5346 for (i = 0; i < n_elts; i++)
5347 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5351 /* If the constructor has fewer elements than the vector,
5352 clear the whole array first. Similarly if this is static
5353 constructor of a non-BLKmode object. */
5356 else if (REG_P (target) && TREE_STATIC (exp))
5360 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5363 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5365 int n_elts_here = tree_low_cst
5366 (int_const_binop (TRUNC_DIV_EXPR,
5367 TYPE_SIZE (TREE_TYPE (value)),
5368 TYPE_SIZE (elttype), 0), 1);
5370 count += n_elts_here;
5371 if (mostly_zeros_p (value))
5372 zero_count += n_elts_here;
5375 /* Clear the entire vector first if there are any missing elements,
5376 or if the incidence of zero elements is >= 75%. */
5377 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5380 if (need_to_clear && size > 0 && !vector)
5383 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5385 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5389 /* Inform later passes that the old value is dead. */
5390 if (!cleared && !vector && REG_P (target))
5391 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5393 /* Store each element of the constructor into the corresponding
5394 element of TARGET, determined by counting the elements. */
5395 for (idx = 0, i = 0;
5396 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5397 idx++, i += bitsize / elt_size)
5399 HOST_WIDE_INT eltpos;
5400 tree value = ce->value;
5402 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5403 if (cleared && initializer_zerop (value))
5407 eltpos = tree_low_cst (ce->index, 1);
5413 /* Vector CONSTRUCTORs should only be built from smaller
5414 vectors in the case of BLKmode vectors. */
5415 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5416 RTVEC_ELT (vector, eltpos)
5417 = expand_normal (value);
5421 enum machine_mode value_mode =
5422 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5423 ? TYPE_MODE (TREE_TYPE (value))
5425 bitpos = eltpos * elt_size;
5426 store_constructor_field (target, bitsize, bitpos,
5427 value_mode, value, type,
5428 cleared, get_alias_set (elttype));
5433 emit_insn (GEN_FCN (icode)
5435 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5444 /* Store the value of EXP (an expression tree)
5445 into a subfield of TARGET which has mode MODE and occupies
5446 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5447 If MODE is VOIDmode, it means that we are storing into a bit-field.
5449 Always return const0_rtx unless we have something particular to
5452 TYPE is the type of the underlying object,
5454 ALIAS_SET is the alias set for the destination. This value will
5455 (in general) be different from that for TARGET, since TARGET is a
5456 reference to the containing structure. */
5459 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5460 enum machine_mode mode, tree exp, tree type, int alias_set)
5462 HOST_WIDE_INT width_mask = 0;
5464 if (TREE_CODE (exp) == ERROR_MARK)
5467 /* If we have nothing to store, do nothing unless the expression has
5470 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5471 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5472 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5474 /* If we are storing into an unaligned field of an aligned union that is
5475 in a register, we may have the mode of TARGET being an integer mode but
5476 MODE == BLKmode. In that case, get an aligned object whose size and
5477 alignment are the same as TARGET and store TARGET into it (we can avoid
5478 the store if the field being stored is the entire width of TARGET). Then
5479 call ourselves recursively to store the field into a BLKmode version of
5480 that object. Finally, load from the object into TARGET. This is not
5481 very efficient in general, but should only be slightly more expensive
5482 than the otherwise-required unaligned accesses. Perhaps this can be
5483 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5484 twice, once with emit_move_insn and once via store_field. */
5487 && (REG_P (target) || GET_CODE (target) == SUBREG))
5489 rtx object = assign_temp (type, 0, 1, 1);
5490 rtx blk_object = adjust_address (object, BLKmode, 0);
5492 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5493 emit_move_insn (object, target);
5495 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5497 emit_move_insn (target, object);
5499 /* We want to return the BLKmode version of the data. */
5503 if (GET_CODE (target) == CONCAT)
5505 /* We're storing into a struct containing a single __complex. */
5507 gcc_assert (!bitpos);
5508 return store_expr (exp, target, 0);
5511 /* If the structure is in a register or if the component
5512 is a bit field, we cannot use addressing to access it.
5513 Use bit-field techniques or SUBREG to store in it. */
5515 if (mode == VOIDmode
5516 || (mode != BLKmode && ! direct_store[(int) mode]
5517 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5518 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5520 || GET_CODE (target) == SUBREG
5521 /* If the field isn't aligned enough to store as an ordinary memref,
5522 store it as a bit field. */
5524 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5525 || bitpos % GET_MODE_ALIGNMENT (mode))
5526 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5527 || (bitpos % BITS_PER_UNIT != 0)))
5528 /* If the RHS and field are a constant size and the size of the
5529 RHS isn't the same size as the bitfield, we must use bitfield
5532 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5533 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5537 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5538 implies a mask operation. If the precision is the same size as
5539 the field we're storing into, that mask is redundant. This is
5540 particularly common with bit field assignments generated by the
5542 if (TREE_CODE (exp) == NOP_EXPR)
5544 tree type = TREE_TYPE (exp);
5545 if (INTEGRAL_TYPE_P (type)
5546 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5547 && bitsize == TYPE_PRECISION (type))
5549 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5550 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5551 exp = TREE_OPERAND (exp, 0);
5555 temp = expand_normal (exp);
5557 /* If BITSIZE is narrower than the size of the type of EXP
5558 we will be narrowing TEMP. Normally, what's wanted are the
5559 low-order bits. However, if EXP's type is a record and this is
5560 big-endian machine, we want the upper BITSIZE bits. */
5561 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5562 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5563 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5564 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5565 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5569 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5571 if (mode != VOIDmode && mode != BLKmode
5572 && mode != TYPE_MODE (TREE_TYPE (exp)))
5573 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5575 /* If the modes of TARGET and TEMP are both BLKmode, both
5576 must be in memory and BITPOS must be aligned on a byte
5577 boundary. If so, we simply do a block copy. */
5578 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5580 gcc_assert (MEM_P (target) && MEM_P (temp)
5581 && !(bitpos % BITS_PER_UNIT));
5583 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5584 emit_block_move (target, temp,
5585 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5592 /* Store the value in the bitfield. */
5593 store_bit_field (target, bitsize, bitpos, mode, temp);
5599 /* Now build a reference to just the desired component. */
5600 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5602 if (to_rtx == target)
5603 to_rtx = copy_rtx (to_rtx);
5605 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5606 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5607 set_mem_alias_set (to_rtx, alias_set);
5609 return store_expr (exp, to_rtx, 0);
5613 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5614 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5615 codes and find the ultimate containing object, which we return.
5617 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5618 bit position, and *PUNSIGNEDP to the signedness of the field.
5619 If the position of the field is variable, we store a tree
5620 giving the variable offset (in units) in *POFFSET.
5621 This offset is in addition to the bit position.
5622 If the position is not variable, we store 0 in *POFFSET.
5624 If any of the extraction expressions is volatile,
5625 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5627 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5628 is a mode that can be used to access the field. In that case, *PBITSIZE
5631 If the field describes a variable-sized object, *PMODE is set to
5632 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5633 this case, but the address of the object can be found.
5635 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5636 look through nodes that serve as markers of a greater alignment than
5637 the one that can be deduced from the expression. These nodes make it
5638 possible for front-ends to prevent temporaries from being created by
5639 the middle-end on alignment considerations. For that purpose, the
5640 normal operating mode at high-level is to always pass FALSE so that
5641 the ultimate containing object is really returned; moreover, the
5642 associated predicate handled_component_p will always return TRUE
5643 on these nodes, thus indicating that they are essentially handled
5644 by get_inner_reference. TRUE should only be passed when the caller
5645 is scanning the expression in order to build another representation
5646 and specifically knows how to handle these nodes; as such, this is
5647 the normal operating mode in the RTL expanders. */
5650 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5651 HOST_WIDE_INT *pbitpos, tree *poffset,
5652 enum machine_mode *pmode, int *punsignedp,
5653 int *pvolatilep, bool keep_aligning)
5656 enum machine_mode mode = VOIDmode;
5657 tree offset = size_zero_node;
5658 tree bit_offset = bitsize_zero_node;
5660 /* First get the mode, signedness, and size. We do this from just the
5661 outermost expression. */
5662 if (TREE_CODE (exp) == COMPONENT_REF)
5664 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5665 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5666 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5668 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5670 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5672 size_tree = TREE_OPERAND (exp, 1);
5673 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5677 mode = TYPE_MODE (TREE_TYPE (exp));
5678 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5680 if (mode == BLKmode)
5681 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5683 *pbitsize = GET_MODE_BITSIZE (mode);
5688 if (! host_integerp (size_tree, 1))
5689 mode = BLKmode, *pbitsize = -1;
5691 *pbitsize = tree_low_cst (size_tree, 1);
5696 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5697 and find the ultimate containing object. */
5700 switch (TREE_CODE (exp))
5703 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5704 TREE_OPERAND (exp, 2));
5709 tree field = TREE_OPERAND (exp, 1);
5710 tree this_offset = component_ref_field_offset (exp);
5712 /* If this field hasn't been filled in yet, don't go past it.
5713 This should only happen when folding expressions made during
5714 type construction. */
5715 if (this_offset == 0)
5718 offset = size_binop (PLUS_EXPR, offset, this_offset);
5719 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5720 DECL_FIELD_BIT_OFFSET (field));
5722 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5727 case ARRAY_RANGE_REF:
5729 tree index = TREE_OPERAND (exp, 1);
5730 tree low_bound = array_ref_low_bound (exp);
5731 tree unit_size = array_ref_element_size (exp);
5733 /* We assume all arrays have sizes that are a multiple of a byte.
5734 First subtract the lower bound, if any, in the type of the
5735 index, then convert to sizetype and multiply by the size of
5736 the array element. */
5737 if (! integer_zerop (low_bound))
5738 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5741 offset = size_binop (PLUS_EXPR, offset,
5742 size_binop (MULT_EXPR,
5743 fold_convert (sizetype, index),
5752 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5753 bitsize_int (*pbitsize));
5756 case VIEW_CONVERT_EXPR:
5757 if (keep_aligning && STRICT_ALIGNMENT
5758 && (TYPE_ALIGN (TREE_TYPE (exp))
5759 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5760 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5761 < BIGGEST_ALIGNMENT)
5762 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5763 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5771 /* If any reference in the chain is volatile, the effect is volatile. */
5772 if (TREE_THIS_VOLATILE (exp))
5775 exp = TREE_OPERAND (exp, 0);
5779 /* If OFFSET is constant, see if we can return the whole thing as a
5780 constant bit position. Make sure to handle overflow during
5782 if (host_integerp (offset, 0))
5784 double_int tem = double_int_mul (tree_to_double_int (offset),
5785 uhwi_to_double_int (BITS_PER_UNIT));
5786 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5787 if (double_int_fits_in_shwi_p (tem))
5789 *pbitpos = double_int_to_shwi (tem);
5790 *poffset = NULL_TREE;
5795 /* Otherwise, split it up. */
5796 *pbitpos = tree_low_cst (bit_offset, 0);
5802 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5803 look for whether EXP or any nested component-refs within EXP is marked
5807 contains_packed_reference (tree exp)
5809 bool packed_p = false;
5813 switch (TREE_CODE (exp))
5817 tree field = TREE_OPERAND (exp, 1);
5818 packed_p = DECL_PACKED (field)
5819 || TYPE_PACKED (TREE_TYPE (field))
5820 || TYPE_PACKED (TREE_TYPE (exp));
5828 case ARRAY_RANGE_REF:
5831 case VIEW_CONVERT_EXPR:
5837 exp = TREE_OPERAND (exp, 0);
5843 /* Return a tree of sizetype representing the size, in bytes, of the element
5844 of EXP, an ARRAY_REF. */
5847 array_ref_element_size (tree exp)
5849 tree aligned_size = TREE_OPERAND (exp, 3);
5850 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5852 /* If a size was specified in the ARRAY_REF, it's the size measured
5853 in alignment units of the element type. So multiply by that value. */
5856 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5857 sizetype from another type of the same width and signedness. */
5858 if (TREE_TYPE (aligned_size) != sizetype)
5859 aligned_size = fold_convert (sizetype, aligned_size);
5860 return size_binop (MULT_EXPR, aligned_size,
5861 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5864 /* Otherwise, take the size from that of the element type. Substitute
5865 any PLACEHOLDER_EXPR that we have. */
5867 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5870 /* Return a tree representing the lower bound of the array mentioned in
5871 EXP, an ARRAY_REF. */
5874 array_ref_low_bound (tree exp)
5876 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5878 /* If a lower bound is specified in EXP, use it. */
5879 if (TREE_OPERAND (exp, 2))
5880 return TREE_OPERAND (exp, 2);
5882 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5883 substituting for a PLACEHOLDER_EXPR as needed. */
5884 if (domain_type && TYPE_MIN_VALUE (domain_type))
5885 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5887 /* Otherwise, return a zero of the appropriate type. */
5888 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5891 /* Return a tree representing the upper bound of the array mentioned in
5892 EXP, an ARRAY_REF. */
5895 array_ref_up_bound (tree exp)
5897 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5899 /* If there is a domain type and it has an upper bound, use it, substituting
5900 for a PLACEHOLDER_EXPR as needed. */
5901 if (domain_type && TYPE_MAX_VALUE (domain_type))
5902 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5904 /* Otherwise fail. */
5908 /* Return a tree representing the offset, in bytes, of the field referenced
5909 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5912 component_ref_field_offset (tree exp)
5914 tree aligned_offset = TREE_OPERAND (exp, 2);
5915 tree field = TREE_OPERAND (exp, 1);
5917 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5918 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5922 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5923 sizetype from another type of the same width and signedness. */
5924 if (TREE_TYPE (aligned_offset) != sizetype)
5925 aligned_offset = fold_convert (sizetype, aligned_offset);
5926 return size_binop (MULT_EXPR, aligned_offset,
5927 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5930 /* Otherwise, take the offset from that of the field. Substitute
5931 any PLACEHOLDER_EXPR that we have. */
5933 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5936 /* Return 1 if T is an expression that get_inner_reference handles. */
5939 handled_component_p (tree t)
5941 switch (TREE_CODE (t))
5946 case ARRAY_RANGE_REF:
5947 case VIEW_CONVERT_EXPR:
5957 /* Given an rtx VALUE that may contain additions and multiplications, return
5958 an equivalent value that just refers to a register, memory, or constant.
5959 This is done by generating instructions to perform the arithmetic and
5960 returning a pseudo-register containing the value.
5962 The returned value may be a REG, SUBREG, MEM or constant. */
5965 force_operand (rtx value, rtx target)
5968 /* Use subtarget as the target for operand 0 of a binary operation. */
5969 rtx subtarget = get_subtarget (target);
5970 enum rtx_code code = GET_CODE (value);
5972 /* Check for subreg applied to an expression produced by loop optimizer. */
5974 && !REG_P (SUBREG_REG (value))
5975 && !MEM_P (SUBREG_REG (value)))
5977 value = simplify_gen_subreg (GET_MODE (value),
5978 force_reg (GET_MODE (SUBREG_REG (value)),
5979 force_operand (SUBREG_REG (value),
5981 GET_MODE (SUBREG_REG (value)),
5982 SUBREG_BYTE (value));
5983 code = GET_CODE (value);
5986 /* Check for a PIC address load. */
5987 if ((code == PLUS || code == MINUS)
5988 && XEXP (value, 0) == pic_offset_table_rtx
5989 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5990 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5991 || GET_CODE (XEXP (value, 1)) == CONST))
5994 subtarget = gen_reg_rtx (GET_MODE (value));
5995 emit_move_insn (subtarget, value);
5999 if (ARITHMETIC_P (value))
6001 op2 = XEXP (value, 1);
6002 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6004 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6007 op2 = negate_rtx (GET_MODE (value), op2);
6010 /* Check for an addition with OP2 a constant integer and our first
6011 operand a PLUS of a virtual register and something else. In that
6012 case, we want to emit the sum of the virtual register and the
6013 constant first and then add the other value. This allows virtual
6014 register instantiation to simply modify the constant rather than
6015 creating another one around this addition. */
6016 if (code == PLUS && GET_CODE (op2) == CONST_INT
6017 && GET_CODE (XEXP (value, 0)) == PLUS
6018 && REG_P (XEXP (XEXP (value, 0), 0))
6019 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6020 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6022 rtx temp = expand_simple_binop (GET_MODE (value), code,
6023 XEXP (XEXP (value, 0), 0), op2,
6024 subtarget, 0, OPTAB_LIB_WIDEN);
6025 return expand_simple_binop (GET_MODE (value), code, temp,
6026 force_operand (XEXP (XEXP (value,
6028 target, 0, OPTAB_LIB_WIDEN);
6031 op1 = force_operand (XEXP (value, 0), subtarget);
6032 op2 = force_operand (op2, NULL_RTX);
6036 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6038 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6039 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6040 target, 1, OPTAB_LIB_WIDEN);
6042 return expand_divmod (0,
6043 FLOAT_MODE_P (GET_MODE (value))
6044 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6045 GET_MODE (value), op1, op2, target, 0);
6048 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6052 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6056 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6060 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6061 target, 0, OPTAB_LIB_WIDEN);
6064 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6065 target, 1, OPTAB_LIB_WIDEN);
6068 if (UNARY_P (value))
6071 target = gen_reg_rtx (GET_MODE (value));
6072 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6079 case FLOAT_TRUNCATE:
6080 convert_move (target, op1, code == ZERO_EXTEND);
6085 expand_fix (target, op1, code == UNSIGNED_FIX);
6089 case UNSIGNED_FLOAT:
6090 expand_float (target, op1, code == UNSIGNED_FLOAT);
6094 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6098 #ifdef INSN_SCHEDULING
6099 /* On machines that have insn scheduling, we want all memory reference to be
6100 explicit, so we need to deal with such paradoxical SUBREGs. */
6101 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6102 && (GET_MODE_SIZE (GET_MODE (value))
6103 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6105 = simplify_gen_subreg (GET_MODE (value),
6106 force_reg (GET_MODE (SUBREG_REG (value)),
6107 force_operand (SUBREG_REG (value),
6109 GET_MODE (SUBREG_REG (value)),
6110 SUBREG_BYTE (value));
6116 /* Subroutine of expand_expr: return nonzero iff there is no way that
6117 EXP can reference X, which is being modified. TOP_P is nonzero if this
6118 call is going to be used to determine whether we need a temporary
6119 for EXP, as opposed to a recursive call to this function.
6121 It is always safe for this routine to return zero since it merely
6122 searches for optimization opportunities. */
6125 safe_from_p (rtx x, tree exp, int top_p)
6131 /* If EXP has varying size, we MUST use a target since we currently
6132 have no way of allocating temporaries of variable size
6133 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6134 So we assume here that something at a higher level has prevented a
6135 clash. This is somewhat bogus, but the best we can do. Only
6136 do this when X is BLKmode and when we are at the top level. */
6137 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6138 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6139 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6140 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6141 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6143 && GET_MODE (x) == BLKmode)
6144 /* If X is in the outgoing argument area, it is always safe. */
6146 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6147 || (GET_CODE (XEXP (x, 0)) == PLUS
6148 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6151 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6152 find the underlying pseudo. */
6153 if (GET_CODE (x) == SUBREG)
6156 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6160 /* Now look at our tree code and possibly recurse. */
6161 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6163 case tcc_declaration:
6164 exp_rtl = DECL_RTL_IF_SET (exp);
6170 case tcc_exceptional:
6171 if (TREE_CODE (exp) == TREE_LIST)
6175 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6177 exp = TREE_CHAIN (exp);
6180 if (TREE_CODE (exp) != TREE_LIST)
6181 return safe_from_p (x, exp, 0);
6184 else if (TREE_CODE (exp) == CONSTRUCTOR)
6186 constructor_elt *ce;
6187 unsigned HOST_WIDE_INT idx;
6190 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6192 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6193 || !safe_from_p (x, ce->value, 0))
6197 else if (TREE_CODE (exp) == ERROR_MARK)
6198 return 1; /* An already-visited SAVE_EXPR? */
6203 /* The only case we look at here is the DECL_INITIAL inside a
6205 return (TREE_CODE (exp) != DECL_EXPR
6206 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6207 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6208 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6211 case tcc_comparison:
6212 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6217 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6219 case tcc_expression:
6221 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6222 the expression. If it is set, we conflict iff we are that rtx or
6223 both are in memory. Otherwise, we check all operands of the
6224 expression recursively. */
6226 switch (TREE_CODE (exp))
6229 /* If the operand is static or we are static, we can't conflict.
6230 Likewise if we don't conflict with the operand at all. */
6231 if (staticp (TREE_OPERAND (exp, 0))
6232 || TREE_STATIC (exp)
6233 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6236 /* Otherwise, the only way this can conflict is if we are taking
6237 the address of a DECL a that address if part of X, which is
6239 exp = TREE_OPERAND (exp, 0);
6242 if (!DECL_RTL_SET_P (exp)
6243 || !MEM_P (DECL_RTL (exp)))
6246 exp_rtl = XEXP (DECL_RTL (exp), 0);
6250 case MISALIGNED_INDIRECT_REF:
6251 case ALIGN_INDIRECT_REF:
6254 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6255 get_alias_set (exp)))
6260 /* Assume that the call will clobber all hard registers and
6262 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6267 case WITH_CLEANUP_EXPR:
6268 case CLEANUP_POINT_EXPR:
6269 /* Lowered by gimplify.c. */
6273 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6279 /* If we have an rtx, we do not need to scan our operands. */
6283 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6284 for (i = 0; i < nops; i++)
6285 if (TREE_OPERAND (exp, i) != 0
6286 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6289 /* If this is a language-specific tree code, it may require
6290 special handling. */
6291 if ((unsigned int) TREE_CODE (exp)
6292 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6293 && !lang_hooks.safe_from_p (x, exp))
6298 /* Should never get a type here. */
6302 /* If we have an rtl, find any enclosed object. Then see if we conflict
6306 if (GET_CODE (exp_rtl) == SUBREG)
6308 exp_rtl = SUBREG_REG (exp_rtl);
6310 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6314 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6315 are memory and they conflict. */
6316 return ! (rtx_equal_p (x, exp_rtl)
6317 || (MEM_P (x) && MEM_P (exp_rtl)
6318 && true_dependence (exp_rtl, VOIDmode, x,
6319 rtx_addr_varies_p)));
6322 /* If we reach here, it is safe. */
6327 /* Return the highest power of two that EXP is known to be a multiple of.
6328 This is used in updating alignment of MEMs in array references. */
6330 unsigned HOST_WIDE_INT
6331 highest_pow2_factor (tree exp)
6333 unsigned HOST_WIDE_INT c0, c1;
6335 switch (TREE_CODE (exp))
6338 /* We can find the lowest bit that's a one. If the low
6339 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6340 We need to handle this case since we can find it in a COND_EXPR,
6341 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6342 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6344 if (TREE_CONSTANT_OVERFLOW (exp))
6345 return BIGGEST_ALIGNMENT;
6348 /* Note: tree_low_cst is intentionally not used here,
6349 we don't care about the upper bits. */
6350 c0 = TREE_INT_CST_LOW (exp);
6352 return c0 ? c0 : BIGGEST_ALIGNMENT;
6356 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6357 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6358 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6359 return MIN (c0, c1);
6362 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6363 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6366 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6368 if (integer_pow2p (TREE_OPERAND (exp, 1))
6369 && host_integerp (TREE_OPERAND (exp, 1), 1))
6371 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6372 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6373 return MAX (1, c0 / c1);
6377 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6379 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6382 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6385 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6386 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6387 return MIN (c0, c1);
6396 /* Similar, except that the alignment requirements of TARGET are
6397 taken into account. Assume it is at least as aligned as its
6398 type, unless it is a COMPONENT_REF in which case the layout of
6399 the structure gives the alignment. */
6401 static unsigned HOST_WIDE_INT
6402 highest_pow2_factor_for_target (tree target, tree exp)
6404 unsigned HOST_WIDE_INT target_align, factor;
6406 factor = highest_pow2_factor (exp);
6407 if (TREE_CODE (target) == COMPONENT_REF)
6408 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6410 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6411 return MAX (factor, target_align);
6414 /* Expands variable VAR. */
6417 expand_var (tree var)
6419 if (DECL_EXTERNAL (var))
6422 if (TREE_STATIC (var))
6423 /* If this is an inlined copy of a static local variable,
6424 look up the original decl. */
6425 var = DECL_ORIGIN (var);
6427 if (TREE_STATIC (var)
6428 ? !TREE_ASM_WRITTEN (var)
6429 : !DECL_RTL_SET_P (var))
6431 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6432 /* Should be ignored. */;
6433 else if (lang_hooks.expand_decl (var))
6435 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6437 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6438 rest_of_decl_compilation (var, 0, 0);
6440 /* No expansion needed. */
6441 gcc_assert (TREE_CODE (var) == TYPE_DECL
6442 || TREE_CODE (var) == CONST_DECL
6443 || TREE_CODE (var) == FUNCTION_DECL
6444 || TREE_CODE (var) == LABEL_DECL);
6448 /* Subroutine of expand_expr. Expand the two operands of a binary
6449 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6450 The value may be stored in TARGET if TARGET is nonzero. The
6451 MODIFIER argument is as documented by expand_expr. */
6454 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6455 enum expand_modifier modifier)
6457 if (! safe_from_p (target, exp1, 1))
6459 if (operand_equal_p (exp0, exp1, 0))
6461 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6462 *op1 = copy_rtx (*op0);
6466 /* If we need to preserve evaluation order, copy exp0 into its own
6467 temporary variable so that it can't be clobbered by exp1. */
6468 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6469 exp0 = save_expr (exp0);
6470 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6471 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6476 /* Return a MEM that contains constant EXP. DEFER is as for
6477 output_constant_def and MODIFIER is as for expand_expr. */
6480 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6484 mem = output_constant_def (exp, defer);
6485 if (modifier != EXPAND_INITIALIZER)
6486 mem = use_anchored_address (mem);
6490 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6491 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6494 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6495 enum expand_modifier modifier)
6497 rtx result, subtarget;
6499 HOST_WIDE_INT bitsize, bitpos;
6500 int volatilep, unsignedp;
6501 enum machine_mode mode1;
6503 /* If we are taking the address of a constant and are at the top level,
6504 we have to use output_constant_def since we can't call force_const_mem
6506 /* ??? This should be considered a front-end bug. We should not be
6507 generating ADDR_EXPR of something that isn't an LVALUE. The only
6508 exception here is STRING_CST. */
6509 if (TREE_CODE (exp) == CONSTRUCTOR
6510 || CONSTANT_CLASS_P (exp))
6511 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6513 /* Everything must be something allowed by is_gimple_addressable. */
6514 switch (TREE_CODE (exp))
6517 /* This case will happen via recursion for &a->b. */
6518 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6521 /* Recurse and make the output_constant_def clause above handle this. */
6522 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6526 /* The real part of the complex number is always first, therefore
6527 the address is the same as the address of the parent object. */
6530 inner = TREE_OPERAND (exp, 0);
6534 /* The imaginary part of the complex number is always second.
6535 The expression is therefore always offset by the size of the
6538 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6539 inner = TREE_OPERAND (exp, 0);
6543 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6544 expand_expr, as that can have various side effects; LABEL_DECLs for
6545 example, may not have their DECL_RTL set yet. Assume language
6546 specific tree nodes can be expanded in some interesting way. */
6548 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6550 result = expand_expr (exp, target, tmode,
6551 modifier == EXPAND_INITIALIZER
6552 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6554 /* If the DECL isn't in memory, then the DECL wasn't properly
6555 marked TREE_ADDRESSABLE, which will be either a front-end
6556 or a tree optimizer bug. */
6557 gcc_assert (MEM_P (result));
6558 result = XEXP (result, 0);
6560 /* ??? Is this needed anymore? */
6561 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6563 assemble_external (exp);
6564 TREE_USED (exp) = 1;
6567 if (modifier != EXPAND_INITIALIZER
6568 && modifier != EXPAND_CONST_ADDRESS)
6569 result = force_operand (result, target);
6573 /* Pass FALSE as the last argument to get_inner_reference although
6574 we are expanding to RTL. The rationale is that we know how to
6575 handle "aligning nodes" here: we can just bypass them because
6576 they won't change the final object whose address will be returned
6577 (they actually exist only for that purpose). */
6578 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6579 &mode1, &unsignedp, &volatilep, false);
6583 /* We must have made progress. */
6584 gcc_assert (inner != exp);
6586 subtarget = offset || bitpos ? NULL_RTX : target;
6587 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6593 if (modifier != EXPAND_NORMAL)
6594 result = force_operand (result, NULL);
6595 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6597 result = convert_memory_address (tmode, result);
6598 tmp = convert_memory_address (tmode, tmp);
6600 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6601 result = gen_rtx_PLUS (tmode, result, tmp);
6604 subtarget = bitpos ? NULL_RTX : target;
6605 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6606 1, OPTAB_LIB_WIDEN);
6612 /* Someone beforehand should have rejected taking the address
6613 of such an object. */
6614 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6616 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6617 if (modifier < EXPAND_SUM)
6618 result = force_operand (result, target);
6624 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6625 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6628 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6629 enum expand_modifier modifier)
6631 enum machine_mode rmode;
6634 /* Target mode of VOIDmode says "whatever's natural". */
6635 if (tmode == VOIDmode)
6636 tmode = TYPE_MODE (TREE_TYPE (exp));
6638 /* We can get called with some Weird Things if the user does silliness
6639 like "(short) &a". In that case, convert_memory_address won't do
6640 the right thing, so ignore the given target mode. */
6641 if (tmode != Pmode && tmode != ptr_mode)
6644 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6647 /* Despite expand_expr claims concerning ignoring TMODE when not
6648 strictly convenient, stuff breaks if we don't honor it. Note
6649 that combined with the above, we only do this for pointer modes. */
6650 rmode = GET_MODE (result);
6651 if (rmode == VOIDmode)
6654 result = convert_memory_address (tmode, result);
6660 /* expand_expr: generate code for computing expression EXP.
6661 An rtx for the computed value is returned. The value is never null.
6662 In the case of a void EXP, const0_rtx is returned.
6664 The value may be stored in TARGET if TARGET is nonzero.
6665 TARGET is just a suggestion; callers must assume that
6666 the rtx returned may not be the same as TARGET.
6668 If TARGET is CONST0_RTX, it means that the value will be ignored.
6670 If TMODE is not VOIDmode, it suggests generating the
6671 result in mode TMODE. But this is done only when convenient.
6672 Otherwise, TMODE is ignored and the value generated in its natural mode.
6673 TMODE is just a suggestion; callers must assume that
6674 the rtx returned may not have mode TMODE.
6676 Note that TARGET may have neither TMODE nor MODE. In that case, it
6677 probably will not be used.
6679 If MODIFIER is EXPAND_SUM then when EXP is an addition
6680 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6681 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6682 products as above, or REG or MEM, or constant.
6683 Ordinarily in such cases we would output mul or add instructions
6684 and then return a pseudo reg containing the sum.
6686 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6687 it also marks a label as absolutely required (it can't be dead).
6688 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6689 This is used for outputting expressions used in initializers.
6691 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6692 with a constant address even if that address is not normally legitimate.
6693 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6695 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6696 a call parameter. Such targets require special care as we haven't yet
6697 marked TARGET so that it's safe from being trashed by libcalls. We
6698 don't want to use TARGET for anything but the final result;
6699 Intermediate values must go elsewhere. Additionally, calls to
6700 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6702 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6703 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6704 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6705 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6708 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6709 enum expand_modifier, rtx *);
6712 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6713 enum expand_modifier modifier, rtx *alt_rtl)
6716 rtx ret, last = NULL;
6718 /* Handle ERROR_MARK before anybody tries to access its type. */
6719 if (TREE_CODE (exp) == ERROR_MARK
6720 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6722 ret = CONST0_RTX (tmode);
6723 return ret ? ret : const0_rtx;
6726 if (flag_non_call_exceptions)
6728 rn = lookup_stmt_eh_region (exp);
6729 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6731 last = get_last_insn ();
6734 /* If this is an expression of some kind and it has an associated line
6735 number, then emit the line number before expanding the expression.
6737 We need to save and restore the file and line information so that
6738 errors discovered during expansion are emitted with the right
6739 information. It would be better of the diagnostic routines
6740 used the file/line information embedded in the tree nodes rather
6742 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6744 location_t saved_location = input_location;
6745 input_location = EXPR_LOCATION (exp);
6746 emit_line_note (input_location);
6748 /* Record where the insns produced belong. */
6749 record_block_change (TREE_BLOCK (exp));
6751 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6753 input_location = saved_location;
6757 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6760 /* If using non-call exceptions, mark all insns that may trap.
6761 expand_call() will mark CALL_INSNs before we get to this code,
6762 but it doesn't handle libcalls, and these may trap. */
6766 for (insn = next_real_insn (last); insn;
6767 insn = next_real_insn (insn))
6769 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6770 /* If we want exceptions for non-call insns, any
6771 may_trap_p instruction may throw. */
6772 && GET_CODE (PATTERN (insn)) != CLOBBER
6773 && GET_CODE (PATTERN (insn)) != USE
6774 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6776 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6786 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6787 enum expand_modifier modifier, rtx *alt_rtl)
6789 rtx op0, op1, temp, decl_rtl;
6790 tree type = TREE_TYPE (exp);
6792 enum machine_mode mode;
6793 enum tree_code code = TREE_CODE (exp);
6795 rtx subtarget, original_target;
6797 tree context, subexp0, subexp1;
6798 bool reduce_bit_field = false;
6799 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6800 ? reduce_to_bit_field_precision ((expr), \
6805 mode = TYPE_MODE (type);
6806 unsignedp = TYPE_UNSIGNED (type);
6807 if (lang_hooks.reduce_bit_field_operations
6808 && TREE_CODE (type) == INTEGER_TYPE
6809 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6811 /* An operation in what may be a bit-field type needs the
6812 result to be reduced to the precision of the bit-field type,
6813 which is narrower than that of the type's mode. */
6814 reduce_bit_field = true;
6815 if (modifier == EXPAND_STACK_PARM)
6819 /* Use subtarget as the target for operand 0 of a binary operation. */
6820 subtarget = get_subtarget (target);
6821 original_target = target;
6822 ignore = (target == const0_rtx
6823 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6824 || code == CONVERT_EXPR || code == COND_EXPR
6825 || code == VIEW_CONVERT_EXPR)
6826 && TREE_CODE (type) == VOID_TYPE));
6828 /* If we are going to ignore this result, we need only do something
6829 if there is a side-effect somewhere in the expression. If there
6830 is, short-circuit the most common cases here. Note that we must
6831 not call expand_expr with anything but const0_rtx in case this
6832 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6836 if (! TREE_SIDE_EFFECTS (exp))
6839 /* Ensure we reference a volatile object even if value is ignored, but
6840 don't do this if all we are doing is taking its address. */
6841 if (TREE_THIS_VOLATILE (exp)
6842 && TREE_CODE (exp) != FUNCTION_DECL
6843 && mode != VOIDmode && mode != BLKmode
6844 && modifier != EXPAND_CONST_ADDRESS)
6846 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6848 temp = copy_to_reg (temp);
6852 if (TREE_CODE_CLASS (code) == tcc_unary
6853 || code == COMPONENT_REF || code == INDIRECT_REF)
6854 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6857 else if (TREE_CODE_CLASS (code) == tcc_binary
6858 || TREE_CODE_CLASS (code) == tcc_comparison
6859 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6861 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6862 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6865 else if (code == BIT_FIELD_REF)
6867 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6868 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6869 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6881 tree function = decl_function_context (exp);
6883 temp = label_rtx (exp);
6884 temp = gen_rtx_LABEL_REF (Pmode, temp);
6886 if (function != current_function_decl
6888 LABEL_REF_NONLOCAL_P (temp) = 1;
6890 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6895 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6900 /* If a static var's type was incomplete when the decl was written,
6901 but the type is complete now, lay out the decl now. */
6902 if (DECL_SIZE (exp) == 0
6903 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6904 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6905 layout_decl (exp, 0);
6907 /* ... fall through ... */
6911 decl_rtl = DECL_RTL (exp);
6912 gcc_assert (decl_rtl);
6914 /* Ensure variable marked as used even if it doesn't go through
6915 a parser. If it hasn't be used yet, write out an external
6917 if (! TREE_USED (exp))
6919 assemble_external (exp);
6920 TREE_USED (exp) = 1;
6923 /* Show we haven't gotten RTL for this yet. */
6926 /* Variables inherited from containing functions should have
6927 been lowered by this point. */
6928 context = decl_function_context (exp);
6929 gcc_assert (!context
6930 || context == current_function_decl
6931 || TREE_STATIC (exp)
6932 /* ??? C++ creates functions that are not TREE_STATIC. */
6933 || TREE_CODE (exp) == FUNCTION_DECL);
6935 /* This is the case of an array whose size is to be determined
6936 from its initializer, while the initializer is still being parsed.
6939 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6940 temp = validize_mem (decl_rtl);
6942 /* If DECL_RTL is memory, we are in the normal case and either
6943 the address is not valid or it is not a register and -fforce-addr
6944 is specified, get the address into a register. */
6946 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6949 *alt_rtl = decl_rtl;
6950 decl_rtl = use_anchored_address (decl_rtl);
6951 if (modifier != EXPAND_CONST_ADDRESS
6952 && modifier != EXPAND_SUM
6953 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6954 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6955 temp = replace_equiv_address (decl_rtl,
6956 copy_rtx (XEXP (decl_rtl, 0)));
6959 /* If we got something, return it. But first, set the alignment
6960 if the address is a register. */
6963 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6964 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6969 /* If the mode of DECL_RTL does not match that of the decl, it
6970 must be a promoted value. We return a SUBREG of the wanted mode,
6971 but mark it so that we know that it was already extended. */
6973 if (REG_P (decl_rtl)
6974 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6976 enum machine_mode pmode;
6978 /* Get the signedness used for this variable. Ensure we get the
6979 same mode we got when the variable was declared. */
6980 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6981 (TREE_CODE (exp) == RESULT_DECL
6982 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6983 gcc_assert (GET_MODE (decl_rtl) == pmode);
6985 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6986 SUBREG_PROMOTED_VAR_P (temp) = 1;
6987 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6994 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6995 TREE_INT_CST_HIGH (exp), mode);
6997 /* ??? If overflow is set, fold will have done an incomplete job,
6998 which can result in (plus xx (const_int 0)), which can get
6999 simplified by validate_replace_rtx during virtual register
7000 instantiation, which can result in unrecognizable insns.
7001 Avoid this by forcing all overflows into registers. */
7002 if (TREE_CONSTANT_OVERFLOW (exp)
7003 && modifier != EXPAND_INITIALIZER)
7004 temp = force_reg (mode, temp);
7010 tree tmp = NULL_TREE;
7011 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7012 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7013 return const_vector_from_tree (exp);
7014 if (GET_MODE_CLASS (mode) == MODE_INT)
7016 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7018 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7021 tmp = build_constructor_from_list (type,
7022 TREE_VECTOR_CST_ELTS (exp));
7023 return expand_expr (tmp, ignore ? const0_rtx : target,
7028 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7031 /* If optimized, generate immediate CONST_DOUBLE
7032 which will be turned into memory by reload if necessary.
7034 We used to force a register so that loop.c could see it. But
7035 this does not allow gen_* patterns to perform optimizations with
7036 the constants. It also produces two insns in cases like "x = 1.0;".
7037 On most machines, floating-point constants are not permitted in
7038 many insns, so we'd end up copying it to a register in any case.
7040 Now, we do the copying in expand_binop, if appropriate. */
7041 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7042 TYPE_MODE (TREE_TYPE (exp)));
7045 /* Handle evaluating a complex constant in a CONCAT target. */
7046 if (original_target && GET_CODE (original_target) == CONCAT)
7048 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7051 rtarg = XEXP (original_target, 0);
7052 itarg = XEXP (original_target, 1);
7054 /* Move the real and imaginary parts separately. */
7055 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7056 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7059 emit_move_insn (rtarg, op0);
7061 emit_move_insn (itarg, op1);
7063 return original_target;
7066 /* ... fall through ... */
7069 temp = expand_expr_constant (exp, 1, modifier);
7071 /* temp contains a constant address.
7072 On RISC machines where a constant address isn't valid,
7073 make some insns to get that address into a register. */
7074 if (modifier != EXPAND_CONST_ADDRESS
7075 && modifier != EXPAND_INITIALIZER
7076 && modifier != EXPAND_SUM
7077 && (! memory_address_p (mode, XEXP (temp, 0))
7078 || flag_force_addr))
7079 return replace_equiv_address (temp,
7080 copy_rtx (XEXP (temp, 0)));
7085 tree val = TREE_OPERAND (exp, 0);
7086 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7088 if (!SAVE_EXPR_RESOLVED_P (exp))
7090 /* We can indeed still hit this case, typically via builtin
7091 expanders calling save_expr immediately before expanding
7092 something. Assume this means that we only have to deal
7093 with non-BLKmode values. */
7094 gcc_assert (GET_MODE (ret) != BLKmode);
7096 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7097 DECL_ARTIFICIAL (val) = 1;
7098 DECL_IGNORED_P (val) = 1;
7099 TREE_OPERAND (exp, 0) = val;
7100 SAVE_EXPR_RESOLVED_P (exp) = 1;
7102 if (!CONSTANT_P (ret))
7103 ret = copy_to_reg (ret);
7104 SET_DECL_RTL (val, ret);
7111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7112 expand_goto (TREE_OPERAND (exp, 0));
7114 expand_computed_goto (TREE_OPERAND (exp, 0));
7118 /* If we don't need the result, just ensure we evaluate any
7122 unsigned HOST_WIDE_INT idx;
7125 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7126 expand_expr (value, const0_rtx, VOIDmode, 0);
7131 /* Try to avoid creating a temporary at all. This is possible
7132 if all of the initializer is zero.
7133 FIXME: try to handle all [0..255] initializers we can handle
7135 else if (TREE_STATIC (exp)
7136 && !TREE_ADDRESSABLE (exp)
7137 && target != 0 && mode == BLKmode
7138 && all_zeros_p (exp))
7140 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7144 /* All elts simple constants => refer to a constant in memory. But
7145 if this is a non-BLKmode mode, let it store a field at a time
7146 since that should make a CONST_INT or CONST_DOUBLE when we
7147 fold. Likewise, if we have a target we can use, it is best to
7148 store directly into the target unless the type is large enough
7149 that memcpy will be used. If we are making an initializer and
7150 all operands are constant, put it in memory as well.
7152 FIXME: Avoid trying to fill vector constructors piece-meal.
7153 Output them with output_constant_def below unless we're sure
7154 they're zeros. This should go away when vector initializers
7155 are treated like VECTOR_CST instead of arrays.
7157 else if ((TREE_STATIC (exp)
7158 && ((mode == BLKmode
7159 && ! (target != 0 && safe_from_p (target, exp, 1)))
7160 || TREE_ADDRESSABLE (exp)
7161 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7162 && (! MOVE_BY_PIECES_P
7163 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7165 && ! mostly_zeros_p (exp))))
7166 || ((modifier == EXPAND_INITIALIZER
7167 || modifier == EXPAND_CONST_ADDRESS)
7168 && TREE_CONSTANT (exp)))
7170 rtx constructor = expand_expr_constant (exp, 1, modifier);
7172 if (modifier != EXPAND_CONST_ADDRESS
7173 && modifier != EXPAND_INITIALIZER
7174 && modifier != EXPAND_SUM)
7175 constructor = validize_mem (constructor);
7181 /* Handle calls that pass values in multiple non-contiguous
7182 locations. The Irix 6 ABI has examples of this. */
7183 if (target == 0 || ! safe_from_p (target, exp, 1)
7184 || GET_CODE (target) == PARALLEL
7185 || modifier == EXPAND_STACK_PARM)
7187 = assign_temp (build_qualified_type (type,
7189 | (TREE_READONLY (exp)
7190 * TYPE_QUAL_CONST))),
7191 0, TREE_ADDRESSABLE (exp), 1);
7193 store_constructor (exp, target, 0, int_expr_size (exp));
7197 case MISALIGNED_INDIRECT_REF:
7198 case ALIGN_INDIRECT_REF:
7201 tree exp1 = TREE_OPERAND (exp, 0);
7203 if (modifier != EXPAND_WRITE)
7207 t = fold_read_from_constant_string (exp);
7209 return expand_expr (t, target, tmode, modifier);
7212 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7213 op0 = memory_address (mode, op0);
7215 if (code == ALIGN_INDIRECT_REF)
7217 int align = TYPE_ALIGN_UNIT (type);
7218 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7219 op0 = memory_address (mode, op0);
7222 temp = gen_rtx_MEM (mode, op0);
7224 set_mem_attributes (temp, exp, 0);
7226 /* Resolve the misalignment now, so that we don't have to remember
7227 to resolve it later. Of course, this only works for reads. */
7228 /* ??? When we get around to supporting writes, we'll have to handle
7229 this in store_expr directly. The vectorizer isn't generating
7230 those yet, however. */
7231 if (code == MISALIGNED_INDIRECT_REF)
7236 gcc_assert (modifier == EXPAND_NORMAL
7237 || modifier == EXPAND_STACK_PARM);
7239 /* The vectorizer should have already checked the mode. */
7240 icode = movmisalign_optab->handlers[mode].insn_code;
7241 gcc_assert (icode != CODE_FOR_nothing);
7243 /* We've already validated the memory, and we're creating a
7244 new pseudo destination. The predicates really can't fail. */
7245 reg = gen_reg_rtx (mode);
7247 /* Nor can the insn generator. */
7248 insn = GEN_FCN (icode) (reg, temp);
7257 case TARGET_MEM_REF:
7259 struct mem_address addr;
7261 get_address_description (exp, &addr);
7262 op0 = addr_for_mem_ref (&addr, true);
7263 op0 = memory_address (mode, op0);
7264 temp = gen_rtx_MEM (mode, op0);
7265 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7272 tree array = TREE_OPERAND (exp, 0);
7273 tree index = TREE_OPERAND (exp, 1);
7275 /* Fold an expression like: "foo"[2].
7276 This is not done in fold so it won't happen inside &.
7277 Don't fold if this is for wide characters since it's too
7278 difficult to do correctly and this is a very rare case. */
7280 if (modifier != EXPAND_CONST_ADDRESS
7281 && modifier != EXPAND_INITIALIZER
7282 && modifier != EXPAND_MEMORY)
7284 tree t = fold_read_from_constant_string (exp);
7287 return expand_expr (t, target, tmode, modifier);
7290 /* If this is a constant index into a constant array,
7291 just get the value from the array. Handle both the cases when
7292 we have an explicit constructor and when our operand is a variable
7293 that was declared const. */
7295 if (modifier != EXPAND_CONST_ADDRESS
7296 && modifier != EXPAND_INITIALIZER
7297 && modifier != EXPAND_MEMORY
7298 && TREE_CODE (array) == CONSTRUCTOR
7299 && ! TREE_SIDE_EFFECTS (array)
7300 && TREE_CODE (index) == INTEGER_CST)
7302 unsigned HOST_WIDE_INT ix;
7305 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7307 if (tree_int_cst_equal (field, index))
7309 if (!TREE_SIDE_EFFECTS (value))
7310 return expand_expr (fold (value), target, tmode, modifier);
7315 else if (optimize >= 1
7316 && modifier != EXPAND_CONST_ADDRESS
7317 && modifier != EXPAND_INITIALIZER
7318 && modifier != EXPAND_MEMORY
7319 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7320 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7321 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7322 && targetm.binds_local_p (array))
7324 if (TREE_CODE (index) == INTEGER_CST)
7326 tree init = DECL_INITIAL (array);
7328 if (TREE_CODE (init) == CONSTRUCTOR)
7330 unsigned HOST_WIDE_INT ix;
7333 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7335 if (tree_int_cst_equal (field, index))
7337 if (!TREE_SIDE_EFFECTS (value))
7338 return expand_expr (fold (value), target, tmode,
7343 else if(TREE_CODE (init) == STRING_CST)
7345 tree index1 = index;
7346 tree low_bound = array_ref_low_bound (exp);
7347 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7349 /* Optimize the special-case of a zero lower bound.
7351 We convert the low_bound to sizetype to avoid some problems
7352 with constant folding. (E.g. suppose the lower bound is 1,
7353 and its mode is QI. Without the conversion,l (ARRAY
7354 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7355 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7357 if (! integer_zerop (low_bound))
7358 index1 = size_diffop (index1, fold_convert (sizetype,
7361 if (0 > compare_tree_int (index1,
7362 TREE_STRING_LENGTH (init)))
7364 tree type = TREE_TYPE (TREE_TYPE (init));
7365 enum machine_mode mode = TYPE_MODE (type);
7367 if (GET_MODE_CLASS (mode) == MODE_INT
7368 && GET_MODE_SIZE (mode) == 1)
7369 return gen_int_mode (TREE_STRING_POINTER (init)
7370 [TREE_INT_CST_LOW (index1)],
7377 goto normal_inner_ref;
7380 /* If the operand is a CONSTRUCTOR, we can just extract the
7381 appropriate field if it is present. */
7382 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7384 unsigned HOST_WIDE_INT idx;
7387 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7389 if (field == TREE_OPERAND (exp, 1)
7390 /* We can normally use the value of the field in the
7391 CONSTRUCTOR. However, if this is a bitfield in
7392 an integral mode that we can fit in a HOST_WIDE_INT,
7393 we must mask only the number of bits in the bitfield,
7394 since this is done implicitly by the constructor. If
7395 the bitfield does not meet either of those conditions,
7396 we can't do this optimization. */
7397 && (! DECL_BIT_FIELD (field)
7398 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7399 && (GET_MODE_BITSIZE (DECL_MODE (field))
7400 <= HOST_BITS_PER_WIDE_INT))))
7402 if (DECL_BIT_FIELD (field)
7403 && modifier == EXPAND_STACK_PARM)
7405 op0 = expand_expr (value, target, tmode, modifier);
7406 if (DECL_BIT_FIELD (field))
7408 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7409 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7411 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7413 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7414 op0 = expand_and (imode, op0, op1, target);
7419 = build_int_cst (NULL_TREE,
7420 GET_MODE_BITSIZE (imode) - bitsize);
7422 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7424 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7432 goto normal_inner_ref;
7435 case ARRAY_RANGE_REF:
7438 enum machine_mode mode1;
7439 HOST_WIDE_INT bitsize, bitpos;
7442 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7443 &mode1, &unsignedp, &volatilep, true);
7446 /* If we got back the original object, something is wrong. Perhaps
7447 we are evaluating an expression too early. In any event, don't
7448 infinitely recurse. */
7449 gcc_assert (tem != exp);
7451 /* If TEM's type is a union of variable size, pass TARGET to the inner
7452 computation, since it will need a temporary and TARGET is known
7453 to have to do. This occurs in unchecked conversion in Ada. */
7457 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7458 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7460 && modifier != EXPAND_STACK_PARM
7461 ? target : NULL_RTX),
7463 (modifier == EXPAND_INITIALIZER
7464 || modifier == EXPAND_CONST_ADDRESS
7465 || modifier == EXPAND_STACK_PARM)
7466 ? modifier : EXPAND_NORMAL);
7468 /* If this is a constant, put it into a register if it is a legitimate
7469 constant, OFFSET is 0, and we won't try to extract outside the
7470 register (in case we were passed a partially uninitialized object
7471 or a view_conversion to a larger size). Force the constant to
7472 memory otherwise. */
7473 if (CONSTANT_P (op0))
7475 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7476 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7478 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7479 op0 = force_reg (mode, op0);
7481 op0 = validize_mem (force_const_mem (mode, op0));
7484 /* Otherwise, if this object not in memory and we either have an
7485 offset, a BLKmode result, or a reference outside the object, put it
7486 there. Such cases can occur in Ada if we have unchecked conversion
7487 of an expression from a scalar type to an array or record type or
7488 for an ARRAY_RANGE_REF whose type is BLKmode. */
7489 else if (!MEM_P (op0)
7491 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7492 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7494 tree nt = build_qualified_type (TREE_TYPE (tem),
7495 (TYPE_QUALS (TREE_TYPE (tem))
7496 | TYPE_QUAL_CONST));
7497 rtx memloc = assign_temp (nt, 1, 1, 1);
7499 emit_move_insn (memloc, op0);
7505 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7508 gcc_assert (MEM_P (op0));
7510 #ifdef POINTERS_EXTEND_UNSIGNED
7511 if (GET_MODE (offset_rtx) != Pmode)
7512 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7514 if (GET_MODE (offset_rtx) != ptr_mode)
7515 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7518 if (GET_MODE (op0) == BLKmode
7519 /* A constant address in OP0 can have VOIDmode, we must
7520 not try to call force_reg in that case. */
7521 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7523 && (bitpos % bitsize) == 0
7524 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7525 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7527 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7531 op0 = offset_address (op0, offset_rtx,
7532 highest_pow2_factor (offset));
7535 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7536 record its alignment as BIGGEST_ALIGNMENT. */
7537 if (MEM_P (op0) && bitpos == 0 && offset != 0
7538 && is_aligning_offset (offset, tem))
7539 set_mem_align (op0, BIGGEST_ALIGNMENT);
7541 /* Don't forget about volatility even if this is a bitfield. */
7542 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7544 if (op0 == orig_op0)
7545 op0 = copy_rtx (op0);
7547 MEM_VOLATILE_P (op0) = 1;
7550 /* The following code doesn't handle CONCAT.
7551 Assume only bitpos == 0 can be used for CONCAT, due to
7552 one element arrays having the same mode as its element. */
7553 if (GET_CODE (op0) == CONCAT)
7555 gcc_assert (bitpos == 0
7556 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7560 /* In cases where an aligned union has an unaligned object
7561 as a field, we might be extracting a BLKmode value from
7562 an integer-mode (e.g., SImode) object. Handle this case
7563 by doing the extract into an object as wide as the field
7564 (which we know to be the width of a basic mode), then
7565 storing into memory, and changing the mode to BLKmode. */
7566 if (mode1 == VOIDmode
7567 || REG_P (op0) || GET_CODE (op0) == SUBREG
7568 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7569 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7570 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7571 && modifier != EXPAND_CONST_ADDRESS
7572 && modifier != EXPAND_INITIALIZER)
7573 /* If the field isn't aligned enough to fetch as a memref,
7574 fetch it as a bit field. */
7575 || (mode1 != BLKmode
7576 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7577 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7579 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7580 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7581 && ((modifier == EXPAND_CONST_ADDRESS
7582 || modifier == EXPAND_INITIALIZER)
7584 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7585 || (bitpos % BITS_PER_UNIT != 0)))
7586 /* If the type and the field are a constant size and the
7587 size of the type isn't the same size as the bitfield,
7588 we must use bitfield operations. */
7590 && TYPE_SIZE (TREE_TYPE (exp))
7591 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7592 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7595 enum machine_mode ext_mode = mode;
7597 if (ext_mode == BLKmode
7598 && ! (target != 0 && MEM_P (op0)
7600 && bitpos % BITS_PER_UNIT == 0))
7601 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7603 if (ext_mode == BLKmode)
7606 target = assign_temp (type, 0, 1, 1);
7611 /* In this case, BITPOS must start at a byte boundary and
7612 TARGET, if specified, must be a MEM. */
7613 gcc_assert (MEM_P (op0)
7614 && (!target || MEM_P (target))
7615 && !(bitpos % BITS_PER_UNIT));
7617 emit_block_move (target,
7618 adjust_address (op0, VOIDmode,
7619 bitpos / BITS_PER_UNIT),
7620 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7622 (modifier == EXPAND_STACK_PARM
7623 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7628 op0 = validize_mem (op0);
7630 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7631 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7633 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7634 (modifier == EXPAND_STACK_PARM
7635 ? NULL_RTX : target),
7636 ext_mode, ext_mode);
7638 /* If the result is a record type and BITSIZE is narrower than
7639 the mode of OP0, an integral mode, and this is a big endian
7640 machine, we must put the field into the high-order bits. */
7641 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7642 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7643 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7644 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7645 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7649 /* If the result type is BLKmode, store the data into a temporary
7650 of the appropriate type, but with the mode corresponding to the
7651 mode for the data we have (op0's mode). It's tempting to make
7652 this a constant type, since we know it's only being stored once,
7653 but that can cause problems if we are taking the address of this
7654 COMPONENT_REF because the MEM of any reference via that address
7655 will have flags corresponding to the type, which will not
7656 necessarily be constant. */
7657 if (mode == BLKmode)
7660 = assign_stack_temp_for_type
7661 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7663 emit_move_insn (new, op0);
7664 op0 = copy_rtx (new);
7665 PUT_MODE (op0, BLKmode);
7666 set_mem_attributes (op0, exp, 1);
7672 /* If the result is BLKmode, use that to access the object
7674 if (mode == BLKmode)
7677 /* Get a reference to just this component. */
7678 if (modifier == EXPAND_CONST_ADDRESS
7679 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7680 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7682 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7684 if (op0 == orig_op0)
7685 op0 = copy_rtx (op0);
7687 set_mem_attributes (op0, exp, 0);
7688 if (REG_P (XEXP (op0, 0)))
7689 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7691 MEM_VOLATILE_P (op0) |= volatilep;
7692 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7693 || modifier == EXPAND_CONST_ADDRESS
7694 || modifier == EXPAND_INITIALIZER)
7696 else if (target == 0)
7697 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7699 convert_move (target, op0, unsignedp);
7704 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7707 /* Check for a built-in function. */
7708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7709 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7711 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7713 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7714 == BUILT_IN_FRONTEND)
7715 return lang_hooks.expand_expr (exp, original_target,
7719 return expand_builtin (exp, target, subtarget, tmode, ignore);
7722 return expand_call (exp, target, ignore);
7724 case NON_LVALUE_EXPR:
7727 if (TREE_OPERAND (exp, 0) == error_mark_node)
7730 if (TREE_CODE (type) == UNION_TYPE)
7732 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7734 /* If both input and output are BLKmode, this conversion isn't doing
7735 anything except possibly changing memory attribute. */
7736 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7738 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7741 result = copy_rtx (result);
7742 set_mem_attributes (result, exp, 0);
7748 if (TYPE_MODE (type) != BLKmode)
7749 target = gen_reg_rtx (TYPE_MODE (type));
7751 target = assign_temp (type, 0, 1, 1);
7755 /* Store data into beginning of memory target. */
7756 store_expr (TREE_OPERAND (exp, 0),
7757 adjust_address (target, TYPE_MODE (valtype), 0),
7758 modifier == EXPAND_STACK_PARM);
7762 gcc_assert (REG_P (target));
7764 /* Store this field into a union of the proper type. */
7765 store_field (target,
7766 MIN ((int_size_in_bytes (TREE_TYPE
7767 (TREE_OPERAND (exp, 0)))
7769 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7770 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7774 /* Return the entire union. */
7778 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7780 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7783 /* If the signedness of the conversion differs and OP0 is
7784 a promoted SUBREG, clear that indication since we now
7785 have to do the proper extension. */
7786 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7787 && GET_CODE (op0) == SUBREG)
7788 SUBREG_PROMOTED_VAR_P (op0) = 0;
7790 return REDUCE_BIT_FIELD (op0);
7793 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7794 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7795 if (GET_MODE (op0) == mode)
7798 /* If OP0 is a constant, just convert it into the proper mode. */
7799 else if (CONSTANT_P (op0))
7801 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7802 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7804 if (modifier == EXPAND_INITIALIZER)
7805 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7806 subreg_lowpart_offset (mode,
7809 op0= convert_modes (mode, inner_mode, op0,
7810 TYPE_UNSIGNED (inner_type));
7813 else if (modifier == EXPAND_INITIALIZER)
7814 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7816 else if (target == 0)
7817 op0 = convert_to_mode (mode, op0,
7818 TYPE_UNSIGNED (TREE_TYPE
7819 (TREE_OPERAND (exp, 0))));
7822 convert_move (target, op0,
7823 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7827 return REDUCE_BIT_FIELD (op0);
7829 case VIEW_CONVERT_EXPR:
7830 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7832 /* If the input and output modes are both the same, we are done. */
7833 if (TYPE_MODE (type) == GET_MODE (op0))
7835 /* If neither mode is BLKmode, and both modes are the same size
7836 then we can use gen_lowpart. */
7837 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7838 && GET_MODE_SIZE (TYPE_MODE (type))
7839 == GET_MODE_SIZE (GET_MODE (op0)))
7841 if (GET_CODE (op0) == SUBREG)
7842 op0 = force_reg (GET_MODE (op0), op0);
7843 op0 = gen_lowpart (TYPE_MODE (type), op0);
7845 /* If both modes are integral, then we can convert from one to the
7847 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7848 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7849 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7850 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7851 /* As a last resort, spill op0 to memory, and reload it in a
7853 else if (!MEM_P (op0))
7855 /* If the operand is not a MEM, force it into memory. Since we
7856 are going to be changing the mode of the MEM, don't call
7857 force_const_mem for constants because we don't allow pool
7858 constants to change mode. */
7859 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7861 gcc_assert (!TREE_ADDRESSABLE (exp));
7863 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7865 = assign_stack_temp_for_type
7866 (TYPE_MODE (inner_type),
7867 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7869 emit_move_insn (target, op0);
7873 /* At this point, OP0 is in the correct mode. If the output type is such
7874 that the operand is known to be aligned, indicate that it is.
7875 Otherwise, we need only be concerned about alignment for non-BLKmode
7879 op0 = copy_rtx (op0);
7881 if (TYPE_ALIGN_OK (type))
7882 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7883 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7884 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7886 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7887 HOST_WIDE_INT temp_size
7888 = MAX (int_size_in_bytes (inner_type),
7889 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7890 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7891 temp_size, 0, type);
7892 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7894 gcc_assert (!TREE_ADDRESSABLE (exp));
7896 if (GET_MODE (op0) == BLKmode)
7897 emit_block_move (new_with_op0_mode, op0,
7898 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7899 (modifier == EXPAND_STACK_PARM
7900 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7902 emit_move_insn (new_with_op0_mode, op0);
7907 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7913 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7914 something else, make sure we add the register to the constant and
7915 then to the other thing. This case can occur during strength
7916 reduction and doing it this way will produce better code if the
7917 frame pointer or argument pointer is eliminated.
7919 fold-const.c will ensure that the constant is always in the inner
7920 PLUS_EXPR, so the only case we need to do anything about is if
7921 sp, ap, or fp is our second argument, in which case we must swap
7922 the innermost first argument and our second argument. */
7924 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7925 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7926 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7927 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7928 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7929 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7931 tree t = TREE_OPERAND (exp, 1);
7933 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7934 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7937 /* If the result is to be ptr_mode and we are adding an integer to
7938 something, we might be forming a constant. So try to use
7939 plus_constant. If it produces a sum and we can't accept it,
7940 use force_operand. This allows P = &ARR[const] to generate
7941 efficient code on machines where a SYMBOL_REF is not a valid
7944 If this is an EXPAND_SUM call, always return the sum. */
7945 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7946 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7948 if (modifier == EXPAND_STACK_PARM)
7950 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7951 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7952 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7956 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7958 /* Use immed_double_const to ensure that the constant is
7959 truncated according to the mode of OP1, then sign extended
7960 to a HOST_WIDE_INT. Using the constant directly can result
7961 in non-canonical RTL in a 64x32 cross compile. */
7963 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7965 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7966 op1 = plus_constant (op1, INTVAL (constant_part));
7967 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7968 op1 = force_operand (op1, target);
7969 return REDUCE_BIT_FIELD (op1);
7972 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7973 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7974 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7978 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7979 (modifier == EXPAND_INITIALIZER
7980 ? EXPAND_INITIALIZER : EXPAND_SUM));
7981 if (! CONSTANT_P (op0))
7983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7984 VOIDmode, modifier);
7985 /* Return a PLUS if modifier says it's OK. */
7986 if (modifier == EXPAND_SUM
7987 || modifier == EXPAND_INITIALIZER)
7988 return simplify_gen_binary (PLUS, mode, op0, op1);
7991 /* Use immed_double_const to ensure that the constant is
7992 truncated according to the mode of OP1, then sign extended
7993 to a HOST_WIDE_INT. Using the constant directly can result
7994 in non-canonical RTL in a 64x32 cross compile. */
7996 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7998 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7999 op0 = plus_constant (op0, INTVAL (constant_part));
8000 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8001 op0 = force_operand (op0, target);
8002 return REDUCE_BIT_FIELD (op0);
8006 /* No sense saving up arithmetic to be done
8007 if it's all in the wrong mode to form part of an address.
8008 And force_operand won't know whether to sign-extend or
8010 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8011 || mode != ptr_mode)
8013 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8014 subtarget, &op0, &op1, 0);
8015 if (op0 == const0_rtx)
8017 if (op1 == const0_rtx)
8022 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8023 subtarget, &op0, &op1, modifier);
8024 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8027 /* For initializers, we are allowed to return a MINUS of two
8028 symbolic constants. Here we handle all cases when both operands
8030 /* Handle difference of two symbolic constants,
8031 for the sake of an initializer. */
8032 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8033 && really_constant_p (TREE_OPERAND (exp, 0))
8034 && really_constant_p (TREE_OPERAND (exp, 1)))
8036 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8037 NULL_RTX, &op0, &op1, modifier);
8039 /* If the last operand is a CONST_INT, use plus_constant of
8040 the negated constant. Else make the MINUS. */
8041 if (GET_CODE (op1) == CONST_INT)
8042 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8044 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8047 /* No sense saving up arithmetic to be done
8048 if it's all in the wrong mode to form part of an address.
8049 And force_operand won't know whether to sign-extend or
8051 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8052 || mode != ptr_mode)
8055 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8056 subtarget, &op0, &op1, modifier);
8058 /* Convert A - const to A + (-const). */
8059 if (GET_CODE (op1) == CONST_INT)
8061 op1 = negate_rtx (mode, op1);
8062 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8068 /* If first operand is constant, swap them.
8069 Thus the following special case checks need only
8070 check the second operand. */
8071 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8073 tree t1 = TREE_OPERAND (exp, 0);
8074 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8075 TREE_OPERAND (exp, 1) = t1;
8078 /* Attempt to return something suitable for generating an
8079 indexed address, for machines that support that. */
8081 if (modifier == EXPAND_SUM && mode == ptr_mode
8082 && host_integerp (TREE_OPERAND (exp, 1), 0))
8084 tree exp1 = TREE_OPERAND (exp, 1);
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8090 op0 = force_operand (op0, NULL_RTX);
8092 op0 = copy_to_mode_reg (mode, op0);
8094 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8095 gen_int_mode (tree_low_cst (exp1, 0),
8096 TYPE_MODE (TREE_TYPE (exp1)))));
8099 if (modifier == EXPAND_STACK_PARM)
8102 /* Check for multiplying things that have been extended
8103 from a narrower type. If this machine supports multiplying
8104 in that narrower type with a result in the desired type,
8105 do it that way, and avoid the explicit type-conversion. */
8107 subexp0 = TREE_OPERAND (exp, 0);
8108 subexp1 = TREE_OPERAND (exp, 1);
8109 /* First, check if we have a multiplication of one signed and one
8110 unsigned operand. */
8111 if (TREE_CODE (subexp0) == NOP_EXPR
8112 && TREE_CODE (subexp1) == NOP_EXPR
8113 && TREE_CODE (type) == INTEGER_TYPE
8114 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8115 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8116 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8117 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8118 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8119 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8121 enum machine_mode innermode
8122 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8123 this_optab = usmul_widen_optab;
8124 if (mode == GET_MODE_WIDER_MODE (innermode))
8126 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8128 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8129 expand_operands (TREE_OPERAND (subexp0, 0),
8130 TREE_OPERAND (subexp1, 0),
8131 NULL_RTX, &op0, &op1, 0);
8133 expand_operands (TREE_OPERAND (subexp0, 0),
8134 TREE_OPERAND (subexp1, 0),
8135 NULL_RTX, &op1, &op0, 0);
8141 /* Check for a multiplication with matching signedness. */
8142 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8143 && TREE_CODE (type) == INTEGER_TYPE
8144 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8145 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8146 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8147 && int_fits_type_p (TREE_OPERAND (exp, 1),
8148 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8149 /* Don't use a widening multiply if a shift will do. */
8150 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8151 > HOST_BITS_PER_WIDE_INT)
8152 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8154 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8155 && (TYPE_PRECISION (TREE_TYPE
8156 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8157 == TYPE_PRECISION (TREE_TYPE
8159 (TREE_OPERAND (exp, 0), 0))))
8160 /* If both operands are extended, they must either both
8161 be zero-extended or both be sign-extended. */
8162 && (TYPE_UNSIGNED (TREE_TYPE
8163 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8164 == TYPE_UNSIGNED (TREE_TYPE
8166 (TREE_OPERAND (exp, 0), 0)))))))
8168 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8169 enum machine_mode innermode = TYPE_MODE (op0type);
8170 bool zextend_p = TYPE_UNSIGNED (op0type);
8171 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8172 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8174 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8176 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8178 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8179 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8180 TREE_OPERAND (exp, 1),
8181 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8183 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8184 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8185 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8188 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8189 && innermode == word_mode)
8192 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8193 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8194 op1 = convert_modes (innermode, mode,
8195 expand_normal (TREE_OPERAND (exp, 1)),
8198 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8199 temp = expand_binop (mode, other_optab, op0, op1, target,
8200 unsignedp, OPTAB_LIB_WIDEN);
8201 hipart = gen_highpart (innermode, temp);
8202 htem = expand_mult_highpart_adjust (innermode, hipart,
8206 emit_move_insn (hipart, htem);
8207 return REDUCE_BIT_FIELD (temp);
8211 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8212 subtarget, &op0, &op1, 0);
8213 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8215 case TRUNC_DIV_EXPR:
8216 case FLOOR_DIV_EXPR:
8218 case ROUND_DIV_EXPR:
8219 case EXACT_DIV_EXPR:
8220 if (modifier == EXPAND_STACK_PARM)
8222 /* Possible optimization: compute the dividend with EXPAND_SUM
8223 then if the divisor is constant can optimize the case
8224 where some terms of the dividend have coeffs divisible by it. */
8225 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8226 subtarget, &op0, &op1, 0);
8227 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8232 case TRUNC_MOD_EXPR:
8233 case FLOOR_MOD_EXPR:
8235 case ROUND_MOD_EXPR:
8236 if (modifier == EXPAND_STACK_PARM)
8238 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8239 subtarget, &op0, &op1, 0);
8240 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8242 case FIX_ROUND_EXPR:
8243 case FIX_FLOOR_EXPR:
8245 gcc_unreachable (); /* Not used for C. */
8247 case FIX_TRUNC_EXPR:
8248 op0 = expand_normal (TREE_OPERAND (exp, 0));
8249 if (target == 0 || modifier == EXPAND_STACK_PARM)
8250 target = gen_reg_rtx (mode);
8251 expand_fix (target, op0, unsignedp);
8255 op0 = expand_normal (TREE_OPERAND (exp, 0));
8256 if (target == 0 || modifier == EXPAND_STACK_PARM)
8257 target = gen_reg_rtx (mode);
8258 /* expand_float can't figure out what to do if FROM has VOIDmode.
8259 So give it the correct mode. With -O, cse will optimize this. */
8260 if (GET_MODE (op0) == VOIDmode)
8261 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8263 expand_float (target, op0,
8264 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8268 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8269 if (modifier == EXPAND_STACK_PARM)
8271 temp = expand_unop (mode,
8272 optab_for_tree_code (NEGATE_EXPR, type),
8275 return REDUCE_BIT_FIELD (temp);
8278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8279 if (modifier == EXPAND_STACK_PARM)
8282 /* ABS_EXPR is not valid for complex arguments. */
8283 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8284 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8286 /* Unsigned abs is simply the operand. Testing here means we don't
8287 risk generating incorrect code below. */
8288 if (TYPE_UNSIGNED (type))
8291 return expand_abs (mode, op0, target, unsignedp,
8292 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8296 target = original_target;
8298 || modifier == EXPAND_STACK_PARM
8299 || (MEM_P (target) && MEM_VOLATILE_P (target))
8300 || GET_MODE (target) != mode
8302 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8303 target = gen_reg_rtx (mode);
8304 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8305 target, &op0, &op1, 0);
8307 /* First try to do it with a special MIN or MAX instruction.
8308 If that does not win, use a conditional jump to select the proper
8310 this_optab = optab_for_tree_code (code, type);
8311 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8316 /* At this point, a MEM target is no longer useful; we will get better
8319 if (! REG_P (target))
8320 target = gen_reg_rtx (mode);
8322 /* If op1 was placed in target, swap op0 and op1. */
8323 if (target != op0 && target == op1)
8330 /* We generate better code and avoid problems with op1 mentioning
8331 target by forcing op1 into a pseudo if it isn't a constant. */
8332 if (! CONSTANT_P (op1))
8333 op1 = force_reg (mode, op1);
8336 enum rtx_code comparison_code;
8339 if (code == MAX_EXPR)
8340 comparison_code = unsignedp ? GEU : GE;
8342 comparison_code = unsignedp ? LEU : LE;
8344 /* Canonicalize to comparisons against 0. */
8345 if (op1 == const1_rtx)
8347 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8348 or (a != 0 ? a : 1) for unsigned.
8349 For MIN we are safe converting (a <= 1 ? a : 1)
8350 into (a <= 0 ? a : 1) */
8351 cmpop1 = const0_rtx;
8352 if (code == MAX_EXPR)
8353 comparison_code = unsignedp ? NE : GT;
8355 if (op1 == constm1_rtx && !unsignedp)
8357 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8358 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8359 cmpop1 = const0_rtx;
8360 if (code == MIN_EXPR)
8361 comparison_code = LT;
8363 #ifdef HAVE_conditional_move
8364 /* Use a conditional move if possible. */
8365 if (can_conditionally_move_p (mode))
8369 /* ??? Same problem as in expmed.c: emit_conditional_move
8370 forces a stack adjustment via compare_from_rtx, and we
8371 lose the stack adjustment if the sequence we are about
8372 to create is discarded. */
8373 do_pending_stack_adjust ();
8377 /* Try to emit the conditional move. */
8378 insn = emit_conditional_move (target, comparison_code,
8383 /* If we could do the conditional move, emit the sequence,
8387 rtx seq = get_insns ();
8393 /* Otherwise discard the sequence and fall back to code with
8399 emit_move_insn (target, op0);
8401 temp = gen_label_rtx ();
8402 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8403 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8405 emit_move_insn (target, op1);
8410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8411 if (modifier == EXPAND_STACK_PARM)
8413 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8417 /* ??? Can optimize bitwise operations with one arg constant.
8418 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8419 and (a bitwise1 b) bitwise2 b (etc)
8420 but that is probably not worth while. */
8422 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8423 boolean values when we want in all cases to compute both of them. In
8424 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8425 as actual zero-or-1 values and then bitwise anding. In cases where
8426 there cannot be any side effects, better code would be made by
8427 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8428 how to recognize those cases. */
8430 case TRUTH_AND_EXPR:
8431 code = BIT_AND_EXPR;
8436 code = BIT_IOR_EXPR;
8440 case TRUTH_XOR_EXPR:
8441 code = BIT_XOR_EXPR;
8449 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8451 if (modifier == EXPAND_STACK_PARM)
8453 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8454 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8457 /* Could determine the answer when only additive constants differ. Also,
8458 the addition of one can be handled by changing the condition. */
8465 case UNORDERED_EXPR:
8473 temp = do_store_flag (exp,
8474 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8475 tmode != VOIDmode ? tmode : mode, 0);
8479 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8480 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8482 && REG_P (original_target)
8483 && (GET_MODE (original_target)
8484 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8486 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8489 /* If temp is constant, we can just compute the result. */
8490 if (GET_CODE (temp) == CONST_INT)
8492 if (INTVAL (temp) != 0)
8493 emit_move_insn (target, const1_rtx);
8495 emit_move_insn (target, const0_rtx);
8500 if (temp != original_target)
8502 enum machine_mode mode1 = GET_MODE (temp);
8503 if (mode1 == VOIDmode)
8504 mode1 = tmode != VOIDmode ? tmode : mode;
8506 temp = copy_to_mode_reg (mode1, temp);
8509 op1 = gen_label_rtx ();
8510 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8511 GET_MODE (temp), unsignedp, op1);
8512 emit_move_insn (temp, const1_rtx);
8517 /* If no set-flag instruction, must generate a conditional store
8518 into a temporary variable. Drop through and handle this
8523 || modifier == EXPAND_STACK_PARM
8524 || ! safe_from_p (target, exp, 1)
8525 /* Make sure we don't have a hard reg (such as function's return
8526 value) live across basic blocks, if not optimizing. */
8527 || (!optimize && REG_P (target)
8528 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8529 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8532 emit_move_insn (target, const0_rtx);
8534 op1 = gen_label_rtx ();
8535 jumpifnot (exp, op1);
8538 emit_move_insn (target, const1_rtx);
8541 return ignore ? const0_rtx : target;
8543 case TRUTH_NOT_EXPR:
8544 if (modifier == EXPAND_STACK_PARM)
8546 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8547 /* The parser is careful to generate TRUTH_NOT_EXPR
8548 only with operands that are always zero or one. */
8549 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8550 target, 1, OPTAB_LIB_WIDEN);
8554 case STATEMENT_LIST:
8556 tree_stmt_iterator iter;
8558 gcc_assert (ignore);
8560 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8561 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8566 /* A COND_EXPR with its type being VOID_TYPE represents a
8567 conditional jump and is handled in
8568 expand_gimple_cond_expr. */
8569 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8571 /* Note that COND_EXPRs whose type is a structure or union
8572 are required to be constructed to contain assignments of
8573 a temporary variable, so that we can evaluate them here
8574 for side effect only. If type is void, we must do likewise. */
8576 gcc_assert (!TREE_ADDRESSABLE (type)
8578 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8579 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8581 /* If we are not to produce a result, we have no target. Otherwise,
8582 if a target was specified use it; it will not be used as an
8583 intermediate target unless it is safe. If no target, use a
8586 if (modifier != EXPAND_STACK_PARM
8588 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8589 && GET_MODE (original_target) == mode
8590 #ifdef HAVE_conditional_move
8591 && (! can_conditionally_move_p (mode)
8592 || REG_P (original_target))
8594 && !MEM_P (original_target))
8595 temp = original_target;
8597 temp = assign_temp (type, 0, 0, 1);
8599 do_pending_stack_adjust ();
8601 op0 = gen_label_rtx ();
8602 op1 = gen_label_rtx ();
8603 jumpifnot (TREE_OPERAND (exp, 0), op0);
8604 store_expr (TREE_OPERAND (exp, 1), temp,
8605 modifier == EXPAND_STACK_PARM);
8607 emit_jump_insn (gen_jump (op1));
8610 store_expr (TREE_OPERAND (exp, 2), temp,
8611 modifier == EXPAND_STACK_PARM);
8618 target = expand_vec_cond_expr (exp, target);
8623 tree lhs = TREE_OPERAND (exp, 0);
8624 tree rhs = TREE_OPERAND (exp, 1);
8626 gcc_assert (ignore);
8628 /* Check for |= or &= of a bitfield of size one into another bitfield
8629 of size 1. In this case, (unless we need the result of the
8630 assignment) we can do this more efficiently with a
8631 test followed by an assignment, if necessary.
8633 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8634 things change so we do, this code should be enhanced to
8636 if (TREE_CODE (lhs) == COMPONENT_REF
8637 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8638 || TREE_CODE (rhs) == BIT_AND_EXPR)
8639 && TREE_OPERAND (rhs, 0) == lhs
8640 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8641 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8642 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8644 rtx label = gen_label_rtx ();
8645 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8646 do_jump (TREE_OPERAND (rhs, 1),
8649 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8650 do_pending_stack_adjust ();
8655 expand_assignment (lhs, rhs);
8661 if (!TREE_OPERAND (exp, 0))
8662 expand_null_return ();
8664 expand_return (TREE_OPERAND (exp, 0));
8668 return expand_expr_addr_expr (exp, target, tmode, modifier);
8671 /* Get the rtx code of the operands. */
8672 op0 = expand_normal (TREE_OPERAND (exp, 0));
8673 op1 = expand_normal (TREE_OPERAND (exp, 1));
8676 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8678 /* Move the real (op0) and imaginary (op1) parts to their location. */
8679 write_complex_part (target, op0, false);
8680 write_complex_part (target, op1, true);
8685 op0 = expand_normal (TREE_OPERAND (exp, 0));
8686 return read_complex_part (op0, false);
8689 op0 = expand_normal (TREE_OPERAND (exp, 0));
8690 return read_complex_part (op0, true);
8693 expand_resx_expr (exp);
8696 case TRY_CATCH_EXPR:
8698 case EH_FILTER_EXPR:
8699 case TRY_FINALLY_EXPR:
8700 /* Lowered by tree-eh.c. */
8703 case WITH_CLEANUP_EXPR:
8704 case CLEANUP_POINT_EXPR:
8706 case CASE_LABEL_EXPR:
8712 case PREINCREMENT_EXPR:
8713 case PREDECREMENT_EXPR:
8714 case POSTINCREMENT_EXPR:
8715 case POSTDECREMENT_EXPR:
8718 case TRUTH_ANDIF_EXPR:
8719 case TRUTH_ORIF_EXPR:
8720 /* Lowered by gimplify.c. */
8724 return get_exception_pointer (cfun);
8727 return get_exception_filter (cfun);
8730 /* Function descriptors are not valid except for as
8731 initialization constants, and should not be expanded. */
8739 expand_label (TREE_OPERAND (exp, 0));
8743 expand_asm_expr (exp);
8746 case WITH_SIZE_EXPR:
8747 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8748 have pulled out the size to use in whatever context it needed. */
8749 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8752 case REALIGN_LOAD_EXPR:
8754 tree oprnd0 = TREE_OPERAND (exp, 0);
8755 tree oprnd1 = TREE_OPERAND (exp, 1);
8756 tree oprnd2 = TREE_OPERAND (exp, 2);
8759 this_optab = optab_for_tree_code (code, type);
8760 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8761 op2 = expand_normal (oprnd2);
8762 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8770 tree oprnd0 = TREE_OPERAND (exp, 0);
8771 tree oprnd1 = TREE_OPERAND (exp, 1);
8772 tree oprnd2 = TREE_OPERAND (exp, 2);
8775 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8776 op2 = expand_normal (oprnd2);
8777 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8782 case WIDEN_SUM_EXPR:
8784 tree oprnd0 = TREE_OPERAND (exp, 0);
8785 tree oprnd1 = TREE_OPERAND (exp, 1);
8787 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8788 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8793 case REDUC_MAX_EXPR:
8794 case REDUC_MIN_EXPR:
8795 case REDUC_PLUS_EXPR:
8797 op0 = expand_normal (TREE_OPERAND (exp, 0));
8798 this_optab = optab_for_tree_code (code, type);
8799 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8804 case VEC_LSHIFT_EXPR:
8805 case VEC_RSHIFT_EXPR:
8807 target = expand_vec_shift_expr (exp, target);
8812 return lang_hooks.expand_expr (exp, original_target, tmode,
8816 /* Here to do an ordinary binary operator. */
8818 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8819 subtarget, &op0, &op1, 0);
8821 this_optab = optab_for_tree_code (code, type);
8823 if (modifier == EXPAND_STACK_PARM)
8825 temp = expand_binop (mode, this_optab, op0, op1, target,
8826 unsignedp, OPTAB_LIB_WIDEN);
8828 return REDUCE_BIT_FIELD (temp);
8830 #undef REDUCE_BIT_FIELD
8832 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8833 signedness of TYPE), possibly returning the result in TARGET. */
8835 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8837 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8838 if (target && GET_MODE (target) != GET_MODE (exp))
8840 /* For constant values, reduce using build_int_cst_type. */
8841 if (GET_CODE (exp) == CONST_INT)
8843 HOST_WIDE_INT value = INTVAL (exp);
8844 tree t = build_int_cst_type (type, value);
8845 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8847 else if (TYPE_UNSIGNED (type))
8850 if (prec < HOST_BITS_PER_WIDE_INT)
8851 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8854 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8855 ((unsigned HOST_WIDE_INT) 1
8856 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8858 return expand_and (GET_MODE (exp), exp, mask, target);
8862 tree count = build_int_cst (NULL_TREE,
8863 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8864 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8865 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8869 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8870 when applied to the address of EXP produces an address known to be
8871 aligned more than BIGGEST_ALIGNMENT. */
8874 is_aligning_offset (tree offset, tree exp)
8876 /* Strip off any conversions. */
8877 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8878 || TREE_CODE (offset) == NOP_EXPR
8879 || TREE_CODE (offset) == CONVERT_EXPR)
8880 offset = TREE_OPERAND (offset, 0);
8882 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8883 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8884 if (TREE_CODE (offset) != BIT_AND_EXPR
8885 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8886 || compare_tree_int (TREE_OPERAND (offset, 1),
8887 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8888 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8891 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8892 It must be NEGATE_EXPR. Then strip any more conversions. */
8893 offset = TREE_OPERAND (offset, 0);
8894 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8895 || TREE_CODE (offset) == NOP_EXPR
8896 || TREE_CODE (offset) == CONVERT_EXPR)
8897 offset = TREE_OPERAND (offset, 0);
8899 if (TREE_CODE (offset) != NEGATE_EXPR)
8902 offset = TREE_OPERAND (offset, 0);
8903 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8904 || TREE_CODE (offset) == NOP_EXPR
8905 || TREE_CODE (offset) == CONVERT_EXPR)
8906 offset = TREE_OPERAND (offset, 0);
8908 /* This must now be the address of EXP. */
8909 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8912 /* Return the tree node if an ARG corresponds to a string constant or zero
8913 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8914 in bytes within the string that ARG is accessing. The type of the
8915 offset will be `sizetype'. */
8918 string_constant (tree arg, tree *ptr_offset)
8923 if (TREE_CODE (arg) == ADDR_EXPR)
8925 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8927 *ptr_offset = size_zero_node;
8928 return TREE_OPERAND (arg, 0);
8930 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8932 array = TREE_OPERAND (arg, 0);
8933 offset = size_zero_node;
8935 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8937 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8938 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8939 if (TREE_CODE (array) != STRING_CST
8940 && TREE_CODE (array) != VAR_DECL)
8946 else if (TREE_CODE (arg) == PLUS_EXPR)
8948 tree arg0 = TREE_OPERAND (arg, 0);
8949 tree arg1 = TREE_OPERAND (arg, 1);
8954 if (TREE_CODE (arg0) == ADDR_EXPR
8955 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8956 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8958 array = TREE_OPERAND (arg0, 0);
8961 else if (TREE_CODE (arg1) == ADDR_EXPR
8962 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8963 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8965 array = TREE_OPERAND (arg1, 0);
8974 if (TREE_CODE (array) == STRING_CST)
8976 *ptr_offset = fold_convert (sizetype, offset);
8979 else if (TREE_CODE (array) == VAR_DECL)
8983 /* Variables initialized to string literals can be handled too. */
8984 if (DECL_INITIAL (array) == NULL_TREE
8985 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8988 /* If they are read-only, non-volatile and bind locally. */
8989 if (! TREE_READONLY (array)
8990 || TREE_SIDE_EFFECTS (array)
8991 || ! targetm.binds_local_p (array))
8994 /* Avoid const char foo[4] = "abcde"; */
8995 if (DECL_SIZE_UNIT (array) == NULL_TREE
8996 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8997 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8998 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9001 /* If variable is bigger than the string literal, OFFSET must be constant
9002 and inside of the bounds of the string literal. */
9003 offset = fold_convert (sizetype, offset);
9004 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9005 && (! host_integerp (offset, 1)
9006 || compare_tree_int (offset, length) >= 0))
9009 *ptr_offset = offset;
9010 return DECL_INITIAL (array);
9016 /* Generate code to calculate EXP using a store-flag instruction
9017 and return an rtx for the result. EXP is either a comparison
9018 or a TRUTH_NOT_EXPR whose operand is a comparison.
9020 If TARGET is nonzero, store the result there if convenient.
9022 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9025 Return zero if there is no suitable set-flag instruction
9026 available on this machine.
9028 Once expand_expr has been called on the arguments of the comparison,
9029 we are committed to doing the store flag, since it is not safe to
9030 re-evaluate the expression. We emit the store-flag insn by calling
9031 emit_store_flag, but only expand the arguments if we have a reason
9032 to believe that emit_store_flag will be successful. If we think that
9033 it will, but it isn't, we have to simulate the store-flag with a
9034 set/jump/set sequence. */
9037 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9040 tree arg0, arg1, type;
9042 enum machine_mode operand_mode;
9046 enum insn_code icode;
9047 rtx subtarget = target;
9050 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9051 result at the end. We can't simply invert the test since it would
9052 have already been inverted if it were valid. This case occurs for
9053 some floating-point comparisons. */
9055 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9056 invert = 1, exp = TREE_OPERAND (exp, 0);
9058 arg0 = TREE_OPERAND (exp, 0);
9059 arg1 = TREE_OPERAND (exp, 1);
9061 /* Don't crash if the comparison was erroneous. */
9062 if (arg0 == error_mark_node || arg1 == error_mark_node)
9065 type = TREE_TYPE (arg0);
9066 operand_mode = TYPE_MODE (type);
9067 unsignedp = TYPE_UNSIGNED (type);
9069 /* We won't bother with BLKmode store-flag operations because it would mean
9070 passing a lot of information to emit_store_flag. */
9071 if (operand_mode == BLKmode)
9074 /* We won't bother with store-flag operations involving function pointers
9075 when function pointers must be canonicalized before comparisons. */
9076 #ifdef HAVE_canonicalize_funcptr_for_compare
9077 if (HAVE_canonicalize_funcptr_for_compare
9078 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9079 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9081 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9082 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9083 == FUNCTION_TYPE))))
9090 /* Get the rtx comparison code to use. We know that EXP is a comparison
9091 operation of some type. Some comparisons against 1 and -1 can be
9092 converted to comparisons with zero. Do so here so that the tests
9093 below will be aware that we have a comparison with zero. These
9094 tests will not catch constants in the first operand, but constants
9095 are rarely passed as the first operand. */
9097 switch (TREE_CODE (exp))
9106 if (integer_onep (arg1))
9107 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9109 code = unsignedp ? LTU : LT;
9112 if (! unsignedp && integer_all_onesp (arg1))
9113 arg1 = integer_zero_node, code = LT;
9115 code = unsignedp ? LEU : LE;
9118 if (! unsignedp && integer_all_onesp (arg1))
9119 arg1 = integer_zero_node, code = GE;
9121 code = unsignedp ? GTU : GT;
9124 if (integer_onep (arg1))
9125 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9127 code = unsignedp ? GEU : GE;
9130 case UNORDERED_EXPR:
9159 /* Put a constant second. */
9160 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9162 tem = arg0; arg0 = arg1; arg1 = tem;
9163 code = swap_condition (code);
9166 /* If this is an equality or inequality test of a single bit, we can
9167 do this by shifting the bit being tested to the low-order bit and
9168 masking the result with the constant 1. If the condition was EQ,
9169 we xor it with 1. This does not require an scc insn and is faster
9170 than an scc insn even if we have it.
9172 The code to make this transformation was moved into fold_single_bit_test,
9173 so we just call into the folder and expand its result. */
9175 if ((code == NE || code == EQ)
9176 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9177 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9179 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9180 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9182 target, VOIDmode, EXPAND_NORMAL);
9185 /* Now see if we are likely to be able to do this. Return if not. */
9186 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9189 icode = setcc_gen_code[(int) code];
9190 if (icode == CODE_FOR_nothing
9191 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9193 /* We can only do this if it is one of the special cases that
9194 can be handled without an scc insn. */
9195 if ((code == LT && integer_zerop (arg1))
9196 || (! only_cheap && code == GE && integer_zerop (arg1)))
9198 else if (! only_cheap && (code == NE || code == EQ)
9199 && TREE_CODE (type) != REAL_TYPE
9200 && ((abs_optab->handlers[(int) operand_mode].insn_code
9201 != CODE_FOR_nothing)
9202 || (ffs_optab->handlers[(int) operand_mode].insn_code
9203 != CODE_FOR_nothing)))
9209 if (! get_subtarget (target)
9210 || GET_MODE (subtarget) != operand_mode)
9213 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9216 target = gen_reg_rtx (mode);
9218 result = emit_store_flag (target, code, op0, op1,
9219 operand_mode, unsignedp, 1);
9224 result = expand_binop (mode, xor_optab, result, const1_rtx,
9225 result, 0, OPTAB_LIB_WIDEN);
9229 /* If this failed, we have to do this with set/compare/jump/set code. */
9231 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9232 target = gen_reg_rtx (GET_MODE (target));
9234 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9235 result = compare_from_rtx (op0, op1, code, unsignedp,
9236 operand_mode, NULL_RTX);
9237 if (GET_CODE (result) == CONST_INT)
9238 return (((result == const0_rtx && ! invert)
9239 || (result != const0_rtx && invert))
9240 ? const0_rtx : const1_rtx);
9242 /* The code of RESULT may not match CODE if compare_from_rtx
9243 decided to swap its operands and reverse the original code.
9245 We know that compare_from_rtx returns either a CONST_INT or
9246 a new comparison code, so it is safe to just extract the
9247 code from RESULT. */
9248 code = GET_CODE (result);
9250 label = gen_label_rtx ();
9251 gcc_assert (bcc_gen_fctn[(int) code]);
9253 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9254 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9261 /* Stubs in case we haven't got a casesi insn. */
9263 # define HAVE_casesi 0
9264 # define gen_casesi(a, b, c, d, e) (0)
9265 # define CODE_FOR_casesi CODE_FOR_nothing
9268 /* If the machine does not have a case insn that compares the bounds,
9269 this means extra overhead for dispatch tables, which raises the
9270 threshold for using them. */
9271 #ifndef CASE_VALUES_THRESHOLD
9272 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9273 #endif /* CASE_VALUES_THRESHOLD */
9276 case_values_threshold (void)
9278 return CASE_VALUES_THRESHOLD;
9281 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9282 0 otherwise (i.e. if there is no casesi instruction). */
9284 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9285 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9287 enum machine_mode index_mode = SImode;
9288 int index_bits = GET_MODE_BITSIZE (index_mode);
9289 rtx op1, op2, index;
9290 enum machine_mode op_mode;
9295 /* Convert the index to SImode. */
9296 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9298 enum machine_mode omode = TYPE_MODE (index_type);
9299 rtx rangertx = expand_normal (range);
9301 /* We must handle the endpoints in the original mode. */
9302 index_expr = build2 (MINUS_EXPR, index_type,
9303 index_expr, minval);
9304 minval = integer_zero_node;
9305 index = expand_normal (index_expr);
9306 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9307 omode, 1, default_label);
9308 /* Now we can safely truncate. */
9309 index = convert_to_mode (index_mode, index, 0);
9313 if (TYPE_MODE (index_type) != index_mode)
9315 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9316 index_expr = fold_convert (index_type, index_expr);
9319 index = expand_normal (index_expr);
9322 do_pending_stack_adjust ();
9324 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9325 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9327 index = copy_to_mode_reg (op_mode, index);
9329 op1 = expand_normal (minval);
9331 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9332 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9333 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9334 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9336 op1 = copy_to_mode_reg (op_mode, op1);
9338 op2 = expand_normal (range);
9340 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9341 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9342 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9343 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9345 op2 = copy_to_mode_reg (op_mode, op2);
9347 emit_jump_insn (gen_casesi (index, op1, op2,
9348 table_label, default_label));
9352 /* Attempt to generate a tablejump instruction; same concept. */
9353 #ifndef HAVE_tablejump
9354 #define HAVE_tablejump 0
9355 #define gen_tablejump(x, y) (0)
9358 /* Subroutine of the next function.
9360 INDEX is the value being switched on, with the lowest value
9361 in the table already subtracted.
9362 MODE is its expected mode (needed if INDEX is constant).
9363 RANGE is the length of the jump table.
9364 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9366 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9367 index value is out of range. */
9370 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9375 if (INTVAL (range) > cfun->max_jumptable_ents)
9376 cfun->max_jumptable_ents = INTVAL (range);
9378 /* Do an unsigned comparison (in the proper mode) between the index
9379 expression and the value which represents the length of the range.
9380 Since we just finished subtracting the lower bound of the range
9381 from the index expression, this comparison allows us to simultaneously
9382 check that the original index expression value is both greater than
9383 or equal to the minimum value of the range and less than or equal to
9384 the maximum value of the range. */
9386 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9389 /* If index is in range, it must fit in Pmode.
9390 Convert to Pmode so we can index with it. */
9392 index = convert_to_mode (Pmode, index, 1);
9394 /* Don't let a MEM slip through, because then INDEX that comes
9395 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9396 and break_out_memory_refs will go to work on it and mess it up. */
9397 #ifdef PIC_CASE_VECTOR_ADDRESS
9398 if (flag_pic && !REG_P (index))
9399 index = copy_to_mode_reg (Pmode, index);
9402 /* If flag_force_addr were to affect this address
9403 it could interfere with the tricky assumptions made
9404 about addresses that contain label-refs,
9405 which may be valid only very near the tablejump itself. */
9406 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9407 GET_MODE_SIZE, because this indicates how large insns are. The other
9408 uses should all be Pmode, because they are addresses. This code
9409 could fail if addresses and insns are not the same size. */
9410 index = gen_rtx_PLUS (Pmode,
9411 gen_rtx_MULT (Pmode, index,
9412 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9413 gen_rtx_LABEL_REF (Pmode, table_label));
9414 #ifdef PIC_CASE_VECTOR_ADDRESS
9416 index = PIC_CASE_VECTOR_ADDRESS (index);
9419 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9420 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9421 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9422 convert_move (temp, vector, 0);
9424 emit_jump_insn (gen_tablejump (temp, table_label));
9426 /* If we are generating PIC code or if the table is PC-relative, the
9427 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9428 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9433 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9434 rtx table_label, rtx default_label)
9438 if (! HAVE_tablejump)
9441 index_expr = fold_build2 (MINUS_EXPR, index_type,
9442 fold_convert (index_type, index_expr),
9443 fold_convert (index_type, minval));
9444 index = expand_normal (index_expr);
9445 do_pending_stack_adjust ();
9447 do_tablejump (index, TYPE_MODE (index_type),
9448 convert_modes (TYPE_MODE (index_type),
9449 TYPE_MODE (TREE_TYPE (range)),
9450 expand_normal (range),
9451 TYPE_UNSIGNED (TREE_TYPE (range))),
9452 table_label, default_label);
9456 /* Nonzero if the mode is a valid vector mode for this architecture.
9457 This returns nonzero even if there is no hardware support for the
9458 vector mode, but we can emulate with narrower modes. */
9461 vector_mode_valid_p (enum machine_mode mode)
9463 enum mode_class class = GET_MODE_CLASS (mode);
9464 enum machine_mode innermode;
9466 /* Doh! What's going on? */
9467 if (class != MODE_VECTOR_INT
9468 && class != MODE_VECTOR_FLOAT)
9471 /* Hardware support. Woo hoo! */
9472 if (targetm.vector_mode_supported_p (mode))
9475 innermode = GET_MODE_INNER (mode);
9477 /* We should probably return 1 if requesting V4DI and we have no DI,
9478 but we have V2DI, but this is probably very unlikely. */
9480 /* If we have support for the inner mode, we can safely emulate it.
9481 We may not have V2DI, but me can emulate with a pair of DIs. */
9482 return targetm.scalar_mode_supported_p (innermode);
9485 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9487 const_vector_from_tree (tree exp)
9492 enum machine_mode inner, mode;
9494 mode = TYPE_MODE (TREE_TYPE (exp));
9496 if (initializer_zerop (exp))
9497 return CONST0_RTX (mode);
9499 units = GET_MODE_NUNITS (mode);
9500 inner = GET_MODE_INNER (mode);
9502 v = rtvec_alloc (units);
9504 link = TREE_VECTOR_CST_ELTS (exp);
9505 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9507 elt = TREE_VALUE (link);
9509 if (TREE_CODE (elt) == REAL_CST)
9510 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9513 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9514 TREE_INT_CST_HIGH (elt),
9518 /* Initialize remaining elements to 0. */
9519 for (; i < units; ++i)
9520 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9522 return gen_rtx_CONST_VECTOR (mode, v);
9524 #include "gt-expr.h"