1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static rtx clear_storage_via_libcall (rtx, rtx, bool);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
151 static int is_aligning_offset (tree, tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block sets. */
203 enum insn_code setmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of three different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
209 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
211 /* Synchronization primitives. */
212 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
231 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
233 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
235 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
237 #ifndef SLOW_UNALIGNED_ACCESS
238 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
241 /* This is run once per compilation to set up which modes can be used
242 directly in memory and to initialize the block move optab. */
245 init_expr_once (void)
248 enum machine_mode mode;
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
256 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
257 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
259 /* A scratch register we can modify in-place below to avoid
260 useless RTL allocations. */
261 reg = gen_rtx_REG (VOIDmode, -1);
263 insn = rtx_alloc (INSN);
264 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
265 PATTERN (insn) = pat;
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
274 PUT_MODE (mem1, mode);
275 PUT_MODE (reg, mode);
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
285 if (! HARD_REGNO_MODE_OK (regno, mode))
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
312 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
315 mode = GET_MODE_WIDER_MODE (mode))
317 enum machine_mode srcmode;
318 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
319 srcmode = GET_MODE_WIDER_MODE (srcmode))
323 ic = can_extend_p (mode, srcmode, 0);
324 if (ic == CODE_FOR_nothing)
327 PUT_MODE (mem, srcmode);
329 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
330 float_extend_from_mem[mode][srcmode] = true;
335 /* This is run at the start of compiling a function. */
340 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
343 /* Copy data from FROM to TO, where the machine modes are not the same.
344 Both modes may be integer, or both may be floating.
345 UNSIGNEDP should be nonzero if FROM is an unsigned type.
346 This causes zero-extension instead of sign-extension. */
349 convert_move (rtx to, rtx from, int unsignedp)
351 enum machine_mode to_mode = GET_MODE (to);
352 enum machine_mode from_mode = GET_MODE (from);
353 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
354 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
358 /* rtx code for making an equivalent value. */
359 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
360 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
363 gcc_assert (to_real == from_real);
365 /* If the source and destination are already the same, then there's
370 /* If FROM is a SUBREG that indicates that we have already done at least
371 the required extension, strip it. We don't handle such SUBREGs as
374 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
375 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
376 >= GET_MODE_SIZE (to_mode))
377 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
378 from = gen_lowpart (to_mode, from), from_mode = to_mode;
380 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
382 if (to_mode == from_mode
383 || (from_mode == VOIDmode && CONSTANT_P (from)))
385 emit_move_insn (to, from);
389 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
391 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
393 if (VECTOR_MODE_P (to_mode))
394 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
396 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
398 emit_move_insn (to, from);
402 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
404 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
405 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
414 gcc_assert ((GET_MODE_PRECISION (from_mode)
415 != GET_MODE_PRECISION (to_mode))
416 || (DECIMAL_FLOAT_MODE_P (from_mode)
417 != DECIMAL_FLOAT_MODE_P (to_mode)));
419 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
420 /* Conversion between decimal float and binary float, same size. */
421 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
422 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
427 /* Try converting directly if the insn is supported. */
429 code = tab->handlers[to_mode][from_mode].insn_code;
430 if (code != CODE_FOR_nothing)
432 emit_unop_insn (code, to, from,
433 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
437 /* Otherwise use a libcall. */
438 libcall = tab->handlers[to_mode][from_mode].libfunc;
440 /* Is this conversion implemented yet? */
441 gcc_assert (libcall);
444 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
446 insns = get_insns ();
448 emit_libcall_block (insns, to, value,
449 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
451 : gen_rtx_FLOAT_EXTEND (to_mode, from));
455 /* Handle pointer conversion. */ /* SPEE 900220. */
456 /* Targets are expected to provide conversion insns between PxImode and
457 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
458 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
460 enum machine_mode full_mode
461 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
463 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
464 != CODE_FOR_nothing);
466 if (full_mode != from_mode)
467 from = convert_to_mode (full_mode, from, unsignedp);
468 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
472 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
475 enum machine_mode full_mode
476 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
478 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
479 != CODE_FOR_nothing);
481 if (to_mode == full_mode)
483 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from = gen_reg_rtx (full_mode);
489 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
490 new_from, from, UNKNOWN);
492 /* else proceed to integer conversions below. */
493 from_mode = full_mode;
497 /* Now both modes are integers. */
499 /* Handle expanding beyond a word. */
500 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
501 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
508 enum machine_mode lowpart_mode;
509 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
511 /* Try converting directly if the insn is supported. */
512 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
515 /* If FROM is a SUBREG, put it into a register. Do this
516 so that we always generate the same set of insns for
517 better cse'ing; if an intermediate assignment occurred,
518 we won't be doing the operation directly on the SUBREG. */
519 if (optimize > 0 && GET_CODE (from) == SUBREG)
520 from = force_reg (from_mode, from);
521 emit_unop_insn (code, to, from, equiv_code);
524 /* Next, try converting via full word. */
525 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
526 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
527 != CODE_FOR_nothing))
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
535 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
536 emit_unop_insn (code, to,
537 gen_lowpart (word_mode, to), equiv_code);
541 /* No special multiword conversion insn; do it by hand. */
544 /* Since we will turn this into a no conflict block, we must ensure
545 that the source does not overlap the target. */
547 if (reg_overlap_mentioned_p (to, from))
548 from = force_reg (from_mode, from);
550 /* Get a copy of FROM widened to a word, if necessary. */
551 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
552 lowpart_mode = word_mode;
554 lowpart_mode = from_mode;
556 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
558 lowpart = gen_lowpart (lowpart_mode, to);
559 emit_move_insn (lowpart, lowfrom);
561 /* Compute the value to put in each remaining word. */
563 fill_value = const0_rtx;
568 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
569 && STORE_FLAG_VALUE == -1)
571 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
573 fill_value = gen_reg_rtx (word_mode);
574 emit_insn (gen_slt (fill_value));
580 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
581 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
583 fill_value = convert_to_mode (word_mode, fill_value, 1);
587 /* Fill the remaining words. */
588 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
590 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
591 rtx subword = operand_subword (to, index, 1, to_mode);
593 gcc_assert (subword);
595 if (fill_value != subword)
596 emit_move_insn (subword, fill_value);
599 insns = get_insns ();
602 emit_no_conflict_block (insns, to, from, NULL_RTX,
603 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
607 /* Truncating multi-word to a word or less. */
608 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
609 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
612 && ! MEM_VOLATILE_P (from)
613 && direct_load[(int) to_mode]
614 && ! mode_dependent_address_p (XEXP (from, 0)))
616 || GET_CODE (from) == SUBREG))
617 from = force_reg (from_mode, from);
618 convert_move (to, gen_lowpart (word_mode, from), 0);
622 /* Now follow all the conversions between integers
623 no more than a word long. */
625 /* For truncation, usually we can just refer to FROM in a narrower mode. */
626 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (from_mode)))
631 && ! MEM_VOLATILE_P (from)
632 && direct_load[(int) to_mode]
633 && ! mode_dependent_address_p (XEXP (from, 0)))
635 || GET_CODE (from) == SUBREG))
636 from = force_reg (from_mode, from);
637 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
638 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
639 from = copy_to_reg (from);
640 emit_move_insn (to, gen_lowpart (to_mode, from));
644 /* Handle extension. */
645 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
647 /* Convert directly if that works. */
648 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
651 emit_unop_insn (code, to, from, equiv_code);
656 enum machine_mode intermediate;
660 /* Search for a mode to convert via. */
661 for (intermediate = from_mode; intermediate != VOIDmode;
662 intermediate = GET_MODE_WIDER_MODE (intermediate))
663 if (((can_extend_p (to_mode, intermediate, unsignedp)
665 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
666 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
667 GET_MODE_BITSIZE (intermediate))))
668 && (can_extend_p (intermediate, from_mode, unsignedp)
669 != CODE_FOR_nothing))
671 convert_move (to, convert_to_mode (intermediate, from,
672 unsignedp), unsignedp);
676 /* No suitable intermediate mode.
677 Generate what we need with shifts. */
678 shift_amount = build_int_cst (NULL_TREE,
679 GET_MODE_BITSIZE (to_mode)
680 - GET_MODE_BITSIZE (from_mode));
681 from = gen_lowpart (to_mode, force_reg (from_mode, from));
682 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
684 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
687 emit_move_insn (to, tmp);
692 /* Support special truncate insns for certain modes. */
693 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
695 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
700 /* Handle truncation of volatile memrefs, and so on;
701 the things that couldn't be truncated directly,
702 and for which there was no special instruction.
704 ??? Code above formerly short-circuited this, for most integer
705 mode pairs, with a force_reg in from_mode followed by a recursive
706 call to this routine. Appears always to have been wrong. */
707 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
709 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
710 emit_move_insn (to, temp);
714 /* Mode combination is not recognized. */
718 /* Return an rtx for a value that would result
719 from converting X to mode MODE.
720 Both X and MODE may be floating, or both integer.
721 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion. */
726 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
728 return convert_modes (mode, VOIDmode, x, unsignedp);
731 /* Return an rtx for a value that would result
732 from converting X from mode OLDMODE to mode MODE.
733 Both modes may be floating, or both integer.
734 UNSIGNEDP is nonzero if X is an unsigned value.
736 This can be done by referring to a part of X in place
737 or by copying to a new temporary with conversion.
739 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
742 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
746 /* If FROM is a SUBREG that indicates that we have already done at least
747 the required extension, strip it. */
749 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
750 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
751 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
752 x = gen_lowpart (mode, x);
754 if (GET_MODE (x) != VOIDmode)
755 oldmode = GET_MODE (x);
760 /* There is one case that we must handle specially: If we are converting
761 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
762 we are to interpret the constant as unsigned, gen_lowpart will do
763 the wrong if the constant appears negative. What we want to do is
764 make the high-order word of the constant zero, not all ones. */
766 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
767 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
768 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
770 HOST_WIDE_INT val = INTVAL (x);
772 if (oldmode != VOIDmode
773 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We need to zero extend VAL. */
778 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
789 if ((GET_CODE (x) == CONST_INT
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
819 return gen_int_mode (val, mode);
822 return gen_lowpart (mode, x);
825 /* Converting from integer constant into mode is always equivalent to an
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
853 return MOVE_BY_PIECES_P (len, align);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
872 struct move_by_pieces data;
873 rtx to_addr, from_addr = XEXP (from, 0);
874 unsigned int max_size = MOVE_MAX_PIECES + 1;
875 enum machine_mode mode = VOIDmode, tmode;
876 enum insn_code icode;
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
881 data.from_addr = from_addr;
884 to_addr = XEXP (to, 0);
887 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
888 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
890 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 #ifdef STACK_GROWS_DOWNWARD
903 data.to_addr = to_addr;
906 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
907 || GET_CODE (from_addr) == POST_INC
908 || GET_CODE (from_addr) == POST_DEC);
910 data.explicit_inc_from = 0;
911 data.explicit_inc_to = 0;
912 if (data.reverse) data.offset = len;
915 /* If copying requires more than two move insns,
916 copy addresses to registers (to make displacements shorter)
917 and use post-increment if available. */
918 if (!(data.autinc_from && data.autinc_to)
919 && move_by_pieces_ninsns (len, align, max_size) > 2)
921 /* Find the mode of the largest move... */
922 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
923 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
924 if (GET_MODE_SIZE (tmode) < max_size)
927 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
929 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
930 data.autinc_from = 1;
931 data.explicit_inc_from = -1;
933 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
935 data.from_addr = copy_addr_to_reg (from_addr);
936 data.autinc_from = 1;
937 data.explicit_inc_from = 1;
939 if (!data.autinc_from && CONSTANT_P (from_addr))
940 data.from_addr = copy_addr_to_reg (from_addr);
941 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
943 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
945 data.explicit_inc_to = -1;
947 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
949 data.to_addr = copy_addr_to_reg (to_addr);
951 data.explicit_inc_to = 1;
953 if (!data.autinc_to && CONSTANT_P (to_addr))
954 data.to_addr = copy_addr_to_reg (to_addr);
957 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
958 if (align >= GET_MODE_ALIGNMENT (tmode))
959 align = GET_MODE_ALIGNMENT (tmode);
962 enum machine_mode xmode;
964 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
966 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
967 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
968 || SLOW_UNALIGNED_ACCESS (tmode, align))
971 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
979 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
980 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
981 if (GET_MODE_SIZE (tmode) < max_size)
984 if (mode == VOIDmode)
987 icode = mov_optab->handlers[(int) mode].insn_code;
988 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
989 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
991 max_size = GET_MODE_SIZE (mode);
994 /* The code above should have handled everything. */
995 gcc_assert (!data.len);
1001 gcc_assert (!data.reverse);
1006 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1007 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1009 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1012 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1019 to1 = adjust_address (data.to, QImode, data.offset);
1027 /* Return number of insns required to move L bytes by pieces.
1028 ALIGN (in bits) is maximum alignment we can assume. */
1030 static unsigned HOST_WIDE_INT
1031 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1032 unsigned int max_size)
1034 unsigned HOST_WIDE_INT n_insns = 0;
1035 enum machine_mode tmode;
1037 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1038 if (align >= GET_MODE_ALIGNMENT (tmode))
1039 align = GET_MODE_ALIGNMENT (tmode);
1042 enum machine_mode tmode, xmode;
1044 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1046 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1047 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1048 || SLOW_UNALIGNED_ACCESS (tmode, align))
1051 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1054 while (max_size > 1)
1056 enum machine_mode mode = VOIDmode;
1057 enum insn_code icode;
1059 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1060 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1061 if (GET_MODE_SIZE (tmode) < max_size)
1064 if (mode == VOIDmode)
1067 icode = mov_optab->handlers[(int) mode].insn_code;
1068 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1069 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1071 max_size = GET_MODE_SIZE (mode);
1078 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1079 with move instructions for mode MODE. GENFUN is the gen_... function
1080 to make a move insn for that mode. DATA has all the other info. */
1083 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1084 struct move_by_pieces *data)
1086 unsigned int size = GET_MODE_SIZE (mode);
1087 rtx to1 = NULL_RTX, from1;
1089 while (data->len >= size)
1092 data->offset -= size;
1096 if (data->autinc_to)
1097 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1100 to1 = adjust_address (data->to, mode, data->offset);
1103 if (data->autinc_from)
1104 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1107 from1 = adjust_address (data->from, mode, data->offset);
1109 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 GEN_INT (-(HOST_WIDE_INT)size)));
1112 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1113 emit_insn (gen_add2_insn (data->from_addr,
1114 GEN_INT (-(HOST_WIDE_INT)size)));
1117 emit_insn ((*genfun) (to1, from1));
1120 #ifdef PUSH_ROUNDING
1121 emit_single_push_insn (mode, from1, NULL);
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1128 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1129 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1130 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1132 if (! data->reverse)
1133 data->offset += size;
1139 /* Emit code to move a block Y to a block X. This may be done with
1140 string-move instructions, with multiple scalar move instructions,
1141 or with a library call.
1143 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1144 SIZE is an rtx that says how long they are.
1145 ALIGN is the maximum alignment we can assume they have.
1146 METHOD describes what kind of copy this is, and what mechanisms may be used.
1148 Return the address of the new block, if memcpy is called and returns it,
1152 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1160 case BLOCK_OP_NORMAL:
1161 case BLOCK_OP_TAILCALL:
1162 may_use_call = true;
1165 case BLOCK_OP_CALL_PARM:
1166 may_use_call = block_move_libcall_safe_for_call_parm ();
1168 /* Make inhibit_defer_pop nonzero around the library call
1169 to force it to pop the arguments right away. */
1173 case BLOCK_OP_NO_LIBCALL:
1174 may_use_call = false;
1181 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1183 gcc_assert (MEM_P (x));
1184 gcc_assert (MEM_P (y));
1187 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1188 block copy is more efficient for other large modes, e.g. DCmode. */
1189 x = adjust_address (x, BLKmode, 0);
1190 y = adjust_address (y, BLKmode, 0);
1192 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1193 can be incorrect is coming from __builtin_memcpy. */
1194 if (GET_CODE (size) == CONST_INT)
1196 if (INTVAL (size) == 0)
1199 x = shallow_copy_rtx (x);
1200 y = shallow_copy_rtx (y);
1201 set_mem_size (x, size);
1202 set_mem_size (y, size);
1205 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1206 move_by_pieces (x, y, INTVAL (size), align, 0);
1207 else if (emit_block_move_via_movmem (x, y, size, align))
1209 else if (may_use_call)
1210 retval = emit_block_move_via_libcall (x, y, size,
1211 method == BLOCK_OP_TAILCALL);
1213 emit_block_move_via_loop (x, y, size, align);
1215 if (method == BLOCK_OP_CALL_PARM)
1221 /* A subroutine of emit_block_move. Returns true if calling the
1222 block move libcall will not clobber any parameters which may have
1223 already been placed on the stack. */
1226 block_move_libcall_safe_for_call_parm (void)
1228 /* If arguments are pushed on the stack, then they're safe. */
1232 /* If registers go on the stack anyway, any argument is sure to clobber
1233 an outgoing argument. */
1234 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1236 tree fn = emit_block_move_libcall_fn (false);
1238 if (REG_PARM_STACK_SPACE (fn) != 0)
1243 /* If any argument goes in memory, then it might clobber an outgoing
1246 CUMULATIVE_ARGS args_so_far;
1249 fn = emit_block_move_libcall_fn (false);
1250 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1252 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1253 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1255 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1256 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1257 if (!tmp || !REG_P (tmp))
1259 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1261 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1273 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1277 /* Since this is a move insn, we don't care about volatility. */
1280 /* Try the most limited insn first, because there's no point
1281 including more than one in the machine description unless
1282 the more limited one has some advantage. */
1284 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1285 mode = GET_MODE_WIDER_MODE (mode))
1287 enum insn_code code = movmem_optab[(int) mode];
1288 insn_operand_predicate_fn pred;
1290 if (code != CODE_FOR_nothing
1291 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1292 here because if SIZE is less than the mode mask, as it is
1293 returned by the macro, it will definitely be less than the
1294 actual mode mask. */
1295 && ((GET_CODE (size) == CONST_INT
1296 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1297 <= (GET_MODE_MASK (mode) >> 1)))
1298 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1299 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1300 || (*pred) (x, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1302 || (*pred) (y, BLKmode))
1303 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1304 || (*pred) (opalign, VOIDmode)))
1307 rtx last = get_last_insn ();
1310 op2 = convert_to_mode (mode, size, 1);
1311 pred = insn_data[(int) code].operand[2].predicate;
1312 if (pred != 0 && ! (*pred) (op2, mode))
1313 op2 = copy_to_mode_reg (mode, op2);
1315 /* ??? When called via emit_block_move_for_call, it'd be
1316 nice if there were some way to inform the backend, so
1317 that it doesn't fail the expansion because it thinks
1318 emitting the libcall would be more efficient. */
1320 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1324 volatile_ok = save_volatile_ok;
1328 delete_insns_since (last);
1332 volatile_ok = save_volatile_ok;
1336 /* A subroutine of emit_block_move. Expand a call to memcpy.
1337 Return the return value from memcpy, 0 otherwise. */
1340 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1342 rtx dst_addr, src_addr;
1343 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1344 enum machine_mode size_mode;
1347 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1348 pseudos. We can then place those new pseudos into a VAR_DECL and
1351 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1352 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1354 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1355 src_addr = convert_memory_address (ptr_mode, src_addr);
1357 dst_tree = make_tree (ptr_type_node, dst_addr);
1358 src_tree = make_tree (ptr_type_node, src_addr);
1360 size_mode = TYPE_MODE (sizetype);
1362 size = convert_to_mode (size_mode, size, 1);
1363 size = copy_to_mode_reg (size_mode, size);
1365 /* It is incorrect to use the libcall calling conventions to call
1366 memcpy in this context. This could be a user call to memcpy and
1367 the user may wish to examine the return value from memcpy. For
1368 targets where libcalls and normal calls have different conventions
1369 for returning pointers, we could end up generating incorrect code. */
1371 size_tree = make_tree (sizetype, size);
1373 fn = emit_block_move_libcall_fn (true);
1374 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1375 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1376 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1378 /* Now we have to build up the CALL_EXPR itself. */
1379 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1380 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1381 call_expr, arg_list, NULL_TREE);
1382 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1384 retval = expand_normal (call_expr);
1389 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1390 for the function we use for block copies. The first time FOR_CALL
1391 is true, we call assemble_external. */
1393 static GTY(()) tree block_move_fn;
1396 init_block_move_fn (const char *asmspec)
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1407 fn = build_decl (FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1419 set_user_assembler_name (block_move_fn, asmspec);
1423 emit_block_move_libcall_fn (int for_call)
1425 static bool emitted_extern;
1428 init_block_move_fn (NULL);
1430 if (for_call && !emitted_extern)
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1434 assemble_external (block_move_fn);
1437 return block_move_fn;
1440 /* A subroutine of emit_block_move. Copy the data via an explicit
1441 loop. This is used only when libcalls are forbidden. */
1442 /* ??? It'd be nice to copy in hunks larger than QImode. */
1445 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1446 unsigned int align ATTRIBUTE_UNUSED)
1448 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1449 enum machine_mode iter_mode;
1451 iter_mode = GET_MODE (size);
1452 if (iter_mode == VOIDmode)
1453 iter_mode = word_mode;
1455 top_label = gen_label_rtx ();
1456 cmp_label = gen_label_rtx ();
1457 iter = gen_reg_rtx (iter_mode);
1459 emit_move_insn (iter, const0_rtx);
1461 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1462 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1463 do_pending_stack_adjust ();
1465 emit_jump (cmp_label);
1466 emit_label (top_label);
1468 tmp = convert_modes (Pmode, iter_mode, iter, true);
1469 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1470 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1471 x = change_address (x, QImode, x_addr);
1472 y = change_address (y, QImode, y_addr);
1474 emit_move_insn (x, y);
1476 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1477 true, OPTAB_LIB_WIDEN);
1479 emit_move_insn (iter, tmp);
1481 emit_label (cmp_label);
1483 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1487 /* Copy all or part of a value X into registers starting at REGNO.
1488 The number of registers to be filled is NREGS. */
1491 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1494 #ifdef HAVE_load_multiple
1502 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1503 x = validize_mem (force_const_mem (mode, x));
1505 /* See if the machine can do this with a load multiple insn. */
1506 #ifdef HAVE_load_multiple
1507 if (HAVE_load_multiple)
1509 last = get_last_insn ();
1510 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1518 delete_insns_since (last);
1522 for (i = 0; i < nregs; i++)
1523 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1524 operand_subword_force (x, i, mode));
1527 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1528 The number of registers to be filled is NREGS. */
1531 move_block_from_reg (int regno, rtx x, int nregs)
1538 /* See if the machine can do this with a store multiple insn. */
1539 #ifdef HAVE_store_multiple
1540 if (HAVE_store_multiple)
1542 rtx last = get_last_insn ();
1543 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1551 delete_insns_since (last);
1555 for (i = 0; i < nregs; i++)
1557 rtx tem = operand_subword (x, i, 1, BLKmode);
1561 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1565 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1566 ORIG, where ORIG is a non-consecutive group of registers represented by
1567 a PARALLEL. The clone is identical to the original except in that the
1568 original set of registers is replaced by a new set of pseudo registers.
1569 The new set has the same modes as the original set. */
1572 gen_group_rtx (rtx orig)
1577 gcc_assert (GET_CODE (orig) == PARALLEL);
1579 length = XVECLEN (orig, 0);
1580 tmps = alloca (sizeof (rtx) * length);
1582 /* Skip a NULL entry in first slot. */
1583 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1588 for (; i < length; i++)
1590 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1591 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1593 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1596 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1599 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1600 except that values are placed in TMPS[i], and must later be moved
1601 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1604 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1608 enum machine_mode m = GET_MODE (orig_src);
1610 gcc_assert (GET_CODE (dst) == PARALLEL);
1613 && !SCALAR_INT_MODE_P (m)
1614 && !MEM_P (orig_src)
1615 && GET_CODE (orig_src) != CONCAT)
1617 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1618 if (imode == BLKmode)
1619 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1621 src = gen_reg_rtx (imode);
1622 if (imode != BLKmode)
1623 src = gen_lowpart (GET_MODE (orig_src), src);
1624 emit_move_insn (src, orig_src);
1625 /* ...and back again. */
1626 if (imode != BLKmode)
1627 src = gen_lowpart (imode, src);
1628 emit_group_load_1 (tmps, dst, src, type, ssize);
1632 /* Check for a NULL entry, used to indicate that the parameter goes
1633 both on the stack and in registers. */
1634 if (XEXP (XVECEXP (dst, 0, 0), 0))
1639 /* Process the pieces. */
1640 for (i = start; i < XVECLEN (dst, 0); i++)
1642 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1643 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1644 unsigned int bytelen = GET_MODE_SIZE (mode);
1647 /* Handle trailing fragments that run over the size of the struct. */
1648 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1650 /* Arrange to shift the fragment to where it belongs.
1651 extract_bit_field loads to the lsb of the reg. */
1653 #ifdef BLOCK_REG_PADDING
1654 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1655 == (BYTES_BIG_ENDIAN ? upward : downward)
1660 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1661 bytelen = ssize - bytepos;
1662 gcc_assert (bytelen > 0);
1665 /* If we won't be loading directly from memory, protect the real source
1666 from strange tricks we might play; but make sure that the source can
1667 be loaded directly into the destination. */
1669 if (!MEM_P (orig_src)
1670 && (!CONSTANT_P (orig_src)
1671 || (GET_MODE (orig_src) != mode
1672 && GET_MODE (orig_src) != VOIDmode)))
1674 if (GET_MODE (orig_src) == VOIDmode)
1675 src = gen_reg_rtx (mode);
1677 src = gen_reg_rtx (GET_MODE (orig_src));
1679 emit_move_insn (src, orig_src);
1682 /* Optimize the access just a bit. */
1684 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1685 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1686 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1687 && bytelen == GET_MODE_SIZE (mode))
1689 tmps[i] = gen_reg_rtx (mode);
1690 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1692 else if (COMPLEX_MODE_P (mode)
1693 && GET_MODE (src) == mode
1694 && bytelen == GET_MODE_SIZE (mode))
1695 /* Let emit_move_complex do the bulk of the work. */
1697 else if (GET_CODE (src) == CONCAT)
1699 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1700 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1702 if ((bytepos == 0 && bytelen == slen0)
1703 || (bytepos != 0 && bytepos + bytelen <= slen))
1705 /* The following assumes that the concatenated objects all
1706 have the same size. In this case, a simple calculation
1707 can be used to determine the object and the bit field
1709 tmps[i] = XEXP (src, bytepos / slen0);
1710 if (! CONSTANT_P (tmps[i])
1711 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1712 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1713 (bytepos % slen0) * BITS_PER_UNIT,
1714 1, NULL_RTX, mode, mode);
1720 gcc_assert (!bytepos);
1721 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1722 emit_move_insn (mem, src);
1723 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1724 0, 1, NULL_RTX, mode, mode);
1727 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1728 SIMD register, which is currently broken. While we get GCC
1729 to emit proper RTL for these cases, let's dump to memory. */
1730 else if (VECTOR_MODE_P (GET_MODE (dst))
1733 int slen = GET_MODE_SIZE (GET_MODE (src));
1736 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1737 emit_move_insn (mem, src);
1738 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1740 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1741 && XVECLEN (dst, 0) > 1)
1742 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1743 else if (CONSTANT_P (src)
1744 || (REG_P (src) && GET_MODE (src) == mode))
1747 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1748 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1752 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1753 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1757 /* Emit code to move a block SRC of type TYPE to a block DST,
1758 where DST is non-consecutive registers represented by a PARALLEL.
1759 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1768 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1769 emit_group_load_1 (tmps, dst, src, type, ssize);
1771 /* Copy the extracted pieces into the proper (probable) hard regs. */
1772 for (i = 0; i < XVECLEN (dst, 0); i++)
1774 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1777 emit_move_insn (d, tmps[i]);
1781 /* Similar, but load SRC into new pseudos in a format that looks like
1782 PARALLEL. This can later be fed to emit_group_move to get things
1783 in the right place. */
1786 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1791 vec = rtvec_alloc (XVECLEN (parallel, 0));
1792 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1794 /* Convert the vector to look just like the original PARALLEL, except
1795 with the computed values. */
1796 for (i = 0; i < XVECLEN (parallel, 0); i++)
1798 rtx e = XVECEXP (parallel, 0, i);
1799 rtx d = XEXP (e, 0);
1803 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1804 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1806 RTVEC_ELT (vec, i) = e;
1809 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1812 /* Emit code to move a block SRC to block DST, where SRC and DST are
1813 non-consecutive groups of registers, each represented by a PARALLEL. */
1816 emit_group_move (rtx dst, rtx src)
1820 gcc_assert (GET_CODE (src) == PARALLEL
1821 && GET_CODE (dst) == PARALLEL
1822 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1824 /* Skip first entry if NULL. */
1825 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1826 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1827 XEXP (XVECEXP (src, 0, i), 0));
1830 /* Move a group of registers represented by a PARALLEL into pseudos. */
1833 emit_group_move_into_temps (rtx src)
1835 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1838 for (i = 0; i < XVECLEN (src, 0); i++)
1840 rtx e = XVECEXP (src, 0, i);
1841 rtx d = XEXP (e, 0);
1844 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1848 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1851 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1852 where SRC is non-consecutive registers represented by a PARALLEL.
1853 SSIZE represents the total size of block ORIG_DST, or -1 if not
1857 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1860 int start, finish, i;
1861 enum machine_mode m = GET_MODE (orig_dst);
1863 gcc_assert (GET_CODE (src) == PARALLEL);
1865 if (!SCALAR_INT_MODE_P (m)
1866 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1868 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1869 if (imode == BLKmode)
1870 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1872 dst = gen_reg_rtx (imode);
1873 emit_group_store (dst, src, type, ssize);
1874 if (imode != BLKmode)
1875 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1876 emit_move_insn (orig_dst, dst);
1880 /* Check for a NULL entry, used to indicate that the parameter goes
1881 both on the stack and in registers. */
1882 if (XEXP (XVECEXP (src, 0, 0), 0))
1886 finish = XVECLEN (src, 0);
1888 tmps = alloca (sizeof (rtx) * finish);
1890 /* Copy the (probable) hard regs into pseudos. */
1891 for (i = start; i < finish; i++)
1893 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1894 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1896 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1897 emit_move_insn (tmps[i], reg);
1903 /* If we won't be storing directly into memory, protect the real destination
1904 from strange tricks we might play. */
1906 if (GET_CODE (dst) == PARALLEL)
1910 /* We can get a PARALLEL dst if there is a conditional expression in
1911 a return statement. In that case, the dst and src are the same,
1912 so no action is necessary. */
1913 if (rtx_equal_p (dst, src))
1916 /* It is unclear if we can ever reach here, but we may as well handle
1917 it. Allocate a temporary, and split this into a store/load to/from
1920 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1921 emit_group_store (temp, src, type, ssize);
1922 emit_group_load (dst, temp, type, ssize);
1925 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1927 enum machine_mode outer = GET_MODE (dst);
1928 enum machine_mode inner;
1929 HOST_WIDE_INT bytepos;
1933 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1934 dst = gen_reg_rtx (outer);
1936 /* Make life a bit easier for combine. */
1937 /* If the first element of the vector is the low part
1938 of the destination mode, use a paradoxical subreg to
1939 initialize the destination. */
1942 inner = GET_MODE (tmps[start]);
1943 bytepos = subreg_lowpart_offset (inner, outer);
1944 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1946 temp = simplify_gen_subreg (outer, tmps[start],
1950 emit_move_insn (dst, temp);
1957 /* If the first element wasn't the low part, try the last. */
1959 && start < finish - 1)
1961 inner = GET_MODE (tmps[finish - 1]);
1962 bytepos = subreg_lowpart_offset (inner, outer);
1963 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1965 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1969 emit_move_insn (dst, temp);
1976 /* Otherwise, simply initialize the result to zero. */
1978 emit_move_insn (dst, CONST0_RTX (outer));
1981 /* Process the pieces. */
1982 for (i = start; i < finish; i++)
1984 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1985 enum machine_mode mode = GET_MODE (tmps[i]);
1986 unsigned int bytelen = GET_MODE_SIZE (mode);
1989 /* Handle trailing fragments that run over the size of the struct. */
1990 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1992 /* store_bit_field always takes its value from the lsb.
1993 Move the fragment to the lsb if it's not already there. */
1995 #ifdef BLOCK_REG_PADDING
1996 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1997 == (BYTES_BIG_ENDIAN ? upward : downward)
2003 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2004 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2005 build_int_cst (NULL_TREE, shift),
2008 bytelen = ssize - bytepos;
2011 if (GET_CODE (dst) == CONCAT)
2013 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2022 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2023 dest = assign_stack_temp (GET_MODE (dest),
2024 GET_MODE_SIZE (GET_MODE (dest)), 0);
2025 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2032 /* Optimize the access just a bit. */
2034 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2035 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2036 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2037 && bytelen == GET_MODE_SIZE (mode))
2038 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2040 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2044 /* Copy from the pseudo into the (probable) hard reg. */
2045 if (orig_dst != dst)
2046 emit_move_insn (orig_dst, dst);
2049 /* Generate code to copy a BLKmode object of TYPE out of a
2050 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2051 is null, a stack temporary is created. TGTBLK is returned.
2053 The purpose of this routine is to handle functions that return
2054 BLKmode structures in registers. Some machines (the PA for example)
2055 want to return all small structures in registers regardless of the
2056 structure's alignment. */
2059 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2061 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2062 rtx src = NULL, dst = NULL;
2063 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2064 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2068 tgtblk = assign_temp (build_qualified_type (type,
2070 | TYPE_QUAL_CONST)),
2072 preserve_temp_slots (tgtblk);
2075 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2076 into a new pseudo which is a full word. */
2078 if (GET_MODE (srcreg) != BLKmode
2079 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2080 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2082 /* If the structure doesn't take up a whole number of words, see whether
2083 SRCREG is padded on the left or on the right. If it's on the left,
2084 set PADDING_CORRECTION to the number of bits to skip.
2086 In most ABIs, the structure will be returned at the least end of
2087 the register, which translates to right padding on little-endian
2088 targets and left padding on big-endian targets. The opposite
2089 holds if the structure is returned at the most significant
2090 end of the register. */
2091 if (bytes % UNITS_PER_WORD != 0
2092 && (targetm.calls.return_in_msb (type)
2094 : BYTES_BIG_ENDIAN))
2096 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2098 /* Copy the structure BITSIZE bites at a time.
2100 We could probably emit more efficient code for machines which do not use
2101 strict alignment, but it doesn't seem worth the effort at the current
2103 for (bitpos = 0, xbitpos = padding_correction;
2104 bitpos < bytes * BITS_PER_UNIT;
2105 bitpos += bitsize, xbitpos += bitsize)
2107 /* We need a new source operand each time xbitpos is on a
2108 word boundary and when xbitpos == padding_correction
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
2111 || xbitpos == padding_correction)
2112 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2115 /* We need a new destination operand each time bitpos is on
2117 if (bitpos % BITS_PER_WORD == 0)
2118 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2120 /* Use xbitpos for the source extraction (right justified) and
2121 xbitpos for the destination store (left justified). */
2122 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2123 extract_bit_field (src, bitsize,
2124 xbitpos % BITS_PER_WORD, 1,
2125 NULL_RTX, word_mode, word_mode));
2131 /* Add a USE expression for REG to the (possibly empty) list pointed
2132 to by CALL_FUSAGE. REG must denote a hard register. */
2135 use_reg (rtx *call_fusage, rtx reg)
2137 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2140 = gen_rtx_EXPR_LIST (VOIDmode,
2141 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2144 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2145 starting at REGNO. All of these registers must be hard registers. */
2148 use_regs (rtx *call_fusage, int regno, int nregs)
2152 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2154 for (i = 0; i < nregs; i++)
2155 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2158 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2159 PARALLEL REGS. This is for calls that pass values in multiple
2160 non-contiguous locations. The Irix 6 ABI has examples of this. */
2163 use_group_regs (rtx *call_fusage, rtx regs)
2167 for (i = 0; i < XVECLEN (regs, 0); i++)
2169 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2171 /* A NULL entry means the parameter goes both on the stack and in
2172 registers. This can also be a MEM for targets that pass values
2173 partially on the stack and partially in registers. */
2174 if (reg != 0 && REG_P (reg))
2175 use_reg (call_fusage, reg);
2180 /* Determine whether the LEN bytes generated by CONSTFUN can be
2181 stored to memory using several move instructions. CONSTFUNDATA is
2182 a pointer which will be passed as argument in every CONSTFUN call.
2183 ALIGN is maximum alignment we can assume. Return nonzero if a
2184 call to store_by_pieces should succeed. */
2187 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2188 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2189 void *constfundata, unsigned int align)
2191 unsigned HOST_WIDE_INT l;
2192 unsigned int max_size;
2193 HOST_WIDE_INT offset = 0;
2194 enum machine_mode mode, tmode;
2195 enum insn_code icode;
2202 if (! STORE_BY_PIECES_P (len, align))
2205 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2206 if (align >= GET_MODE_ALIGNMENT (tmode))
2207 align = GET_MODE_ALIGNMENT (tmode);
2210 enum machine_mode xmode;
2212 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2214 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2215 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2216 || SLOW_UNALIGNED_ACCESS (tmode, align))
2219 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2222 /* We would first store what we can in the largest integer mode, then go to
2223 successively smaller modes. */
2226 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2231 max_size = STORE_MAX_PIECES + 1;
2232 while (max_size > 1)
2234 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2235 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2236 if (GET_MODE_SIZE (tmode) < max_size)
2239 if (mode == VOIDmode)
2242 icode = mov_optab->handlers[(int) mode].insn_code;
2243 if (icode != CODE_FOR_nothing
2244 && align >= GET_MODE_ALIGNMENT (mode))
2246 unsigned int size = GET_MODE_SIZE (mode);
2253 cst = (*constfun) (constfundata, offset, mode);
2254 if (!LEGITIMATE_CONSTANT_P (cst))
2264 max_size = GET_MODE_SIZE (mode);
2267 /* The code above should have handled everything. */
2274 /* Generate several move instructions to store LEN bytes generated by
2275 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2276 pointer which will be passed as argument in every CONSTFUN call.
2277 ALIGN is maximum alignment we can assume.
2278 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2279 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2283 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2284 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2285 void *constfundata, unsigned int align, int endp)
2287 struct store_by_pieces data;
2291 gcc_assert (endp != 2);
2295 gcc_assert (STORE_BY_PIECES_P (len, align));
2296 data.constfun = constfun;
2297 data.constfundata = constfundata;
2300 store_by_pieces_1 (&data, align);
2305 gcc_assert (!data.reverse);
2310 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2311 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2313 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2316 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2323 to1 = adjust_address (data.to, QImode, data.offset);
2331 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2332 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2335 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2337 struct store_by_pieces data;
2342 data.constfun = clear_by_pieces_1;
2343 data.constfundata = NULL;
2346 store_by_pieces_1 (&data, align);
2349 /* Callback routine for clear_by_pieces.
2350 Return const0_rtx unconditionally. */
2353 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2354 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2355 enum machine_mode mode ATTRIBUTE_UNUSED)
2360 /* Subroutine of clear_by_pieces and store_by_pieces.
2361 Generate several move instructions to store LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2365 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2366 unsigned int align ATTRIBUTE_UNUSED)
2368 rtx to_addr = XEXP (data->to, 0);
2369 unsigned int max_size = STORE_MAX_PIECES + 1;
2370 enum machine_mode mode = VOIDmode, tmode;
2371 enum insn_code icode;
2374 data->to_addr = to_addr;
2376 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2377 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2379 data->explicit_inc_to = 0;
2381 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2383 data->offset = data->len;
2385 /* If storing requires more than two move insns,
2386 copy addresses to registers (to make displacements shorter)
2387 and use post-increment if available. */
2388 if (!data->autinc_to
2389 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2391 /* Determine the main mode we'll be using. */
2392 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2393 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2394 if (GET_MODE_SIZE (tmode) < max_size)
2397 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2399 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2400 data->autinc_to = 1;
2401 data->explicit_inc_to = -1;
2404 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2405 && ! data->autinc_to)
2407 data->to_addr = copy_addr_to_reg (to_addr);
2408 data->autinc_to = 1;
2409 data->explicit_inc_to = 1;
2412 if ( !data->autinc_to && CONSTANT_P (to_addr))
2413 data->to_addr = copy_addr_to_reg (to_addr);
2416 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2417 if (align >= GET_MODE_ALIGNMENT (tmode))
2418 align = GET_MODE_ALIGNMENT (tmode);
2421 enum machine_mode xmode;
2423 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2425 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2426 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2427 || SLOW_UNALIGNED_ACCESS (tmode, align))
2430 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2433 /* First store what we can in the largest integer mode, then go to
2434 successively smaller modes. */
2436 while (max_size > 1)
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2443 if (mode == VOIDmode)
2446 icode = mov_optab->handlers[(int) mode].insn_code;
2447 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2448 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2450 max_size = GET_MODE_SIZE (mode);
2453 /* The code above should have handled everything. */
2454 gcc_assert (!data->len);
2457 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2458 with move instructions for mode MODE. GENFUN is the gen_... function
2459 to make a move insn for that mode. DATA has all the other info. */
2462 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2463 struct store_by_pieces *data)
2465 unsigned int size = GET_MODE_SIZE (mode);
2468 while (data->len >= size)
2471 data->offset -= size;
2473 if (data->autinc_to)
2474 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2477 to1 = adjust_address (data->to, mode, data->offset);
2479 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2480 emit_insn (gen_add2_insn (data->to_addr,
2481 GEN_INT (-(HOST_WIDE_INT) size)));
2483 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2484 emit_insn ((*genfun) (to1, cst));
2486 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2487 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2489 if (! data->reverse)
2490 data->offset += size;
2496 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2497 its length in bytes. */
2500 clear_storage (rtx object, rtx size, enum block_op_methods method)
2502 enum machine_mode mode = GET_MODE (object);
2505 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2507 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2508 just move a zero. Otherwise, do this a piece at a time. */
2510 && GET_CODE (size) == CONST_INT
2511 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2513 rtx zero = CONST0_RTX (mode);
2516 emit_move_insn (object, zero);
2520 if (COMPLEX_MODE_P (mode))
2522 zero = CONST0_RTX (GET_MODE_INNER (mode));
2525 write_complex_part (object, zero, 0);
2526 write_complex_part (object, zero, 1);
2532 if (size == const0_rtx)
2535 align = MEM_ALIGN (object);
2537 if (GET_CODE (size) == CONST_INT
2538 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2539 clear_by_pieces (object, INTVAL (size), align);
2540 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2543 return clear_storage_via_libcall (object, size,
2544 method == BLOCK_OP_TAILCALL);
2549 /* A subroutine of clear_storage. Expand a call to memset.
2550 Return the return value of memset, 0 otherwise. */
2553 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2555 tree call_expr, arg_list, fn, object_tree, size_tree;
2556 enum machine_mode size_mode;
2559 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2560 place those into new pseudos into a VAR_DECL and use them later. */
2562 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2564 size_mode = TYPE_MODE (sizetype);
2565 size = convert_to_mode (size_mode, size, 1);
2566 size = copy_to_mode_reg (size_mode, size);
2568 /* It is incorrect to use the libcall calling conventions to call
2569 memset in this context. This could be a user call to memset and
2570 the user may wish to examine the return value from memset. For
2571 targets where libcalls and normal calls have different conventions
2572 for returning pointers, we could end up generating incorrect code. */
2574 object_tree = make_tree (ptr_type_node, object);
2575 size_tree = make_tree (sizetype, size);
2577 fn = clear_storage_libcall_fn (true);
2578 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2579 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2580 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2582 /* Now we have to build up the CALL_EXPR itself. */
2583 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2584 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2585 call_expr, arg_list, NULL_TREE);
2586 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2588 retval = expand_normal (call_expr);
2593 /* A subroutine of clear_storage_via_libcall. Create the tree node
2594 for the function we use for block clears. The first time FOR_CALL
2595 is true, we call assemble_external. */
2597 static GTY(()) tree block_clear_fn;
2600 init_block_clear_fn (const char *asmspec)
2602 if (!block_clear_fn)
2606 fn = get_identifier ("memset");
2607 args = build_function_type_list (ptr_type_node, ptr_type_node,
2608 integer_type_node, sizetype,
2611 fn = build_decl (FUNCTION_DECL, fn, args);
2612 DECL_EXTERNAL (fn) = 1;
2613 TREE_PUBLIC (fn) = 1;
2614 DECL_ARTIFICIAL (fn) = 1;
2615 TREE_NOTHROW (fn) = 1;
2616 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2617 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2619 block_clear_fn = fn;
2623 set_user_assembler_name (block_clear_fn, asmspec);
2627 clear_storage_libcall_fn (int for_call)
2629 static bool emitted_extern;
2631 if (!block_clear_fn)
2632 init_block_clear_fn (NULL);
2634 if (for_call && !emitted_extern)
2636 emitted_extern = true;
2637 make_decl_rtl (block_clear_fn);
2638 assemble_external (block_clear_fn);
2641 return block_clear_fn;
2644 /* Expand a setmem pattern; return true if successful. */
2647 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2649 /* Try the most limited insn first, because there's no point
2650 including more than one in the machine description unless
2651 the more limited one has some advantage. */
2653 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2654 enum machine_mode mode;
2656 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2657 mode = GET_MODE_WIDER_MODE (mode))
2659 enum insn_code code = setmem_optab[(int) mode];
2660 insn_operand_predicate_fn pred;
2662 if (code != CODE_FOR_nothing
2663 /* We don't need MODE to be narrower than
2664 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2665 the mode mask, as it is returned by the macro, it will
2666 definitely be less than the actual mode mask. */
2667 && ((GET_CODE (size) == CONST_INT
2668 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2669 <= (GET_MODE_MASK (mode) >> 1)))
2670 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2671 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2672 || (*pred) (object, BLKmode))
2673 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2674 || (*pred) (opalign, VOIDmode)))
2677 enum machine_mode char_mode;
2678 rtx last = get_last_insn ();
2681 opsize = convert_to_mode (mode, size, 1);
2682 pred = insn_data[(int) code].operand[1].predicate;
2683 if (pred != 0 && ! (*pred) (opsize, mode))
2684 opsize = copy_to_mode_reg (mode, opsize);
2687 char_mode = insn_data[(int) code].operand[2].mode;
2688 if (char_mode != VOIDmode)
2690 opchar = convert_to_mode (char_mode, opchar, 1);
2691 pred = insn_data[(int) code].operand[2].predicate;
2692 if (pred != 0 && ! (*pred) (opchar, char_mode))
2693 opchar = copy_to_mode_reg (char_mode, opchar);
2696 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2703 delete_insns_since (last);
2711 /* Write to one of the components of the complex value CPLX. Write VAL to
2712 the real part if IMAG_P is false, and the imaginary part if its true. */
2715 write_complex_part (rtx cplx, rtx val, bool imag_p)
2717 enum machine_mode cmode;
2718 enum machine_mode imode;
2721 if (GET_CODE (cplx) == CONCAT)
2723 emit_move_insn (XEXP (cplx, imag_p), val);
2727 cmode = GET_MODE (cplx);
2728 imode = GET_MODE_INNER (cmode);
2729 ibitsize = GET_MODE_BITSIZE (imode);
2731 /* For MEMs simplify_gen_subreg may generate an invalid new address
2732 because, e.g., the original address is considered mode-dependent
2733 by the target, which restricts simplify_subreg from invoking
2734 adjust_address_nv. Instead of preparing fallback support for an
2735 invalid address, we call adjust_address_nv directly. */
2738 emit_move_insn (adjust_address_nv (cplx, imode,
2739 imag_p ? GET_MODE_SIZE (imode) : 0),
2744 /* If the sub-object is at least word sized, then we know that subregging
2745 will work. This special case is important, since store_bit_field
2746 wants to operate on integer modes, and there's rarely an OImode to
2747 correspond to TCmode. */
2748 if (ibitsize >= BITS_PER_WORD
2749 /* For hard regs we have exact predicates. Assume we can split
2750 the original object if it spans an even number of hard regs.
2751 This special case is important for SCmode on 64-bit platforms
2752 where the natural size of floating-point regs is 32-bit. */
2754 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2755 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2757 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2758 imag_p ? GET_MODE_SIZE (imode) : 0);
2761 emit_move_insn (part, val);
2765 /* simplify_gen_subreg may fail for sub-word MEMs. */
2766 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2769 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2772 /* Extract one of the components of the complex value CPLX. Extract the
2773 real part if IMAG_P is false, and the imaginary part if it's true. */
2776 read_complex_part (rtx cplx, bool imag_p)
2778 enum machine_mode cmode, imode;
2781 if (GET_CODE (cplx) == CONCAT)
2782 return XEXP (cplx, imag_p);
2784 cmode = GET_MODE (cplx);
2785 imode = GET_MODE_INNER (cmode);
2786 ibitsize = GET_MODE_BITSIZE (imode);
2788 /* Special case reads from complex constants that got spilled to memory. */
2789 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2791 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2792 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2794 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2795 if (CONSTANT_CLASS_P (part))
2796 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2800 /* For MEMs simplify_gen_subreg may generate an invalid new address
2801 because, e.g., the original address is considered mode-dependent
2802 by the target, which restricts simplify_subreg from invoking
2803 adjust_address_nv. Instead of preparing fallback support for an
2804 invalid address, we call adjust_address_nv directly. */
2806 return adjust_address_nv (cplx, imode,
2807 imag_p ? GET_MODE_SIZE (imode) : 0);
2809 /* If the sub-object is at least word sized, then we know that subregging
2810 will work. This special case is important, since extract_bit_field
2811 wants to operate on integer modes, and there's rarely an OImode to
2812 correspond to TCmode. */
2813 if (ibitsize >= BITS_PER_WORD
2814 /* For hard regs we have exact predicates. Assume we can split
2815 the original object if it spans an even number of hard regs.
2816 This special case is important for SCmode on 64-bit platforms
2817 where the natural size of floating-point regs is 32-bit. */
2819 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2820 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2822 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2823 imag_p ? GET_MODE_SIZE (imode) : 0);
2827 /* simplify_gen_subreg may fail for sub-word MEMs. */
2828 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2831 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2832 true, NULL_RTX, imode, imode);
2835 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2836 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2837 represented in NEW_MODE. If FORCE is true, this will never happen, as
2838 we'll force-create a SUBREG if needed. */
2841 emit_move_change_mode (enum machine_mode new_mode,
2842 enum machine_mode old_mode, rtx x, bool force)
2848 /* We don't have to worry about changing the address since the
2849 size in bytes is supposed to be the same. */
2850 if (reload_in_progress)
2852 /* Copy the MEM to change the mode and move any
2853 substitutions from the old MEM to the new one. */
2854 ret = adjust_address_nv (x, new_mode, 0);
2855 copy_replacements (x, ret);
2858 ret = adjust_address (x, new_mode, 0);
2862 /* Note that we do want simplify_subreg's behavior of validating
2863 that the new mode is ok for a hard register. If we were to use
2864 simplify_gen_subreg, we would create the subreg, but would
2865 probably run into the target not being able to implement it. */
2866 /* Except, of course, when FORCE is true, when this is exactly what
2867 we want. Which is needed for CCmodes on some targets. */
2869 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2871 ret = simplify_subreg (new_mode, x, old_mode, 0);
2877 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2878 an integer mode of the same size as MODE. Returns the instruction
2879 emitted, or NULL if such a move could not be generated. */
2882 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2884 enum machine_mode imode;
2885 enum insn_code code;
2887 /* There must exist a mode of the exact size we require. */
2888 imode = int_mode_for_mode (mode);
2889 if (imode == BLKmode)
2892 /* The target must support moves in this mode. */
2893 code = mov_optab->handlers[imode].insn_code;
2894 if (code == CODE_FOR_nothing)
2897 x = emit_move_change_mode (imode, mode, x, force);
2900 y = emit_move_change_mode (imode, mode, y, force);
2903 return emit_insn (GEN_FCN (code) (x, y));
2906 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2907 Return an equivalent MEM that does not use an auto-increment. */
2910 emit_move_resolve_push (enum machine_mode mode, rtx x)
2912 enum rtx_code code = GET_CODE (XEXP (x, 0));
2913 HOST_WIDE_INT adjust;
2916 adjust = GET_MODE_SIZE (mode);
2917 #ifdef PUSH_ROUNDING
2918 adjust = PUSH_ROUNDING (adjust);
2920 if (code == PRE_DEC || code == POST_DEC)
2922 else if (code == PRE_MODIFY || code == POST_MODIFY)
2924 rtx expr = XEXP (XEXP (x, 0), 1);
2927 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2928 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2929 val = INTVAL (XEXP (expr, 1));
2930 if (GET_CODE (expr) == MINUS)
2932 gcc_assert (adjust == val || adjust == -val);
2936 /* Do not use anti_adjust_stack, since we don't want to update
2937 stack_pointer_delta. */
2938 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2939 GEN_INT (adjust), stack_pointer_rtx,
2940 0, OPTAB_LIB_WIDEN);
2941 if (temp != stack_pointer_rtx)
2942 emit_move_insn (stack_pointer_rtx, temp);
2949 temp = stack_pointer_rtx;
2954 temp = plus_constant (stack_pointer_rtx, -adjust);
2960 return replace_equiv_address (x, temp);
2963 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2964 X is known to satisfy push_operand, and MODE is known to be complex.
2965 Returns the last instruction emitted. */
2968 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2970 enum machine_mode submode = GET_MODE_INNER (mode);
2973 #ifdef PUSH_ROUNDING
2974 unsigned int submodesize = GET_MODE_SIZE (submode);
2976 /* In case we output to the stack, but the size is smaller than the
2977 machine can push exactly, we need to use move instructions. */
2978 if (PUSH_ROUNDING (submodesize) != submodesize)
2980 x = emit_move_resolve_push (mode, x);
2981 return emit_move_insn (x, y);
2985 /* Note that the real part always precedes the imag part in memory
2986 regardless of machine's endianness. */
2987 switch (GET_CODE (XEXP (x, 0)))
3001 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3002 read_complex_part (y, imag_first));
3003 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3004 read_complex_part (y, !imag_first));
3007 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3008 MODE is known to be complex. Returns the last instruction emitted. */
3011 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3015 /* Need to take special care for pushes, to maintain proper ordering
3016 of the data, and possibly extra padding. */
3017 if (push_operand (x, mode))
3018 return emit_move_complex_push (mode, x, y);
3020 /* See if we can coerce the target into moving both values at once. */
3022 /* Move floating point as parts. */
3023 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3024 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3026 /* Not possible if the values are inherently not adjacent. */
3027 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3029 /* Is possible if both are registers (or subregs of registers). */
3030 else if (register_operand (x, mode) && register_operand (y, mode))
3032 /* If one of the operands is a memory, and alignment constraints
3033 are friendly enough, we may be able to do combined memory operations.
3034 We do not attempt this if Y is a constant because that combination is
3035 usually better with the by-parts thing below. */
3036 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3037 && (!STRICT_ALIGNMENT
3038 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3047 /* For memory to memory moves, optimal behavior can be had with the
3048 existing block move logic. */
3049 if (MEM_P (x) && MEM_P (y))
3051 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3052 BLOCK_OP_NO_LIBCALL);
3053 return get_last_insn ();
3056 ret = emit_move_via_integer (mode, x, y, true);
3061 /* Show the output dies here. This is necessary for SUBREGs
3062 of pseudos since we cannot track their lifetimes correctly;
3063 hard regs shouldn't appear here except as return values. */
3064 if (!reload_completed && !reload_in_progress
3065 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3068 write_complex_part (x, read_complex_part (y, false), false);
3069 write_complex_part (x, read_complex_part (y, true), true);
3070 return get_last_insn ();
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3077 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3081 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3084 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3085 if (code != CODE_FOR_nothing)
3087 x = emit_move_change_mode (CCmode, mode, x, true);
3088 y = emit_move_change_mode (CCmode, mode, y, true);
3089 return emit_insn (GEN_FCN (code) (x, y));
3093 /* Otherwise, find the MODE_INT mode of the same width. */
3094 ret = emit_move_via_integer (mode, x, y, false);
3095 gcc_assert (ret != NULL);
3099 /* Return true if word I of OP lies entirely in the
3100 undefined bits of a paradoxical subreg. */
3103 undefined_operand_subword_p (rtx op, int i)
3105 enum machine_mode innermode, innermostmode;
3107 if (GET_CODE (op) != SUBREG)
3109 innermode = GET_MODE (op);
3110 innermostmode = GET_MODE (SUBREG_REG (op));
3111 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3112 /* The SUBREG_BYTE represents offset, as if the value were stored in
3113 memory, except for a paradoxical subreg where we define
3114 SUBREG_BYTE to be 0; undo this exception as in
3116 if (SUBREG_BYTE (op) == 0
3117 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3119 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3120 if (WORDS_BIG_ENDIAN)
3121 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3122 if (BYTES_BIG_ENDIAN)
3123 offset += difference % UNITS_PER_WORD;
3125 if (offset >= GET_MODE_SIZE (innermostmode)
3126 || offset <= -GET_MODE_SIZE (word_mode))
3131 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3132 MODE is any multi-word or full-word mode that lacks a move_insn
3133 pattern. Note that you will get better code if you define such
3134 patterns, even if they must turn into multiple assembler instructions. */
3137 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3144 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3146 /* If X is a push on the stack, do the push now and replace
3147 X with a reference to the stack pointer. */
3148 if (push_operand (x, mode))
3149 x = emit_move_resolve_push (mode, x);
3151 /* If we are in reload, see if either operand is a MEM whose address
3152 is scheduled for replacement. */
3153 if (reload_in_progress && MEM_P (x)
3154 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3155 x = replace_equiv_address_nv (x, inner);
3156 if (reload_in_progress && MEM_P (y)
3157 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3158 y = replace_equiv_address_nv (y, inner);
3162 need_clobber = false;
3164 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3167 rtx xpart = operand_subword (x, i, 1, mode);
3170 /* Do not generate code for a move if it would come entirely
3171 from the undefined bits of a paradoxical subreg. */
3172 if (undefined_operand_subword_p (y, i))
3175 ypart = operand_subword (y, i, 1, mode);
3177 /* If we can't get a part of Y, put Y into memory if it is a
3178 constant. Otherwise, force it into a register. Then we must
3179 be able to get a part of Y. */
3180 if (ypart == 0 && CONSTANT_P (y))
3182 y = use_anchored_address (force_const_mem (mode, y));
3183 ypart = operand_subword (y, i, 1, mode);
3185 else if (ypart == 0)
3186 ypart = operand_subword_force (y, i, mode);
3188 gcc_assert (xpart && ypart);
3190 need_clobber |= (GET_CODE (xpart) == SUBREG);
3192 last_insn = emit_move_insn (xpart, ypart);
3198 /* Show the output dies here. This is necessary for SUBREGs
3199 of pseudos since we cannot track their lifetimes correctly;
3200 hard regs shouldn't appear here except as return values.
3201 We never want to emit such a clobber after reload. */
3203 && ! (reload_in_progress || reload_completed)
3204 && need_clobber != 0)
3205 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3212 /* Low level part of emit_move_insn.
3213 Called just like emit_move_insn, but assumes X and Y
3214 are basically valid. */
3217 emit_move_insn_1 (rtx x, rtx y)
3219 enum machine_mode mode = GET_MODE (x);
3220 enum insn_code code;
3222 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3224 code = mov_optab->handlers[mode].insn_code;
3225 if (code != CODE_FOR_nothing)
3226 return emit_insn (GEN_FCN (code) (x, y));
3228 /* Expand complex moves by moving real part and imag part. */
3229 if (COMPLEX_MODE_P (mode))
3230 return emit_move_complex (mode, x, y);
3232 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3234 rtx result = emit_move_via_integer (mode, x, y, true);
3236 /* If we can't find an integer mode, use multi words. */
3240 return emit_move_multi_word (mode, x, y);
3243 if (GET_MODE_CLASS (mode) == MODE_CC)
3244 return emit_move_ccmode (mode, x, y);
3246 /* Try using a move pattern for the corresponding integer mode. This is
3247 only safe when simplify_subreg can convert MODE constants into integer
3248 constants. At present, it can only do this reliably if the value
3249 fits within a HOST_WIDE_INT. */
3250 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3252 rtx ret = emit_move_via_integer (mode, x, y, false);
3257 return emit_move_multi_word (mode, x, y);
3260 /* Generate code to copy Y into X.
3261 Both Y and X must have the same mode, except that
3262 Y can be a constant with VOIDmode.
3263 This mode cannot be BLKmode; use emit_block_move for that.
3265 Return the last instruction emitted. */
3268 emit_move_insn (rtx x, rtx y)
3270 enum machine_mode mode = GET_MODE (x);
3271 rtx y_cst = NULL_RTX;
3274 gcc_assert (mode != BLKmode
3275 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3280 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3281 && (last_insn = compress_float_constant (x, y)))
3286 if (!LEGITIMATE_CONSTANT_P (y))
3288 y = force_const_mem (mode, y);
3290 /* If the target's cannot_force_const_mem prevented the spill,
3291 assume that the target's move expanders will also take care
3292 of the non-legitimate constant. */
3296 y = use_anchored_address (y);
3300 /* If X or Y are memory references, verify that their addresses are valid
3303 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3304 && ! push_operand (x, GET_MODE (x)))
3306 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3307 x = validize_mem (x);
3310 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3312 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3313 y = validize_mem (y);
3315 gcc_assert (mode != BLKmode);
3317 last_insn = emit_move_insn_1 (x, y);
3319 if (y_cst && REG_P (x)
3320 && (set = single_set (last_insn)) != NULL_RTX
3321 && SET_DEST (set) == x
3322 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3323 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3328 /* If Y is representable exactly in a narrower mode, and the target can
3329 perform the extension directly from constant or memory, then emit the
3330 move as an extension. */
3333 compress_float_constant (rtx x, rtx y)
3335 enum machine_mode dstmode = GET_MODE (x);
3336 enum machine_mode orig_srcmode = GET_MODE (y);
3337 enum machine_mode srcmode;
3339 int oldcost, newcost;
3341 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3343 if (LEGITIMATE_CONSTANT_P (y))
3344 oldcost = rtx_cost (y, SET);
3346 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3348 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3349 srcmode != orig_srcmode;
3350 srcmode = GET_MODE_WIDER_MODE (srcmode))
3353 rtx trunc_y, last_insn;
3355 /* Skip if the target can't extend this way. */
3356 ic = can_extend_p (dstmode, srcmode, 0);
3357 if (ic == CODE_FOR_nothing)
3360 /* Skip if the narrowed value isn't exact. */
3361 if (! exact_real_truncate (srcmode, &r))
3364 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3366 if (LEGITIMATE_CONSTANT_P (trunc_y))
3368 /* Skip if the target needs extra instructions to perform
3370 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3372 /* This is valid, but may not be cheaper than the original. */
3373 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3374 if (oldcost < newcost)
3377 else if (float_extend_from_mem[dstmode][srcmode])
3379 trunc_y = force_const_mem (srcmode, trunc_y);
3380 /* This is valid, but may not be cheaper than the original. */
3381 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3382 if (oldcost < newcost)
3384 trunc_y = validize_mem (trunc_y);
3389 /* For CSE's benefit, force the compressed constant pool entry
3390 into a new pseudo. This constant may be used in different modes,
3391 and if not, combine will put things back together for us. */
3392 trunc_y = force_reg (srcmode, trunc_y);
3393 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3394 last_insn = get_last_insn ();
3397 set_unique_reg_note (last_insn, REG_EQUAL, y);
3405 /* Pushing data onto the stack. */
3407 /* Push a block of length SIZE (perhaps variable)
3408 and return an rtx to address the beginning of the block.
3409 The value may be virtual_outgoing_args_rtx.
3411 EXTRA is the number of bytes of padding to push in addition to SIZE.
3412 BELOW nonzero means this padding comes at low addresses;
3413 otherwise, the padding comes at high addresses. */
3416 push_block (rtx size, int extra, int below)
3420 size = convert_modes (Pmode, ptr_mode, size, 1);
3421 if (CONSTANT_P (size))
3422 anti_adjust_stack (plus_constant (size, extra));
3423 else if (REG_P (size) && extra == 0)
3424 anti_adjust_stack (size);
3427 temp = copy_to_mode_reg (Pmode, size);
3429 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3430 temp, 0, OPTAB_LIB_WIDEN);
3431 anti_adjust_stack (temp);
3434 #ifndef STACK_GROWS_DOWNWARD
3440 temp = virtual_outgoing_args_rtx;
3441 if (extra != 0 && below)
3442 temp = plus_constant (temp, extra);
3446 if (GET_CODE (size) == CONST_INT)
3447 temp = plus_constant (virtual_outgoing_args_rtx,
3448 -INTVAL (size) - (below ? 0 : extra));
3449 else if (extra != 0 && !below)
3450 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3451 negate_rtx (Pmode, plus_constant (size, extra)));
3453 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3454 negate_rtx (Pmode, size));
3457 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3460 #ifdef PUSH_ROUNDING
3462 /* Emit single push insn. */
3465 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3468 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3470 enum insn_code icode;
3471 insn_operand_predicate_fn pred;
3473 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3474 /* If there is push pattern, use it. Otherwise try old way of throwing
3475 MEM representing push operation to move expander. */
3476 icode = push_optab->handlers[(int) mode].insn_code;
3477 if (icode != CODE_FOR_nothing)
3479 if (((pred = insn_data[(int) icode].operand[0].predicate)
3480 && !((*pred) (x, mode))))
3481 x = force_reg (mode, x);
3482 emit_insn (GEN_FCN (icode) (x));
3485 if (GET_MODE_SIZE (mode) == rounded_size)
3486 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3487 /* If we are to pad downward, adjust the stack pointer first and
3488 then store X into the stack location using an offset. This is
3489 because emit_move_insn does not know how to pad; it does not have
3491 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3493 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3494 HOST_WIDE_INT offset;
3496 emit_move_insn (stack_pointer_rtx,
3497 expand_binop (Pmode,
3498 #ifdef STACK_GROWS_DOWNWARD
3504 GEN_INT (rounded_size),
3505 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3507 offset = (HOST_WIDE_INT) padding_size;
3508 #ifdef STACK_GROWS_DOWNWARD
3509 if (STACK_PUSH_CODE == POST_DEC)
3510 /* We have already decremented the stack pointer, so get the
3512 offset += (HOST_WIDE_INT) rounded_size;
3514 if (STACK_PUSH_CODE == POST_INC)
3515 /* We have already incremented the stack pointer, so get the
3517 offset -= (HOST_WIDE_INT) rounded_size;
3519 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3523 #ifdef STACK_GROWS_DOWNWARD
3524 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3525 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3526 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3528 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3529 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3530 GEN_INT (rounded_size));
3532 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3535 dest = gen_rtx_MEM (mode, dest_addr);
3539 set_mem_attributes (dest, type, 1);
3541 if (flag_optimize_sibling_calls)
3542 /* Function incoming arguments may overlap with sibling call
3543 outgoing arguments and we cannot allow reordering of reads
3544 from function arguments with stores to outgoing arguments
3545 of sibling calls. */
3546 set_mem_alias_set (dest, 0);
3548 emit_move_insn (dest, x);
3552 /* Generate code to push X onto the stack, assuming it has mode MODE and
3554 MODE is redundant except when X is a CONST_INT (since they don't
3556 SIZE is an rtx for the size of data to be copied (in bytes),
3557 needed only if X is BLKmode.
3559 ALIGN (in bits) is maximum alignment we can assume.
3561 If PARTIAL and REG are both nonzero, then copy that many of the first
3562 bytes of X into registers starting with REG, and push the rest of X.
3563 The amount of space pushed is decreased by PARTIAL bytes.
3564 REG must be a hard register in this case.
3565 If REG is zero but PARTIAL is not, take any all others actions for an
3566 argument partially in registers, but do not actually load any
3569 EXTRA is the amount in bytes of extra space to leave next to this arg.
3570 This is ignored if an argument block has already been allocated.
3572 On a machine that lacks real push insns, ARGS_ADDR is the address of
3573 the bottom of the argument block for this call. We use indexing off there
3574 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3575 argument block has not been preallocated.
3577 ARGS_SO_FAR is the size of args previously pushed for this call.
3579 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3580 for arguments passed in registers. If nonzero, it will be the number
3581 of bytes required. */
3584 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3585 unsigned int align, int partial, rtx reg, int extra,
3586 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3590 enum direction stack_direction
3591 #ifdef STACK_GROWS_DOWNWARD
3597 /* Decide where to pad the argument: `downward' for below,
3598 `upward' for above, or `none' for don't pad it.
3599 Default is below for small data on big-endian machines; else above. */
3600 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3602 /* Invert direction if stack is post-decrement.
3604 if (STACK_PUSH_CODE == POST_DEC)
3605 if (where_pad != none)
3606 where_pad = (where_pad == downward ? upward : downward);
3610 if (mode == BLKmode)
3612 /* Copy a block into the stack, entirely or partially. */
3619 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3620 used = partial - offset;
3624 /* USED is now the # of bytes we need not copy to the stack
3625 because registers will take care of them. */
3628 xinner = adjust_address (xinner, BLKmode, used);
3630 /* If the partial register-part of the arg counts in its stack size,
3631 skip the part of stack space corresponding to the registers.
3632 Otherwise, start copying to the beginning of the stack space,
3633 by setting SKIP to 0. */
3634 skip = (reg_parm_stack_space == 0) ? 0 : used;
3636 #ifdef PUSH_ROUNDING
3637 /* Do it with several push insns if that doesn't take lots of insns
3638 and if there is no difficulty with push insns that skip bytes
3639 on the stack for alignment purposes. */
3642 && GET_CODE (size) == CONST_INT
3644 && MEM_ALIGN (xinner) >= align
3645 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3646 /* Here we avoid the case of a structure whose weak alignment
3647 forces many pushes of a small amount of data,
3648 and such small pushes do rounding that causes trouble. */
3649 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3650 || align >= BIGGEST_ALIGNMENT
3651 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3652 == (align / BITS_PER_UNIT)))
3653 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3655 /* Push padding now if padding above and stack grows down,
3656 or if padding below and stack grows up.
3657 But if space already allocated, this has already been done. */
3658 if (extra && args_addr == 0
3659 && where_pad != none && where_pad != stack_direction)
3660 anti_adjust_stack (GEN_INT (extra));
3662 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3665 #endif /* PUSH_ROUNDING */
3669 /* Otherwise make space on the stack and copy the data
3670 to the address of that space. */
3672 /* Deduct words put into registers from the size we must copy. */
3675 if (GET_CODE (size) == CONST_INT)
3676 size = GEN_INT (INTVAL (size) - used);
3678 size = expand_binop (GET_MODE (size), sub_optab, size,
3679 GEN_INT (used), NULL_RTX, 0,
3683 /* Get the address of the stack space.
3684 In this case, we do not deal with EXTRA separately.
3685 A single stack adjust will do. */
3688 temp = push_block (size, extra, where_pad == downward);
3691 else if (GET_CODE (args_so_far) == CONST_INT)
3692 temp = memory_address (BLKmode,
3693 plus_constant (args_addr,
3694 skip + INTVAL (args_so_far)));
3696 temp = memory_address (BLKmode,
3697 plus_constant (gen_rtx_PLUS (Pmode,
3702 if (!ACCUMULATE_OUTGOING_ARGS)
3704 /* If the source is referenced relative to the stack pointer,
3705 copy it to another register to stabilize it. We do not need
3706 to do this if we know that we won't be changing sp. */
3708 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3709 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3710 temp = copy_to_reg (temp);
3713 target = gen_rtx_MEM (BLKmode, temp);
3715 /* We do *not* set_mem_attributes here, because incoming arguments
3716 may overlap with sibling call outgoing arguments and we cannot
3717 allow reordering of reads from function arguments with stores
3718 to outgoing arguments of sibling calls. We do, however, want
3719 to record the alignment of the stack slot. */
3720 /* ALIGN may well be better aligned than TYPE, e.g. due to
3721 PARM_BOUNDARY. Assume the caller isn't lying. */
3722 set_mem_align (target, align);
3724 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3727 else if (partial > 0)
3729 /* Scalar partly in registers. */
3731 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3734 /* # bytes of start of argument
3735 that we must make space for but need not store. */
3736 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3737 int args_offset = INTVAL (args_so_far);
3740 /* Push padding now if padding above and stack grows down,
3741 or if padding below and stack grows up.
3742 But if space already allocated, this has already been done. */
3743 if (extra && args_addr == 0
3744 && where_pad != none && where_pad != stack_direction)
3745 anti_adjust_stack (GEN_INT (extra));
3747 /* If we make space by pushing it, we might as well push
3748 the real data. Otherwise, we can leave OFFSET nonzero
3749 and leave the space uninitialized. */
3753 /* Now NOT_STACK gets the number of words that we don't need to
3754 allocate on the stack. Convert OFFSET to words too. */
3755 not_stack = (partial - offset) / UNITS_PER_WORD;
3756 offset /= UNITS_PER_WORD;
3758 /* If the partial register-part of the arg counts in its stack size,
3759 skip the part of stack space corresponding to the registers.
3760 Otherwise, start copying to the beginning of the stack space,
3761 by setting SKIP to 0. */
3762 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3764 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3765 x = validize_mem (force_const_mem (mode, x));
3767 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3768 SUBREGs of such registers are not allowed. */
3769 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3770 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3771 x = copy_to_reg (x);
3773 /* Loop over all the words allocated on the stack for this arg. */
3774 /* We can do it by words, because any scalar bigger than a word
3775 has a size a multiple of a word. */
3776 #ifndef PUSH_ARGS_REVERSED
3777 for (i = not_stack; i < size; i++)
3779 for (i = size - 1; i >= not_stack; i--)
3781 if (i >= not_stack + offset)
3782 emit_push_insn (operand_subword_force (x, i, mode),
3783 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3785 GEN_INT (args_offset + ((i - not_stack + skip)
3787 reg_parm_stack_space, alignment_pad);
3794 /* Push padding now if padding above and stack grows down,
3795 or if padding below and stack grows up.
3796 But if space already allocated, this has already been done. */
3797 if (extra && args_addr == 0
3798 && where_pad != none && where_pad != stack_direction)
3799 anti_adjust_stack (GEN_INT (extra));
3801 #ifdef PUSH_ROUNDING
3802 if (args_addr == 0 && PUSH_ARGS)
3803 emit_single_push_insn (mode, x, type);
3807 if (GET_CODE (args_so_far) == CONST_INT)
3809 = memory_address (mode,
3810 plus_constant (args_addr,
3811 INTVAL (args_so_far)));
3813 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3815 dest = gen_rtx_MEM (mode, addr);
3817 /* We do *not* set_mem_attributes here, because incoming arguments
3818 may overlap with sibling call outgoing arguments and we cannot
3819 allow reordering of reads from function arguments with stores
3820 to outgoing arguments of sibling calls. We do, however, want
3821 to record the alignment of the stack slot. */
3822 /* ALIGN may well be better aligned than TYPE, e.g. due to
3823 PARM_BOUNDARY. Assume the caller isn't lying. */
3824 set_mem_align (dest, align);
3826 emit_move_insn (dest, x);
3830 /* If part should go in registers, copy that part
3831 into the appropriate registers. Do this now, at the end,
3832 since mem-to-mem copies above may do function calls. */
3833 if (partial > 0 && reg != 0)
3835 /* Handle calls that pass values in multiple non-contiguous locations.
3836 The Irix 6 ABI has examples of this. */
3837 if (GET_CODE (reg) == PARALLEL)
3838 emit_group_load (reg, x, type, -1);
3841 gcc_assert (partial % UNITS_PER_WORD == 0);
3842 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3846 if (extra && args_addr == 0 && where_pad == stack_direction)
3847 anti_adjust_stack (GEN_INT (extra));
3849 if (alignment_pad && args_addr == 0)
3850 anti_adjust_stack (alignment_pad);
3853 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3857 get_subtarget (rtx x)
3861 /* Only registers can be subtargets. */
3863 /* Don't use hard regs to avoid extending their life. */
3864 || REGNO (x) < FIRST_PSEUDO_REGISTER
3868 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3869 FIELD is a bitfield. Returns true if the optimization was successful,
3870 and there's nothing else to do. */
3873 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3874 unsigned HOST_WIDE_INT bitpos,
3875 enum machine_mode mode1, rtx str_rtx,
3878 enum machine_mode str_mode = GET_MODE (str_rtx);
3879 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3884 if (mode1 != VOIDmode
3885 || bitsize >= BITS_PER_WORD
3886 || str_bitsize > BITS_PER_WORD
3887 || TREE_SIDE_EFFECTS (to)
3888 || TREE_THIS_VOLATILE (to))
3892 if (!BINARY_CLASS_P (src)
3893 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3896 op0 = TREE_OPERAND (src, 0);
3897 op1 = TREE_OPERAND (src, 1);
3900 if (!operand_equal_p (to, op0, 0))
3903 if (MEM_P (str_rtx))
3905 unsigned HOST_WIDE_INT offset1;
3907 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3908 str_mode = word_mode;
3909 str_mode = get_best_mode (bitsize, bitpos,
3910 MEM_ALIGN (str_rtx), str_mode, 0);
3911 if (str_mode == VOIDmode)
3913 str_bitsize = GET_MODE_BITSIZE (str_mode);
3916 bitpos %= str_bitsize;
3917 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3918 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3920 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3923 /* If the bit field covers the whole REG/MEM, store_field
3924 will likely generate better code. */
3925 if (bitsize >= str_bitsize)
3928 /* We can't handle fields split across multiple entities. */
3929 if (bitpos + bitsize > str_bitsize)
3932 if (BYTES_BIG_ENDIAN)
3933 bitpos = str_bitsize - bitpos - bitsize;
3935 switch (TREE_CODE (src))
3939 /* For now, just optimize the case of the topmost bitfield
3940 where we don't need to do any masking and also
3941 1 bit bitfields where xor can be used.
3942 We might win by one instruction for the other bitfields
3943 too if insv/extv instructions aren't used, so that
3944 can be added later. */
3945 if (bitpos + bitsize != str_bitsize
3946 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3949 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3950 value = convert_modes (str_mode,
3951 TYPE_MODE (TREE_TYPE (op1)), value,
3952 TYPE_UNSIGNED (TREE_TYPE (op1)));
3954 /* We may be accessing data outside the field, which means
3955 we can alias adjacent data. */
3956 if (MEM_P (str_rtx))
3958 str_rtx = shallow_copy_rtx (str_rtx);
3959 set_mem_alias_set (str_rtx, 0);
3960 set_mem_expr (str_rtx, 0);
3963 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3964 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3966 value = expand_and (str_mode, value, const1_rtx, NULL);
3969 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3970 build_int_cst (NULL_TREE, bitpos),
3972 result = expand_binop (str_mode, binop, str_rtx,
3973 value, str_rtx, 1, OPTAB_WIDEN);
3974 if (result != str_rtx)
3975 emit_move_insn (str_rtx, result);
3980 if (TREE_CODE (op1) != INTEGER_CST)
3982 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3983 value = convert_modes (GET_MODE (str_rtx),
3984 TYPE_MODE (TREE_TYPE (op1)), value,
3985 TYPE_UNSIGNED (TREE_TYPE (op1)));
3987 /* We may be accessing data outside the field, which means
3988 we can alias adjacent data. */
3989 if (MEM_P (str_rtx))
3991 str_rtx = shallow_copy_rtx (str_rtx);
3992 set_mem_alias_set (str_rtx, 0);
3993 set_mem_expr (str_rtx, 0);
3996 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3997 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3999 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4001 value = expand_and (GET_MODE (str_rtx), value, mask,
4004 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4005 build_int_cst (NULL_TREE, bitpos),
4007 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4008 value, str_rtx, 1, OPTAB_WIDEN);
4009 if (result != str_rtx)
4010 emit_move_insn (str_rtx, result);
4021 /* Expand an assignment that stores the value of FROM into TO. */
4024 expand_assignment (tree to, tree from)
4029 /* Don't crash if the lhs of the assignment was erroneous. */
4030 if (TREE_CODE (to) == ERROR_MARK)
4032 result = expand_normal (from);
4036 /* Optimize away no-op moves without side-effects. */
4037 if (operand_equal_p (to, from, 0))
4040 /* Assignment of a structure component needs special treatment
4041 if the structure component's rtx is not simply a MEM.
4042 Assignment of an array element at a constant index, and assignment of
4043 an array element in an unaligned packed structure field, has the same
4045 if (handled_component_p (to)
4046 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4048 enum machine_mode mode1;
4049 HOST_WIDE_INT bitsize, bitpos;
4056 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4057 &unsignedp, &volatilep, true);
4059 /* If we are going to use store_bit_field and extract_bit_field,
4060 make sure to_rtx will be safe for multiple use. */
4062 to_rtx = expand_normal (tem);
4068 if (!MEM_P (to_rtx))
4070 /* We can get constant negative offsets into arrays with broken
4071 user code. Translate this to a trap instead of ICEing. */
4072 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4073 expand_builtin_trap ();
4074 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4077 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4078 #ifdef POINTERS_EXTEND_UNSIGNED
4079 if (GET_MODE (offset_rtx) != Pmode)
4080 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4082 if (GET_MODE (offset_rtx) != ptr_mode)
4083 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4086 /* A constant address in TO_RTX can have VOIDmode, we must not try
4087 to call force_reg for that case. Avoid that case. */
4089 && GET_MODE (to_rtx) == BLKmode
4090 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4092 && (bitpos % bitsize) == 0
4093 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4094 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4096 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4100 to_rtx = offset_address (to_rtx, offset_rtx,
4101 highest_pow2_factor_for_target (to,
4105 /* Handle expand_expr of a complex value returning a CONCAT. */
4106 if (GET_CODE (to_rtx) == CONCAT)
4108 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4110 gcc_assert (bitpos == 0);
4111 result = store_expr (from, to_rtx, false);
4115 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4116 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4123 /* If the field is at offset zero, we could have been given the
4124 DECL_RTX of the parent struct. Don't munge it. */
4125 to_rtx = shallow_copy_rtx (to_rtx);
4127 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4129 /* Deal with volatile and readonly fields. The former is only
4130 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4132 MEM_VOLATILE_P (to_rtx) = 1;
4133 if (component_uses_parent_alias_set (to))
4134 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4137 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4141 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4142 TREE_TYPE (tem), get_alias_set (to));
4146 preserve_temp_slots (result);
4152 /* If the rhs is a function call and its value is not an aggregate,
4153 call the function before we start to compute the lhs.
4154 This is needed for correct code for cases such as
4155 val = setjmp (buf) on machines where reference to val
4156 requires loading up part of an address in a separate insn.
4158 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4159 since it might be a promoted variable where the zero- or sign- extension
4160 needs to be done. Handling this in the normal way is safe because no
4161 computation is done before the call. */
4162 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4163 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4164 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4165 && REG_P (DECL_RTL (to))))
4170 value = expand_normal (from);
4172 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4174 /* Handle calls that return values in multiple non-contiguous locations.
4175 The Irix 6 ABI has examples of this. */
4176 if (GET_CODE (to_rtx) == PARALLEL)
4177 emit_group_load (to_rtx, value, TREE_TYPE (from),
4178 int_size_in_bytes (TREE_TYPE (from)));
4179 else if (GET_MODE (to_rtx) == BLKmode)
4180 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4183 if (POINTER_TYPE_P (TREE_TYPE (to)))
4184 value = convert_memory_address (GET_MODE (to_rtx), value);
4185 emit_move_insn (to_rtx, value);
4187 preserve_temp_slots (to_rtx);
4193 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4194 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4197 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4199 /* Don't move directly into a return register. */
4200 if (TREE_CODE (to) == RESULT_DECL
4201 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4206 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4208 if (GET_CODE (to_rtx) == PARALLEL)
4209 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4210 int_size_in_bytes (TREE_TYPE (from)));
4212 emit_move_insn (to_rtx, temp);
4214 preserve_temp_slots (to_rtx);
4220 /* In case we are returning the contents of an object which overlaps
4221 the place the value is being stored, use a safe function when copying
4222 a value through a pointer into a structure value return block. */
4223 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4224 && current_function_returns_struct
4225 && !current_function_returns_pcc_struct)
4230 size = expr_size (from);
4231 from_rtx = expand_normal (from);
4233 emit_library_call (memmove_libfunc, LCT_NORMAL,
4234 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4235 XEXP (from_rtx, 0), Pmode,
4236 convert_to_mode (TYPE_MODE (sizetype),
4237 size, TYPE_UNSIGNED (sizetype)),
4238 TYPE_MODE (sizetype));
4240 preserve_temp_slots (to_rtx);
4246 /* Compute FROM and store the value in the rtx we got. */
4249 result = store_expr (from, to_rtx, 0);
4250 preserve_temp_slots (result);
4256 /* Generate code for computing expression EXP,
4257 and storing the value into TARGET.
4259 If the mode is BLKmode then we may return TARGET itself.
4260 It turns out that in BLKmode it doesn't cause a problem.
4261 because C has no operators that could combine two different
4262 assignments into the same BLKmode object with different values
4263 with no sequence point. Will other languages need this to
4266 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4267 stack, and block moves may need to be treated specially. */
4270 store_expr (tree exp, rtx target, int call_param_p)
4273 rtx alt_rtl = NULL_RTX;
4274 int dont_return_target = 0;
4276 if (VOID_TYPE_P (TREE_TYPE (exp)))
4278 /* C++ can generate ?: expressions with a throw expression in one
4279 branch and an rvalue in the other. Here, we resolve attempts to
4280 store the throw expression's nonexistent result. */
4281 gcc_assert (!call_param_p);
4282 expand_expr (exp, const0_rtx, VOIDmode, 0);
4285 if (TREE_CODE (exp) == COMPOUND_EXPR)
4287 /* Perform first part of compound expression, then assign from second
4289 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4290 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4291 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4293 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4295 /* For conditional expression, get safe form of the target. Then
4296 test the condition, doing the appropriate assignment on either
4297 side. This avoids the creation of unnecessary temporaries.
4298 For non-BLKmode, it is more efficient not to do this. */
4300 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4302 do_pending_stack_adjust ();
4304 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4305 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4306 emit_jump_insn (gen_jump (lab2));
4309 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4315 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4316 /* If this is a scalar in a register that is stored in a wider mode
4317 than the declared mode, compute the result into its declared mode
4318 and then convert to the wider mode. Our value is the computed
4321 rtx inner_target = 0;
4323 /* We can do the conversion inside EXP, which will often result
4324 in some optimizations. Do the conversion in two steps: first
4325 change the signedness, if needed, then the extend. But don't
4326 do this if the type of EXP is a subtype of something else
4327 since then the conversion might involve more than just
4328 converting modes. */
4329 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4330 && TREE_TYPE (TREE_TYPE (exp)) == 0
4331 && (!lang_hooks.reduce_bit_field_operations
4332 || (GET_MODE_PRECISION (GET_MODE (target))
4333 == TYPE_PRECISION (TREE_TYPE (exp)))))
4335 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4336 != SUBREG_PROMOTED_UNSIGNED_P (target))
4338 (lang_hooks.types.signed_or_unsigned_type
4339 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4341 exp = fold_convert (lang_hooks.types.type_for_mode
4342 (GET_MODE (SUBREG_REG (target)),
4343 SUBREG_PROMOTED_UNSIGNED_P (target)),
4346 inner_target = SUBREG_REG (target);
4349 temp = expand_expr (exp, inner_target, VOIDmode,
4350 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4352 /* If TEMP is a VOIDmode constant, use convert_modes to make
4353 sure that we properly convert it. */
4354 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4356 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4357 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4358 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4359 GET_MODE (target), temp,
4360 SUBREG_PROMOTED_UNSIGNED_P (target));
4363 convert_move (SUBREG_REG (target), temp,
4364 SUBREG_PROMOTED_UNSIGNED_P (target));
4370 temp = expand_expr_real (exp, target, GET_MODE (target),
4372 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4374 /* Return TARGET if it's a specified hardware register.
4375 If TARGET is a volatile mem ref, either return TARGET
4376 or return a reg copied *from* TARGET; ANSI requires this.
4378 Otherwise, if TEMP is not TARGET, return TEMP
4379 if it is constant (for efficiency),
4380 or if we really want the correct value. */
4381 if (!(target && REG_P (target)
4382 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4383 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4384 && ! rtx_equal_p (temp, target)
4385 && CONSTANT_P (temp))
4386 dont_return_target = 1;
4389 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4390 the same as that of TARGET, adjust the constant. This is needed, for
4391 example, in case it is a CONST_DOUBLE and we want only a word-sized
4393 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4394 && TREE_CODE (exp) != ERROR_MARK
4395 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4396 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4397 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4399 /* If value was not generated in the target, store it there.
4400 Convert the value to TARGET's type first if necessary and emit the
4401 pending incrementations that have been queued when expanding EXP.
4402 Note that we cannot emit the whole queue blindly because this will
4403 effectively disable the POST_INC optimization later.
4405 If TEMP and TARGET compare equal according to rtx_equal_p, but
4406 one or both of them are volatile memory refs, we have to distinguish
4408 - expand_expr has used TARGET. In this case, we must not generate
4409 another copy. This can be detected by TARGET being equal according
4411 - expand_expr has not used TARGET - that means that the source just
4412 happens to have the same RTX form. Since temp will have been created
4413 by expand_expr, it will compare unequal according to == .
4414 We must generate a copy in this case, to reach the correct number
4415 of volatile memory references. */
4417 if ((! rtx_equal_p (temp, target)
4418 || (temp != target && (side_effects_p (temp)
4419 || side_effects_p (target))))
4420 && TREE_CODE (exp) != ERROR_MARK
4421 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4422 but TARGET is not valid memory reference, TEMP will differ
4423 from TARGET although it is really the same location. */
4424 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4425 /* If there's nothing to copy, don't bother. Don't call
4426 expr_size unless necessary, because some front-ends (C++)
4427 expr_size-hook must not be given objects that are not
4428 supposed to be bit-copied or bit-initialized. */
4429 && expr_size (exp) != const0_rtx)
4431 if (GET_MODE (temp) != GET_MODE (target)
4432 && GET_MODE (temp) != VOIDmode)
4434 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4435 if (dont_return_target)
4437 /* In this case, we will return TEMP,
4438 so make sure it has the proper mode.
4439 But don't forget to store the value into TARGET. */
4440 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4441 emit_move_insn (target, temp);
4444 convert_move (target, temp, unsignedp);
4447 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4449 /* Handle copying a string constant into an array. The string
4450 constant may be shorter than the array. So copy just the string's
4451 actual length, and clear the rest. First get the size of the data
4452 type of the string, which is actually the size of the target. */
4453 rtx size = expr_size (exp);
4455 if (GET_CODE (size) == CONST_INT
4456 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4457 emit_block_move (target, temp, size,
4459 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4462 /* Compute the size of the data to copy from the string. */
4464 = size_binop (MIN_EXPR,
4465 make_tree (sizetype, size),
4466 size_int (TREE_STRING_LENGTH (exp)));
4468 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4470 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4473 /* Copy that much. */
4474 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4475 TYPE_UNSIGNED (sizetype));
4476 emit_block_move (target, temp, copy_size_rtx,
4478 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4480 /* Figure out how much is left in TARGET that we have to clear.
4481 Do all calculations in ptr_mode. */
4482 if (GET_CODE (copy_size_rtx) == CONST_INT)
4484 size = plus_constant (size, -INTVAL (copy_size_rtx));
4485 target = adjust_address (target, BLKmode,
4486 INTVAL (copy_size_rtx));
4490 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4491 copy_size_rtx, NULL_RTX, 0,
4494 #ifdef POINTERS_EXTEND_UNSIGNED
4495 if (GET_MODE (copy_size_rtx) != Pmode)
4496 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4497 TYPE_UNSIGNED (sizetype));
4500 target = offset_address (target, copy_size_rtx,
4501 highest_pow2_factor (copy_size));
4502 label = gen_label_rtx ();
4503 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4504 GET_MODE (size), 0, label);
4507 if (size != const0_rtx)
4508 clear_storage (target, size, BLOCK_OP_NORMAL);
4514 /* Handle calls that return values in multiple non-contiguous locations.
4515 The Irix 6 ABI has examples of this. */
4516 else if (GET_CODE (target) == PARALLEL)
4517 emit_group_load (target, temp, TREE_TYPE (exp),
4518 int_size_in_bytes (TREE_TYPE (exp)));
4519 else if (GET_MODE (temp) == BLKmode)
4520 emit_block_move (target, temp, expr_size (exp),
4522 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4525 temp = force_operand (temp, target);
4527 emit_move_insn (target, temp);
4534 /* Helper for categorize_ctor_elements. Identical interface. */
4537 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4538 HOST_WIDE_INT *p_elt_count,
4541 unsigned HOST_WIDE_INT idx;
4542 HOST_WIDE_INT nz_elts, elt_count;
4543 tree value, purpose;
4545 /* Whether CTOR is a valid constant initializer, in accordance with what
4546 initializer_constant_valid_p does. If inferred from the constructor
4547 elements, true until proven otherwise. */
4548 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4549 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4554 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4559 if (TREE_CODE (purpose) == RANGE_EXPR)
4561 tree lo_index = TREE_OPERAND (purpose, 0);
4562 tree hi_index = TREE_OPERAND (purpose, 1);
4564 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4565 mult = (tree_low_cst (hi_index, 1)
4566 - tree_low_cst (lo_index, 1) + 1);
4569 switch (TREE_CODE (value))
4573 HOST_WIDE_INT nz = 0, ic = 0;
4576 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4578 nz_elts += mult * nz;
4579 elt_count += mult * ic;
4581 if (const_from_elts_p && const_p)
4582 const_p = const_elt_p;
4588 if (!initializer_zerop (value))
4594 nz_elts += mult * TREE_STRING_LENGTH (value);
4595 elt_count += mult * TREE_STRING_LENGTH (value);
4599 if (!initializer_zerop (TREE_REALPART (value)))
4601 if (!initializer_zerop (TREE_IMAGPART (value)))
4609 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4611 if (!initializer_zerop (TREE_VALUE (v)))
4622 if (const_from_elts_p && const_p)
4623 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4630 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4631 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4634 bool clear_this = true;
4636 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4638 /* We don't expect more than one element of the union to be
4639 initialized. Not sure what we should do otherwise... */
4640 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4643 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4644 CONSTRUCTOR_ELTS (ctor),
4647 /* ??? We could look at each element of the union, and find the
4648 largest element. Which would avoid comparing the size of the
4649 initialized element against any tail padding in the union.
4650 Doesn't seem worth the effort... */
4651 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4652 TYPE_SIZE (init_sub_type)) == 1)
4654 /* And now we have to find out if the element itself is fully
4655 constructed. E.g. for union { struct { int a, b; } s; } u
4656 = { .s = { .a = 1 } }. */
4657 if (elt_count == count_type_elements (init_sub_type, false))
4662 *p_must_clear = clear_this;
4665 *p_nz_elts += nz_elts;
4666 *p_elt_count += elt_count;
4671 /* Examine CTOR to discover:
4672 * how many scalar fields are set to nonzero values,
4673 and place it in *P_NZ_ELTS;
4674 * how many scalar fields in total are in CTOR,
4675 and place it in *P_ELT_COUNT.
4676 * if a type is a union, and the initializer from the constructor
4677 is not the largest element in the union, then set *p_must_clear.
4679 Return whether or not CTOR is a valid static constant initializer, the same
4680 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4683 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4684 HOST_WIDE_INT *p_elt_count,
4689 *p_must_clear = false;
4692 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4695 /* Count the number of scalars in TYPE. Return -1 on overflow or
4696 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4697 array member at the end of the structure. */
4700 count_type_elements (tree type, bool allow_flexarr)
4702 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4703 switch (TREE_CODE (type))
4707 tree telts = array_type_nelts (type);
4708 if (telts && host_integerp (telts, 1))
4710 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4711 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4714 else if (max / n > m)
4722 HOST_WIDE_INT n = 0, t;
4725 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4726 if (TREE_CODE (f) == FIELD_DECL)
4728 t = count_type_elements (TREE_TYPE (f), false);
4731 /* Check for structures with flexible array member. */
4732 tree tf = TREE_TYPE (f);
4734 && TREE_CHAIN (f) == NULL
4735 && TREE_CODE (tf) == ARRAY_TYPE
4737 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4738 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4739 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4740 && int_size_in_bytes (type) >= 0)
4752 case QUAL_UNION_TYPE:
4759 return TYPE_VECTOR_SUBPARTS (type);
4767 case REFERENCE_TYPE:
4779 /* Return 1 if EXP contains mostly (3/4) zeros. */
4782 mostly_zeros_p (tree exp)
4784 if (TREE_CODE (exp) == CONSTRUCTOR)
4787 HOST_WIDE_INT nz_elts, count, elts;
4790 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4794 elts = count_type_elements (TREE_TYPE (exp), false);
4796 return nz_elts < elts / 4;
4799 return initializer_zerop (exp);
4802 /* Return 1 if EXP contains all zeros. */
4805 all_zeros_p (tree exp)
4807 if (TREE_CODE (exp) == CONSTRUCTOR)
4810 HOST_WIDE_INT nz_elts, count;
4813 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4814 return nz_elts == 0;
4817 return initializer_zerop (exp);
4820 /* Helper function for store_constructor.
4821 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4822 TYPE is the type of the CONSTRUCTOR, not the element type.
4823 CLEARED is as for store_constructor.
4824 ALIAS_SET is the alias set to use for any stores.
4826 This provides a recursive shortcut back to store_constructor when it isn't
4827 necessary to go through store_field. This is so that we can pass through
4828 the cleared field to let store_constructor know that we may not have to
4829 clear a substructure if the outer structure has already been cleared. */
4832 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4833 HOST_WIDE_INT bitpos, enum machine_mode mode,
4834 tree exp, tree type, int cleared, int alias_set)
4836 if (TREE_CODE (exp) == CONSTRUCTOR
4837 /* We can only call store_constructor recursively if the size and
4838 bit position are on a byte boundary. */
4839 && bitpos % BITS_PER_UNIT == 0
4840 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4841 /* If we have a nonzero bitpos for a register target, then we just
4842 let store_field do the bitfield handling. This is unlikely to
4843 generate unnecessary clear instructions anyways. */
4844 && (bitpos == 0 || MEM_P (target)))
4848 = adjust_address (target,
4849 GET_MODE (target) == BLKmode
4851 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4852 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4855 /* Update the alias set, if required. */
4856 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4857 && MEM_ALIAS_SET (target) != 0)
4859 target = copy_rtx (target);
4860 set_mem_alias_set (target, alias_set);
4863 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4866 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4869 /* Store the value of constructor EXP into the rtx TARGET.
4870 TARGET is either a REG or a MEM; we know it cannot conflict, since
4871 safe_from_p has been called.
4872 CLEARED is true if TARGET is known to have been zero'd.
4873 SIZE is the number of bytes of TARGET we are allowed to modify: this
4874 may not be the same as the size of EXP if we are assigning to a field
4875 which has been packed to exclude padding bits. */
4878 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4880 tree type = TREE_TYPE (exp);
4881 #ifdef WORD_REGISTER_OPERATIONS
4882 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4885 switch (TREE_CODE (type))
4889 case QUAL_UNION_TYPE:
4891 unsigned HOST_WIDE_INT idx;
4894 /* If size is zero or the target is already cleared, do nothing. */
4895 if (size == 0 || cleared)
4897 /* We either clear the aggregate or indicate the value is dead. */
4898 else if ((TREE_CODE (type) == UNION_TYPE
4899 || TREE_CODE (type) == QUAL_UNION_TYPE)
4900 && ! CONSTRUCTOR_ELTS (exp))
4901 /* If the constructor is empty, clear the union. */
4903 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4907 /* If we are building a static constructor into a register,
4908 set the initial value as zero so we can fold the value into
4909 a constant. But if more than one register is involved,
4910 this probably loses. */
4911 else if (REG_P (target) && TREE_STATIC (exp)
4912 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4914 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4918 /* If the constructor has fewer fields than the structure or
4919 if we are initializing the structure to mostly zeros, clear
4920 the whole structure first. Don't do this if TARGET is a
4921 register whose mode size isn't equal to SIZE since
4922 clear_storage can't handle this case. */
4924 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4925 != fields_length (type))
4926 || mostly_zeros_p (exp))
4928 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4931 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4936 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4938 /* Store each element of the constructor into the
4939 corresponding field of TARGET. */
4940 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4942 enum machine_mode mode;
4943 HOST_WIDE_INT bitsize;
4944 HOST_WIDE_INT bitpos = 0;
4946 rtx to_rtx = target;
4948 /* Just ignore missing fields. We cleared the whole
4949 structure, above, if any fields are missing. */
4953 if (cleared && initializer_zerop (value))
4956 if (host_integerp (DECL_SIZE (field), 1))
4957 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4961 mode = DECL_MODE (field);
4962 if (DECL_BIT_FIELD (field))
4965 offset = DECL_FIELD_OFFSET (field);
4966 if (host_integerp (offset, 0)
4967 && host_integerp (bit_position (field), 0))
4969 bitpos = int_bit_position (field);
4973 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4980 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4981 make_tree (TREE_TYPE (exp),
4984 offset_rtx = expand_normal (offset);
4985 gcc_assert (MEM_P (to_rtx));
4987 #ifdef POINTERS_EXTEND_UNSIGNED
4988 if (GET_MODE (offset_rtx) != Pmode)
4989 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4991 if (GET_MODE (offset_rtx) != ptr_mode)
4992 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4995 to_rtx = offset_address (to_rtx, offset_rtx,
4996 highest_pow2_factor (offset));
4999 #ifdef WORD_REGISTER_OPERATIONS
5000 /* If this initializes a field that is smaller than a
5001 word, at the start of a word, try to widen it to a full
5002 word. This special case allows us to output C++ member
5003 function initializations in a form that the optimizers
5006 && bitsize < BITS_PER_WORD
5007 && bitpos % BITS_PER_WORD == 0
5008 && GET_MODE_CLASS (mode) == MODE_INT
5009 && TREE_CODE (value) == INTEGER_CST
5011 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5013 tree type = TREE_TYPE (value);
5015 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5017 type = lang_hooks.types.type_for_size
5018 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5019 value = fold_convert (type, value);
5022 if (BYTES_BIG_ENDIAN)
5024 = fold_build2 (LSHIFT_EXPR, type, value,
5025 build_int_cst (type,
5026 BITS_PER_WORD - bitsize));
5027 bitsize = BITS_PER_WORD;
5032 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5033 && DECL_NONADDRESSABLE_P (field))
5035 to_rtx = copy_rtx (to_rtx);
5036 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5039 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5040 value, type, cleared,
5041 get_alias_set (TREE_TYPE (field)));
5048 unsigned HOST_WIDE_INT i;
5051 tree elttype = TREE_TYPE (type);
5053 HOST_WIDE_INT minelt = 0;
5054 HOST_WIDE_INT maxelt = 0;
5056 domain = TYPE_DOMAIN (type);
5057 const_bounds_p = (TYPE_MIN_VALUE (domain)
5058 && TYPE_MAX_VALUE (domain)
5059 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5060 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5062 /* If we have constant bounds for the range of the type, get them. */
5065 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5066 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5069 /* If the constructor has fewer elements than the array, clear
5070 the whole array first. Similarly if this is static
5071 constructor of a non-BLKmode object. */
5074 else if (REG_P (target) && TREE_STATIC (exp))
5078 unsigned HOST_WIDE_INT idx;
5080 HOST_WIDE_INT count = 0, zero_count = 0;
5081 need_to_clear = ! const_bounds_p;
5083 /* This loop is a more accurate version of the loop in
5084 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5085 is also needed to check for missing elements. */
5086 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5088 HOST_WIDE_INT this_node_count;
5093 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5095 tree lo_index = TREE_OPERAND (index, 0);
5096 tree hi_index = TREE_OPERAND (index, 1);
5098 if (! host_integerp (lo_index, 1)
5099 || ! host_integerp (hi_index, 1))
5105 this_node_count = (tree_low_cst (hi_index, 1)
5106 - tree_low_cst (lo_index, 1) + 1);
5109 this_node_count = 1;
5111 count += this_node_count;
5112 if (mostly_zeros_p (value))
5113 zero_count += this_node_count;
5116 /* Clear the entire array first if there are any missing
5117 elements, or if the incidence of zero elements is >=
5120 && (count < maxelt - minelt + 1
5121 || 4 * zero_count >= 3 * count))
5125 if (need_to_clear && size > 0)
5128 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5130 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5134 if (!cleared && REG_P (target))
5135 /* Inform later passes that the old value is dead. */
5136 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5138 /* Store each element of the constructor into the
5139 corresponding element of TARGET, determined by counting the
5141 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5143 enum machine_mode mode;
5144 HOST_WIDE_INT bitsize;
5145 HOST_WIDE_INT bitpos;
5147 rtx xtarget = target;
5149 if (cleared && initializer_zerop (value))
5152 unsignedp = TYPE_UNSIGNED (elttype);
5153 mode = TYPE_MODE (elttype);
5154 if (mode == BLKmode)
5155 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5156 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5159 bitsize = GET_MODE_BITSIZE (mode);
5161 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5163 tree lo_index = TREE_OPERAND (index, 0);
5164 tree hi_index = TREE_OPERAND (index, 1);
5165 rtx index_r, pos_rtx;
5166 HOST_WIDE_INT lo, hi, count;
5169 /* If the range is constant and "small", unroll the loop. */
5171 && host_integerp (lo_index, 0)
5172 && host_integerp (hi_index, 0)
5173 && (lo = tree_low_cst (lo_index, 0),
5174 hi = tree_low_cst (hi_index, 0),
5175 count = hi - lo + 1,
5178 || (host_integerp (TYPE_SIZE (elttype), 1)
5179 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5182 lo -= minelt; hi -= minelt;
5183 for (; lo <= hi; lo++)
5185 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5188 && !MEM_KEEP_ALIAS_SET_P (target)
5189 && TREE_CODE (type) == ARRAY_TYPE
5190 && TYPE_NONALIASED_COMPONENT (type))
5192 target = copy_rtx (target);
5193 MEM_KEEP_ALIAS_SET_P (target) = 1;
5196 store_constructor_field
5197 (target, bitsize, bitpos, mode, value, type, cleared,
5198 get_alias_set (elttype));
5203 rtx loop_start = gen_label_rtx ();
5204 rtx loop_end = gen_label_rtx ();
5207 expand_normal (hi_index);
5208 unsignedp = TYPE_UNSIGNED (domain);
5210 index = build_decl (VAR_DECL, NULL_TREE, domain);
5213 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5215 SET_DECL_RTL (index, index_r);
5216 store_expr (lo_index, index_r, 0);
5218 /* Build the head of the loop. */
5219 do_pending_stack_adjust ();
5220 emit_label (loop_start);
5222 /* Assign value to element index. */
5224 fold_convert (ssizetype,
5225 fold_build2 (MINUS_EXPR,
5228 TYPE_MIN_VALUE (domain)));
5231 size_binop (MULT_EXPR, position,
5232 fold_convert (ssizetype,
5233 TYPE_SIZE_UNIT (elttype)));
5235 pos_rtx = expand_normal (position);
5236 xtarget = offset_address (target, pos_rtx,
5237 highest_pow2_factor (position));
5238 xtarget = adjust_address (xtarget, mode, 0);
5239 if (TREE_CODE (value) == CONSTRUCTOR)
5240 store_constructor (value, xtarget, cleared,
5241 bitsize / BITS_PER_UNIT);
5243 store_expr (value, xtarget, 0);
5245 /* Generate a conditional jump to exit the loop. */
5246 exit_cond = build2 (LT_EXPR, integer_type_node,
5248 jumpif (exit_cond, loop_end);
5250 /* Update the loop counter, and jump to the head of
5252 expand_assignment (index,
5253 build2 (PLUS_EXPR, TREE_TYPE (index),
5254 index, integer_one_node));
5256 emit_jump (loop_start);
5258 /* Build the end of the loop. */
5259 emit_label (loop_end);
5262 else if ((index != 0 && ! host_integerp (index, 0))
5263 || ! host_integerp (TYPE_SIZE (elttype), 1))
5268 index = ssize_int (1);
5271 index = fold_convert (ssizetype,
5272 fold_build2 (MINUS_EXPR,
5275 TYPE_MIN_VALUE (domain)));
5278 size_binop (MULT_EXPR, index,
5279 fold_convert (ssizetype,
5280 TYPE_SIZE_UNIT (elttype)));
5281 xtarget = offset_address (target,
5282 expand_normal (position),
5283 highest_pow2_factor (position));
5284 xtarget = adjust_address (xtarget, mode, 0);
5285 store_expr (value, xtarget, 0);
5290 bitpos = ((tree_low_cst (index, 0) - minelt)
5291 * tree_low_cst (TYPE_SIZE (elttype), 1));
5293 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5295 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5296 && TREE_CODE (type) == ARRAY_TYPE
5297 && TYPE_NONALIASED_COMPONENT (type))
5299 target = copy_rtx (target);
5300 MEM_KEEP_ALIAS_SET_P (target) = 1;
5302 store_constructor_field (target, bitsize, bitpos, mode, value,
5303 type, cleared, get_alias_set (elttype));
5311 unsigned HOST_WIDE_INT idx;
5312 constructor_elt *ce;
5316 tree elttype = TREE_TYPE (type);
5317 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5318 enum machine_mode eltmode = TYPE_MODE (elttype);
5319 HOST_WIDE_INT bitsize;
5320 HOST_WIDE_INT bitpos;
5321 rtvec vector = NULL;
5324 gcc_assert (eltmode != BLKmode);
5326 n_elts = TYPE_VECTOR_SUBPARTS (type);
5327 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5329 enum machine_mode mode = GET_MODE (target);
5331 icode = (int) vec_init_optab->handlers[mode].insn_code;
5332 if (icode != CODE_FOR_nothing)
5336 vector = rtvec_alloc (n_elts);
5337 for (i = 0; i < n_elts; i++)
5338 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5342 /* If the constructor has fewer elements than the vector,
5343 clear the whole array first. Similarly if this is static
5344 constructor of a non-BLKmode object. */
5347 else if (REG_P (target) && TREE_STATIC (exp))
5351 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5354 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5356 int n_elts_here = tree_low_cst
5357 (int_const_binop (TRUNC_DIV_EXPR,
5358 TYPE_SIZE (TREE_TYPE (value)),
5359 TYPE_SIZE (elttype), 0), 1);
5361 count += n_elts_here;
5362 if (mostly_zeros_p (value))
5363 zero_count += n_elts_here;
5366 /* Clear the entire vector first if there are any missing elements,
5367 or if the incidence of zero elements is >= 75%. */
5368 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5371 if (need_to_clear && size > 0 && !vector)
5374 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5376 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5380 /* Inform later passes that the old value is dead. */
5381 if (!cleared && !vector && REG_P (target))
5382 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5384 /* Store each element of the constructor into the corresponding
5385 element of TARGET, determined by counting the elements. */
5386 for (idx = 0, i = 0;
5387 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5388 idx++, i += bitsize / elt_size)
5390 HOST_WIDE_INT eltpos;
5391 tree value = ce->value;
5393 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5394 if (cleared && initializer_zerop (value))
5398 eltpos = tree_low_cst (ce->index, 1);
5404 /* Vector CONSTRUCTORs should only be built from smaller
5405 vectors in the case of BLKmode vectors. */
5406 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5407 RTVEC_ELT (vector, eltpos)
5408 = expand_normal (value);
5412 enum machine_mode value_mode =
5413 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5414 ? TYPE_MODE (TREE_TYPE (value))
5416 bitpos = eltpos * elt_size;
5417 store_constructor_field (target, bitsize, bitpos,
5418 value_mode, value, type,
5419 cleared, get_alias_set (elttype));
5424 emit_insn (GEN_FCN (icode)
5426 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5435 /* Store the value of EXP (an expression tree)
5436 into a subfield of TARGET which has mode MODE and occupies
5437 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5438 If MODE is VOIDmode, it means that we are storing into a bit-field.
5440 Always return const0_rtx unless we have something particular to
5443 TYPE is the type of the underlying object,
5445 ALIAS_SET is the alias set for the destination. This value will
5446 (in general) be different from that for TARGET, since TARGET is a
5447 reference to the containing structure. */
5450 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5451 enum machine_mode mode, tree exp, tree type, int alias_set)
5453 HOST_WIDE_INT width_mask = 0;
5455 if (TREE_CODE (exp) == ERROR_MARK)
5458 /* If we have nothing to store, do nothing unless the expression has
5461 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5462 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5463 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5465 /* If we are storing into an unaligned field of an aligned union that is
5466 in a register, we may have the mode of TARGET being an integer mode but
5467 MODE == BLKmode. In that case, get an aligned object whose size and
5468 alignment are the same as TARGET and store TARGET into it (we can avoid
5469 the store if the field being stored is the entire width of TARGET). Then
5470 call ourselves recursively to store the field into a BLKmode version of
5471 that object. Finally, load from the object into TARGET. This is not
5472 very efficient in general, but should only be slightly more expensive
5473 than the otherwise-required unaligned accesses. Perhaps this can be
5474 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5475 twice, once with emit_move_insn and once via store_field. */
5478 && (REG_P (target) || GET_CODE (target) == SUBREG))
5480 rtx object = assign_temp (type, 0, 1, 1);
5481 rtx blk_object = adjust_address (object, BLKmode, 0);
5483 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5484 emit_move_insn (object, target);
5486 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5488 emit_move_insn (target, object);
5490 /* We want to return the BLKmode version of the data. */
5494 if (GET_CODE (target) == CONCAT)
5496 /* We're storing into a struct containing a single __complex. */
5498 gcc_assert (!bitpos);
5499 return store_expr (exp, target, 0);
5502 /* If the structure is in a register or if the component
5503 is a bit field, we cannot use addressing to access it.
5504 Use bit-field techniques or SUBREG to store in it. */
5506 if (mode == VOIDmode
5507 || (mode != BLKmode && ! direct_store[(int) mode]
5508 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5509 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5511 || GET_CODE (target) == SUBREG
5512 /* If the field isn't aligned enough to store as an ordinary memref,
5513 store it as a bit field. */
5515 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5516 || bitpos % GET_MODE_ALIGNMENT (mode))
5517 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5518 || (bitpos % BITS_PER_UNIT != 0)))
5519 /* If the RHS and field are a constant size and the size of the
5520 RHS isn't the same size as the bitfield, we must use bitfield
5523 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5524 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5528 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5529 implies a mask operation. If the precision is the same size as
5530 the field we're storing into, that mask is redundant. This is
5531 particularly common with bit field assignments generated by the
5533 if (TREE_CODE (exp) == NOP_EXPR)
5535 tree type = TREE_TYPE (exp);
5536 if (INTEGRAL_TYPE_P (type)
5537 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5538 && bitsize == TYPE_PRECISION (type))
5540 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5541 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5542 exp = TREE_OPERAND (exp, 0);
5546 temp = expand_normal (exp);
5548 /* If BITSIZE is narrower than the size of the type of EXP
5549 we will be narrowing TEMP. Normally, what's wanted are the
5550 low-order bits. However, if EXP's type is a record and this is
5551 big-endian machine, we want the upper BITSIZE bits. */
5552 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5553 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5554 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5555 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5556 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5560 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5562 if (mode != VOIDmode && mode != BLKmode
5563 && mode != TYPE_MODE (TREE_TYPE (exp)))
5564 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5566 /* If the modes of TARGET and TEMP are both BLKmode, both
5567 must be in memory and BITPOS must be aligned on a byte
5568 boundary. If so, we simply do a block copy. */
5569 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5571 gcc_assert (MEM_P (target) && MEM_P (temp)
5572 && !(bitpos % BITS_PER_UNIT));
5574 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5575 emit_block_move (target, temp,
5576 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5583 /* Store the value in the bitfield. */
5584 store_bit_field (target, bitsize, bitpos, mode, temp);
5590 /* Now build a reference to just the desired component. */
5591 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5593 if (to_rtx == target)
5594 to_rtx = copy_rtx (to_rtx);
5596 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5597 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5598 set_mem_alias_set (to_rtx, alias_set);
5600 return store_expr (exp, to_rtx, 0);
5604 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5605 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5606 codes and find the ultimate containing object, which we return.
5608 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5609 bit position, and *PUNSIGNEDP to the signedness of the field.
5610 If the position of the field is variable, we store a tree
5611 giving the variable offset (in units) in *POFFSET.
5612 This offset is in addition to the bit position.
5613 If the position is not variable, we store 0 in *POFFSET.
5615 If any of the extraction expressions is volatile,
5616 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5618 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5619 is a mode that can be used to access the field. In that case, *PBITSIZE
5622 If the field describes a variable-sized object, *PMODE is set to
5623 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5624 this case, but the address of the object can be found.
5626 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5627 look through nodes that serve as markers of a greater alignment than
5628 the one that can be deduced from the expression. These nodes make it
5629 possible for front-ends to prevent temporaries from being created by
5630 the middle-end on alignment considerations. For that purpose, the
5631 normal operating mode at high-level is to always pass FALSE so that
5632 the ultimate containing object is really returned; moreover, the
5633 associated predicate handled_component_p will always return TRUE
5634 on these nodes, thus indicating that they are essentially handled
5635 by get_inner_reference. TRUE should only be passed when the caller
5636 is scanning the expression in order to build another representation
5637 and specifically knows how to handle these nodes; as such, this is
5638 the normal operating mode in the RTL expanders. */
5641 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5642 HOST_WIDE_INT *pbitpos, tree *poffset,
5643 enum machine_mode *pmode, int *punsignedp,
5644 int *pvolatilep, bool keep_aligning)
5647 enum machine_mode mode = VOIDmode;
5648 tree offset = size_zero_node;
5649 tree bit_offset = bitsize_zero_node;
5651 /* First get the mode, signedness, and size. We do this from just the
5652 outermost expression. */
5653 if (TREE_CODE (exp) == COMPONENT_REF)
5655 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5656 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5657 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5659 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5661 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5663 size_tree = TREE_OPERAND (exp, 1);
5664 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5668 mode = TYPE_MODE (TREE_TYPE (exp));
5669 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5671 if (mode == BLKmode)
5672 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5674 *pbitsize = GET_MODE_BITSIZE (mode);
5679 if (! host_integerp (size_tree, 1))
5680 mode = BLKmode, *pbitsize = -1;
5682 *pbitsize = tree_low_cst (size_tree, 1);
5687 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5688 and find the ultimate containing object. */
5691 switch (TREE_CODE (exp))
5694 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5695 TREE_OPERAND (exp, 2));
5700 tree field = TREE_OPERAND (exp, 1);
5701 tree this_offset = component_ref_field_offset (exp);
5703 /* If this field hasn't been filled in yet, don't go past it.
5704 This should only happen when folding expressions made during
5705 type construction. */
5706 if (this_offset == 0)
5709 offset = size_binop (PLUS_EXPR, offset, this_offset);
5710 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5711 DECL_FIELD_BIT_OFFSET (field));
5713 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5718 case ARRAY_RANGE_REF:
5720 tree index = TREE_OPERAND (exp, 1);
5721 tree low_bound = array_ref_low_bound (exp);
5722 tree unit_size = array_ref_element_size (exp);
5724 /* We assume all arrays have sizes that are a multiple of a byte.
5725 First subtract the lower bound, if any, in the type of the
5726 index, then convert to sizetype and multiply by the size of
5727 the array element. */
5728 if (! integer_zerop (low_bound))
5729 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5732 offset = size_binop (PLUS_EXPR, offset,
5733 size_binop (MULT_EXPR,
5734 fold_convert (sizetype, index),
5743 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5744 bitsize_int (*pbitsize));
5747 case VIEW_CONVERT_EXPR:
5748 if (keep_aligning && STRICT_ALIGNMENT
5749 && (TYPE_ALIGN (TREE_TYPE (exp))
5750 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5751 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5752 < BIGGEST_ALIGNMENT)
5753 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5754 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5762 /* If any reference in the chain is volatile, the effect is volatile. */
5763 if (TREE_THIS_VOLATILE (exp))
5766 exp = TREE_OPERAND (exp, 0);
5770 /* If OFFSET is constant, see if we can return the whole thing as a
5771 constant bit position. Make sure to handle overflow during
5773 if (host_integerp (offset, 0))
5775 double_int tem = double_int_mul (tree_to_double_int (offset),
5776 uhwi_to_double_int (BITS_PER_UNIT));
5777 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5778 if (double_int_fits_in_shwi_p (tem))
5780 *pbitpos = double_int_to_shwi (tem);
5781 *poffset = NULL_TREE;
5786 /* Otherwise, split it up. */
5787 *pbitpos = tree_low_cst (bit_offset, 0);
5793 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5794 look for whether EXP or any nested component-refs within EXP is marked
5798 contains_packed_reference (tree exp)
5800 bool packed_p = false;
5804 switch (TREE_CODE (exp))
5808 tree field = TREE_OPERAND (exp, 1);
5809 packed_p = DECL_PACKED (field)
5810 || TYPE_PACKED (TREE_TYPE (field))
5811 || TYPE_PACKED (TREE_TYPE (exp));
5819 case ARRAY_RANGE_REF:
5822 case VIEW_CONVERT_EXPR:
5828 exp = TREE_OPERAND (exp, 0);
5834 /* Return a tree of sizetype representing the size, in bytes, of the element
5835 of EXP, an ARRAY_REF. */
5838 array_ref_element_size (tree exp)
5840 tree aligned_size = TREE_OPERAND (exp, 3);
5841 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5843 /* If a size was specified in the ARRAY_REF, it's the size measured
5844 in alignment units of the element type. So multiply by that value. */
5847 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5848 sizetype from another type of the same width and signedness. */
5849 if (TREE_TYPE (aligned_size) != sizetype)
5850 aligned_size = fold_convert (sizetype, aligned_size);
5851 return size_binop (MULT_EXPR, aligned_size,
5852 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5855 /* Otherwise, take the size from that of the element type. Substitute
5856 any PLACEHOLDER_EXPR that we have. */
5858 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5861 /* Return a tree representing the lower bound of the array mentioned in
5862 EXP, an ARRAY_REF. */
5865 array_ref_low_bound (tree exp)
5867 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5869 /* If a lower bound is specified in EXP, use it. */
5870 if (TREE_OPERAND (exp, 2))
5871 return TREE_OPERAND (exp, 2);
5873 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5874 substituting for a PLACEHOLDER_EXPR as needed. */
5875 if (domain_type && TYPE_MIN_VALUE (domain_type))
5876 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5878 /* Otherwise, return a zero of the appropriate type. */
5879 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5882 /* Return a tree representing the upper bound of the array mentioned in
5883 EXP, an ARRAY_REF. */
5886 array_ref_up_bound (tree exp)
5888 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5890 /* If there is a domain type and it has an upper bound, use it, substituting
5891 for a PLACEHOLDER_EXPR as needed. */
5892 if (domain_type && TYPE_MAX_VALUE (domain_type))
5893 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5895 /* Otherwise fail. */
5899 /* Return a tree representing the offset, in bytes, of the field referenced
5900 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5903 component_ref_field_offset (tree exp)
5905 tree aligned_offset = TREE_OPERAND (exp, 2);
5906 tree field = TREE_OPERAND (exp, 1);
5908 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5909 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5913 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5914 sizetype from another type of the same width and signedness. */
5915 if (TREE_TYPE (aligned_offset) != sizetype)
5916 aligned_offset = fold_convert (sizetype, aligned_offset);
5917 return size_binop (MULT_EXPR, aligned_offset,
5918 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5921 /* Otherwise, take the offset from that of the field. Substitute
5922 any PLACEHOLDER_EXPR that we have. */
5924 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5927 /* Return 1 if T is an expression that get_inner_reference handles. */
5930 handled_component_p (tree t)
5932 switch (TREE_CODE (t))
5937 case ARRAY_RANGE_REF:
5938 case VIEW_CONVERT_EXPR:
5948 /* Given an rtx VALUE that may contain additions and multiplications, return
5949 an equivalent value that just refers to a register, memory, or constant.
5950 This is done by generating instructions to perform the arithmetic and
5951 returning a pseudo-register containing the value.
5953 The returned value may be a REG, SUBREG, MEM or constant. */
5956 force_operand (rtx value, rtx target)
5959 /* Use subtarget as the target for operand 0 of a binary operation. */
5960 rtx subtarget = get_subtarget (target);
5961 enum rtx_code code = GET_CODE (value);
5963 /* Check for subreg applied to an expression produced by loop optimizer. */
5965 && !REG_P (SUBREG_REG (value))
5966 && !MEM_P (SUBREG_REG (value)))
5968 value = simplify_gen_subreg (GET_MODE (value),
5969 force_reg (GET_MODE (SUBREG_REG (value)),
5970 force_operand (SUBREG_REG (value),
5972 GET_MODE (SUBREG_REG (value)),
5973 SUBREG_BYTE (value));
5974 code = GET_CODE (value);
5977 /* Check for a PIC address load. */
5978 if ((code == PLUS || code == MINUS)
5979 && XEXP (value, 0) == pic_offset_table_rtx
5980 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5981 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5982 || GET_CODE (XEXP (value, 1)) == CONST))
5985 subtarget = gen_reg_rtx (GET_MODE (value));
5986 emit_move_insn (subtarget, value);
5990 if (ARITHMETIC_P (value))
5992 op2 = XEXP (value, 1);
5993 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5995 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5998 op2 = negate_rtx (GET_MODE (value), op2);
6001 /* Check for an addition with OP2 a constant integer and our first
6002 operand a PLUS of a virtual register and something else. In that
6003 case, we want to emit the sum of the virtual register and the
6004 constant first and then add the other value. This allows virtual
6005 register instantiation to simply modify the constant rather than
6006 creating another one around this addition. */
6007 if (code == PLUS && GET_CODE (op2) == CONST_INT
6008 && GET_CODE (XEXP (value, 0)) == PLUS
6009 && REG_P (XEXP (XEXP (value, 0), 0))
6010 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6011 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6013 rtx temp = expand_simple_binop (GET_MODE (value), code,
6014 XEXP (XEXP (value, 0), 0), op2,
6015 subtarget, 0, OPTAB_LIB_WIDEN);
6016 return expand_simple_binop (GET_MODE (value), code, temp,
6017 force_operand (XEXP (XEXP (value,
6019 target, 0, OPTAB_LIB_WIDEN);
6022 op1 = force_operand (XEXP (value, 0), subtarget);
6023 op2 = force_operand (op2, NULL_RTX);
6027 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6029 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6030 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6031 target, 1, OPTAB_LIB_WIDEN);
6033 return expand_divmod (0,
6034 FLOAT_MODE_P (GET_MODE (value))
6035 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6036 GET_MODE (value), op1, op2, target, 0);
6039 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6043 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6047 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6051 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6052 target, 0, OPTAB_LIB_WIDEN);
6055 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6056 target, 1, OPTAB_LIB_WIDEN);
6059 if (UNARY_P (value))
6062 target = gen_reg_rtx (GET_MODE (value));
6063 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6070 case FLOAT_TRUNCATE:
6071 convert_move (target, op1, code == ZERO_EXTEND);
6076 expand_fix (target, op1, code == UNSIGNED_FIX);
6080 case UNSIGNED_FLOAT:
6081 expand_float (target, op1, code == UNSIGNED_FLOAT);
6085 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6089 #ifdef INSN_SCHEDULING
6090 /* On machines that have insn scheduling, we want all memory reference to be
6091 explicit, so we need to deal with such paradoxical SUBREGs. */
6092 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6093 && (GET_MODE_SIZE (GET_MODE (value))
6094 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6096 = simplify_gen_subreg (GET_MODE (value),
6097 force_reg (GET_MODE (SUBREG_REG (value)),
6098 force_operand (SUBREG_REG (value),
6100 GET_MODE (SUBREG_REG (value)),
6101 SUBREG_BYTE (value));
6107 /* Subroutine of expand_expr: return nonzero iff there is no way that
6108 EXP can reference X, which is being modified. TOP_P is nonzero if this
6109 call is going to be used to determine whether we need a temporary
6110 for EXP, as opposed to a recursive call to this function.
6112 It is always safe for this routine to return zero since it merely
6113 searches for optimization opportunities. */
6116 safe_from_p (rtx x, tree exp, int top_p)
6122 /* If EXP has varying size, we MUST use a target since we currently
6123 have no way of allocating temporaries of variable size
6124 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6125 So we assume here that something at a higher level has prevented a
6126 clash. This is somewhat bogus, but the best we can do. Only
6127 do this when X is BLKmode and when we are at the top level. */
6128 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6129 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6130 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6131 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6132 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6134 && GET_MODE (x) == BLKmode)
6135 /* If X is in the outgoing argument area, it is always safe. */
6137 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6138 || (GET_CODE (XEXP (x, 0)) == PLUS
6139 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6142 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6143 find the underlying pseudo. */
6144 if (GET_CODE (x) == SUBREG)
6147 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6151 /* Now look at our tree code and possibly recurse. */
6152 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6154 case tcc_declaration:
6155 exp_rtl = DECL_RTL_IF_SET (exp);
6161 case tcc_exceptional:
6162 if (TREE_CODE (exp) == TREE_LIST)
6166 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6168 exp = TREE_CHAIN (exp);
6171 if (TREE_CODE (exp) != TREE_LIST)
6172 return safe_from_p (x, exp, 0);
6175 else if (TREE_CODE (exp) == CONSTRUCTOR)
6177 constructor_elt *ce;
6178 unsigned HOST_WIDE_INT idx;
6181 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6183 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6184 || !safe_from_p (x, ce->value, 0))
6188 else if (TREE_CODE (exp) == ERROR_MARK)
6189 return 1; /* An already-visited SAVE_EXPR? */
6194 /* The only case we look at here is the DECL_INITIAL inside a
6196 return (TREE_CODE (exp) != DECL_EXPR
6197 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6198 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6199 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6202 case tcc_comparison:
6203 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6208 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6210 case tcc_expression:
6212 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6213 the expression. If it is set, we conflict iff we are that rtx or
6214 both are in memory. Otherwise, we check all operands of the
6215 expression recursively. */
6217 switch (TREE_CODE (exp))
6220 /* If the operand is static or we are static, we can't conflict.
6221 Likewise if we don't conflict with the operand at all. */
6222 if (staticp (TREE_OPERAND (exp, 0))
6223 || TREE_STATIC (exp)
6224 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6227 /* Otherwise, the only way this can conflict is if we are taking
6228 the address of a DECL a that address if part of X, which is
6230 exp = TREE_OPERAND (exp, 0);
6233 if (!DECL_RTL_SET_P (exp)
6234 || !MEM_P (DECL_RTL (exp)))
6237 exp_rtl = XEXP (DECL_RTL (exp), 0);
6241 case MISALIGNED_INDIRECT_REF:
6242 case ALIGN_INDIRECT_REF:
6245 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6246 get_alias_set (exp)))
6251 /* Assume that the call will clobber all hard registers and
6253 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6258 case WITH_CLEANUP_EXPR:
6259 case CLEANUP_POINT_EXPR:
6260 /* Lowered by gimplify.c. */
6264 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6270 /* If we have an rtx, we do not need to scan our operands. */
6274 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6275 for (i = 0; i < nops; i++)
6276 if (TREE_OPERAND (exp, i) != 0
6277 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6280 /* If this is a language-specific tree code, it may require
6281 special handling. */
6282 if ((unsigned int) TREE_CODE (exp)
6283 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6284 && !lang_hooks.safe_from_p (x, exp))
6289 /* Should never get a type here. */
6293 /* If we have an rtl, find any enclosed object. Then see if we conflict
6297 if (GET_CODE (exp_rtl) == SUBREG)
6299 exp_rtl = SUBREG_REG (exp_rtl);
6301 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6305 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6306 are memory and they conflict. */
6307 return ! (rtx_equal_p (x, exp_rtl)
6308 || (MEM_P (x) && MEM_P (exp_rtl)
6309 && true_dependence (exp_rtl, VOIDmode, x,
6310 rtx_addr_varies_p)));
6313 /* If we reach here, it is safe. */
6318 /* Return the highest power of two that EXP is known to be a multiple of.
6319 This is used in updating alignment of MEMs in array references. */
6321 unsigned HOST_WIDE_INT
6322 highest_pow2_factor (tree exp)
6324 unsigned HOST_WIDE_INT c0, c1;
6326 switch (TREE_CODE (exp))
6329 /* We can find the lowest bit that's a one. If the low
6330 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6331 We need to handle this case since we can find it in a COND_EXPR,
6332 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6333 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6335 if (TREE_CONSTANT_OVERFLOW (exp))
6336 return BIGGEST_ALIGNMENT;
6339 /* Note: tree_low_cst is intentionally not used here,
6340 we don't care about the upper bits. */
6341 c0 = TREE_INT_CST_LOW (exp);
6343 return c0 ? c0 : BIGGEST_ALIGNMENT;
6347 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6348 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6349 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6350 return MIN (c0, c1);
6353 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6354 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6357 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6359 if (integer_pow2p (TREE_OPERAND (exp, 1))
6360 && host_integerp (TREE_OPERAND (exp, 1), 1))
6362 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6363 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6364 return MAX (1, c0 / c1);
6368 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6370 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6373 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6376 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6377 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6378 return MIN (c0, c1);
6387 /* Similar, except that the alignment requirements of TARGET are
6388 taken into account. Assume it is at least as aligned as its
6389 type, unless it is a COMPONENT_REF in which case the layout of
6390 the structure gives the alignment. */
6392 static unsigned HOST_WIDE_INT
6393 highest_pow2_factor_for_target (tree target, tree exp)
6395 unsigned HOST_WIDE_INT target_align, factor;
6397 factor = highest_pow2_factor (exp);
6398 if (TREE_CODE (target) == COMPONENT_REF)
6399 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6401 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6402 return MAX (factor, target_align);
6405 /* Expands variable VAR. */
6408 expand_var (tree var)
6410 if (DECL_EXTERNAL (var))
6413 if (TREE_STATIC (var))
6414 /* If this is an inlined copy of a static local variable,
6415 look up the original decl. */
6416 var = DECL_ORIGIN (var);
6418 if (TREE_STATIC (var)
6419 ? !TREE_ASM_WRITTEN (var)
6420 : !DECL_RTL_SET_P (var))
6422 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6423 /* Should be ignored. */;
6424 else if (lang_hooks.expand_decl (var))
6426 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6428 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6429 rest_of_decl_compilation (var, 0, 0);
6431 /* No expansion needed. */
6432 gcc_assert (TREE_CODE (var) == TYPE_DECL
6433 || TREE_CODE (var) == CONST_DECL
6434 || TREE_CODE (var) == FUNCTION_DECL
6435 || TREE_CODE (var) == LABEL_DECL);
6439 /* Subroutine of expand_expr. Expand the two operands of a binary
6440 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6441 The value may be stored in TARGET if TARGET is nonzero. The
6442 MODIFIER argument is as documented by expand_expr. */
6445 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6446 enum expand_modifier modifier)
6448 if (! safe_from_p (target, exp1, 1))
6450 if (operand_equal_p (exp0, exp1, 0))
6452 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6453 *op1 = copy_rtx (*op0);
6457 /* If we need to preserve evaluation order, copy exp0 into its own
6458 temporary variable so that it can't be clobbered by exp1. */
6459 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6460 exp0 = save_expr (exp0);
6461 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6462 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6467 /* Return a MEM that contains constant EXP. DEFER is as for
6468 output_constant_def and MODIFIER is as for expand_expr. */
6471 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6475 mem = output_constant_def (exp, defer);
6476 if (modifier != EXPAND_INITIALIZER)
6477 mem = use_anchored_address (mem);
6481 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6482 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6485 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6486 enum expand_modifier modifier)
6488 rtx result, subtarget;
6490 HOST_WIDE_INT bitsize, bitpos;
6491 int volatilep, unsignedp;
6492 enum machine_mode mode1;
6494 /* If we are taking the address of a constant and are at the top level,
6495 we have to use output_constant_def since we can't call force_const_mem
6497 /* ??? This should be considered a front-end bug. We should not be
6498 generating ADDR_EXPR of something that isn't an LVALUE. The only
6499 exception here is STRING_CST. */
6500 if (TREE_CODE (exp) == CONSTRUCTOR
6501 || CONSTANT_CLASS_P (exp))
6502 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6504 /* Everything must be something allowed by is_gimple_addressable. */
6505 switch (TREE_CODE (exp))
6508 /* This case will happen via recursion for &a->b. */
6509 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6512 /* Recurse and make the output_constant_def clause above handle this. */
6513 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6517 /* The real part of the complex number is always first, therefore
6518 the address is the same as the address of the parent object. */
6521 inner = TREE_OPERAND (exp, 0);
6525 /* The imaginary part of the complex number is always second.
6526 The expression is therefore always offset by the size of the
6529 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6530 inner = TREE_OPERAND (exp, 0);
6534 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6535 expand_expr, as that can have various side effects; LABEL_DECLs for
6536 example, may not have their DECL_RTL set yet. Assume language
6537 specific tree nodes can be expanded in some interesting way. */
6539 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6541 result = expand_expr (exp, target, tmode,
6542 modifier == EXPAND_INITIALIZER
6543 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6545 /* If the DECL isn't in memory, then the DECL wasn't properly
6546 marked TREE_ADDRESSABLE, which will be either a front-end
6547 or a tree optimizer bug. */
6548 gcc_assert (MEM_P (result));
6549 result = XEXP (result, 0);
6551 /* ??? Is this needed anymore? */
6552 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6554 assemble_external (exp);
6555 TREE_USED (exp) = 1;
6558 if (modifier != EXPAND_INITIALIZER
6559 && modifier != EXPAND_CONST_ADDRESS)
6560 result = force_operand (result, target);
6564 /* Pass FALSE as the last argument to get_inner_reference although
6565 we are expanding to RTL. The rationale is that we know how to
6566 handle "aligning nodes" here: we can just bypass them because
6567 they won't change the final object whose address will be returned
6568 (they actually exist only for that purpose). */
6569 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6570 &mode1, &unsignedp, &volatilep, false);
6574 /* We must have made progress. */
6575 gcc_assert (inner != exp);
6577 subtarget = offset || bitpos ? NULL_RTX : target;
6578 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6584 if (modifier != EXPAND_NORMAL)
6585 result = force_operand (result, NULL);
6586 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6588 result = convert_memory_address (tmode, result);
6589 tmp = convert_memory_address (tmode, tmp);
6591 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6592 result = gen_rtx_PLUS (tmode, result, tmp);
6595 subtarget = bitpos ? NULL_RTX : target;
6596 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6597 1, OPTAB_LIB_WIDEN);
6603 /* Someone beforehand should have rejected taking the address
6604 of such an object. */
6605 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6607 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6608 if (modifier < EXPAND_SUM)
6609 result = force_operand (result, target);
6615 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6616 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6619 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6620 enum expand_modifier modifier)
6622 enum machine_mode rmode;
6625 /* Target mode of VOIDmode says "whatever's natural". */
6626 if (tmode == VOIDmode)
6627 tmode = TYPE_MODE (TREE_TYPE (exp));
6629 /* We can get called with some Weird Things if the user does silliness
6630 like "(short) &a". In that case, convert_memory_address won't do
6631 the right thing, so ignore the given target mode. */
6632 if (tmode != Pmode && tmode != ptr_mode)
6635 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6638 /* Despite expand_expr claims concerning ignoring TMODE when not
6639 strictly convenient, stuff breaks if we don't honor it. Note
6640 that combined with the above, we only do this for pointer modes. */
6641 rmode = GET_MODE (result);
6642 if (rmode == VOIDmode)
6645 result = convert_memory_address (tmode, result);
6651 /* expand_expr: generate code for computing expression EXP.
6652 An rtx for the computed value is returned. The value is never null.
6653 In the case of a void EXP, const0_rtx is returned.
6655 The value may be stored in TARGET if TARGET is nonzero.
6656 TARGET is just a suggestion; callers must assume that
6657 the rtx returned may not be the same as TARGET.
6659 If TARGET is CONST0_RTX, it means that the value will be ignored.
6661 If TMODE is not VOIDmode, it suggests generating the
6662 result in mode TMODE. But this is done only when convenient.
6663 Otherwise, TMODE is ignored and the value generated in its natural mode.
6664 TMODE is just a suggestion; callers must assume that
6665 the rtx returned may not have mode TMODE.
6667 Note that TARGET may have neither TMODE nor MODE. In that case, it
6668 probably will not be used.
6670 If MODIFIER is EXPAND_SUM then when EXP is an addition
6671 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6672 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6673 products as above, or REG or MEM, or constant.
6674 Ordinarily in such cases we would output mul or add instructions
6675 and then return a pseudo reg containing the sum.
6677 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6678 it also marks a label as absolutely required (it can't be dead).
6679 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6680 This is used for outputting expressions used in initializers.
6682 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6683 with a constant address even if that address is not normally legitimate.
6684 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6686 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6687 a call parameter. Such targets require special care as we haven't yet
6688 marked TARGET so that it's safe from being trashed by libcalls. We
6689 don't want to use TARGET for anything but the final result;
6690 Intermediate values must go elsewhere. Additionally, calls to
6691 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6693 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6694 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6695 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6696 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6699 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6700 enum expand_modifier, rtx *);
6703 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6704 enum expand_modifier modifier, rtx *alt_rtl)
6707 rtx ret, last = NULL;
6709 /* Handle ERROR_MARK before anybody tries to access its type. */
6710 if (TREE_CODE (exp) == ERROR_MARK
6711 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6713 ret = CONST0_RTX (tmode);
6714 return ret ? ret : const0_rtx;
6717 if (flag_non_call_exceptions)
6719 rn = lookup_stmt_eh_region (exp);
6720 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6722 last = get_last_insn ();
6725 /* If this is an expression of some kind and it has an associated line
6726 number, then emit the line number before expanding the expression.
6728 We need to save and restore the file and line information so that
6729 errors discovered during expansion are emitted with the right
6730 information. It would be better of the diagnostic routines
6731 used the file/line information embedded in the tree nodes rather
6733 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6735 location_t saved_location = input_location;
6736 input_location = EXPR_LOCATION (exp);
6737 emit_line_note (input_location);
6739 /* Record where the insns produced belong. */
6740 record_block_change (TREE_BLOCK (exp));
6742 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6744 input_location = saved_location;
6748 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6751 /* If using non-call exceptions, mark all insns that may trap.
6752 expand_call() will mark CALL_INSNs before we get to this code,
6753 but it doesn't handle libcalls, and these may trap. */
6757 for (insn = next_real_insn (last); insn;
6758 insn = next_real_insn (insn))
6760 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6761 /* If we want exceptions for non-call insns, any
6762 may_trap_p instruction may throw. */
6763 && GET_CODE (PATTERN (insn)) != CLOBBER
6764 && GET_CODE (PATTERN (insn)) != USE
6765 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6767 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6777 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6778 enum expand_modifier modifier, rtx *alt_rtl)
6780 rtx op0, op1, temp, decl_rtl;
6781 tree type = TREE_TYPE (exp);
6783 enum machine_mode mode;
6784 enum tree_code code = TREE_CODE (exp);
6786 rtx subtarget, original_target;
6788 tree context, subexp0, subexp1;
6789 bool reduce_bit_field = false;
6790 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6791 ? reduce_to_bit_field_precision ((expr), \
6796 mode = TYPE_MODE (type);
6797 unsignedp = TYPE_UNSIGNED (type);
6798 if (lang_hooks.reduce_bit_field_operations
6799 && TREE_CODE (type) == INTEGER_TYPE
6800 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6802 /* An operation in what may be a bit-field type needs the
6803 result to be reduced to the precision of the bit-field type,
6804 which is narrower than that of the type's mode. */
6805 reduce_bit_field = true;
6806 if (modifier == EXPAND_STACK_PARM)
6810 /* Use subtarget as the target for operand 0 of a binary operation. */
6811 subtarget = get_subtarget (target);
6812 original_target = target;
6813 ignore = (target == const0_rtx
6814 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6815 || code == CONVERT_EXPR || code == COND_EXPR
6816 || code == VIEW_CONVERT_EXPR)
6817 && TREE_CODE (type) == VOID_TYPE));
6819 /* If we are going to ignore this result, we need only do something
6820 if there is a side-effect somewhere in the expression. If there
6821 is, short-circuit the most common cases here. Note that we must
6822 not call expand_expr with anything but const0_rtx in case this
6823 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6827 if (! TREE_SIDE_EFFECTS (exp))
6830 /* Ensure we reference a volatile object even if value is ignored, but
6831 don't do this if all we are doing is taking its address. */
6832 if (TREE_THIS_VOLATILE (exp)
6833 && TREE_CODE (exp) != FUNCTION_DECL
6834 && mode != VOIDmode && mode != BLKmode
6835 && modifier != EXPAND_CONST_ADDRESS)
6837 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6839 temp = copy_to_reg (temp);
6843 if (TREE_CODE_CLASS (code) == tcc_unary
6844 || code == COMPONENT_REF || code == INDIRECT_REF)
6845 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6848 else if (TREE_CODE_CLASS (code) == tcc_binary
6849 || TREE_CODE_CLASS (code) == tcc_comparison
6850 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6852 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6853 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6856 else if (code == BIT_FIELD_REF)
6858 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6859 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6860 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6872 tree function = decl_function_context (exp);
6874 temp = label_rtx (exp);
6875 temp = gen_rtx_LABEL_REF (Pmode, temp);
6877 if (function != current_function_decl
6879 LABEL_REF_NONLOCAL_P (temp) = 1;
6881 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6886 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6891 /* If a static var's type was incomplete when the decl was written,
6892 but the type is complete now, lay out the decl now. */
6893 if (DECL_SIZE (exp) == 0
6894 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6895 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6896 layout_decl (exp, 0);
6898 /* ... fall through ... */
6902 decl_rtl = DECL_RTL (exp);
6903 gcc_assert (decl_rtl);
6905 /* Ensure variable marked as used even if it doesn't go through
6906 a parser. If it hasn't be used yet, write out an external
6908 if (! TREE_USED (exp))
6910 assemble_external (exp);
6911 TREE_USED (exp) = 1;
6914 /* Show we haven't gotten RTL for this yet. */
6917 /* Variables inherited from containing functions should have
6918 been lowered by this point. */
6919 context = decl_function_context (exp);
6920 gcc_assert (!context
6921 || context == current_function_decl
6922 || TREE_STATIC (exp)
6923 /* ??? C++ creates functions that are not TREE_STATIC. */
6924 || TREE_CODE (exp) == FUNCTION_DECL);
6926 /* This is the case of an array whose size is to be determined
6927 from its initializer, while the initializer is still being parsed.
6930 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6931 temp = validize_mem (decl_rtl);
6933 /* If DECL_RTL is memory, we are in the normal case and either
6934 the address is not valid or it is not a register and -fforce-addr
6935 is specified, get the address into a register. */
6937 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6940 *alt_rtl = decl_rtl;
6941 decl_rtl = use_anchored_address (decl_rtl);
6942 if (modifier != EXPAND_CONST_ADDRESS
6943 && modifier != EXPAND_SUM
6944 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6945 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6946 temp = replace_equiv_address (decl_rtl,
6947 copy_rtx (XEXP (decl_rtl, 0)));
6950 /* If we got something, return it. But first, set the alignment
6951 if the address is a register. */
6954 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6955 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6960 /* If the mode of DECL_RTL does not match that of the decl, it
6961 must be a promoted value. We return a SUBREG of the wanted mode,
6962 but mark it so that we know that it was already extended. */
6964 if (REG_P (decl_rtl)
6965 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6967 enum machine_mode pmode;
6969 /* Get the signedness used for this variable. Ensure we get the
6970 same mode we got when the variable was declared. */
6971 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6972 (TREE_CODE (exp) == RESULT_DECL
6973 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6974 gcc_assert (GET_MODE (decl_rtl) == pmode);
6976 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6977 SUBREG_PROMOTED_VAR_P (temp) = 1;
6978 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6985 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6986 TREE_INT_CST_HIGH (exp), mode);
6988 /* ??? If overflow is set, fold will have done an incomplete job,
6989 which can result in (plus xx (const_int 0)), which can get
6990 simplified by validate_replace_rtx during virtual register
6991 instantiation, which can result in unrecognizable insns.
6992 Avoid this by forcing all overflows into registers. */
6993 if (TREE_CONSTANT_OVERFLOW (exp)
6994 && modifier != EXPAND_INITIALIZER)
6995 temp = force_reg (mode, temp);
7001 tree tmp = NULL_TREE;
7002 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7003 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7004 return const_vector_from_tree (exp);
7005 if (GET_MODE_CLASS (mode) == MODE_INT)
7007 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7009 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7012 tmp = build_constructor_from_list (type,
7013 TREE_VECTOR_CST_ELTS (exp));
7014 return expand_expr (tmp, ignore ? const0_rtx : target,
7019 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7022 /* If optimized, generate immediate CONST_DOUBLE
7023 which will be turned into memory by reload if necessary.
7025 We used to force a register so that loop.c could see it. But
7026 this does not allow gen_* patterns to perform optimizations with
7027 the constants. It also produces two insns in cases like "x = 1.0;".
7028 On most machines, floating-point constants are not permitted in
7029 many insns, so we'd end up copying it to a register in any case.
7031 Now, we do the copying in expand_binop, if appropriate. */
7032 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7033 TYPE_MODE (TREE_TYPE (exp)));
7036 /* Handle evaluating a complex constant in a CONCAT target. */
7037 if (original_target && GET_CODE (original_target) == CONCAT)
7039 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7042 rtarg = XEXP (original_target, 0);
7043 itarg = XEXP (original_target, 1);
7045 /* Move the real and imaginary parts separately. */
7046 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7047 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7050 emit_move_insn (rtarg, op0);
7052 emit_move_insn (itarg, op1);
7054 return original_target;
7057 /* ... fall through ... */
7060 temp = expand_expr_constant (exp, 1, modifier);
7062 /* temp contains a constant address.
7063 On RISC machines where a constant address isn't valid,
7064 make some insns to get that address into a register. */
7065 if (modifier != EXPAND_CONST_ADDRESS
7066 && modifier != EXPAND_INITIALIZER
7067 && modifier != EXPAND_SUM
7068 && (! memory_address_p (mode, XEXP (temp, 0))
7069 || flag_force_addr))
7070 return replace_equiv_address (temp,
7071 copy_rtx (XEXP (temp, 0)));
7076 tree val = TREE_OPERAND (exp, 0);
7077 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7079 if (!SAVE_EXPR_RESOLVED_P (exp))
7081 /* We can indeed still hit this case, typically via builtin
7082 expanders calling save_expr immediately before expanding
7083 something. Assume this means that we only have to deal
7084 with non-BLKmode values. */
7085 gcc_assert (GET_MODE (ret) != BLKmode);
7087 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7088 DECL_ARTIFICIAL (val) = 1;
7089 DECL_IGNORED_P (val) = 1;
7090 TREE_OPERAND (exp, 0) = val;
7091 SAVE_EXPR_RESOLVED_P (exp) = 1;
7093 if (!CONSTANT_P (ret))
7094 ret = copy_to_reg (ret);
7095 SET_DECL_RTL (val, ret);
7102 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7103 expand_goto (TREE_OPERAND (exp, 0));
7105 expand_computed_goto (TREE_OPERAND (exp, 0));
7109 /* If we don't need the result, just ensure we evaluate any
7113 unsigned HOST_WIDE_INT idx;
7116 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7117 expand_expr (value, const0_rtx, VOIDmode, 0);
7122 /* Try to avoid creating a temporary at all. This is possible
7123 if all of the initializer is zero.
7124 FIXME: try to handle all [0..255] initializers we can handle
7126 else if (TREE_STATIC (exp)
7127 && !TREE_ADDRESSABLE (exp)
7128 && target != 0 && mode == BLKmode
7129 && all_zeros_p (exp))
7131 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7135 /* All elts simple constants => refer to a constant in memory. But
7136 if this is a non-BLKmode mode, let it store a field at a time
7137 since that should make a CONST_INT or CONST_DOUBLE when we
7138 fold. Likewise, if we have a target we can use, it is best to
7139 store directly into the target unless the type is large enough
7140 that memcpy will be used. If we are making an initializer and
7141 all operands are constant, put it in memory as well.
7143 FIXME: Avoid trying to fill vector constructors piece-meal.
7144 Output them with output_constant_def below unless we're sure
7145 they're zeros. This should go away when vector initializers
7146 are treated like VECTOR_CST instead of arrays.
7148 else if ((TREE_STATIC (exp)
7149 && ((mode == BLKmode
7150 && ! (target != 0 && safe_from_p (target, exp, 1)))
7151 || TREE_ADDRESSABLE (exp)
7152 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7153 && (! MOVE_BY_PIECES_P
7154 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7156 && ! mostly_zeros_p (exp))))
7157 || ((modifier == EXPAND_INITIALIZER
7158 || modifier == EXPAND_CONST_ADDRESS)
7159 && TREE_CONSTANT (exp)))
7161 rtx constructor = expand_expr_constant (exp, 1, modifier);
7163 if (modifier != EXPAND_CONST_ADDRESS
7164 && modifier != EXPAND_INITIALIZER
7165 && modifier != EXPAND_SUM)
7166 constructor = validize_mem (constructor);
7172 /* Handle calls that pass values in multiple non-contiguous
7173 locations. The Irix 6 ABI has examples of this. */
7174 if (target == 0 || ! safe_from_p (target, exp, 1)
7175 || GET_CODE (target) == PARALLEL
7176 || modifier == EXPAND_STACK_PARM)
7178 = assign_temp (build_qualified_type (type,
7180 | (TREE_READONLY (exp)
7181 * TYPE_QUAL_CONST))),
7182 0, TREE_ADDRESSABLE (exp), 1);
7184 store_constructor (exp, target, 0, int_expr_size (exp));
7188 case MISALIGNED_INDIRECT_REF:
7189 case ALIGN_INDIRECT_REF:
7192 tree exp1 = TREE_OPERAND (exp, 0);
7194 if (modifier != EXPAND_WRITE)
7198 t = fold_read_from_constant_string (exp);
7200 return expand_expr (t, target, tmode, modifier);
7203 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7204 op0 = memory_address (mode, op0);
7206 if (code == ALIGN_INDIRECT_REF)
7208 int align = TYPE_ALIGN_UNIT (type);
7209 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7210 op0 = memory_address (mode, op0);
7213 temp = gen_rtx_MEM (mode, op0);
7215 set_mem_attributes (temp, exp, 0);
7217 /* Resolve the misalignment now, so that we don't have to remember
7218 to resolve it later. Of course, this only works for reads. */
7219 /* ??? When we get around to supporting writes, we'll have to handle
7220 this in store_expr directly. The vectorizer isn't generating
7221 those yet, however. */
7222 if (code == MISALIGNED_INDIRECT_REF)
7227 gcc_assert (modifier == EXPAND_NORMAL
7228 || modifier == EXPAND_STACK_PARM);
7230 /* The vectorizer should have already checked the mode. */
7231 icode = movmisalign_optab->handlers[mode].insn_code;
7232 gcc_assert (icode != CODE_FOR_nothing);
7234 /* We've already validated the memory, and we're creating a
7235 new pseudo destination. The predicates really can't fail. */
7236 reg = gen_reg_rtx (mode);
7238 /* Nor can the insn generator. */
7239 insn = GEN_FCN (icode) (reg, temp);
7248 case TARGET_MEM_REF:
7250 struct mem_address addr;
7252 get_address_description (exp, &addr);
7253 op0 = addr_for_mem_ref (&addr, true);
7254 op0 = memory_address (mode, op0);
7255 temp = gen_rtx_MEM (mode, op0);
7256 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7263 tree array = TREE_OPERAND (exp, 0);
7264 tree index = TREE_OPERAND (exp, 1);
7266 /* Fold an expression like: "foo"[2].
7267 This is not done in fold so it won't happen inside &.
7268 Don't fold if this is for wide characters since it's too
7269 difficult to do correctly and this is a very rare case. */
7271 if (modifier != EXPAND_CONST_ADDRESS
7272 && modifier != EXPAND_INITIALIZER
7273 && modifier != EXPAND_MEMORY)
7275 tree t = fold_read_from_constant_string (exp);
7278 return expand_expr (t, target, tmode, modifier);
7281 /* If this is a constant index into a constant array,
7282 just get the value from the array. Handle both the cases when
7283 we have an explicit constructor and when our operand is a variable
7284 that was declared const. */
7286 if (modifier != EXPAND_CONST_ADDRESS
7287 && modifier != EXPAND_INITIALIZER
7288 && modifier != EXPAND_MEMORY
7289 && TREE_CODE (array) == CONSTRUCTOR
7290 && ! TREE_SIDE_EFFECTS (array)
7291 && TREE_CODE (index) == INTEGER_CST)
7293 unsigned HOST_WIDE_INT ix;
7296 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7298 if (tree_int_cst_equal (field, index))
7300 if (!TREE_SIDE_EFFECTS (value))
7301 return expand_expr (fold (value), target, tmode, modifier);
7306 else if (optimize >= 1
7307 && modifier != EXPAND_CONST_ADDRESS
7308 && modifier != EXPAND_INITIALIZER
7309 && modifier != EXPAND_MEMORY
7310 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7311 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7312 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7313 && targetm.binds_local_p (array))
7315 if (TREE_CODE (index) == INTEGER_CST)
7317 tree init = DECL_INITIAL (array);
7319 if (TREE_CODE (init) == CONSTRUCTOR)
7321 unsigned HOST_WIDE_INT ix;
7324 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7326 if (tree_int_cst_equal (field, index))
7328 if (!TREE_SIDE_EFFECTS (value))
7329 return expand_expr (fold (value), target, tmode,
7334 else if(TREE_CODE (init) == STRING_CST)
7336 tree index1 = index;
7337 tree low_bound = array_ref_low_bound (exp);
7338 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7340 /* Optimize the special-case of a zero lower bound.
7342 We convert the low_bound to sizetype to avoid some problems
7343 with constant folding. (E.g. suppose the lower bound is 1,
7344 and its mode is QI. Without the conversion,l (ARRAY
7345 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7346 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7348 if (! integer_zerop (low_bound))
7349 index1 = size_diffop (index1, fold_convert (sizetype,
7352 if (0 > compare_tree_int (index1,
7353 TREE_STRING_LENGTH (init)))
7355 tree type = TREE_TYPE (TREE_TYPE (init));
7356 enum machine_mode mode = TYPE_MODE (type);
7358 if (GET_MODE_CLASS (mode) == MODE_INT
7359 && GET_MODE_SIZE (mode) == 1)
7360 return gen_int_mode (TREE_STRING_POINTER (init)
7361 [TREE_INT_CST_LOW (index1)],
7368 goto normal_inner_ref;
7371 /* If the operand is a CONSTRUCTOR, we can just extract the
7372 appropriate field if it is present. */
7373 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7375 unsigned HOST_WIDE_INT idx;
7378 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7380 if (field == TREE_OPERAND (exp, 1)
7381 /* We can normally use the value of the field in the
7382 CONSTRUCTOR. However, if this is a bitfield in
7383 an integral mode that we can fit in a HOST_WIDE_INT,
7384 we must mask only the number of bits in the bitfield,
7385 since this is done implicitly by the constructor. If
7386 the bitfield does not meet either of those conditions,
7387 we can't do this optimization. */
7388 && (! DECL_BIT_FIELD (field)
7389 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7390 && (GET_MODE_BITSIZE (DECL_MODE (field))
7391 <= HOST_BITS_PER_WIDE_INT))))
7393 if (DECL_BIT_FIELD (field)
7394 && modifier == EXPAND_STACK_PARM)
7396 op0 = expand_expr (value, target, tmode, modifier);
7397 if (DECL_BIT_FIELD (field))
7399 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7400 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7402 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7404 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7405 op0 = expand_and (imode, op0, op1, target);
7410 = build_int_cst (NULL_TREE,
7411 GET_MODE_BITSIZE (imode) - bitsize);
7413 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7415 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7423 goto normal_inner_ref;
7426 case ARRAY_RANGE_REF:
7429 enum machine_mode mode1;
7430 HOST_WIDE_INT bitsize, bitpos;
7433 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7434 &mode1, &unsignedp, &volatilep, true);
7437 /* If we got back the original object, something is wrong. Perhaps
7438 we are evaluating an expression too early. In any event, don't
7439 infinitely recurse. */
7440 gcc_assert (tem != exp);
7442 /* If TEM's type is a union of variable size, pass TARGET to the inner
7443 computation, since it will need a temporary and TARGET is known
7444 to have to do. This occurs in unchecked conversion in Ada. */
7448 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7449 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7451 && modifier != EXPAND_STACK_PARM
7452 ? target : NULL_RTX),
7454 (modifier == EXPAND_INITIALIZER
7455 || modifier == EXPAND_CONST_ADDRESS
7456 || modifier == EXPAND_STACK_PARM)
7457 ? modifier : EXPAND_NORMAL);
7459 /* If this is a constant, put it into a register if it is a legitimate
7460 constant, OFFSET is 0, and we won't try to extract outside the
7461 register (in case we were passed a partially uninitialized object
7462 or a view_conversion to a larger size). Force the constant to
7463 memory otherwise. */
7464 if (CONSTANT_P (op0))
7466 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7467 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7469 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7470 op0 = force_reg (mode, op0);
7472 op0 = validize_mem (force_const_mem (mode, op0));
7475 /* Otherwise, if this object not in memory and we either have an
7476 offset, a BLKmode result, or a reference outside the object, put it
7477 there. Such cases can occur in Ada if we have unchecked conversion
7478 of an expression from a scalar type to an array or record type or
7479 for an ARRAY_RANGE_REF whose type is BLKmode. */
7480 else if (!MEM_P (op0)
7482 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7483 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7485 tree nt = build_qualified_type (TREE_TYPE (tem),
7486 (TYPE_QUALS (TREE_TYPE (tem))
7487 | TYPE_QUAL_CONST));
7488 rtx memloc = assign_temp (nt, 1, 1, 1);
7490 emit_move_insn (memloc, op0);
7496 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7499 gcc_assert (MEM_P (op0));
7501 #ifdef POINTERS_EXTEND_UNSIGNED
7502 if (GET_MODE (offset_rtx) != Pmode)
7503 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7505 if (GET_MODE (offset_rtx) != ptr_mode)
7506 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7509 if (GET_MODE (op0) == BLKmode
7510 /* A constant address in OP0 can have VOIDmode, we must
7511 not try to call force_reg in that case. */
7512 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7514 && (bitpos % bitsize) == 0
7515 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7516 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7518 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7522 op0 = offset_address (op0, offset_rtx,
7523 highest_pow2_factor (offset));
7526 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7527 record its alignment as BIGGEST_ALIGNMENT. */
7528 if (MEM_P (op0) && bitpos == 0 && offset != 0
7529 && is_aligning_offset (offset, tem))
7530 set_mem_align (op0, BIGGEST_ALIGNMENT);
7532 /* Don't forget about volatility even if this is a bitfield. */
7533 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7535 if (op0 == orig_op0)
7536 op0 = copy_rtx (op0);
7538 MEM_VOLATILE_P (op0) = 1;
7541 /* The following code doesn't handle CONCAT.
7542 Assume only bitpos == 0 can be used for CONCAT, due to
7543 one element arrays having the same mode as its element. */
7544 if (GET_CODE (op0) == CONCAT)
7546 gcc_assert (bitpos == 0
7547 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7551 /* In cases where an aligned union has an unaligned object
7552 as a field, we might be extracting a BLKmode value from
7553 an integer-mode (e.g., SImode) object. Handle this case
7554 by doing the extract into an object as wide as the field
7555 (which we know to be the width of a basic mode), then
7556 storing into memory, and changing the mode to BLKmode. */
7557 if (mode1 == VOIDmode
7558 || REG_P (op0) || GET_CODE (op0) == SUBREG
7559 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7560 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7562 && modifier != EXPAND_CONST_ADDRESS
7563 && modifier != EXPAND_INITIALIZER)
7564 /* If the field isn't aligned enough to fetch as a memref,
7565 fetch it as a bit field. */
7566 || (mode1 != BLKmode
7567 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7568 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7570 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7571 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7572 && ((modifier == EXPAND_CONST_ADDRESS
7573 || modifier == EXPAND_INITIALIZER)
7575 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7576 || (bitpos % BITS_PER_UNIT != 0)))
7577 /* If the type and the field are a constant size and the
7578 size of the type isn't the same size as the bitfield,
7579 we must use bitfield operations. */
7581 && TYPE_SIZE (TREE_TYPE (exp))
7582 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7583 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7586 enum machine_mode ext_mode = mode;
7588 if (ext_mode == BLKmode
7589 && ! (target != 0 && MEM_P (op0)
7591 && bitpos % BITS_PER_UNIT == 0))
7592 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7594 if (ext_mode == BLKmode)
7597 target = assign_temp (type, 0, 1, 1);
7602 /* In this case, BITPOS must start at a byte boundary and
7603 TARGET, if specified, must be a MEM. */
7604 gcc_assert (MEM_P (op0)
7605 && (!target || MEM_P (target))
7606 && !(bitpos % BITS_PER_UNIT));
7608 emit_block_move (target,
7609 adjust_address (op0, VOIDmode,
7610 bitpos / BITS_PER_UNIT),
7611 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7613 (modifier == EXPAND_STACK_PARM
7614 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7619 op0 = validize_mem (op0);
7621 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7622 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7624 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7625 (modifier == EXPAND_STACK_PARM
7626 ? NULL_RTX : target),
7627 ext_mode, ext_mode);
7629 /* If the result is a record type and BITSIZE is narrower than
7630 the mode of OP0, an integral mode, and this is a big endian
7631 machine, we must put the field into the high-order bits. */
7632 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7633 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7634 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7635 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7636 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7640 /* If the result type is BLKmode, store the data into a temporary
7641 of the appropriate type, but with the mode corresponding to the
7642 mode for the data we have (op0's mode). It's tempting to make
7643 this a constant type, since we know it's only being stored once,
7644 but that can cause problems if we are taking the address of this
7645 COMPONENT_REF because the MEM of any reference via that address
7646 will have flags corresponding to the type, which will not
7647 necessarily be constant. */
7648 if (mode == BLKmode)
7651 = assign_stack_temp_for_type
7652 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7654 emit_move_insn (new, op0);
7655 op0 = copy_rtx (new);
7656 PUT_MODE (op0, BLKmode);
7657 set_mem_attributes (op0, exp, 1);
7663 /* If the result is BLKmode, use that to access the object
7665 if (mode == BLKmode)
7668 /* Get a reference to just this component. */
7669 if (modifier == EXPAND_CONST_ADDRESS
7670 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7671 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7673 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7675 if (op0 == orig_op0)
7676 op0 = copy_rtx (op0);
7678 set_mem_attributes (op0, exp, 0);
7679 if (REG_P (XEXP (op0, 0)))
7680 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7682 MEM_VOLATILE_P (op0) |= volatilep;
7683 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7684 || modifier == EXPAND_CONST_ADDRESS
7685 || modifier == EXPAND_INITIALIZER)
7687 else if (target == 0)
7688 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7690 convert_move (target, op0, unsignedp);
7695 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7698 /* Check for a built-in function. */
7699 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7700 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7702 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7704 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7705 == BUILT_IN_FRONTEND)
7706 return lang_hooks.expand_expr (exp, original_target,
7710 return expand_builtin (exp, target, subtarget, tmode, ignore);
7713 return expand_call (exp, target, ignore);
7715 case NON_LVALUE_EXPR:
7718 if (TREE_OPERAND (exp, 0) == error_mark_node)
7721 if (TREE_CODE (type) == UNION_TYPE)
7723 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7725 /* If both input and output are BLKmode, this conversion isn't doing
7726 anything except possibly changing memory attribute. */
7727 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7729 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7732 result = copy_rtx (result);
7733 set_mem_attributes (result, exp, 0);
7739 if (TYPE_MODE (type) != BLKmode)
7740 target = gen_reg_rtx (TYPE_MODE (type));
7742 target = assign_temp (type, 0, 1, 1);
7746 /* Store data into beginning of memory target. */
7747 store_expr (TREE_OPERAND (exp, 0),
7748 adjust_address (target, TYPE_MODE (valtype), 0),
7749 modifier == EXPAND_STACK_PARM);
7753 gcc_assert (REG_P (target));
7755 /* Store this field into a union of the proper type. */
7756 store_field (target,
7757 MIN ((int_size_in_bytes (TREE_TYPE
7758 (TREE_OPERAND (exp, 0)))
7760 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7761 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7765 /* Return the entire union. */
7769 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7771 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7774 /* If the signedness of the conversion differs and OP0 is
7775 a promoted SUBREG, clear that indication since we now
7776 have to do the proper extension. */
7777 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7778 && GET_CODE (op0) == SUBREG)
7779 SUBREG_PROMOTED_VAR_P (op0) = 0;
7781 return REDUCE_BIT_FIELD (op0);
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7785 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7786 if (GET_MODE (op0) == mode)
7789 /* If OP0 is a constant, just convert it into the proper mode. */
7790 else if (CONSTANT_P (op0))
7792 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7793 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7795 if (modifier == EXPAND_INITIALIZER)
7796 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7797 subreg_lowpart_offset (mode,
7800 op0= convert_modes (mode, inner_mode, op0,
7801 TYPE_UNSIGNED (inner_type));
7804 else if (modifier == EXPAND_INITIALIZER)
7805 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7807 else if (target == 0)
7808 op0 = convert_to_mode (mode, op0,
7809 TYPE_UNSIGNED (TREE_TYPE
7810 (TREE_OPERAND (exp, 0))));
7813 convert_move (target, op0,
7814 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7818 return REDUCE_BIT_FIELD (op0);
7820 case VIEW_CONVERT_EXPR:
7821 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7823 /* If the input and output modes are both the same, we are done. */
7824 if (TYPE_MODE (type) == GET_MODE (op0))
7826 /* If neither mode is BLKmode, and both modes are the same size
7827 then we can use gen_lowpart. */
7828 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7829 && GET_MODE_SIZE (TYPE_MODE (type))
7830 == GET_MODE_SIZE (GET_MODE (op0)))
7832 if (GET_CODE (op0) == SUBREG)
7833 op0 = force_reg (GET_MODE (op0), op0);
7834 op0 = gen_lowpart (TYPE_MODE (type), op0);
7836 /* If both modes are integral, then we can convert from one to the
7838 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7839 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7840 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7841 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7842 /* As a last resort, spill op0 to memory, and reload it in a
7844 else if (!MEM_P (op0))
7846 /* If the operand is not a MEM, force it into memory. Since we
7847 are going to be changing the mode of the MEM, don't call
7848 force_const_mem for constants because we don't allow pool
7849 constants to change mode. */
7850 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7852 gcc_assert (!TREE_ADDRESSABLE (exp));
7854 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7856 = assign_stack_temp_for_type
7857 (TYPE_MODE (inner_type),
7858 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7860 emit_move_insn (target, op0);
7864 /* At this point, OP0 is in the correct mode. If the output type is such
7865 that the operand is known to be aligned, indicate that it is.
7866 Otherwise, we need only be concerned about alignment for non-BLKmode
7870 op0 = copy_rtx (op0);
7872 if (TYPE_ALIGN_OK (type))
7873 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7874 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7875 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7877 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7878 HOST_WIDE_INT temp_size
7879 = MAX (int_size_in_bytes (inner_type),
7880 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7881 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7882 temp_size, 0, type);
7883 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7885 gcc_assert (!TREE_ADDRESSABLE (exp));
7887 if (GET_MODE (op0) == BLKmode)
7888 emit_block_move (new_with_op0_mode, op0,
7889 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7890 (modifier == EXPAND_STACK_PARM
7891 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7893 emit_move_insn (new_with_op0_mode, op0);
7898 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7904 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7905 something else, make sure we add the register to the constant and
7906 then to the other thing. This case can occur during strength
7907 reduction and doing it this way will produce better code if the
7908 frame pointer or argument pointer is eliminated.
7910 fold-const.c will ensure that the constant is always in the inner
7911 PLUS_EXPR, so the only case we need to do anything about is if
7912 sp, ap, or fp is our second argument, in which case we must swap
7913 the innermost first argument and our second argument. */
7915 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7916 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7917 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7918 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7919 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7920 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7922 tree t = TREE_OPERAND (exp, 1);
7924 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7925 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7928 /* If the result is to be ptr_mode and we are adding an integer to
7929 something, we might be forming a constant. So try to use
7930 plus_constant. If it produces a sum and we can't accept it,
7931 use force_operand. This allows P = &ARR[const] to generate
7932 efficient code on machines where a SYMBOL_REF is not a valid
7935 If this is an EXPAND_SUM call, always return the sum. */
7936 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7937 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7939 if (modifier == EXPAND_STACK_PARM)
7941 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7942 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7943 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7947 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7949 /* Use immed_double_const to ensure that the constant is
7950 truncated according to the mode of OP1, then sign extended
7951 to a HOST_WIDE_INT. Using the constant directly can result
7952 in non-canonical RTL in a 64x32 cross compile. */
7954 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7956 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7957 op1 = plus_constant (op1, INTVAL (constant_part));
7958 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7959 op1 = force_operand (op1, target);
7960 return REDUCE_BIT_FIELD (op1);
7963 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7964 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7965 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7969 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7970 (modifier == EXPAND_INITIALIZER
7971 ? EXPAND_INITIALIZER : EXPAND_SUM));
7972 if (! CONSTANT_P (op0))
7974 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7975 VOIDmode, modifier);
7976 /* Return a PLUS if modifier says it's OK. */
7977 if (modifier == EXPAND_SUM
7978 || modifier == EXPAND_INITIALIZER)
7979 return simplify_gen_binary (PLUS, mode, op0, op1);
7982 /* Use immed_double_const to ensure that the constant is
7983 truncated according to the mode of OP1, then sign extended
7984 to a HOST_WIDE_INT. Using the constant directly can result
7985 in non-canonical RTL in a 64x32 cross compile. */
7987 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7989 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7990 op0 = plus_constant (op0, INTVAL (constant_part));
7991 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7992 op0 = force_operand (op0, target);
7993 return REDUCE_BIT_FIELD (op0);
7997 /* No sense saving up arithmetic to be done
7998 if it's all in the wrong mode to form part of an address.
7999 And force_operand won't know whether to sign-extend or
8001 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8002 || mode != ptr_mode)
8004 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8005 subtarget, &op0, &op1, 0);
8006 if (op0 == const0_rtx)
8008 if (op1 == const0_rtx)
8013 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8014 subtarget, &op0, &op1, modifier);
8015 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8018 /* For initializers, we are allowed to return a MINUS of two
8019 symbolic constants. Here we handle all cases when both operands
8021 /* Handle difference of two symbolic constants,
8022 for the sake of an initializer. */
8023 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8024 && really_constant_p (TREE_OPERAND (exp, 0))
8025 && really_constant_p (TREE_OPERAND (exp, 1)))
8027 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8028 NULL_RTX, &op0, &op1, modifier);
8030 /* If the last operand is a CONST_INT, use plus_constant of
8031 the negated constant. Else make the MINUS. */
8032 if (GET_CODE (op1) == CONST_INT)
8033 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8035 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8038 /* No sense saving up arithmetic to be done
8039 if it's all in the wrong mode to form part of an address.
8040 And force_operand won't know whether to sign-extend or
8042 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8043 || mode != ptr_mode)
8046 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8047 subtarget, &op0, &op1, modifier);
8049 /* Convert A - const to A + (-const). */
8050 if (GET_CODE (op1) == CONST_INT)
8052 op1 = negate_rtx (mode, op1);
8053 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8059 /* If first operand is constant, swap them.
8060 Thus the following special case checks need only
8061 check the second operand. */
8062 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8064 tree t1 = TREE_OPERAND (exp, 0);
8065 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8066 TREE_OPERAND (exp, 1) = t1;
8069 /* Attempt to return something suitable for generating an
8070 indexed address, for machines that support that. */
8072 if (modifier == EXPAND_SUM && mode == ptr_mode
8073 && host_integerp (TREE_OPERAND (exp, 1), 0))
8075 tree exp1 = TREE_OPERAND (exp, 1);
8077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8081 op0 = force_operand (op0, NULL_RTX);
8083 op0 = copy_to_mode_reg (mode, op0);
8085 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8086 gen_int_mode (tree_low_cst (exp1, 0),
8087 TYPE_MODE (TREE_TYPE (exp1)))));
8090 if (modifier == EXPAND_STACK_PARM)
8093 /* Check for multiplying things that have been extended
8094 from a narrower type. If this machine supports multiplying
8095 in that narrower type with a result in the desired type,
8096 do it that way, and avoid the explicit type-conversion. */
8098 subexp0 = TREE_OPERAND (exp, 0);
8099 subexp1 = TREE_OPERAND (exp, 1);
8100 /* First, check if we have a multiplication of one signed and one
8101 unsigned operand. */
8102 if (TREE_CODE (subexp0) == NOP_EXPR
8103 && TREE_CODE (subexp1) == NOP_EXPR
8104 && TREE_CODE (type) == INTEGER_TYPE
8105 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8106 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8107 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8108 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8109 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8110 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8112 enum machine_mode innermode
8113 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8114 this_optab = usmul_widen_optab;
8115 if (mode == GET_MODE_WIDER_MODE (innermode))
8117 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8119 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8120 expand_operands (TREE_OPERAND (subexp0, 0),
8121 TREE_OPERAND (subexp1, 0),
8122 NULL_RTX, &op0, &op1, 0);
8124 expand_operands (TREE_OPERAND (subexp0, 0),
8125 TREE_OPERAND (subexp1, 0),
8126 NULL_RTX, &op1, &op0, 0);
8132 /* Check for a multiplication with matching signedness. */
8133 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8134 && TREE_CODE (type) == INTEGER_TYPE
8135 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8136 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8137 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8138 && int_fits_type_p (TREE_OPERAND (exp, 1),
8139 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8140 /* Don't use a widening multiply if a shift will do. */
8141 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8142 > HOST_BITS_PER_WIDE_INT)
8143 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8145 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8146 && (TYPE_PRECISION (TREE_TYPE
8147 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8148 == TYPE_PRECISION (TREE_TYPE
8150 (TREE_OPERAND (exp, 0), 0))))
8151 /* If both operands are extended, they must either both
8152 be zero-extended or both be sign-extended. */
8153 && (TYPE_UNSIGNED (TREE_TYPE
8154 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8155 == TYPE_UNSIGNED (TREE_TYPE
8157 (TREE_OPERAND (exp, 0), 0)))))))
8159 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8160 enum machine_mode innermode = TYPE_MODE (op0type);
8161 bool zextend_p = TYPE_UNSIGNED (op0type);
8162 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8163 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8165 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8167 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8169 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8170 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8171 TREE_OPERAND (exp, 1),
8172 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8174 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8175 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8176 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8179 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8180 && innermode == word_mode)
8183 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8184 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8185 op1 = convert_modes (innermode, mode,
8186 expand_normal (TREE_OPERAND (exp, 1)),
8189 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8190 temp = expand_binop (mode, other_optab, op0, op1, target,
8191 unsignedp, OPTAB_LIB_WIDEN);
8192 hipart = gen_highpart (innermode, temp);
8193 htem = expand_mult_highpart_adjust (innermode, hipart,
8197 emit_move_insn (hipart, htem);
8198 return REDUCE_BIT_FIELD (temp);
8202 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8203 subtarget, &op0, &op1, 0);
8204 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8206 case TRUNC_DIV_EXPR:
8207 case FLOOR_DIV_EXPR:
8209 case ROUND_DIV_EXPR:
8210 case EXACT_DIV_EXPR:
8211 if (modifier == EXPAND_STACK_PARM)
8213 /* Possible optimization: compute the dividend with EXPAND_SUM
8214 then if the divisor is constant can optimize the case
8215 where some terms of the dividend have coeffs divisible by it. */
8216 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8217 subtarget, &op0, &op1, 0);
8218 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8223 case TRUNC_MOD_EXPR:
8224 case FLOOR_MOD_EXPR:
8226 case ROUND_MOD_EXPR:
8227 if (modifier == EXPAND_STACK_PARM)
8229 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8230 subtarget, &op0, &op1, 0);
8231 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8233 case FIX_ROUND_EXPR:
8234 case FIX_FLOOR_EXPR:
8236 gcc_unreachable (); /* Not used for C. */
8238 case FIX_TRUNC_EXPR:
8239 op0 = expand_normal (TREE_OPERAND (exp, 0));
8240 if (target == 0 || modifier == EXPAND_STACK_PARM)
8241 target = gen_reg_rtx (mode);
8242 expand_fix (target, op0, unsignedp);
8246 op0 = expand_normal (TREE_OPERAND (exp, 0));
8247 if (target == 0 || modifier == EXPAND_STACK_PARM)
8248 target = gen_reg_rtx (mode);
8249 /* expand_float can't figure out what to do if FROM has VOIDmode.
8250 So give it the correct mode. With -O, cse will optimize this. */
8251 if (GET_MODE (op0) == VOIDmode)
8252 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8254 expand_float (target, op0,
8255 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8259 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8260 if (modifier == EXPAND_STACK_PARM)
8262 temp = expand_unop (mode,
8263 optab_for_tree_code (NEGATE_EXPR, type),
8266 return REDUCE_BIT_FIELD (temp);
8269 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8270 if (modifier == EXPAND_STACK_PARM)
8273 /* ABS_EXPR is not valid for complex arguments. */
8274 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8275 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8277 /* Unsigned abs is simply the operand. Testing here means we don't
8278 risk generating incorrect code below. */
8279 if (TYPE_UNSIGNED (type))
8282 return expand_abs (mode, op0, target, unsignedp,
8283 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8287 target = original_target;
8289 || modifier == EXPAND_STACK_PARM
8290 || (MEM_P (target) && MEM_VOLATILE_P (target))
8291 || GET_MODE (target) != mode
8293 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8294 target = gen_reg_rtx (mode);
8295 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8296 target, &op0, &op1, 0);
8298 /* First try to do it with a special MIN or MAX instruction.
8299 If that does not win, use a conditional jump to select the proper
8301 this_optab = optab_for_tree_code (code, type);
8302 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8307 /* At this point, a MEM target is no longer useful; we will get better
8310 if (! REG_P (target))
8311 target = gen_reg_rtx (mode);
8313 /* If op1 was placed in target, swap op0 and op1. */
8314 if (target != op0 && target == op1)
8321 /* We generate better code and avoid problems with op1 mentioning
8322 target by forcing op1 into a pseudo if it isn't a constant. */
8323 if (! CONSTANT_P (op1))
8324 op1 = force_reg (mode, op1);
8327 enum rtx_code comparison_code;
8330 if (code == MAX_EXPR)
8331 comparison_code = unsignedp ? GEU : GE;
8333 comparison_code = unsignedp ? LEU : LE;
8335 /* Canonicalize to comparisons against 0. */
8336 if (op1 == const1_rtx)
8338 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8339 or (a != 0 ? a : 1) for unsigned.
8340 For MIN we are safe converting (a <= 1 ? a : 1)
8341 into (a <= 0 ? a : 1) */
8342 cmpop1 = const0_rtx;
8343 if (code == MAX_EXPR)
8344 comparison_code = unsignedp ? NE : GT;
8346 if (op1 == constm1_rtx && !unsignedp)
8348 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8349 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8350 cmpop1 = const0_rtx;
8351 if (code == MIN_EXPR)
8352 comparison_code = LT;
8354 #ifdef HAVE_conditional_move
8355 /* Use a conditional move if possible. */
8356 if (can_conditionally_move_p (mode))
8360 /* ??? Same problem as in expmed.c: emit_conditional_move
8361 forces a stack adjustment via compare_from_rtx, and we
8362 lose the stack adjustment if the sequence we are about
8363 to create is discarded. */
8364 do_pending_stack_adjust ();
8368 /* Try to emit the conditional move. */
8369 insn = emit_conditional_move (target, comparison_code,
8374 /* If we could do the conditional move, emit the sequence,
8378 rtx seq = get_insns ();
8384 /* Otherwise discard the sequence and fall back to code with
8390 emit_move_insn (target, op0);
8392 temp = gen_label_rtx ();
8393 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8394 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8396 emit_move_insn (target, op1);
8401 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8402 if (modifier == EXPAND_STACK_PARM)
8404 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8408 /* ??? Can optimize bitwise operations with one arg constant.
8409 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8410 and (a bitwise1 b) bitwise2 b (etc)
8411 but that is probably not worth while. */
8413 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8414 boolean values when we want in all cases to compute both of them. In
8415 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8416 as actual zero-or-1 values and then bitwise anding. In cases where
8417 there cannot be any side effects, better code would be made by
8418 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8419 how to recognize those cases. */
8421 case TRUTH_AND_EXPR:
8422 code = BIT_AND_EXPR;
8427 code = BIT_IOR_EXPR;
8431 case TRUTH_XOR_EXPR:
8432 code = BIT_XOR_EXPR;
8440 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8442 if (modifier == EXPAND_STACK_PARM)
8444 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8445 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8448 /* Could determine the answer when only additive constants differ. Also,
8449 the addition of one can be handled by changing the condition. */
8456 case UNORDERED_EXPR:
8464 temp = do_store_flag (exp,
8465 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8466 tmode != VOIDmode ? tmode : mode, 0);
8470 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8471 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8473 && REG_P (original_target)
8474 && (GET_MODE (original_target)
8475 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8477 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8480 /* If temp is constant, we can just compute the result. */
8481 if (GET_CODE (temp) == CONST_INT)
8483 if (INTVAL (temp) != 0)
8484 emit_move_insn (target, const1_rtx);
8486 emit_move_insn (target, const0_rtx);
8491 if (temp != original_target)
8493 enum machine_mode mode1 = GET_MODE (temp);
8494 if (mode1 == VOIDmode)
8495 mode1 = tmode != VOIDmode ? tmode : mode;
8497 temp = copy_to_mode_reg (mode1, temp);
8500 op1 = gen_label_rtx ();
8501 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8502 GET_MODE (temp), unsignedp, op1);
8503 emit_move_insn (temp, const1_rtx);
8508 /* If no set-flag instruction, must generate a conditional store
8509 into a temporary variable. Drop through and handle this
8514 || modifier == EXPAND_STACK_PARM
8515 || ! safe_from_p (target, exp, 1)
8516 /* Make sure we don't have a hard reg (such as function's return
8517 value) live across basic blocks, if not optimizing. */
8518 || (!optimize && REG_P (target)
8519 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8520 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8523 emit_move_insn (target, const0_rtx);
8525 op1 = gen_label_rtx ();
8526 jumpifnot (exp, op1);
8529 emit_move_insn (target, const1_rtx);
8532 return ignore ? const0_rtx : target;
8534 case TRUTH_NOT_EXPR:
8535 if (modifier == EXPAND_STACK_PARM)
8537 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8538 /* The parser is careful to generate TRUTH_NOT_EXPR
8539 only with operands that are always zero or one. */
8540 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8541 target, 1, OPTAB_LIB_WIDEN);
8545 case STATEMENT_LIST:
8547 tree_stmt_iterator iter;
8549 gcc_assert (ignore);
8551 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8552 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8557 /* A COND_EXPR with its type being VOID_TYPE represents a
8558 conditional jump and is handled in
8559 expand_gimple_cond_expr. */
8560 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8562 /* Note that COND_EXPRs whose type is a structure or union
8563 are required to be constructed to contain assignments of
8564 a temporary variable, so that we can evaluate them here
8565 for side effect only. If type is void, we must do likewise. */
8567 gcc_assert (!TREE_ADDRESSABLE (type)
8569 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8570 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8572 /* If we are not to produce a result, we have no target. Otherwise,
8573 if a target was specified use it; it will not be used as an
8574 intermediate target unless it is safe. If no target, use a
8577 if (modifier != EXPAND_STACK_PARM
8579 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8580 && GET_MODE (original_target) == mode
8581 #ifdef HAVE_conditional_move
8582 && (! can_conditionally_move_p (mode)
8583 || REG_P (original_target))
8585 && !MEM_P (original_target))
8586 temp = original_target;
8588 temp = assign_temp (type, 0, 0, 1);
8590 do_pending_stack_adjust ();
8592 op0 = gen_label_rtx ();
8593 op1 = gen_label_rtx ();
8594 jumpifnot (TREE_OPERAND (exp, 0), op0);
8595 store_expr (TREE_OPERAND (exp, 1), temp,
8596 modifier == EXPAND_STACK_PARM);
8598 emit_jump_insn (gen_jump (op1));
8601 store_expr (TREE_OPERAND (exp, 2), temp,
8602 modifier == EXPAND_STACK_PARM);
8609 target = expand_vec_cond_expr (exp, target);
8614 tree lhs = TREE_OPERAND (exp, 0);
8615 tree rhs = TREE_OPERAND (exp, 1);
8617 gcc_assert (ignore);
8619 /* Check for |= or &= of a bitfield of size one into another bitfield
8620 of size 1. In this case, (unless we need the result of the
8621 assignment) we can do this more efficiently with a
8622 test followed by an assignment, if necessary.
8624 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8625 things change so we do, this code should be enhanced to
8627 if (TREE_CODE (lhs) == COMPONENT_REF
8628 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8629 || TREE_CODE (rhs) == BIT_AND_EXPR)
8630 && TREE_OPERAND (rhs, 0) == lhs
8631 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8632 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8633 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8635 rtx label = gen_label_rtx ();
8636 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8637 do_jump (TREE_OPERAND (rhs, 1),
8640 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8641 do_pending_stack_adjust ();
8646 expand_assignment (lhs, rhs);
8652 if (!TREE_OPERAND (exp, 0))
8653 expand_null_return ();
8655 expand_return (TREE_OPERAND (exp, 0));
8659 return expand_expr_addr_expr (exp, target, tmode, modifier);
8662 /* Get the rtx code of the operands. */
8663 op0 = expand_normal (TREE_OPERAND (exp, 0));
8664 op1 = expand_normal (TREE_OPERAND (exp, 1));
8667 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8669 /* Move the real (op0) and imaginary (op1) parts to their location. */
8670 write_complex_part (target, op0, false);
8671 write_complex_part (target, op1, true);
8676 op0 = expand_normal (TREE_OPERAND (exp, 0));
8677 return read_complex_part (op0, false);
8680 op0 = expand_normal (TREE_OPERAND (exp, 0));
8681 return read_complex_part (op0, true);
8684 expand_resx_expr (exp);
8687 case TRY_CATCH_EXPR:
8689 case EH_FILTER_EXPR:
8690 case TRY_FINALLY_EXPR:
8691 /* Lowered by tree-eh.c. */
8694 case WITH_CLEANUP_EXPR:
8695 case CLEANUP_POINT_EXPR:
8697 case CASE_LABEL_EXPR:
8703 case PREINCREMENT_EXPR:
8704 case PREDECREMENT_EXPR:
8705 case POSTINCREMENT_EXPR:
8706 case POSTDECREMENT_EXPR:
8709 case TRUTH_ANDIF_EXPR:
8710 case TRUTH_ORIF_EXPR:
8711 /* Lowered by gimplify.c. */
8715 return get_exception_pointer (cfun);
8718 return get_exception_filter (cfun);
8721 /* Function descriptors are not valid except for as
8722 initialization constants, and should not be expanded. */
8730 expand_label (TREE_OPERAND (exp, 0));
8734 expand_asm_expr (exp);
8737 case WITH_SIZE_EXPR:
8738 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8739 have pulled out the size to use in whatever context it needed. */
8740 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8743 case REALIGN_LOAD_EXPR:
8745 tree oprnd0 = TREE_OPERAND (exp, 0);
8746 tree oprnd1 = TREE_OPERAND (exp, 1);
8747 tree oprnd2 = TREE_OPERAND (exp, 2);
8750 this_optab = optab_for_tree_code (code, type);
8751 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8752 op2 = expand_normal (oprnd2);
8753 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8761 tree oprnd0 = TREE_OPERAND (exp, 0);
8762 tree oprnd1 = TREE_OPERAND (exp, 1);
8763 tree oprnd2 = TREE_OPERAND (exp, 2);
8766 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8767 op2 = expand_normal (oprnd2);
8768 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8773 case WIDEN_SUM_EXPR:
8775 tree oprnd0 = TREE_OPERAND (exp, 0);
8776 tree oprnd1 = TREE_OPERAND (exp, 1);
8778 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8779 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8784 case REDUC_MAX_EXPR:
8785 case REDUC_MIN_EXPR:
8786 case REDUC_PLUS_EXPR:
8788 op0 = expand_normal (TREE_OPERAND (exp, 0));
8789 this_optab = optab_for_tree_code (code, type);
8790 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8795 case VEC_LSHIFT_EXPR:
8796 case VEC_RSHIFT_EXPR:
8798 target = expand_vec_shift_expr (exp, target);
8803 return lang_hooks.expand_expr (exp, original_target, tmode,
8807 /* Here to do an ordinary binary operator. */
8809 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8810 subtarget, &op0, &op1, 0);
8812 this_optab = optab_for_tree_code (code, type);
8814 if (modifier == EXPAND_STACK_PARM)
8816 temp = expand_binop (mode, this_optab, op0, op1, target,
8817 unsignedp, OPTAB_LIB_WIDEN);
8819 return REDUCE_BIT_FIELD (temp);
8821 #undef REDUCE_BIT_FIELD
8823 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8824 signedness of TYPE), possibly returning the result in TARGET. */
8826 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8828 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8829 if (target && GET_MODE (target) != GET_MODE (exp))
8831 /* For constant values, reduce using build_int_cst_type. */
8832 if (GET_CODE (exp) == CONST_INT)
8834 HOST_WIDE_INT value = INTVAL (exp);
8835 tree t = build_int_cst_type (type, value);
8836 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
8838 else if (TYPE_UNSIGNED (type))
8841 if (prec < HOST_BITS_PER_WIDE_INT)
8842 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8845 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8846 ((unsigned HOST_WIDE_INT) 1
8847 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8849 return expand_and (GET_MODE (exp), exp, mask, target);
8853 tree count = build_int_cst (NULL_TREE,
8854 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8855 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8856 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8860 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8861 when applied to the address of EXP produces an address known to be
8862 aligned more than BIGGEST_ALIGNMENT. */
8865 is_aligning_offset (tree offset, tree exp)
8867 /* Strip off any conversions. */
8868 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8869 || TREE_CODE (offset) == NOP_EXPR
8870 || TREE_CODE (offset) == CONVERT_EXPR)
8871 offset = TREE_OPERAND (offset, 0);
8873 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8874 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8875 if (TREE_CODE (offset) != BIT_AND_EXPR
8876 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8877 || compare_tree_int (TREE_OPERAND (offset, 1),
8878 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8879 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8882 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8883 It must be NEGATE_EXPR. Then strip any more conversions. */
8884 offset = TREE_OPERAND (offset, 0);
8885 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8886 || TREE_CODE (offset) == NOP_EXPR
8887 || TREE_CODE (offset) == CONVERT_EXPR)
8888 offset = TREE_OPERAND (offset, 0);
8890 if (TREE_CODE (offset) != NEGATE_EXPR)
8893 offset = TREE_OPERAND (offset, 0);
8894 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8895 || TREE_CODE (offset) == NOP_EXPR
8896 || TREE_CODE (offset) == CONVERT_EXPR)
8897 offset = TREE_OPERAND (offset, 0);
8899 /* This must now be the address of EXP. */
8900 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8903 /* Return the tree node if an ARG corresponds to a string constant or zero
8904 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8905 in bytes within the string that ARG is accessing. The type of the
8906 offset will be `sizetype'. */
8909 string_constant (tree arg, tree *ptr_offset)
8914 if (TREE_CODE (arg) == ADDR_EXPR)
8916 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8918 *ptr_offset = size_zero_node;
8919 return TREE_OPERAND (arg, 0);
8921 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8923 array = TREE_OPERAND (arg, 0);
8924 offset = size_zero_node;
8926 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8928 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8929 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8930 if (TREE_CODE (array) != STRING_CST
8931 && TREE_CODE (array) != VAR_DECL)
8937 else if (TREE_CODE (arg) == PLUS_EXPR)
8939 tree arg0 = TREE_OPERAND (arg, 0);
8940 tree arg1 = TREE_OPERAND (arg, 1);
8945 if (TREE_CODE (arg0) == ADDR_EXPR
8946 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8947 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8949 array = TREE_OPERAND (arg0, 0);
8952 else if (TREE_CODE (arg1) == ADDR_EXPR
8953 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8954 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8956 array = TREE_OPERAND (arg1, 0);
8965 if (TREE_CODE (array) == STRING_CST)
8967 *ptr_offset = fold_convert (sizetype, offset);
8970 else if (TREE_CODE (array) == VAR_DECL)
8974 /* Variables initialized to string literals can be handled too. */
8975 if (DECL_INITIAL (array) == NULL_TREE
8976 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8979 /* If they are read-only, non-volatile and bind locally. */
8980 if (! TREE_READONLY (array)
8981 || TREE_SIDE_EFFECTS (array)
8982 || ! targetm.binds_local_p (array))
8985 /* Avoid const char foo[4] = "abcde"; */
8986 if (DECL_SIZE_UNIT (array) == NULL_TREE
8987 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8988 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8989 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8992 /* If variable is bigger than the string literal, OFFSET must be constant
8993 and inside of the bounds of the string literal. */
8994 offset = fold_convert (sizetype, offset);
8995 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8996 && (! host_integerp (offset, 1)
8997 || compare_tree_int (offset, length) >= 0))
9000 *ptr_offset = offset;
9001 return DECL_INITIAL (array);
9007 /* Generate code to calculate EXP using a store-flag instruction
9008 and return an rtx for the result. EXP is either a comparison
9009 or a TRUTH_NOT_EXPR whose operand is a comparison.
9011 If TARGET is nonzero, store the result there if convenient.
9013 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9016 Return zero if there is no suitable set-flag instruction
9017 available on this machine.
9019 Once expand_expr has been called on the arguments of the comparison,
9020 we are committed to doing the store flag, since it is not safe to
9021 re-evaluate the expression. We emit the store-flag insn by calling
9022 emit_store_flag, but only expand the arguments if we have a reason
9023 to believe that emit_store_flag will be successful. If we think that
9024 it will, but it isn't, we have to simulate the store-flag with a
9025 set/jump/set sequence. */
9028 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9031 tree arg0, arg1, type;
9033 enum machine_mode operand_mode;
9037 enum insn_code icode;
9038 rtx subtarget = target;
9041 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9042 result at the end. We can't simply invert the test since it would
9043 have already been inverted if it were valid. This case occurs for
9044 some floating-point comparisons. */
9046 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9047 invert = 1, exp = TREE_OPERAND (exp, 0);
9049 arg0 = TREE_OPERAND (exp, 0);
9050 arg1 = TREE_OPERAND (exp, 1);
9052 /* Don't crash if the comparison was erroneous. */
9053 if (arg0 == error_mark_node || arg1 == error_mark_node)
9056 type = TREE_TYPE (arg0);
9057 operand_mode = TYPE_MODE (type);
9058 unsignedp = TYPE_UNSIGNED (type);
9060 /* We won't bother with BLKmode store-flag operations because it would mean
9061 passing a lot of information to emit_store_flag. */
9062 if (operand_mode == BLKmode)
9065 /* We won't bother with store-flag operations involving function pointers
9066 when function pointers must be canonicalized before comparisons. */
9067 #ifdef HAVE_canonicalize_funcptr_for_compare
9068 if (HAVE_canonicalize_funcptr_for_compare
9069 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9070 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9072 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9073 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9074 == FUNCTION_TYPE))))
9081 /* Get the rtx comparison code to use. We know that EXP is a comparison
9082 operation of some type. Some comparisons against 1 and -1 can be
9083 converted to comparisons with zero. Do so here so that the tests
9084 below will be aware that we have a comparison with zero. These
9085 tests will not catch constants in the first operand, but constants
9086 are rarely passed as the first operand. */
9088 switch (TREE_CODE (exp))
9097 if (integer_onep (arg1))
9098 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9100 code = unsignedp ? LTU : LT;
9103 if (! unsignedp && integer_all_onesp (arg1))
9104 arg1 = integer_zero_node, code = LT;
9106 code = unsignedp ? LEU : LE;
9109 if (! unsignedp && integer_all_onesp (arg1))
9110 arg1 = integer_zero_node, code = GE;
9112 code = unsignedp ? GTU : GT;
9115 if (integer_onep (arg1))
9116 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9118 code = unsignedp ? GEU : GE;
9121 case UNORDERED_EXPR:
9150 /* Put a constant second. */
9151 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9153 tem = arg0; arg0 = arg1; arg1 = tem;
9154 code = swap_condition (code);
9157 /* If this is an equality or inequality test of a single bit, we can
9158 do this by shifting the bit being tested to the low-order bit and
9159 masking the result with the constant 1. If the condition was EQ,
9160 we xor it with 1. This does not require an scc insn and is faster
9161 than an scc insn even if we have it.
9163 The code to make this transformation was moved into fold_single_bit_test,
9164 so we just call into the folder and expand its result. */
9166 if ((code == NE || code == EQ)
9167 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9168 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9170 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9171 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9173 target, VOIDmode, EXPAND_NORMAL);
9176 /* Now see if we are likely to be able to do this. Return if not. */
9177 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9180 icode = setcc_gen_code[(int) code];
9181 if (icode == CODE_FOR_nothing
9182 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9184 /* We can only do this if it is one of the special cases that
9185 can be handled without an scc insn. */
9186 if ((code == LT && integer_zerop (arg1))
9187 || (! only_cheap && code == GE && integer_zerop (arg1)))
9189 else if (! only_cheap && (code == NE || code == EQ)
9190 && TREE_CODE (type) != REAL_TYPE
9191 && ((abs_optab->handlers[(int) operand_mode].insn_code
9192 != CODE_FOR_nothing)
9193 || (ffs_optab->handlers[(int) operand_mode].insn_code
9194 != CODE_FOR_nothing)))
9200 if (! get_subtarget (target)
9201 || GET_MODE (subtarget) != operand_mode)
9204 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9207 target = gen_reg_rtx (mode);
9209 result = emit_store_flag (target, code, op0, op1,
9210 operand_mode, unsignedp, 1);
9215 result = expand_binop (mode, xor_optab, result, const1_rtx,
9216 result, 0, OPTAB_LIB_WIDEN);
9220 /* If this failed, we have to do this with set/compare/jump/set code. */
9222 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9223 target = gen_reg_rtx (GET_MODE (target));
9225 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9226 result = compare_from_rtx (op0, op1, code, unsignedp,
9227 operand_mode, NULL_RTX);
9228 if (GET_CODE (result) == CONST_INT)
9229 return (((result == const0_rtx && ! invert)
9230 || (result != const0_rtx && invert))
9231 ? const0_rtx : const1_rtx);
9233 /* The code of RESULT may not match CODE if compare_from_rtx
9234 decided to swap its operands and reverse the original code.
9236 We know that compare_from_rtx returns either a CONST_INT or
9237 a new comparison code, so it is safe to just extract the
9238 code from RESULT. */
9239 code = GET_CODE (result);
9241 label = gen_label_rtx ();
9242 gcc_assert (bcc_gen_fctn[(int) code]);
9244 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9245 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9252 /* Stubs in case we haven't got a casesi insn. */
9254 # define HAVE_casesi 0
9255 # define gen_casesi(a, b, c, d, e) (0)
9256 # define CODE_FOR_casesi CODE_FOR_nothing
9259 /* If the machine does not have a case insn that compares the bounds,
9260 this means extra overhead for dispatch tables, which raises the
9261 threshold for using them. */
9262 #ifndef CASE_VALUES_THRESHOLD
9263 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9264 #endif /* CASE_VALUES_THRESHOLD */
9267 case_values_threshold (void)
9269 return CASE_VALUES_THRESHOLD;
9272 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9273 0 otherwise (i.e. if there is no casesi instruction). */
9275 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9276 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9278 enum machine_mode index_mode = SImode;
9279 int index_bits = GET_MODE_BITSIZE (index_mode);
9280 rtx op1, op2, index;
9281 enum machine_mode op_mode;
9286 /* Convert the index to SImode. */
9287 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9289 enum machine_mode omode = TYPE_MODE (index_type);
9290 rtx rangertx = expand_normal (range);
9292 /* We must handle the endpoints in the original mode. */
9293 index_expr = build2 (MINUS_EXPR, index_type,
9294 index_expr, minval);
9295 minval = integer_zero_node;
9296 index = expand_normal (index_expr);
9297 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9298 omode, 1, default_label);
9299 /* Now we can safely truncate. */
9300 index = convert_to_mode (index_mode, index, 0);
9304 if (TYPE_MODE (index_type) != index_mode)
9306 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9307 index_expr = fold_convert (index_type, index_expr);
9310 index = expand_normal (index_expr);
9313 do_pending_stack_adjust ();
9315 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9316 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9318 index = copy_to_mode_reg (op_mode, index);
9320 op1 = expand_normal (minval);
9322 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9323 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9324 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9325 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9327 op1 = copy_to_mode_reg (op_mode, op1);
9329 op2 = expand_normal (range);
9331 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9332 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9333 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9334 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9336 op2 = copy_to_mode_reg (op_mode, op2);
9338 emit_jump_insn (gen_casesi (index, op1, op2,
9339 table_label, default_label));
9343 /* Attempt to generate a tablejump instruction; same concept. */
9344 #ifndef HAVE_tablejump
9345 #define HAVE_tablejump 0
9346 #define gen_tablejump(x, y) (0)
9349 /* Subroutine of the next function.
9351 INDEX is the value being switched on, with the lowest value
9352 in the table already subtracted.
9353 MODE is its expected mode (needed if INDEX is constant).
9354 RANGE is the length of the jump table.
9355 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9357 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9358 index value is out of range. */
9361 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9366 if (INTVAL (range) > cfun->max_jumptable_ents)
9367 cfun->max_jumptable_ents = INTVAL (range);
9369 /* Do an unsigned comparison (in the proper mode) between the index
9370 expression and the value which represents the length of the range.
9371 Since we just finished subtracting the lower bound of the range
9372 from the index expression, this comparison allows us to simultaneously
9373 check that the original index expression value is both greater than
9374 or equal to the minimum value of the range and less than or equal to
9375 the maximum value of the range. */
9377 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9380 /* If index is in range, it must fit in Pmode.
9381 Convert to Pmode so we can index with it. */
9383 index = convert_to_mode (Pmode, index, 1);
9385 /* Don't let a MEM slip through, because then INDEX that comes
9386 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9387 and break_out_memory_refs will go to work on it and mess it up. */
9388 #ifdef PIC_CASE_VECTOR_ADDRESS
9389 if (flag_pic && !REG_P (index))
9390 index = copy_to_mode_reg (Pmode, index);
9393 /* If flag_force_addr were to affect this address
9394 it could interfere with the tricky assumptions made
9395 about addresses that contain label-refs,
9396 which may be valid only very near the tablejump itself. */
9397 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9398 GET_MODE_SIZE, because this indicates how large insns are. The other
9399 uses should all be Pmode, because they are addresses. This code
9400 could fail if addresses and insns are not the same size. */
9401 index = gen_rtx_PLUS (Pmode,
9402 gen_rtx_MULT (Pmode, index,
9403 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9404 gen_rtx_LABEL_REF (Pmode, table_label));
9405 #ifdef PIC_CASE_VECTOR_ADDRESS
9407 index = PIC_CASE_VECTOR_ADDRESS (index);
9410 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9411 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9412 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9413 convert_move (temp, vector, 0);
9415 emit_jump_insn (gen_tablejump (temp, table_label));
9417 /* If we are generating PIC code or if the table is PC-relative, the
9418 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9419 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9424 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9425 rtx table_label, rtx default_label)
9429 if (! HAVE_tablejump)
9432 index_expr = fold_build2 (MINUS_EXPR, index_type,
9433 fold_convert (index_type, index_expr),
9434 fold_convert (index_type, minval));
9435 index = expand_normal (index_expr);
9436 do_pending_stack_adjust ();
9438 do_tablejump (index, TYPE_MODE (index_type),
9439 convert_modes (TYPE_MODE (index_type),
9440 TYPE_MODE (TREE_TYPE (range)),
9441 expand_normal (range),
9442 TYPE_UNSIGNED (TREE_TYPE (range))),
9443 table_label, default_label);
9447 /* Nonzero if the mode is a valid vector mode for this architecture.
9448 This returns nonzero even if there is no hardware support for the
9449 vector mode, but we can emulate with narrower modes. */
9452 vector_mode_valid_p (enum machine_mode mode)
9454 enum mode_class class = GET_MODE_CLASS (mode);
9455 enum machine_mode innermode;
9457 /* Doh! What's going on? */
9458 if (class != MODE_VECTOR_INT
9459 && class != MODE_VECTOR_FLOAT)
9462 /* Hardware support. Woo hoo! */
9463 if (targetm.vector_mode_supported_p (mode))
9466 innermode = GET_MODE_INNER (mode);
9468 /* We should probably return 1 if requesting V4DI and we have no DI,
9469 but we have V2DI, but this is probably very unlikely. */
9471 /* If we have support for the inner mode, we can safely emulate it.
9472 We may not have V2DI, but me can emulate with a pair of DIs. */
9473 return targetm.scalar_mode_supported_p (innermode);
9476 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9478 const_vector_from_tree (tree exp)
9483 enum machine_mode inner, mode;
9485 mode = TYPE_MODE (TREE_TYPE (exp));
9487 if (initializer_zerop (exp))
9488 return CONST0_RTX (mode);
9490 units = GET_MODE_NUNITS (mode);
9491 inner = GET_MODE_INNER (mode);
9493 v = rtvec_alloc (units);
9495 link = TREE_VECTOR_CST_ELTS (exp);
9496 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9498 elt = TREE_VALUE (link);
9500 if (TREE_CODE (elt) == REAL_CST)
9501 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9504 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9505 TREE_INT_CST_HIGH (elt),
9509 /* Initialize remaining elements to 0. */
9510 for (; i < units; ++i)
9511 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9513 return gen_rtx_CONST_VECTOR (mode, v);
9515 #include "gt-expr.h"